Python toposort.toposort() Examples

The following are 23 code examples of toposort.toposort(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module toposort , or try the search function .
Example #1
Source File: msg.py    From bonnet with GNU General Public License v3.0 6 votes vote down vote up
def tf_toposort(ts, within_ops=None):
    all_ops = ge.get_forward_walk_ops(
        [x.op for x in ts], within_ops=within_ops)

    deps = {}
    for op in all_ops:
        for o in op.outputs:
            deps[o] = set(op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for l in sorted_ts:
        keep = list(set(l).intersection(ts))
        if keep:
            ts_sorted_lists.append(keep)

    return ts_sorted_lists 
Example #2
Source File: memory_saving_gradients.py    From pix2pix-flow with MIT License 6 votes vote down vote up
def tf_toposort(ts, within_ops=None):
    all_ops = ge.get_forward_walk_ops(
        [x.op for x in ts], within_ops=within_ops)

    deps = {}
    for op in all_ops:
        for o in op.outputs:
            deps[o] = set(op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for l in sorted_ts:
        keep = list(set(l).intersection(ts))
        if keep:
            ts_sorted_lists.append(keep)

    return ts_sorted_lists 
Example #3
Source File: memory_saving_gradients.py    From faceswap with GNU General Public License v3.0 6 votes vote down vote up
def tf_toposort(ts_inp, within_ops=None):
    """ Tensorflow topological sort """
    all_ops = ge.get_forward_walk_ops([x.op for x in ts_inp], within_ops=within_ops)

    deps = {}
    for tf_op in all_ops:
        for outp in tf_op.outputs:
            deps[outp] = set(tf_op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for lst in sorted_ts:
        keep = list(set(lst).intersection(ts_inp))
        if keep:
            ts_sorted_lists.append(keep)
    return ts_sorted_lists 
Example #4
Source File: memory_saving_gradients.py    From dfc2019 with MIT License 6 votes vote down vote up
def tf_toposort(ts, within_ops=None):
    all_ops = ge.get_forward_walk_ops([x.op for x in ts], within_ops=within_ops)

    deps = {}
    for op in all_ops:
        for o in op.outputs:
            deps[o] = set(op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for l in sorted_ts:
        keep = list(set(l).intersection(ts))
        if keep:
            ts_sorted_lists.append(keep)

    return ts_sorted_lists 
Example #5
Source File: memory_saving_gradients.py    From DeepV2D with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def tf_toposort(ts, within_ops=None):
    all_ops = ge.get_forward_walk_ops([x.op for x in ts], within_ops=within_ops)

    deps = {}
    for op in all_ops:
        for o in op.outputs:
            deps[o] = set(op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for l in sorted_ts:
        keep = list(set(l).intersection(ts))
        if keep:
            ts_sorted_lists.append(keep)

    return ts_sorted_lists 
Example #6
Source File: memory_saving_gradients.py    From kaggle-rsna18 with MIT License 6 votes vote down vote up
def tf_toposort(ts, within_ops=None):
    all_ops = ge.get_forward_walk_ops([x.op for x in ts], within_ops=within_ops)

    deps = {}
    for op in all_ops:
        for o in op.outputs:
            deps[o] = set(op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for l in sorted_ts:
        keep = list(set(l).intersection(ts))
        if keep:
            ts_sorted_lists.append(keep)

    return ts_sorted_lists 
Example #7
Source File: memory_saving_gradients.py    From glow with MIT License 6 votes vote down vote up
def tf_toposort(ts, within_ops=None):
    all_ops = ge.get_forward_walk_ops(
        [x.op for x in ts], within_ops=within_ops)

    deps = {}
    for op in all_ops:
        for o in op.outputs:
            deps[o] = set(op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for l in sorted_ts:
        keep = list(set(l).intersection(ts))
        if keep:
            ts_sorted_lists.append(keep)

    return ts_sorted_lists 
Example #8
Source File: memory_saving_gradients.py    From gradient-checkpointing with MIT License 6 votes vote down vote up
def tf_toposort(ts, within_ops=None):
    all_ops = ge.get_forward_walk_ops([x.op for x in ts], within_ops=within_ops)

    deps = {}
    for op in all_ops:
        for o in op.outputs:
            deps[o] = set(op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for l in sorted_ts:
        keep = list(set(l).intersection(ts))
        if keep:
            ts_sorted_lists.append(keep)

    return ts_sorted_lists 
Example #9
Source File: memory_saving_gradients.py    From sp-society-camera-model-identification with GNU General Public License v3.0 6 votes vote down vote up
def tf_toposort(ts, within_ops=None):
    all_ops = ge.get_forward_walk_ops([x.op for x in ts], within_ops=within_ops)

    deps = {}
    for op in all_ops:
        for o in op.outputs:
            deps[o] = set(op.inputs)
    sorted_ts = toposort(deps)

    # only keep the tensors from our original list
    ts_sorted_lists = []
    for l in sorted_ts:
        keep = list(set(l).intersection(ts))
        if keep:
            ts_sorted_lists.append(keep)

    return ts_sorted_lists 
Example #10
Source File: models.py    From symfit with GNU General Public License v2.0 5 votes vote down vote up
def _init_from_dict(self, model_dict):
        """
        Initiate self from a model_dict to make sure attributes such as vars, params are available.

        Creates lists of alphabetically sorted independent vars, dependent vars, sigma vars, and parameters.
        Finally it creates a signature for this model so it can be called nicely. This signature only contains
        independent vars and params, as one would expect.

        :param model_dict: dict of (dependent_var, expression) pairs.
        """
        sort_func = lambda symbol: symbol.name
        self.model_dict = OrderedDict(sorted(model_dict.items(),
                                             key=lambda i: sort_func(i[0])))
        # Everything at the bottom of the toposort is independent, at the top
        # dependent, and the rest interdependent.
        ordered = list(toposort(self.connectivity_mapping))
        independent = sorted(ordered.pop(0), key=sort_func)
        self.dependent_vars = sorted(ordered.pop(-1), key=sort_func)
        self.interdependent_vars = sorted(
            [item for items in ordered for item in items],
            key=sort_func
        )
        # `independent` contains both params and vars, needs to be separated
        self.independent_vars = [s for s in independent if
                                 not isinstance(s, Parameter) and not s in self]
        self.params = [s for s in independent if isinstance(s, Parameter)]

        try:
            assert not any(isinstance(var, Parameter)
                           for var in self.dependent_vars)
            assert not any(isinstance(var, Parameter)
                           for var in self.interdependent_vars)
        except AssertionError:
            raise ModelError('`Parameter`\'s can not feature in the role '
                             'of `Variable`')
        # Make Variable object corresponding to each depedent var.
        self.sigmas = {var: Variable(name='sigma_{}'.format(var.name))
                       for var in self.dependent_vars} 
Example #11
Source File: scheduler.py    From PsyNeuLink with Apache License 2.0 5 votes vote down vote up
def _init_consideration_queue_from_graph(self, graph):
        self.dependency_dict, self.removed_dependencies, self.structural_dependencies = graph.prune_feedback_edges()
        self.consideration_queue = list(toposort(self.dependency_dict)) 
Example #12
Source File: dfgraph.py    From checkmate with Apache License 2.0 5 votes vote down vote up
def topological_order_fwd(self):
        adj_set = {k: set(v) for k, v in self.adj_list_fwd.items()}
        topo_sets = list(toposort(adj_set))
        return [x for topo_set in topo_sets for x in topo_set] 
Example #13
Source File: dfgraph.py    From checkmate with Apache License 2.0 5 votes vote down vote up
def topological_order(self):
        adj_set = {k: set(v) for k, v in self.adj_list.items()}
        topo_sets = list(toposort(adj_set))
        return [x for topo_set in topo_sets for x in topo_set] 
Example #14
Source File: graph_builder.py    From checkmate with Apache License 2.0 5 votes vote down vote up
def make_graph(self) -> DFGraph:
        """
        Build the DFGraph given the dependency structure described in the problem
        :return: a DFGraph instance corresponding to your problem description
        """
        # step 1 -- toposort graph and allocate node positions as a dict({0, ..., n} -> UUID)
        edge_list = [(source, dest) for dest, sources in self.arguments.items() for source in sources]
        topo_order = list(reversed([x for st in toposort(edge_to_adj_list(edge_list)) for x in st]))
        topo_order = [v for v in set(self.nodes.values()) - set(topo_order)] + topo_order  # add isolated nodes
        uuid2topo = {uuid: topo_idx for topo_idx, uuid in enumerate(topo_order)}

        # step 2 -- map builder data-structures to node position indexed data-structures
        vertex_list = list(uuid2topo.values())
        cost_cpu = dict((uuid2topo[idx], self.costs_cpu[idx]) for idx in self.costs_cpu.keys())
        cost_ram = dict((uuid2topo[idx], self.costs_ram[idx]) for idx in self.costs_ram.keys())
        arg_list = {uuid2topo[key]: [uuid2topo[arg] for arg in args] for key, args in self.arguments.items()}
        names = {uuid2topo[idx]: name for (name, idx) in self.nodes.items()}
        bwd_node_set = set(uuid2topo[v] for v in self.nodes.values() if v in self.backward_nodes)

        # step 3 -- make DFGraph
        return DFGraph(
            v=vertex_list,
            args=arg_list,
            backward_nodes=bwd_node_set,
            node_names=names,
            cost_cpu=cost_cpu,
            cost_ram=cost_ram,
            cost_ram_parameters=self.parameter_cost,
        ) 
Example #15
Source File: models.py    From symfit with GNU General Public License v2.0 5 votes vote down vote up
def ordered_symbols(self):
        """
        :return: list of all symbols in this model, topologically sorted so they
            can be evaluated in the correct order.

            Within each group of equal priority symbols, we sort by the order of
            the derivative.
        """
        key_func = lambda s: [isinstance(s, sympy.Derivative),
                           isinstance(s, sympy.Derivative) and s.derivative_count]
        symbols = []
        for symbol in toposort(self.connectivity_mapping):
            symbols.extend(sorted(symbol, key=key_func))

        return symbols 
Example #16
Source File: job.py    From cc-utils with Apache License 2.0 5 votes vote down vote up
def ordered_steps(self):
        dependencies = {
            step.name: step.depends() for step in self.steps()
        }
        try:
            result = list(toposort.toposort(dependencies))
        except toposort.CircularDependencyError as de:
            # remove cirular dependencies caused by synthetic steps
            # (custom steps' dependencies should "win")
            for step_name, step_dependencies in de.data.items():
                step = self.step(step_name)
                if not step.is_synthetic:
                    continue # only patch away synthetic steps' dependencies
                for step_dependency_name in step_dependencies:
                    step_dependency = self.step(step_dependency_name)
                    if step_dependency.is_synthetic:
                        continue # leave dependencies between synthetic steps
                    # patch out dependency from synthetic step to custom step
                    dependencies[step_name].remove(step_dependency_name)
            # try again - if there is still a cyclic dependency, this is probably caused
            # by a user error - so let it propagate
            result = toposort.toposort(dependencies)

        # result contains a generator yielding tuples of step name in the correct execution order.
        # each tuple can/should be parallelised
        return result 
Example #17
Source File: linearize_test.py    From gradient-checkpointing with MIT License 5 votes vote down vote up
def test_toposort():
  tf.reset_default_graph()
  nodes = util.make_caterpillar_graph(length=2)
  graph = linearize_lib.get_graph()
  initial = list(toposort(graph))[0]
  assert len(initial) == 1
  assert list(initial)[0].name == 'merge2' 
Example #18
Source File: topo_orderer.py    From condenser with MIT License 5 votes vote down vote up
def __prepare_topsort_input(relationships, tables):
    dep_breaks = config_reader.get_dependency_breaks()
    deps = dict()
    for r in relationships:
        p =r['fk_table']
        c =r['target_table']

        #break circ dependency
        dep_break_found = False
        for dep_break in dep_breaks:
            if p == dep_break.fk_table and c == dep_break.target_table:
                dep_break_found = True
                break

        if dep_break_found == True:
            continue

        # toposort ignores self circularities for some reason, but we cannot
        if p == c:
            raise ValueError('Circular dependency, {} depends on itself!'.format(p))

        if tables is not None and len(tables) > 0 and (p not in tables or c not in tables):
            continue

        if p in deps:
            deps[p].add(c)
        else:
            deps[p] = set()
            deps[p].add(c)

    return deps 
Example #19
Source File: topo_orderer.py    From condenser with MIT License 5 votes vote down vote up
def get_topological_order_by_tables(relationships, tables):
    topsort_input =  __prepare_topsort_input(relationships, tables)
    return list(toposort(topsort_input)) 
Example #20
Source File: utils.py    From dagster with Apache License 2.0 5 votes vote down vote up
def toposort_flatten(data):
    return [item for level in toposort(data) for item in level] 
Example #21
Source File: utils.py    From dagster with Apache License 2.0 5 votes vote down vote up
def toposort(data):
    return [sorted(list(level)) for level in toposort_.toposort(data)] 
Example #22
Source File: preference.py    From cmdb with GNU General Public License v2.0 4 votes vote down vote up
def get_relation_view():
        _views = PreferenceRelationView.get_by(to_dict=True)
        views = []
        if current_app.config.get("USE_ACL"):
            for i in _views:
                try:
                    if ACLManager().has_permission(i.get('name'),
                                                   ResourceTypeEnum.RELATION_VIEW,
                                                   PermEnum.READ):
                        views.append(i)
                except AbortException:
                    pass
        else:
            views = _views

        view2cr_ids = dict()
        result = dict()
        name2id = list()
        for view in views:
            view2cr_ids.setdefault(view['name'], []).extend(json.loads(view['cr_ids']))
            name2id.append([view['name'], view['id']])

        id2type = dict()
        for view_name in view2cr_ids:
            for i in view2cr_ids[view_name]:
                id2type[i['parent_id']] = None
                id2type[i['child_id']] = None
            topo = {i['child_id']: {i['parent_id']} for i in view2cr_ids[view_name]}
            leaf = list(set(toposort.toposort_flatten(topo)) - set([j for i in topo.values() for j in i]))

            leaf2show_types = {i: [t['child_id'] for t in CITypeRelation.get_by(parent_id=i)] for i in leaf}
            node2show_types = copy.deepcopy(leaf2show_types)

            def _find_parent(_node_id):
                parents = topo.get(_node_id, {})
                for parent in parents:
                    node2show_types.setdefault(parent, []).extend(node2show_types.get(_node_id, []))
                    _find_parent(parent)
                if not parents:
                    return

            for l in leaf:
                _find_parent(l)

            for node_id in node2show_types:
                node2show_types[node_id] = [CITypeCache.get(i).to_dict() for i in set(node2show_types[node_id])]

            result[view_name] = dict(topo=list(map(list, toposort.toposort(topo))),
                                     topo_flatten=list(toposort.toposort_flatten(topo)),
                                     leaf=leaf,
                                     leaf2show_types=leaf2show_types,
                                     node2show_types=node2show_types,
                                     show_types=[CITypeCache.get(j).to_dict()
                                                 for i in leaf2show_types.values() for j in i])

        for type_id in id2type:
            id2type[type_id] = CITypeCache.get(type_id).to_dict()

        return result, id2type, sorted(name2id, key=lambda x: x[1]) 
Example #23
Source File: scheduler.py    From PsyNeuLink with Apache License 2.0 4 votes vote down vote up
def __init__(
        self,
        composition=None,
        graph=None,
        conditions=None,
        termination_conds={
            TimeScale.RUN: Never(),
            TimeScale.TRIAL: AllHaveRun(),
        },
        default_execution_id=None,
        **kwargs
    ):
        """
        :param self:
        :param composition: (Composition) - the Composition this scheduler is scheduling for
        :param conditions: (ConditionSet) - a :keyword:`ConditionSet` to be scheduled
        """
        self.conditions = ConditionSet(conditions)

        # stores the in order list of self.run's yielded outputs
        self.consideration_queue = []
        self.default_termination_conds = Scheduler._parse_termination_conditions(termination_conds)
        self._termination_conds = termination_conds.copy()

        self.cycle_nodes = set()

        if composition is not None:
            self.nodes = [vert.component for vert in composition.graph_processing.vertices]
            self._init_consideration_queue_from_graph(composition.graph_processing)
            if default_execution_id is None:
                default_execution_id = composition.default_execution_id
        elif graph is not None:
            try:
                self.nodes = [vert.component for vert in graph.vertices]
                self._init_consideration_queue_from_graph(graph)
            except AttributeError:
                self.consideration_queue = list(toposort(graph))
                self.nodes = []
                for consideration_set in self.consideration_queue:
                    for node in consideration_set:
                        self.nodes.append(node)
        else:
            raise SchedulerError('Must instantiate a Scheduler with either a Composition (kwarg composition) '
                                 'or a graph dependency dict (kwarg graph)')

        self.default_execution_id = default_execution_id
        self.execution_list = {self.default_execution_id: []}
        self.clocks = {self.default_execution_id: Clock()}
        self.counts_total = {}
        self.counts_useable = {}
        self._init_counts(execution_id=self.default_execution_id)
        self.date_creation = datetime.datetime.now()
        self.date_last_run_end = None

    # the consideration queue is the ordered list of sets of nodes in the graph, by the
    # order in which they should be checked to ensure that all parents have a chance to run before their children