Python itertools.combinations() Examples
The following are 30
code examples of itertools.combinations().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
itertools
, or try the search function
.
Example #1
Source File: find_centroid.py From Localization with MIT License | 7 votes |
def canInd(P, ni): l = len(P) ind = range(l) if ni < 2: return [[xx] for xx in ind] if ni >= l: return [ind] im = intersection_matrix(P) can = [] for w in combinations(ind, ni): fg = True for i in w: for j in w: if im[i, j] == 0: fg = False break if not fg: break if fg: can.append(list(w)) return can
Example #2
Source File: fonts-subset-support.py From gftools with Apache License 2.0 | 6 votes |
def _LeastSimilarCoverage(files, subset): """Returns pair of fonts having inconsistent coverage for a subset. Args: files: List of font files subset: Name of subset Returns: 3 tuple of (file1, file2, number of codepoints difference) """ worst = (None, None, 0) subsetcps = fonts.CodepointsInSubset(subset, True) for pair in itertools.combinations(files, 2): inconsistency = _InconsistentSubsetSupport(pair[0], pair[1], subsetcps) if inconsistency > worst[2]: worst = (pair[0], pair[1], inconsistency) return worst
Example #3
Source File: nasbench_space.py From deep_architect with MIT License | 6 votes |
def create_cell_generator(num_nodes): h_connections = [ Bool(name='in_%d_%d' % (in_id, out_id)) for (in_id, out_id) in itertools.combinations(range(num_nodes + 2), 2) ] cell_ops = [ D(['conv1', 'conv3', 'max3'], name='node_%d' % i) for i in range(num_nodes) ] def generate(filters): return cell( lambda channels: mo.siso_sequential( [conv2d(D([channels]), D([1])), batch_normalization(), relu()]), lambda num_inputs, node_id, channels: intermediate_node_fn(num_inputs, node_id, channels, cell_ops), concat, h_connections, 5, filters) return generate
Example #4
Source File: Features.py From ConvLab with MIT License | 6 votes |
def get_ngrams(sentence, max_length, skip_ngrams=False, add_tags = True): # return ngrams of length up to max_length as found in sentence. out = [] words = sentence.split() if add_tags : words = ["<s>"]+words+["</s>"] if not skip_ngrams : for i in range(len(words)): for n in range(1,min(max_length+1, len(words)-i+1)): this_ngram = " ".join(words[i:i+n]) out.append((this_ngram,[])) else : for n in range(1, max_length+1): subsets = set(itertools.combinations(range(len(words)), n)) for subset in subsets: subset = sorted(subset) dists = [(subset[i]-subset[i-1]) for i in range(1, len(subset))] out.append((" ".join([words[j] for j in subset]), dists)) return out
Example #5
Source File: qubitgraph.py From pyGSTi with Apache License 2.0 | 6 votes |
def connected_combos(self, possible_nodes, size): """ Computes the number of different connected subsets of `possible_nodes` containing `size` nodes. Parameters ---------- possible_nodes : list A list of node (qubit) labels. size : int The size of the connected subsets being sought (counted). Returns ------- int """ count = 0 for selected_nodes in _itertools.combinations(possible_nodes, size): if self.are_glob_connected(selected_nodes): count += 1 return count
Example #6
Source File: test_numeric.py From recruit with Apache License 2.0 | 6 votes |
def test_count_nonzero_axis_consistent(self): # Check that the axis behaviour for valid axes in # non-special cases is consistent (and therefore # correct) by checking it against an integer array # that is then casted to the generic object dtype from itertools import combinations, permutations axis = (0, 1, 2, 3) size = (5, 5, 5, 5) msg = "Mismatch for axis: %s" rng = np.random.RandomState(1234) m = rng.randint(-100, 100, size=size) n = m.astype(object) for length in range(len(axis)): for combo in combinations(axis, length): for perm in permutations(combo): assert_equal( np.count_nonzero(m, axis=perm), np.count_nonzero(n, axis=perm), err_msg=msg % (perm,))
Example #7
Source File: collocations.py From razzy-spinner with GNU General Public License v3.0 | 6 votes |
def from_words(cls, words, window_size=3): """Construct a TrigramCollocationFinder for all trigrams in the given sequence. """ if window_size < 3: raise ValueError("Specify window_size at least 3") wfd = FreqDist() wildfd = FreqDist() bfd = FreqDist() tfd = FreqDist() for window in ngrams(words, window_size, pad_right=True): w1 = window[0] if w1 is None: continue for w2, w3 in _itertools.combinations(window[1:], 2): wfd[w1] += 1 if w2 is None: continue bfd[(w1, w2)] += 1 if w3 is None: continue wildfd[(w1, w3)] += 1 tfd[(w1, w2, w3)] += 1 return cls(wfd, bfd, wildfd, tfd)
Example #8
Source File: processorspec.py From pyGSTi with Apache License 2.0 | 6 votes |
def get_all_connected_sets(self, n): """ Returns all connected sets of `n` qubits. Note that for a large device with this will be often be an unreasonably large number of sets of qubits, and so the run-time of this method will be unreasonable. Parameters ---------- n: int The number of qubits within each set. Returns ------- list All sets of `n` connected qubits. """ connectedqubits = [] for combo in _iter.combinations(self.qubit_labels, n): if self.qubitgraph.subgraph(list(combo)).are_glob_connected(combo): connectedqubits.append(combo) return connectedqubits #Note: Below method gets all subgraphs up to full graph size.
Example #9
Source File: causal_search.py From whynot with MIT License | 6 votes |
def _find_skeleton(self, data, variable_types): """ For each pair of nodes, run a conditional independence test over larger and larger conditioning sets to try to find a set that d-separates the pair. If such a set exists, cut the edge between the nodes. If not, keep the edge. """ self.separating_sets = {} if not self.max_k: self.max_k = len(self._g.nodes) + 1 for N in range(self.max_k + 1): for (x, y) in list(self._g.edges()): x_neighbors = list(self._g.neighbors(x)) y_neighbors = list(self._g.neighbors(y)) z_candidates = list(set(x_neighbors + y_neighbors) - set([x, y])) for z in itertools.combinations(z_candidates, N): test = self.independence_test([y], [x], list(z), data, self.alpha) if test.independent(): self._g.remove_edge(x, y) self.separating_sets[(x, y)] = z break
Example #10
Source File: randomcircuit.py From pyGSTi with Apache License 2.0 | 6 votes |
def find_all_sets_of_compatible_twoQgates(edgelist, n, gatename='Gcnot', aslabel=False): """ todo. n : int . the number of two-qubit gates to have in the set. """ co2Qgates = [] # Go for all combinations of n two-qubit gates from the edgelist. for npairs in _itertools.combinations(edgelist, n): # Make a list of the qubits involved in the gates flat_list = [item for sublist in npairs for item in sublist] # If no qubit is involved in more than one gate we accept the combination if len(flat_list) == len(set(flat_list)): if aslabel: co2Qgates.append([_lbl.Label(gatename, pair) for pair in npairs]) else: co2Qgates.append([gatename + ':' + pair[0] + ':' + pair[1] for pair in npairs]) return co2Qgates
Example #11
Source File: nqubitconstruction.py From pyGSTi with Apache License 2.0 | 6 votes |
def connected_combos(self, possible_indices, size): count = 0 for selected_inds in _itertools.combinations(possible_indices, size): if self.are_connected(selected_inds): count += 1 return count # def remove(self, node): # """ Remove all references to node """ # for n, cxns in self._graph.iteritems(): # try: # cxns.remove(node) # except KeyError: # pass # try: # del self._graph[node] # except KeyError: # pass
Example #12
Source File: swap_network_trotter.py From OpenFermion-Cirq with Apache License 2.0 | 6 votes |
def params(self) -> Iterable[sympy.Symbol]: """The parameters of the ansatz.""" for i in range(self.iterations): for p in range(len(self.qubits)): if (self.include_all_z or not numpy.isclose(self.hamiltonian.one_body[p, p], 0)): yield LetterWithSubscripts('U', p, i) for p, q in itertools.combinations(range(len(self.qubits)), 2): if (self.include_all_xxyy or not numpy.isclose(self.hamiltonian.one_body[p, q].real, 0)): yield LetterWithSubscripts('T', p, q, i) if (self.include_all_yxxy or not numpy.isclose(self.hamiltonian.one_body[p, q].imag, 0)): yield LetterWithSubscripts('W', p, q, i) if (self.include_all_cz or not numpy.isclose(self.hamiltonian.two_body[p, q], 0)): yield LetterWithSubscripts('V', p, q, i)
Example #13
Source File: configtypes.py From qutebrowser with GNU General Public License v3.0 | 6 votes |
def complete(self) -> _Completions: valid_values = self.valtype.valid_values if valid_values is None: return None out = [] # Single value completions for value in valid_values: desc = valid_values.descriptions.get(value, "") out.append((json.dumps([value]), desc)) combinables = self.combinable_values if combinables is None: combinables = list(valid_values) # Generate combinations of each possible value combination for size in range(2, len(combinables) + 1): for combination in itertools.combinations(combinables, size): out.append((json.dumps(combination), '')) return out
Example #14
Source File: LinearRegression.py From fuku-ml with MIT License | 6 votes |
def init_W(self, mode='normal'): self.W = {} if (self.status != 'load_train_data') and (self.status != 'train'): print("Please load train data first.") return self.W self.status = 'init' self.data_num = len(self.train_Y) self.data_demension = len(self.train_X[0]) self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2)) for class_item in self.class_list: self.W[class_item] = np.zeros(self.data_demension) return self.W
Example #15
Source File: build_audio_database.py From speech_separation with MIT License | 6 votes |
def generate_dataset(sample_range,repo_path,num_speaker=2): ''' A function to generate dataset :param sample_range: range of the sample to create the dataset :param repo_path: audio repository :param num_speaker: number of speaker to separate :return: X_data, y_data ''' audio_path_list = generate_path_list(sample_range,repo_path) num_data = 0 combinations = itertools.combinations(audio_path_list,num_speaker) for combo in combinations: num_data += 1 generate_mix_sample(combo,num_speaker) print('number of the data generated: ',num_data)
Example #16
Source File: SupportVectorMachine.py From fuku-ml with MIT License | 6 votes |
def init_W(self, mode='normal'): self.W = {} if (self.status != 'load_train_data') and (self.status != 'train'): print("Please load train data first.") return self.W self.status = 'init' self.data_num = len(self.train_Y) self.data_demension = len(self.train_X[0]) self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2)) for class_item in self.class_list: self.W[class_item] = np.zeros(self.data_demension) return self.W
Example #17
Source File: KernelRidgeRegression.py From fuku-ml with MIT License | 6 votes |
def init_W(self, mode='normal'): self.W = {} if (self.status != 'load_train_data') and (self.status != 'train'): print("Please load train data first.") return self.W self.status = 'init' self.data_num = len(self.train_Y) self.data_demension = len(self.train_X[0]) self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2)) for class_item in self.class_list: self.W[class_item] = np.zeros(self.data_demension) return self.W
Example #18
Source File: twisst.py From twisst with GNU General Public License v3.0 | 6 votes |
def makeTopoDict(taxonNames, topos=None, outgroup = None): output = {} output["topos"] = allTopos(taxonNames, []) if topos is None else topos if outgroup: for topo in output["topos"]: topo.set_outgroup(outgroup) output["n"] = len(output["topos"]) pairs = list(itertools.combinations(taxonNames,2)) pairsNumeric = list(itertools.combinations(range(len(taxonNames)),2)) output["pairsOfPairs"] = [y for y in itertools.combinations(pairs,2) if pairsDisjoint(y[0],y[1])] output["pairsOfPairsNumeric"] = [y for y in itertools.combinations(pairsNumeric,2) if pairsDisjoint(y[0],y[1])] output["chainsDisjoint"] = [] for tree in output["topos"]: rootLeafChains = makeRootLeafChainDict(tree) leafLeafChains = makeLeafLeafChainDict(rootLeafChains, pairs) for pair in pairs: leafLeafChains[pair[0]][pair[1]].setSet() output["chainsDisjoint"].append(checkDisjointChains(leafLeafChains, output["pairsOfPairs"])) return output
Example #19
Source File: RidgeRegression.py From fuku-ml with MIT License | 6 votes |
def init_W(self, mode='normal'): self.W = {} if (self.status != 'load_train_data') and (self.status != 'train'): print("Please load train data first.") return self.W self.status = 'init' self.data_num = len(self.train_Y) self.data_demension = len(self.train_X[0]) self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2)) for class_item in self.class_list: self.W[class_item] = np.zeros(self.data_demension) return self.W
Example #20
Source File: test_pyeclib_api.py From pyeclib with BSD 2-Clause "Simplified" License | 5 votes |
def test_greedy_decode_reconstruct_combination(self): # the testing spec defined at get_pyeclib_testspec() method # and if you want to test either other parameters or backends, # you can add the spec you want to test there. pyeclib_drivers = self.get_pyeclib_testspec() orig_data = os.urandom(1024 ** 2) for pyeclib_driver in pyeclib_drivers: encoded = pyeclib_driver.encode(orig_data) # make all fragment like (index, frag_data) format to feed # to combinations frags = [(i, frag) for i, frag in enumerate(encoded)] num_frags = pyeclib_driver.k + pyeclib_driver.m if pyeclib_driver.ec_type == PyECLib_EC_Types.flat_xor_hd: # flat_xord_hd is guaranteed to work with 2 or 3 failures tolerable_failures = pyeclib_driver.hd - 1 else: # ... while others can tolerate more tolerable_failures = pyeclib_driver.m for check_frags_tuples in combinations( frags, num_frags - tolerable_failures): # extract check_frags_tuples from [(index, data bytes), ...] # to [index, index, ...] and [data bytes, data bytes, ...] indexes, check_frags = zip(*check_frags_tuples) decoded = pyeclib_driver.decode(check_frags) self.assertEqual( orig_data, decoded, "assertion fail in decode %s from:%s" % (pyeclib_driver, indexes)) holes = [index for index in range(num_frags) if index not in indexes] for hole in holes: reconed = pyeclib_driver.reconstruct( check_frags, [hole])[0] self.assertEqual( frags[hole][1], reconed, "assertion fail in reconstruct %s target:%s " "from:%s" % (pyeclib_driver, hole, indexes))
Example #21
Source File: test_constr_crp.py From cgpm with Apache License 2.0 | 5 votes |
def test_all_enemies(): N, alpha = 13, 1.4 Cd = [] Ci = list(itertools.combinations(range(N), 2)) Rd = Ri = {} Z = gu.simulate_crp_constrained( N, alpha, Cd, Ci, Rd, Ri, rng=gu.gen_rng(0)) assert vu.validate_crp_constrained_partition(Z, Cd, Ci, Rd, Ri)
Example #22
Source File: test_dependence_constraints.py From cgpm with Apache License 2.0 | 5 votes |
def test_naive_bayes_independence(): rng = gu.gen_rng(1) D = rng.normal(size=(10,1)) T = np.repeat(D, 10, axis=1) Ci = list(itertools.combinations(range(10), 2)) state = State(T, cctypes=['normal']*10, Ci=Ci, rng=rng) state.transition(N=10, progress=0) vu.validate_crp_constrained_partition(state.Zv(), [], Ci, {}, {})
Example #23
Source File: validation.py From cgpm with Apache License 2.0 | 5 votes |
def validate_crp_constrained_partition(Zv, Cd, Ci, Rd, Ri): """Only tests the outer CRP partition Zv.""" valid = True N = len(Zv) for block in Cd: valid = valid and all(Zv[block[0]] == Zv[b] for b in block) for a, b in it.combinations(block, 2): valid = valid and check_compatible_customers(Cd, Ci, Ri, Rd, a, b) for a, b in Ci: valid = valid and not Zv[a] == Zv[b] return valid
Example #24
Source File: multiclass.py From MKLpy with GNU General Public License v3.0 | 5 votes |
def _generate_tasks(self, Y): tasks = {} for cp, cn in combinations(self.classes_, 2): idx_pos = [i for i,y in enumerate(Y) if y==cp] idx_neg = [i for i,y in enumerate(Y) if y==cn] tasks[(cp,cn)] = {'idx_pos': idx_pos, 'idx_neg': idx_neg} return tasks
Example #25
Source File: genetic_space.py From deep_architect with MIT License | 5 votes |
def generate_stage(stage_num, num_nodes, filters, filter_size): h_connections = [ Bool(name='%d_in_%d_%d' % (stage_num, in_id, out_id)) for (in_id, out_id) in itertools.combinations(range(1, num_nodes + 1), 2) ] return genetic_stage( lambda: mo.siso_sequential([ conv2d(D([filters]), D([filter_size])), batch_normalization(), relu() ]), lambda num_inputs: intermediate_node_fn(num_inputs, filters), lambda num_inputs: intermediate_node_fn(num_inputs, filters), h_connections, num_nodes)
Example #26
Source File: evaluation.py From Clustering with MIT License | 5 votes |
def count_correct_pairs(cluster, labels_lookup): """ Given a cluster, count the number of pairs belong to the same label and the total number of pairs. """ total_pairs = 0 correct_pairs = 0 pairs = combinations(cluster, 2) for f1, f2 in pairs: if labels_lookup[f1] == labels_lookup[f2]: correct_pairs += 1 total_pairs += 1 return correct_pairs, total_pairs
Example #27
Source File: causal_search.py From whynot with MIT License | 5 votes |
def _orient_colliders(self): for v_i, v_j in self._g.edges(): self._g[v_i][v_j]["arrows"] = [] for v_c in self._g.nodes(): for (v_a, v_b) in itertools.combinations(self._g.neighbors(v_c), 2): if not self._g.has_edge(v_a, v_b): if v_c not in self.separating_set(v_a, v_b): self._g[v_a][v_c]["arrows"].append(v_c) self._g[v_b][v_c]["arrows"].append(v_c)
Example #28
Source File: test_client.py From pylxd with Apache License 2.0 | 5 votes |
def test_events_type_filter(self): """The websocket client can filter events by type.""" an_client = client.Client() # from the itertools recipes documentation def powerset(types): from itertools import chain, combinations pwset = [combinations(types, r) for r in range(len(types) + 1)] return chain.from_iterable(pwset) event_path = '/1.0/events' for types in powerset(client.EventType): ws_client = an_client.events(event_types=set(types)) actual_resource = parse.urlparse(ws_client.resource) expect_resource = parse.urlparse(event_path) if types and client.EventType.All not in types: type_csl = ','.join([t.value for t in types]) query = parse.parse_qs(expect_resource.query) query.update({'type': type_csl}) qs = parse.urlencode(query) expect_resource = expect_resource._replace(query=qs) self.assertEqual(expect_resource.path, actual_resource.path) if types and client.EventType.All not in types: qdict = parse.parse_qs(expect_resource.query) expect_types = set(qdict['type'][0].split(',')) qdict = parse.parse_qs(actual_resource.query) actual_types = set(qdict['type'][0].split(',')) self.assertEqual(expect_types, actual_types) else: self.assertEqual(expect_resource.query, actual_resource.query)
Example #29
Source File: causal_search.py From whynot with MIT License | 5 votes |
def _build_g(self, variable_types): """ This initializes a complete graph over the variables. We'll run independence tests on the complete graph to cut edges by trying to find separating sets. """ self._g = nx.Graph() self._g.add_nodes_from(variable_types.keys()) for var, var_type in variable_types.items(): self._g.nodes[var]["type"] = var_type edges_to_add = [] for (node_a, node_b) in itertools.combinations(self._g.nodes.keys(), 2): edges_to_add.append((node_a, node_b)) self._g.add_edges_from(edges_to_add, marked=False)
Example #30
Source File: causal_search.py From whynot with MIT License | 5 votes |
def _apply_recursion_rule_1(self): added_arrows = False for c in self._g.nodes(): for (a, b) in itertools.combinations(self._g.neighbors(c), 2): if not self._g.has_edge(a, b): if ( c in self._g[a][c]["arrows"] and c not in self._g[b][c]["arrows"] and not ( b in self._g[b][c]["arrows"] and self._g[b][c]["marked"] ) ): self._g[b][c]["arrows"].append(b) self._g[b][c]["marked"] = True added_arrows = True if ( c in self._g[b][c]["arrows"] and c not in self._g[a][c]["arrows"] and not ( a in self._g[a][c]["arrows"] and self._g[a][c]["marked"] ) ): self._g[a][c]["arrows"].append(a) self._g[a][c]["marked"] = True added_arrows = True return added_arrows