it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet Java Examples
The following examples show how to use
it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FastutilSetHelper.java From presto with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public static Set<?> toFastutilHashSet(Set<?> set, Type type, Metadata metadata) { // 0.25 as the load factor is chosen because the argument set is assumed to be small (<10000), // and the return set is assumed to be read-heavy. // The performance of InCodeGenerator heavily depends on the load factor being small. Class<?> javaElementType = type.getJavaType(); if (javaElementType == long.class) { return new LongOpenCustomHashSet((Collection<Long>) set, 0.25f, new LongStrategy(metadata, type)); } if (javaElementType == double.class) { return new DoubleOpenCustomHashSet((Collection<Double>) set, 0.25f, new DoubleStrategy(metadata, type)); } if (javaElementType == boolean.class) { return new BooleanOpenHashSet((Collection<Boolean>) set, 0.25f); } else if (!type.getJavaType().isPrimitive()) { return new ObjectOpenCustomHashSet<>(set, 0.25f, new ObjectStrategy(metadata, type)); } else { throw new UnsupportedOperationException("Unsupported native type in set: " + type.getJavaType() + " with type " + type.getTypeSignature()); } }
Example #2
Source File: ORDERLhsRhs.java From metanome-algorithms with Apache License 2.0 | 6 votes |
private Set<byte[]> extend(final byte[] lhs, final byte[] rhs) { final Set<byte[]> extendedRhsCandidates = new ObjectOpenCustomHashSet<>(ByteArrays.HASH_STRATEGY); if (lhs.length + rhs.length > this.level - 1) { // extended candidates would have more columns than this level allows, // return empty extended candidates return extendedRhsCandidates; } for (final byte[] singleColumn : this.singleColumnPermutations) { if (ByteArrayPermutations.disjoint(singleColumn, lhs) && ByteArrayPermutations.disjoint(singleColumn, rhs)) { extendedRhsCandidates.add(ByteArrayPermutations.join(rhs, singleColumn)); } } return extendedRhsCandidates; }
Example #3
Source File: Session.java From parity with Apache License 2.0 | 5 votes |
Session(SocketChannel channel, OrderBooks books) { this.transport = new SoupBinTCPServer(channel, POE.MAX_INBOUND_MESSAGE_LENGTH, new POEServerParser(this), this); this.orders = new Object2ObjectOpenCustomHashMap<>(HASH_STRATEGY); this.orderIds = new ObjectOpenCustomHashSet<>(HASH_STRATEGY); this.books = books; this.terminated = false; }
Example #4
Source File: FastutilSetHelper.java From presto with Apache License 2.0 | 4 votes |
public static boolean in(Object objectValue, ObjectOpenCustomHashSet<?> set) { return set.contains(objectValue); }
Example #5
Source File: CallGraphGenerator.java From fasten with Apache License 2.0 | 4 votes |
/** Generates <code>np</code> call graphs. Each call graph is obtained using {@link #preferentialAttachmentDAG(int, int, IntegerDistribution, RandomGenerator)} (with * specified initial graph size (<code>initialGraphSizeDistribution</code>), graph size (<code>graphSizeDistribution</code>), outdegree distribution (<code>outdegreeDistribution</code>). * Then a dependency DAG is generated between the call graphs, once more using {@link #preferentialAttachmentDAG(int, int, IntegerDistribution, RandomGenerator)} (this * time the initial graph size is 1, whereas the outdegree distribution is <code>outdegreeDistribution</code>). * Then to each node of each call graph a new set of outgoing arcs is generated (their number is chosen using <code>externalOutdegreeDistribution</code>): the target * call graph is generated using the indegree distribution of the dependency DAG; the target node is chosen according to the reverse indegree distribution within the revision call graph. * * @param np number of revision call graphs to be generated. * @param graphSizeDistribution the distribution of the graph sizes (number of functions per call graph). * @param initialGraphSizeDistribution the distribution of the initial graph sizes (the initial independent set from which the preferential attachment starts). * @param outdegreeDistribution the distribution of internal outdegrees (number of internal calls per function). * @param externalOutdegreeDistribution the distribution of external outdegrees (number of external calls per function). * @param depExponent exponent of the Zipf distribution used to establish the dependencies between call graphs. * @param random the random object used for the generation. */ public void generate(final int np, final IntegerDistribution graphSizeDistribution, final IntegerDistribution initialGraphSizeDistribution, final IntegerDistribution outdegreeDistribution, final IntegerDistribution externalOutdegreeDistribution, final IntegerDistribution dependencyOutdegreeDistribution, final RandomGenerator random) { rcgs = new ArrayListMutableGraph[np]; nodePermutation = new int[np][]; final FenwickTree[] td = new FenwickTree[np]; deps = new IntOpenHashSet[np]; source2Targets = new ObjectOpenCustomHashSet[np]; // Generate rcg of the np revisions, and the corresponding reverse preferential distribution; cumsize[i] is the sum of all nodes in packages <i for ( int i = 0; i < np; i++) { deps[i] = new IntOpenHashSet(); final int n = graphSizeDistribution.sample(); final int n0 = Math.min(initialGraphSizeDistribution.sample(), n); rcgs[i] = preferentialAttachmentDAG(n, n0, outdegreeDistribution, random); td[i] = getPreferentialDistribution(rcgs[i].immutableView(), true); nodePermutation[i] = Util.identity(n); Collections.shuffle(IntArrayList.wrap(nodePermutation[i]), new Random(random.nextLong())); } // Generate the dependency DAG between revisions using preferential attachment starting from 1 node final ArrayListMutableGraph depDAG = preferentialAttachmentDAG(np, 1, dependencyOutdegreeDistribution, random); // For each source package, generate function calls so to cover all dependencies for (int sourcePackage = 0; sourcePackage < np; sourcePackage++) { source2Targets[sourcePackage] = new ObjectOpenCustomHashSet<>(IntArrays.HASH_STRATEGY); final int outdegree = depDAG.outdegree(sourcePackage); if (outdegree == 0) continue; // No calls needed (I'm kinda busy) final int numFuncs = rcgs[sourcePackage].numNodes(); final int[] externalArcs = new int[numFuncs]; int allExternalArcs = 0; // We decide how many calls to dispatch from each function for (int sourceNode = 0; sourceNode < numFuncs; sourceNode++) allExternalArcs += (externalArcs[sourceNode] = externalOutdegreeDistribution.sample()); // We create a global list of external successors by shuffling final int[] targetPackage = new int[allExternalArcs]; final int[] succ = depDAG.successorArray(sourcePackage); for(int i = 0; i < outdegree; i++) deps[sourcePackage].add(succ[i]); for(int i = 0; i < allExternalArcs; i++) targetPackage[i] = succ[i % outdegree]; MathArrays.shuffle(targetPackage, random); for (int sourceNode = allExternalArcs = 0; sourceNode < numFuncs; sourceNode++) { final int externalOutdegree = externalArcs[sourceNode]; for (int t = 0; t < externalOutdegree; t++) { final int targetNode = td[targetPackage[allExternalArcs + t]].sample(random) - 1; source2Targets[sourcePackage].add(new int[] { sourceNode, targetPackage[allExternalArcs + t], targetNode }); } allExternalArcs += externalOutdegree; } } }
Example #6
Source File: ORDERLhsRhs.java From metanome-algorithms with Apache License 2.0 | 4 votes |
@Override public void execute() throws AlgorithmExecutionException { super.execute(); this.partitionsCache = CacheBuilder.newBuilder().maximumSize(this.partitionCacheSize) .build(new CacheLoader<ByteArray, SortedPartition>() { @Override public SortedPartition load(final ByteArray key) throws Exception { return ORDERLhsRhs.this.createPartitionFromSingletons(key.data); } }); this.logger.info("Running order dependency detection algorithm (lhs and rhs) on table: " + this.tableName + " (" + this.numRows + " rows, " + this.columnIndices.length + " columns)"); if (this.columnNames.size() > MAX_NUM_COLS) { throw new AlgorithmConfigurationException("You provided a table with " + this.columnNames.size() + " columns. This order dependency detection algorithm supports a maximum of " + MAX_NUM_COLS + " columns."); } this.levelCandidates = new ArrayList<>(); this.currentRhsCandidateSet = new Object2ObjectOpenCustomHashMap<>(ByteArrays.HASH_STRATEGY); this.singleColumnPermutations = new ArrayList<>(); this.validDependencies = new Object2ObjectOpenCustomHashMap<>(ByteArrays.HASH_STRATEGY); // initialize level 1 for (final int lhsColumnIndex : this.columnIndices) { final byte[] singleLhsColumnPermutation = {(byte) lhsColumnIndex}; // columns with only equal values mean that any OD with that lhs holds (under "<") // (but there exists no minimal n-ary OD that has lhs on its lhs or rhs, // i.e., they can be removed) if (this.permutationToPartition.get(singleLhsColumnPermutation).size() == 1) { for (final int rhsColumnIndex : this.columnIndices) { final byte[] rhs = new byte[] {(byte) rhsColumnIndex}; if (Arrays.equals(singleLhsColumnPermutation, rhs)) { continue; } this.statistics.setNumFoundDependencies(this.statistics.getNumFoundDependencies() + 1); this.statistics.setNumFoundDependenciesInPreCheck(this.statistics .getNumFoundDependenciesInPreCheck() + 1); this.signalFoundOrderDependency(singleLhsColumnPermutation, rhs, ComparisonOperator.STRICTLY_SMALLER, OrderType.LEXICOGRAPHICAL); } continue; } this.singleColumnPermutations.add(singleLhsColumnPermutation); this.levelCandidates.add(singleLhsColumnPermutation); this.currentRhsCandidateSet.put(singleLhsColumnPermutation, new ObjectOpenCustomHashSet<>( ByteArrays.HASH_STRATEGY)); } for (final byte[] levelCandidate : this.levelCandidates) { for (final byte[] singleColumnRhsCandidate : this.singleColumnPermutations) { if (!ByteArrayPermutations.disjoint(levelCandidate, singleColumnRhsCandidate)) { continue; } this.currentRhsCandidateSet.get(levelCandidate).add(singleColumnRhsCandidate); } } this.level = 1; while (!this.levelCandidates.isEmpty()) { this.computeDependencies(); this.prune(); this.logger.debug("Candidate set: {}", this.prettyPrintCurrentRhsCandidates()); this.generateNextLevel(); this.logger.info("Statistics after generating level " + (this.level + 1) + " for table #" + this.tableName + "#: " + this.statistics.toString()); this.level++; } this.logger.info("Statistics (FINAL) for table #" + this.tableName + "#: " + this.statistics.toString()); }
Example #7
Source File: ORDERLhsRhs.java From metanome-algorithms with Apache License 2.0 | 4 votes |
private void generateNextLevel() { long time = System.currentTimeMillis(); this.previousLevelCandidates = this.levelCandidates; this.levelCandidates = new ArrayList<>(); this.buildPrefixBlocks(); System.gc(); this.logger.debug("PREFIX_BLOCKS in level {}: {} ", Integer.valueOf(this.level), this.prettyPrintPrefixBlocks()); for (final List<byte[]> candidatesWithSamePrefix : this.prefixBlocks.values()) { if (candidatesWithSamePrefix.size() < 2) { break; } for (final byte[] candidate : candidatesWithSamePrefix) { for (final byte[] candidateForJoin : candidatesWithSamePrefix) { if (Arrays.equals(candidate, candidateForJoin)) { continue; } final byte[] joinedCandidate = ByteArrayPermutations.join(candidate, candidateForJoin); this.levelCandidates.add(joinedCandidate); } } } this.logger.debug("Generated level {}", Integer.valueOf(this.level + 1)); this.logger.debug("Level {} candidates: {}", Integer.valueOf(this.level + 1), ByteArrayPermutations.permutationListToIntegerString(this.levelCandidates)); if (this.level > 1 && !this.levelCandidates.isEmpty()) { for (final byte[] newLhs : this.previousLevelCandidates) { this.logger .debug( "After generating level {}. Adding {} as an lhs to the candidate set for the next level.", Integer.valueOf(this.level + 1), ByteArrayPermutations.permutationToIntegerString(newLhs)); this.currentRhsCandidateSet.put(newLhs, new ObjectOpenCustomHashSet<byte[]>( ByteArrays.HASH_STRATEGY)); } } time = System.currentTimeMillis() - time; this.statistics.setGenNextTime(this.statistics.getGenNextTime() + time); }