Java Code Examples for com.google.common.collect.Multiset#elementSet()
The following examples show how to use
com.google.common.collect.Multiset#elementSet() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GroupingDocumentTransformer.java From datawave with Apache License 2.0 | 6 votes |
protected BaseQueryResponse createGroupedResponse(Multiset<Collection<FieldBase<?>>> multiset) { Map<String,String> markings = Maps.newHashMap(); EventQueryResponseBase response = this.responseObjectFactory.getEventQueryResponse(); List<EventBase> events = new ArrayList<>(); for (Collection<FieldBase<?>> entry : multiset.elementSet()) { EventBase event = this.responseObjectFactory.getEvent(); event.setMarkings(markings); List<FieldBase<?>> fields = new ArrayList(entry); FieldBase<?> counter = this.responseObjectFactory.getField(); counter.setName("COUNT"); counter.setMarkings(markings); counter.setValue(multiset.count(entry)); counter.setTimestamp(0L); fields.add(counter); event.setFields(fields); events.add(event); } response.setEvents(events); response.setTotalEvents((long) events.size()); response.setReturnedEvents((long) events.size()); return response; }
Example 2
Source File: Schema.java From registry with Apache License 2.0 | 6 votes |
private static Field parseField(String fieldName, Object fieldValue) throws ParserException { Field field = null; Type fieldType = fromJavaType(fieldValue); if(fieldType == Type.NESTED) { field = new NestedField(fieldName, parseFields((Map<String, Object>)fieldValue)); } else if(fieldType == Type.ARRAY) { Multiset<Field> members = parseArray((List<Object>)fieldValue); Set<Field> fieldTypes = members.elementSet(); if (fieldTypes.size() > 1) { field = new ArrayField(fieldName, new ArrayList<>(members)); } else if (fieldTypes.size() == 1) { field = new ArrayField(fieldName, new ArrayList<>(members.elementSet())); } else { throw new IllegalArgumentException("Array should have at least one element"); } } else { field = new Field(fieldName, fieldType); } return field; }
Example 3
Source File: ObservableMultisetWrapper.java From gef with Eclipse Public License 2.0 | 6 votes |
@Override public boolean retainAll(Collection<?> collection) { Multiset<E> previousContents = delegateCopy(); boolean changed = super.retainAll(collection); if (changed) { List<ElementarySubChange<E>> elementaryChanges = new ArrayList<>(); // collection may contain element multiple times; as we only want to // notify once per element, we have to iterate over the set of // unique elements for (E e : previousContents.elementSet()) { if (!collection.contains(e)) { elementaryChanges.add(new ElementarySubChange<>(e, previousContents.count(e), 0)); } } helper.fireValueChangedEvent( new MultisetListenerHelper.AtomicChange<>(this, previousContents, elementaryChanges)); } return changed; }
Example 4
Source File: ObservableMultisetWrapper.java From gef with Eclipse Public License 2.0 | 5 votes |
@Override public void clear() { Multiset<E> previousContents = delegateCopy(); super.clear(); if (!previousContents.isEmpty()) { List<ElementarySubChange<E>> elementaryChanges = new ArrayList<>(); for (E e : previousContents.elementSet()) { elementaryChanges.add(new ElementarySubChange<>(e, previousContents.count(e), 0)); } helper.fireValueChangedEvent( new MultisetListenerHelper.AtomicChange<>(this, previousContents, elementaryChanges)); } }
Example 5
Source File: SourceDirectoryCalculator.java From intellij with Apache License 2.0 | 5 votes |
@Nullable private static <T> T pickMostFrequentlyOccurring(Multiset<T> set, String prefer) { T best = null; int bestCount = 0; for (T candidate : set.elementSet()) { int candidateCount = set.count(candidate); if (candidateCount > bestCount || (candidateCount == bestCount && candidate.equals(prefer))) { best = candidate; bestCount = candidateCount; } } return best; }
Example 6
Source File: AbstractMemoryOccStore.java From termsuite-core with Apache License 2.0 | 5 votes |
@Override public List<Form> getForms(Term term) { Multiset<String> texts = HashMultiset.create(); for(TermOccurrence o:getOccurrences(term)) { if(o.getCoveredText() != null) texts.add(o.getCoveredText()); } List<Form> forms = new ArrayList<>(); for(String distinctText:texts.elementSet()) forms.add(new Form(distinctText).setCount(texts.count(distinctText))); Collections.sort(forms); return forms; }
Example 7
Source File: PartitionTest.java From sequence-mining with GNU General Public License v3.0 | 5 votes |
@Test public void testInterleavingGenerator() { final Random random = new Random(1); final Random randomI = new Random(10); final RandomGenerator randomC = new JDKRandomGenerator(); randomC.setSeed(100); final Multiset<Sequence> seqsI = HashMultiset.create(); seqsI.add(new Sequence(1, 2, 3)); seqsI.add(new Sequence(4, 5)); seqsI.add(new Sequence(6)); seqsI.add(new Sequence(7)); final HashMap<Sequence, Double> seqsG = new HashMap<>(); for (final Sequence seq : seqsI.elementSet()) { seqsG.put(seq, 1.0); } final Map<Sequence, EnumeratedIntegerDistribution> countDists = new HashMap<>(); final EnumeratedIntegerDistribution oneRepeat = new EnumeratedIntegerDistribution(randomC, new int[] { 1 }, new double[] { 1.0 }); countDists.put(new Sequence(1, 2, 3), oneRepeat); countDists.put(new Sequence(4, 5), oneRepeat); countDists.put(new Sequence(6), oneRepeat); countDists.put(new Sequence(7), oneRepeat); final HashSet<Transaction> transG = new HashSet<>(); for (int i = 0; i < 700000; i++) transG.add( TransactionGenerator.sampleFromDistribution(random, seqsG, countDists, new HashMap<>(), randomI)); // Note that upper bound is exact when there are no repetitions assertEquals(transG.size(), modP(seqsI.iterator()), EPS); }
Example 8
Source File: TokenVector.java From tassal with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** Calculates tf-idf weights for snippet */ public TokenVector(final Multiset<String> termFreqs) { tf.addAll(termFreqs); for (final String term : termFreqs.elementSet()) { maxFreq = Math.max(maxFreq, tf.count(term)); } if (tf.size() > 0) { avgFreq = ((double) tf.size()) / tf.elementSet().size(); } calcWeights(); }
Example 9
Source File: NestedRandomTest.java From log-synth with Apache License 2.0 | 5 votes |
@Test public void testUniqueAndRepeatable() { Multiset<Integer> samples = HashMultiset.create(); for (int seed = 0; seed < 20; seed++) { NestedRandom origin = new NestedRandom(seed); addSamples(samples, origin.get("a")); addSamples(samples, origin.get("b")); addSamples(samples, origin.get("c")); addSamples(samples, origin.get("a")); addSamples(samples, origin.get("b")); addSamples(samples, origin.get("c")); } Multiset<Integer> counts = HashMultiset.create(); for (Integer x : samples.elementSet()) { counts.add(samples.count(x)); } Set<Integer> observedCounts = counts.elementSet(); assertEquals(2, observedCounts.size()); assertTrue(observedCounts.contains(2)); assertTrue(observedCounts.contains(4)); assertTrue(counts.count(4) < 1000); assertTrue(counts.count(2) > 2e6); }
Example 10
Source File: WeakKeySet.java From businessworks with Apache License 2.0 | 4 votes |
public Set<Object> getSources(Key<?> key) { evictionCache.cleanUp(); Multiset<Object> sources = (backingMap == null) ? null : backingMap.get(key); return (sources == null) ? null : sources.elementSet(); }
Example 11
Source File: MultisetExpression.java From gef with Eclipse Public License 2.0 | 4 votes |
@Override public Set<E> elementSet() { final Multiset<E> multiset = get(); return (multiset == null) ? EMPTY_MULTISET.elementSet() : multiset.elementSet(); }
Example 12
Source File: TreeBoundedEncoder.java From angelix with MIT License | 4 votes |
/** * @return output variable, synthesis constraints and encoding information */ public Triple<Variable, Pair<List<Node>, List<Node>>, EncodingInfo> encode(Shape shape, Multiset<Node> components) { List<Node> uniqueComponents = new ArrayList<>(components.elementSet()); ExpressionOutput root = new ExpressionOutput(shape.getOutputType()); // top level -> current level Map<Expression, Expression> initialForbidden = shape.getForbidden().stream().collect(Collectors.toMap(Function.identity(), Function.identity())); Optional<EncodingInfo> result; if (shape instanceof RepairShape && ((RepairShape) shape).getLevel() == CONDITIONAL) { result = encodeConditional(root, ((RepairShape) shape).getOriginal(), uniqueComponents, initialForbidden); } else { result = encodeBranch(root, shape, uniqueComponents, initialForbidden); } if (!result.isPresent()) { throw new RuntimeException("wrong synthesis configuration"); } List<Node> hard = new ArrayList<>(); List<Node> soft = new ArrayList<>(); // choice synthesis constraints: hard.addAll(result.get().clauses); // branch activation constraints: for (Map.Entry<Variable, List<Selector>> entry : result.get().nodeChoices.entrySet()) { if (!entry.getValue().isEmpty()) { Node precondition; if (result.get().branchDependencies.containsKey(entry.getKey())) { precondition = Node.disjunction(result.get().branchDependencies.get(entry.getKey())); } else { precondition = BoolConst.TRUE; } hard.add(new Impl(precondition, Node.disjunction(entry.getValue()))); } } // forbidden constrains: for (List<List<Selector>> selectors : result.get().forbiddenSelectors.values()) { if (!selectors.isEmpty()) { hard.add( Node.disjunction(selectors.stream().map(l -> Node.conjunction(l.stream().map(Not::new).collect(Collectors.toList()))).collect(Collectors.toList()))); } } // uniqueness constraints: if (uniqueUsage) { for (Node component : components.elementSet()) { if (result.get().componentUsage.containsKey(component)) { hard.addAll(Cardinality.SortingNetwork.atMostK(components.count(component), result.get().componentUsage.get(component))); } } } soft.addAll(result.get().originalSelectors); return new ImmutableTriple<>(root, new ImmutablePair<>(hard, soft), result.get()); }
Example 13
Source File: App1.java From pyramid with Apache License 2.0 | 4 votes |
static Set<Ngram> gather(Config config, ESIndex index, String[] ids, Logger logger) throws Exception{ File metaDataFolder = new File(config.getString("output.folder"),"meta_data"); metaDataFolder.mkdirs(); Multiset<Ngram> allNgrams = ConcurrentHashMultiset.create(); List<Integer> ns = config.getIntegers("train.feature.ngram.n"); double minDf = config.getDouble("train.feature.ngram.minDf"); int minDFrequency = (int)Math.floor(ids.length*minDf); List<String> fields = config.getStrings("train.feature.ngram.extractionFields"); List<Integer> slops = config.getIntegers("train.feature.ngram.slop"); boolean inorder = config.getBoolean("train.feature.ngram.inOrder"); boolean allowDuplicates = config.getBoolean("train.feature.ngram.allowDuplicateWords"); for (String field: fields){ for (int n: ns){ for (int slop:slops){ logger.info("gathering "+n+ "-grams from field "+field+" with slop "+slop+" and minDf "+minDf+ ", (actual frequency threshold = "+minDFrequency+")"); NgramTemplate template = new NgramTemplate(field,n,slop); Multiset<Ngram> ngrams = NgramEnumerator.gatherNgram(index, ids, template, minDFrequency); logger.info("gathered "+ngrams.elementSet().size()+ " ngrams"); int newCounter = 0; for (Multiset.Entry<Ngram> entry: ngrams.entrySet()){ Ngram ngram = entry.getElement(); ngram.setInOrder(inorder); int count = entry.getCount(); if (interesting(allNgrams,ngram,count)){ if (allowDuplicates) { allNgrams.add(ngram, count); newCounter += 1; }else{ if (!ngram.hasDuplicate()){ allNgrams.add(ngram, count); newCounter += 1; } } } } logger.info(newCounter+" are really new"); } } } logger.info("there are "+allNgrams.elementSet().size()+" ngrams in total"); // BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(new File(metaDataFolder,"all_ngrams.txt"))); // for (Multiset.Entry<Ngram> ngramEntry: allNgrams.entrySet()){ // bufferedWriter.write(ngramEntry.getElement().toString()); // bufferedWriter.write("\t"); // bufferedWriter.write(""+ngramEntry.getCount()); // bufferedWriter.newLine(); // } // // bufferedWriter.close(); // // //for serialization // Set<Ngram> uniques = new HashSet<>(); // uniques.addAll(allNgrams.elementSet()); // Serialization.serialize(uniques, new File(metaDataFolder, "all_ngrams.ser")); return allNgrams.elementSet(); }
Example 14
Source File: SingularitySlaveAndRackManager.java From Singularity with Apache License 2.0 | 4 votes |
private boolean isRackOk( Multiset<String> countPerRack, String sanitizedRackId, int numDesiredInstances, String requestId, String slaveId, String host, double numCleaningOnSlave ) { int racksAccountedFor = countPerRack.elementSet().size(); int activeRacksWithCapacityCount = getActiveRacksWithCapacityCount(); double numPerRack = numDesiredInstances / (double) activeRacksWithCapacityCount; if (racksAccountedFor < activeRacksWithCapacityCount) { if (countPerRack.count(sanitizedRackId) < Math.max(numPerRack, 1)) { return true; } } else { Integer rackMin = null; for (String rackId : countPerRack.elementSet()) { if (rackMin == null || countPerRack.count(rackId) < rackMin) { rackMin = countPerRack.count(rackId); } } if (rackMin == null || rackMin < (int) numPerRack) { if (countPerRack.count(sanitizedRackId) < (int) numPerRack) { return true; } } else if (countPerRack.count(sanitizedRackId) < numPerRack) { return true; } } LOG.trace( "Rejecting RackSensitive task {} from slave {} ({}) due to numOnRack {} and cleaningOnSlave {}", requestId, slaveId, host, countPerRack.count(sanitizedRackId), numCleaningOnSlave ); return false; }