Java Code Examples for gnu.trove.map.hash.TIntObjectHashMap#containsKey()
The following examples show how to use
gnu.trove.map.hash.TIntObjectHashMap#containsKey() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Cluster.java From metanome-algorithms with Apache License 2.0 | 5 votes |
public TIntObjectHashMap<Cluster> refineBy(int column, int[][] values) { TIntObjectHashMap<Cluster> map = new TIntObjectHashMap<>(); for (TIntIterator iter = array.iterator(); iter.hasNext();) { int line = iter.next(); int c = values[line][column]; if (map.containsKey(c)) { map.get(c).add(line); } else { Cluster p = new Cluster(line); map.put(c, p); } } return map; }
Example 2
Source File: DynamicGrid.java From cineast with MIT License | 5 votes |
@Override public T get(int x, int y) { if (!grid.containsKey(x)) { return defaultElement; } TIntObjectHashMap<T> map = grid.get(x); if (!map.containsKey(y)) { return defaultElement; } return map.get(y); }
Example 3
Source File: DynamicGrid.java From cineast with MIT License | 5 votes |
@Override public T remove(int x, int y){ if (!grid.containsKey(x)) { return null; } TIntObjectHashMap<T> map = grid.get(x); if (!map.containsKey(y)) { return null; } T _return = map.remove(y); if(map.isEmpty()){ grid.remove(x); } return _return; }
Example 4
Source File: DataAccessForTesting.java From ambiverse-nlu with Apache License 2.0 | 4 votes |
@Override public Keyphrases getEntityKeyphrases(Entities entities, Map<String, Double> keyphraseSourceWeights, double minKeyphraseWeight, int maxEntityKeyphraseCount) throws EntityLinkingDataAccessException { Keyphrases keyphrases = new Keyphrases(); TIntObjectHashMap<int[]> eKps = new TIntObjectHashMap<int[]>(); TIntObjectHashMap<int[]> kpTokens = new TIntObjectHashMap<int[]>(); getEntityKeyphraseTokens(entities, eKps, kpTokens); keyphrases.setEntityKeyphrases(eKps); keyphrases.setKeyphraseTokens(kpTokens); TIntObjectHashMap<TIntDoubleHashMap> e2kw2mi = new TIntObjectHashMap<TIntDoubleHashMap>(); keyphrases.setEntityKeywordWeights(e2kw2mi); TIntObjectHashMap<TIntDoubleHashMap> e2kp2mi = new TIntObjectHashMap<TIntDoubleHashMap>(); keyphrases.setEntityKeyphraseWeights(e2kp2mi); for (Entity entity : entities) { int eId = entity.getId(); Entities singleEntity = new Entities(); singleEntity.add(entity); int entityCount = getEntitySuperdocSize(singleEntity).get(eId); TIntDoubleHashMap kp2mi = new TIntDoubleHashMap(); e2kp2mi.put(entity.getId(), kp2mi); TIntDoubleHashMap kw2mi = new TIntDoubleHashMap(); e2kw2mi.put(entity.getId(), kw2mi); if (!eKps.containsKey(eId)) { continue; } for (int kp : eKps.get(eId)) { TIntHashSet singleKp = new TIntHashSet(); singleKp.add(kp); int kpCount = getKeyphraseDocumentFrequencies(singleKp).get(kp); int eKpIcCount = getEntityKeyphraseIntersectionCount(singleEntity).get(eId).get(kp); kp2mi.put(kp, WeightComputation.computeNPMI(entityCount, kpCount, eKpIcCount, TOTAL_ENTITY_COUNT)); for (int kw : kpTokens.get(kp)) { TIntHashSet singleKw = new TIntHashSet(); singleKw.add(kw); int kwCount = getKeywordDocumentFrequencies(singleKw).get(kw); int eKwIcCount = getEntityKeywordIntersectionCount(singleEntity).get(eId).get(kw); kw2mi.put(kw, WeightComputation.computeMI(entityCount, kwCount, eKwIcCount, TOTAL_ENTITY_COUNT, false)); } } } return keyphrases; }