Java Code Examples for com.carrotsearch.hppc.IntIntOpenHashMap#containsKey()
The following examples show how to use
com.carrotsearch.hppc.IntIntOpenHashMap#containsKey() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SpanMergingEvaluatorDecorator.java From gerbil with GNU Affero General Public License v3.0 | 5 votes |
@SuppressWarnings("unchecked") protected List<T> merge(List<T> spans) { Span spanArray[] = spans.toArray(new Span[spans.size()]); Arrays.sort(spanArray, this); IntIntOpenHashMap enclosedByMap = new IntIntOpenHashMap(); boolean isEnclosed; for (int i = 0; i < spanArray.length; ++i) { isEnclosed = false; for (int j = spanArray.length - 1; (j > i) && (!isEnclosed); --j) { // if spanArray[i] is enclosed by spanArray[j] if ((spanArray[i].getStartPosition() >= spanArray[j].getStartPosition()) && ((spanArray[i].getStartPosition() + spanArray[i].getLength()) <= (spanArray[j] .getStartPosition() + spanArray[j].getLength()))) { enclosedByMap.put(i, j); isEnclosed = true; } } } // if no match could be found if (enclosedByMap.size() == 0) { return spans; } List<T> mergedMarkings = new ArrayList<T>(spans.size()); // starting with the smallest span, check if a span is enclosed by // another int largerSpanId; for (int i = 0; i < spanArray.length; ++i) { if (enclosedByMap.containsKey(i)) { largerSpanId = enclosedByMap.lget(); spanArray[largerSpanId] = merge(spanArray[i], spanArray[largerSpanId]); } else { mergedMarkings.add((T) spanArray[i]); } } return mergedMarkings; }
Example 2
Source File: GeneralizationHierarchy.java From arx with Apache License 2.0 | 5 votes |
/** * Throws an exception, if the hierarchy is not monotonic. * * @param manager */ public void checkMonotonicity(DataManager manager) { // Obtain dictionary String[] dictionary = null; String[] header = manager.getDataGeneralized().getHeader(); for (int i=0; i<header.length; i++) { if (header[i].equals(attribute)) { dictionary = manager.getDataGeneralized().getDictionary().getMapping()[i]; } } // Check if (dictionary==null) { throw new IllegalStateException("Cannot obtain dictionary for attribute ("+attribute+")"); } // Level value -> level+1 value final IntIntOpenHashMap hMap = new IntIntOpenHashMap(); // Input->level->output. for (int level = 0; level < (map[0].length - 1); level++) { hMap.clear(); for (int i = 0; i < map.length; i++) { final int outputCurrentLevel = map[i][level]; final int outputNextLevel = map[i][level + 1]; if (hMap.containsKey(outputCurrentLevel)) { final int compare = hMap.get(outputCurrentLevel); if (compare != outputNextLevel) { String in = dictionary[outputCurrentLevel]; String out1 = dictionary[compare]; String out2 = dictionary[outputNextLevel]; throw new IllegalArgumentException("The transformation rule for the attribute '" + attribute + "' is not a hierarchy. ("+in+") can either be transformed to ("+out1+") or to ("+out2+")"); } } else { hMap.put(outputCurrentLevel, outputNextLevel); } } } }
Example 3
Source File: WindowSupportingLuceneCorpusAdapter.java From Palmetto with GNU Affero General Public License v3.0 | 4 votes |
protected void requestDocumentsWithWord(String word, IntObjectOpenHashMap<IntArrayList[]> positionsInDocs, IntIntOpenHashMap docLengths, int wordId, int numberOfWords) { DocsAndPositionsEnum docPosEnum = null; Term term = new Term(fieldName, word); int localDocId, globalDocId, baseDocId; IntArrayList positions[]; try { for (int i = 0; i < reader.length; i++) { docPosEnum = reader[i].termPositionsEnum(term); baseDocId = contexts[i].docBase; if (docPosEnum != null) { while (docPosEnum.nextDoc() != DocsEnum.NO_MORE_DOCS) { localDocId = docPosEnum.docID(); globalDocId = localDocId + baseDocId; // if this is the first word and we found a new document if (!positionsInDocs.containsKey(globalDocId)) { positions = new IntArrayList[numberOfWords]; positionsInDocs.put(globalDocId, positions); } else { positions = positionsInDocs.get(globalDocId); } if (positions[wordId] == null) { positions[wordId] = new IntArrayList(); } // Go through the positions inside this document for (int p = 0; p < docPosEnum.freq(); ++p) { positions[wordId].add(docPosEnum.nextPosition()); } if (!docLengths.containsKey(globalDocId)) { // Get the length of the document docLengths.put(globalDocId, reader[i].document(localDocId).getField(docLengthFieldName) .numericValue().intValue()); } } } } } catch (IOException e) { LOGGER.error("Error while requesting documents for word \"" + word + "\".", e); } }