Java Code Examples for gnu.trove.THashMap#put()
The following examples show how to use
gnu.trove.THashMap#put() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Win32FsCache.java From consulo with Apache License 2.0 | 6 votes |
@Nullable FileAttributes getAttributes(@Nonnull VirtualFile file) { VirtualFile parent = file.getParent(); int parentId = parent instanceof VirtualFileWithId ? ((VirtualFileWithId)parent).getId() : -((VirtualFileWithId)file).getId(); TIntObjectHashMap<THashMap<String, FileAttributes>> map = getMap(); THashMap<String, FileAttributes> nestedMap = map.get(parentId); String name = file.getName(); FileAttributes attributes = nestedMap != null ? nestedMap.get(name) : null; if (attributes == null) { if (nestedMap != null && !(nestedMap instanceof IncompleteChildrenMap)) { return null; // our info from parent doesn't mention the child in this refresh session } FileInfo info = myKernel.getInfo(file.getPath()); if (info == null) { return null; } attributes = info.toFileAttributes(); if (nestedMap == null) { nestedMap = new IncompleteChildrenMap<>(FileUtil.PATH_HASHING_STRATEGY); map.put(parentId, nestedMap); } nestedMap.put(name, attributes); } return attributes; }
Example 2
Source File: Win32FsCache.java From consulo with Apache License 2.0 | 6 votes |
@Nonnull String[] list(@Nonnull VirtualFile file) { String path = file.getPath(); FileInfo[] fileInfo = myKernel.listChildren(path); if (fileInfo == null || fileInfo.length == 0) { return ArrayUtil.EMPTY_STRING_ARRAY; } String[] names = new String[fileInfo.length]; TIntObjectHashMap<THashMap<String, FileAttributes>> map = getMap(); int parentId = ((VirtualFileWithId)file).getId(); THashMap<String, FileAttributes> nestedMap = map.get(parentId); if (nestedMap == null) { nestedMap = new THashMap<String, FileAttributes>(fileInfo.length, FileUtil.PATH_HASHING_STRATEGY); map.put(parentId, nestedMap); } for (int i = 0, length = fileInfo.length; i < length; i++) { FileInfo info = fileInfo[i]; String name = info.getName(); nestedMap.put(name, info.toFileAttributes()); names[i] = name; } return names; }
Example 3
Source File: FeatureExtractor.java From semafor-semantic-parser with GNU General Public License v3.0 | 6 votes |
public Set<String> getWNRelations(THashMap<String, THashSet<String>> wnCacheMap,String sWord, String tWord, WordNetRelations wnr) { String pair = sWord.toLowerCase()+"\t"+tWord.toLowerCase(); if(wnCacheMap==null) { return wnr.getRelations(sWord.toLowerCase(), tWord.toLowerCase()); } else if(!wnCacheMap.contains(pair)) { Set<String> relations = wnr.getRelations(sWord.toLowerCase(), tWord.toLowerCase()); if(relations.contains(WordNetRelations.NO_RELATION)) return relations; else { THashSet<String> nR = new THashSet<String>(); for(String string:relations) nR.add(string); wnCacheMap.put(pair, nR); return relations; } } else { return wnCacheMap.get(pair); } }
Example 4
Source File: ProduceLargerFrameDistribution.java From semafor-semantic-parser with GNU General Public License v3.0 | 6 votes |
public static THashMap<String, THashMap<String, Double>> readTrainDistFile(String trainDistFile) { THashMap<String, THashMap<String, Double>> result = new THashMap<String, THashMap<String, Double>>(); ArrayList<String> sents = ParsePreparation.readSentencesFromFile(trainDistFile); for (String sent: sents) { sent = sent.trim(); String[] toks = sent.split("\t"); String pred = toks[0]; String[] toks1 = toks[1].trim().split(" "); THashMap<String, Double> map = new THashMap<String, Double>(); for (int i = 0; i < toks1.length; i = i + 2) { String frame = toks1[i]; double prob = new Double(toks1[i+1]); map.put(frame, prob); } result.put(pred, map); } return result; }
Example 5
Source File: FeatureExtractor.java From semafor-semantic-parser with GNU General Public License v3.0 | 6 votes |
public String getLowerCaseLemma(THashMap<String,String> lemmaCache, String word, String POS, WordNetRelations wnr) { String pair = word+"_"+POS; if(lemmaCache==null) { return wnr.getLemmaForWord(word, POS).toLowerCase(); } else if(!lemmaCache.contains(pair)) { lemmaCache.put(pair, wnr.getLemmaForWord(word, POS).toLowerCase()); return wnr.getLemmaForWord(word, POS).toLowerCase(); } else { return lemmaCache.get(pair); } }
Example 6
Source File: StubForwardIndexExternalizer.java From consulo with Apache License 2.0 | 5 votes |
<K> Map<StubIndexKey, Map<Object, StubIdList>> doRead(@Nonnull DataInput in, @Nullable StubIndexKey<K, ?> requestedIndex, @Nullable K requestedKey) throws IOException { if (!myEnsuredStubElementTypesLoaded) { ProgressManager.getInstance().executeNonCancelableSection(() -> { SerializationManager.getInstance().initSerializers(); StubIndexImpl.initExtensions(); }); myEnsuredStubElementTypesLoaded = true; } int stubIndicesValueMapSize = DataInputOutputUtil.readINT(in); if (stubIndicesValueMapSize > 0) { THashMap<StubIndexKey, Map<Object, StubIdList>> stubIndicesValueMap = requestedIndex != null ? null : new THashMap<>(stubIndicesValueMapSize); StubIndexImpl stubIndex = (StubIndexImpl)StubIndex.getInstance(); StubKeySerializationState stubKeySerializationState = createStubIndexKeySerializationState(in, stubIndicesValueMapSize); for (int i = 0; i < stubIndicesValueMapSize; ++i) { ID<Object, ?> indexKey = (ID<Object, ?>)readStubIndexKey(in, stubKeySerializationState); if (indexKey instanceof StubIndexKey) { // indexKey can be ID in case of removed index StubIndexKey<Object, ?> stubIndexKey = (StubIndexKey<Object, ?>)indexKey; boolean deserialize = requestedIndex == null || requestedIndex.equals(stubIndexKey); if (deserialize) { Map<Object, StubIdList> value = stubIndex.deserializeIndexValue(in, stubIndexKey, requestedKey); if (requestedIndex != null) { return Collections.singletonMap(requestedIndex, value); } stubIndicesValueMap.put(stubIndexKey, value); } else { stubIndex.skipIndexValue(in); } } } return stubIndicesValueMap; } return Collections.emptyMap(); }
Example 7
Source File: ScrapTest.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
public static THashMap<String,Integer> getSpansWithHeads(String file) { THashMap<String, Integer> spans = new THashMap<String, Integer>(); THashMap<String, Integer> countMap = new THashMap<String,Integer>(); try { String line = null; BufferedReader bReader = new BufferedReader(new FileReader(file)); while((line=bReader.readLine())!=null) { String[] toks = line.trim().split("\t"); char first = toks[0].charAt(0); if((first>='a'&&first<='z')||(first>='A'&&first<='Z')||(first>='0'&&first<='9')) { String word = toks[0].trim(); int count = new Integer(toks[2].trim()); if(countMap.contains(word)) { if(countMap.get(word)<count) { countMap.put(word, count); spans.put(word, new Integer(toks[1].trim())); } } else { countMap.put(word, count); spans.put(word, new Integer(toks[1].trim())); } } } bReader.close(); } catch(Exception e) { e.printStackTrace(); } System.out.println("Size of spans:"+spans.size()); return spans; }
Example 8
Source File: TIntObjectObjectHashMap.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
public K put(int one, V two, K val) { THashMap<V,K> firstVal = map.get(one); if(firstVal==null) { firstVal = new THashMap<V,K>(); map.put(one, firstVal); } return firstVal.put(two, val); }
Example 9
Source File: Decoding.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
public static THashMap<String,String> getOnlyOverlappingFes(THashMap<String,String> map) { Set<String> keys = map.keySet(); THashMap<String,String> tempMap = new THashMap<String,String>(); for(String fe: keys) { String span = map.get(fe); if(!span.equals("-1_-1")) { tempMap.put(fe, span); } } return tempMap; }
Example 10
Source File: WordNetRelationsCache.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
public static void createMap() { String textFile = "/mal2/dipanjan/experiments/FramenetParsing/framenet_1.3/ddData/hierWnCache"; String wnRelationsMapFile = "/mal2/dipanjan/experiments/FramenetParsing/framenet_1.3/ddData/hierWnCacheRelations.ser"; THashMap<String,THashSet<String>> map = new THashMap<String,THashSet<String>>(); try { BufferedReader bReader = new BufferedReader(new FileReader(textFile)); String line = null; int count = 0; while((line=bReader.readLine())!=null) { String[] toks = line.split("\t"); String hiddenWord = toks[0].trim(); String actualWord = toks[1].trim(); String relations = toks[2].trim(); String key = hiddenWord+"\t"+actualWord; String[] relationToks = relations.split(" "); THashSet<String> relSet = new THashSet<String>(); for(int i = 0; i < relationToks.length; i ++) { relSet.add(relationToks[i]); } map.put(key, relSet); if(count%1000==0) System.out.println(count); count++; } } catch(Exception e) { e.printStackTrace(); } SerializedObjects.writeSerializedObject(map, wnRelationsMapFile); }
Example 11
Source File: OptimizeMapReduce.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
public void optimize() { THashMap<String,Double> paramList = new THashMap<String, Double>(); try { String localInitParamFile=mTmpDir+"/initParamFile.txt"; BufferedReader bReader = new BufferedReader(new FileReader(localInitParamFile)); String pattern = null; while ((pattern = bReader.readLine()) != null) { StringTokenizer st = new StringTokenizer(pattern.trim(),"\t"); String paramName = st.nextToken().trim(); String rest = st.nextToken().trim(); String[] arr = rest.split(","); double value = new Double(arr[0].trim()); boolean sign = new Boolean(arr[1].trim()); LDouble val = new LDouble(value,sign); paramList.put(paramName, val.exponentiate()); } bReader.close(); runCustomLBFGS(paramList); } catch(Exception e) { e.printStackTrace(); } }
Example 12
Source File: LRIdentificationModelHadoop.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
public THashMap<String,LDouble> getAllGradients(TObjectDoubleHashMap<String> paramMap) { THashMap<String,LDouble> gradientMap = new THashMap<String,LDouble>(); String[] keys = new String[paramMap.size()]; paramMap.keys(keys); int len = keys.length; for(int i = 0; i < len; i ++) { int paramIndex = localA.get(keys[i]); LDouble gradient = G[paramIndex]; gradientMap.put(keys[i], gradient); } return gradientMap; }
Example 13
Source File: LRIdentificationModelSingleNode.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
public THashMap<String,LDouble> getAllGradients(TObjectDoubleHashMap<String> paramMap) { THashMap<String,LDouble> gradientMap = new THashMap<String,LDouble>(); String[] keys = new String[paramMap.size()]; paramMap.keys(keys); int len = keys.length; for(int i = 0; i < len; i ++) { int paramIndex = localA.get(keys[i]); LDouble gradient = G[paramIndex]; gradientMap.put(keys[i], gradient); } return gradientMap; }
Example 14
Source File: FixTokenization.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
public static THashMap<String,String> readConversions(String file) { ArrayList<String> list = ParsePreparation.readSentencesFromFile(file); int size = list.size(); THashMap<String,String> map = new THashMap<String,String>(); for(int i = 0; i < size; i ++) { StringTokenizer st = new StringTokenizer(list.get(i),"\t"); String american = st.nextToken(); String british = st.nextToken(); map.put(british, american); } return map; }
Example 15
Source File: LexicalUnitsFrameExtraction.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
/** * From flat-text files with frame element occurrence information, * creates a map from frame names to a set of names of frame element * (argument) roles observed for that frame with overt fillers * and stores that map as a serialized object * @author Nathan Schneider (nschneid) */ public static void writeMapOfFrameElements() { String[] filePrefixes = {"lrdata/framenet.original", "lrdata/semeval.fulltrain", "lrdata/semeval.fulldev", "lrdata/semeval.fulltest"}; for (int j=0; j<filePrefixes.length; j++) { // treat framenet.original, semeval.fulltrain, and semeval.fulldev separately String filePrefix = filePrefixes[j]; String trainFrameElementsFile = filePrefix + ".sentences.frame.elements"; String trainParseFile = filePrefix + ".sentences.all.tags"; ArrayList<String> frameElementLines = ParsePreparation.readSentencesFromFile(trainFrameElementsFile); ArrayList<String> frameParseLines = ParsePreparation.readSentencesFromFile(trainParseFile); THashMap<String,THashSet<String>> framesToFEs = new THashMap<String,THashSet<String>>(/*new TObjectIdentityHashingStrategy<String>()*/); for (int l=0; l<frameElementLines.size()-1; l++) { String frameElementsLine = frameElementLines.get(l); int sentenceNum = Integer.parseInt(frameElementsLine.split("\t")[5]); //TODO: above should be = DataPointWithElements.parseFrameNameAndSentenceNum(frameElementsLine).getSecond(); String frameParseLine = frameParseLines.get(sentenceNum); DataPointWithElements dp = new DataPointWithElements(frameParseLine, frameElementsLine); if (!framesToFEs.containsKey(dp.getFrameName())) { framesToFEs.put(dp.getFrameName(), new THashSet<String>(/*new TObjectIdentityHashingStrategy<String>()*/)); } String[] feNames = dp.getOvertFilledFrameElementNames(); for (String feName : feNames) framesToFEs.get(dp.getFrameName()).add(feName); } System.out.println(framesToFEs.size()); String mapFile = filePrefix + ".frame.elements.map"; SerializedObjects.writeSerializedObject(framesToFEs, mapFile); } }
Example 16
Source File: LexicalUnitsFrameExtraction.java From semafor-semantic-parser with GNU General Public License v3.0 | 5 votes |
/** * Populates the given map object with frames (as keys) and sets of target words that evoke * those frames in the given corresponding sentences (as values) * @param map * @param frames * @param sentences * @author dipanjan */ public static void fillMap(THashMap<String,THashSet<String>> map, ArrayList<String> frames, ArrayList<String> sentences) { int size = frames.size(); for(int i = 0; i < size; i ++) { String line = frames.get(i); String[] toks = line.split("\t"); int sentenceNum = new Integer(toks[toks.length-1]); String storedSentence = sentences.get(sentenceNum); String frame = toks[0]; String tokenNums = toks[2]; String[] nums = tokenNums.split("_"); String lexicalUnits = getTokens(storedSentence,nums); THashSet<String> set = map.get(frame); if(set==null) { set = new THashSet<String>(); set.add(lexicalUnits); map.put(frame, set); } else { if(!set.contains(lexicalUnits)) { set.add(lexicalUnits); } } } }
Example 17
Source File: RequiredDataCreation.java From semafor-semantic-parser with GNU General Public License v3.0 | 4 votes |
public static THashMap<String,THashSet<String>> getHVCorrespondence(FNModelOptions options) { String fmFile = options.frameNetMapFile.get(); String wnConfigFile = options.wnConfigFile.get(); String stopFile = options.stopWordsFile.get(); String hvCorrespondenceFile = options.hvCorrespondenceFile.get(); THashMap<String, THashSet<String>> frameMap = (THashMap<String, THashSet<String>>) SerializedObjects.readSerializedObject(fmFile); WordNetRelations wnr = new WordNetRelations(stopFile, wnConfigFile); THashMap<String,THashSet<String>> cMap = new THashMap<String,THashSet<String>>(); Set<String> keySet = frameMap.keySet(); for(String frame: keySet) { THashSet<String> hus = frameMap.get(frame); for(String hUnit: hus) { String[] hiddenToks = hUnit.split(" "); String hiddenUnitTokens=""; String hiddenUnitLemmas=""; for(int i = 0; i < hiddenToks.length; i ++) { String[] arr = hiddenToks[i].split("_"); hiddenUnitTokens+=arr[0]+" "; hiddenUnitLemmas+=wnr.getLemmaForWord(arr[0], arr[1]).toLowerCase()+" "; } hiddenUnitTokens=hiddenUnitTokens.trim(); hiddenUnitLemmas=hiddenUnitLemmas.trim(); THashSet<String> frames = cMap.get(hiddenUnitLemmas); if(frames==null) { frames = new THashSet<String>(); frames.add(frame); cMap.put(hiddenUnitLemmas, frames); } else { frames.add(frame); } System.out.println("Processed:"+hiddenUnitLemmas); } } SerializedObjects.writeSerializedObject(cMap,hvCorrespondenceFile); return cMap; }
Example 18
Source File: WordNetRelations.java From semafor-semantic-parser with GNU General Public License v3.0 | 4 votes |
private THashMap<String, Set<String>> collapseFinerRelations(Map<RelationType, Set<String>> rel) { THashMap<String,Set<String>> result = new THashMap<String,Set<String>>(); THashSet<String> identity = new THashSet<String>(); THashSet<String> synonym = new THashSet<String>(); THashSet<String> antonym = new THashSet<String>(); THashSet<String> hypernym = new THashSet<String>(); THashSet<String> hyponym = new THashSet<String>(); THashSet<String> derivedForm = new THashSet<String>(); THashSet<String> morphSet = new THashSet<String>(); THashSet<String> verbGroup = new THashSet<String>(); THashSet<String> entailment = new THashSet<String>(); THashSet<String> entailedBy = new THashSet<String>(); THashSet<String> seeAlso = new THashSet<String>(); THashSet<String> causalRelation = new THashSet<String>(); THashSet<String> sameNumber = new THashSet<String>(); identity.addAll(rel.get(RelationType.idty)); synonym.addAll(rel.get(RelationType.synm)); synonym.addAll(rel.get(RelationType.syn2)); antonym.addAll(rel.get(RelationType.antm)); antonym.addAll(rel.get(RelationType.extd)); antonym.addAll(rel.get(RelationType.indi)); hypernym.addAll(rel.get(RelationType.hype)); hyponym.addAll(rel.get(RelationType.hypo)); derivedForm.addAll(rel.get(RelationType.derv)); morphSet.addAll(rel.get(RelationType.morph)); verbGroup.addAll(rel.get(RelationType.vgrp)); entailment.addAll(rel.get(RelationType.entl)); entailedBy.addAll(rel.get(RelationType.entlby)); seeAlso.addAll(rel.get(RelationType.alsoc)); causalRelation.addAll(rel.get(RelationType.cause)); if(sourceWord==null) { System.out.println("Problem. Source Word Null. Exiting"); System.exit(0); } if(sourceWord.charAt(0)>='0'&&sourceWord.charAt(0)<='9') sameNumber.add(sourceWord); result.put("identity",identity); result.put("synonym",synonym); result.put("antonym",antonym); result.put("hypernym",hypernym); result.put("hyponym",hyponym); result.put("derived-form",derivedForm); result.put("morph",morphSet); result.put("verb-group",verbGroup); result.put("entailment",entailment); result.put("entailed-by",entailedBy); result.put("see-also",seeAlso); result.put("causal-relation",causalRelation); result.put("same-number", sameNumber); return result; }