gnu.trove.map.hash.TObjectIntHashMap Java Examples
The following examples show how to use
gnu.trove.map.hash.TObjectIntHashMap.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MetaQTL4ExecutionTask.java From systemsgenetics with GNU General Public License v3.0 | 6 votes |
public MetaQTL4ExecutionTask(int nrThreads, long[] randomizationSeeds, ArrayList<MetaQTL4MetaTrait> availableTraits, TObjectIntHashMap<MetaQTL4MetaTrait> availableTraitsHash, MetaQTL4Dataset[] datasets, GeneticVariant[][] geneticVariantIndex, MetaQTL4Settings m_settings, MetaQTL4TraitAnnotation traitAnnotation, Integer[][] traitIndex, Set<MetaQTL4MetaTrait> traitsToInclude, Set<String> variantsToInclude, int start, int stop, int bufferSize, CompletionService resultPool) { this.randomizationSeeds = randomizationSeeds; this.availableTraits = availableTraits; this.availableTraitsHash = availableTraitsHash; this.datasets = datasets; this.geneticVariantIndex = geneticVariantIndex; this.m_settings = m_settings; this.traitAnnotation = traitAnnotation; this.traitIndex = traitIndex; this.traitsToInclude = traitsToInclude; this.variantsToInclude = variantsToInclude; this.start = start; this.stop = stop; this.bufferSize = bufferSize; this.resultPool = resultPool; }
Example #2
Source File: RailNetwork.java From Signals with GNU General Public License v3.0 | 6 votes |
/** * Build the network from the stored rail objects, if it wasn't loaded already */ public RailNetwork<TPos> build(){ if(railPosToRailSections == null) { synchronized(this) { if(railPosToRailSections == null) { railPosToRailSections = new HashMap<>(); allEdges = new HashSet<>(); allSections = new HashSet<>(); railLinkPosToDelays = new TObjectIntHashMap<TPos>(); railPosToRailEdges = new HashMap<>(); positionsToEdgesBackward = ArrayListMultimap.create(); buildRailSections();//TODO rail section and edge building can be done in parallel? No MC dependences or interdependencies. Set<RailEdge<TPos>> allEdges = buildRoughRailEdges(); mergeCrossingEdges(allEdges).forEach(edge -> addEdge(edge)); buildStationNames(); buildRailLinkToDelayMap(); onAfterBuild(); } } } return this; }
Example #3
Source File: Graph.java From ambiverse-nlu with Apache License 2.0 | 6 votes |
public Graph(String name, int nodesCount, double alpha) { this.name = name; this.nodesCount = nodesCount; this.alpha = alpha; nodes = new GraphNode[nodesCount]; mentionNodesIds = new TObjectIntHashMap<Mention>(); entityNodesIds = new TIntIntHashMap(); localSimilarities = new HashMap<Mention, TIntDoubleHashMap>(); mentionPriorSimL1 = new TIntDoubleHashMap(50, 0.5f, -1, -1.0); isRemoved = new boolean[nodesCount]; Arrays.fill(isRemoved, false); nodesOutdegrees = new int[nodesCount]; Arrays.fill(nodesOutdegrees, 0); nodesWeightedDegrees = new double[nodesCount]; Arrays.fill(nodesWeightedDegrees, 0); removalSteps = new TIntArrayList(); }
Example #4
Source File: DataAccessKeyValueStore.java From ambiverse-nlu with Apache License 2.0 | 6 votes |
public TObjectIntHashMap<String> getAllWordIds() throws EntityLinkingDataAccessException { TObjectIntHashMap<String> wordIds; try { wordIds = new TObjectIntHashMap<>(); DatabaseKeyValueStore db = DatabaseKeyValueStore.WORD_IDS_WORD; if(db.getPartitions() != 1) { throw new IllegalArgumentException("Multiple partitions not supported for this key-value store"); } Codec codec = DataAccessKeyValueStoreHandler.singleton().getCodec(db); KeyValueStore<byte[], byte[]> keyValueStore = DataAccessKeyValueStoreHandler.singleton().getKeyValueStore(db); KeyValueStore.EntryIterator it = keyValueStore.entryIterator(); while (it.hasNext()) { KeyValueStore.Entry<byte[], byte[]> entry = it.next(); String key = (String) codec.decodeKey(entry.getKey()); Integer value = (Integer) codec.decodeValue(entry.getValue()); wordIds.put(key, value); } } catch (Exception e) { throw new EntityLinkingDataAccessException(e); } return wordIds; }
Example #5
Source File: PileupEntry.java From systemsgenetics with GNU General Public License v3.0 | 6 votes |
public PileupEntry(String chr, int pos, Allele refAllele, int readDepth, String basesString, String basesQualityString, int minimumBaseQuality) throws PileupParseException { this.chr = chr; this.pos = pos; this.refAllele = refAllele; this.readDepth = readDepth; this.alleleCounts = new TObjectIntHashMap<Allele>(); this.alleleAverageQualities = new TObjectDoubleHashMap<Allele>(); this.minimumBaseQuality = minimumBaseQuality; alleleCounts.put(Allele.A, 0); alleleCounts.put(Allele.C, 0); alleleCounts.put(Allele.G, 0); alleleCounts.put(Allele.T, 0); alleleAverageQualities.put(Allele.A, 0); alleleAverageQualities.put(Allele.C, 0); alleleAverageQualities.put(Allele.G, 0); alleleAverageQualities.put(Allele.T, 0); if (!alleleCounts.containsKey(refAllele)) { throw new PileupParseException("Error parsing pipeup entry"); } int[] basesQuality = basesQualityString == null ? null : parseBasesQualityString(basesQualityString); parseBasesString(basesString, basesQuality); }
Example #6
Source File: AbstractAttributeClustering.java From JedAIToolkit with Apache License 2.0 | 6 votes |
private void buildAttributeModels(int datasetId, List<EntityProfile> profiles) { attrNameIndex = new TObjectIntHashMap<>(); profiles.forEach((profile) -> { profile.getAttributes().forEach((attribute) -> { attrNameIndex.putIfAbsent(attribute.getName(), attrNameIndex.size() + 1); }); }); int currentAttributes = attrNameIndex.size(); attributeModels[datasetId] = new ITextModel[currentAttributes]; final TObjectIntIterator<String> it = attrNameIndex.iterator(); while (it.hasNext()) { it.advance(); attributeModels[datasetId][it.value() - 1] = RepresentationModel.getModel(datasetId, repModel, simMetric, it.key()); } profiles.forEach((profile) -> { profile.getAttributes().forEach((attribute) -> { updateModel(datasetId, attribute); }); }); for (int i = 0; i < currentAttributes; i++) { attributeModels[datasetId][i].finalizeModel(); } }
Example #7
Source File: AbstractAttributeClustering.java From JedAIToolkit with Apache License 2.0 | 6 votes |
protected AttributeClusters clusterAttributes(int datasetId, ConnectedComponents cc) { int firstId = datasetId == DATASET_1 ? 0 : attributesDelimiter; int lastId = 0 < attributesDelimiter && datasetId == DATASET_1 ? attributesDelimiter : noOfAttributes; int glueClusterId = cc.count() + 1; int[] clusterFrequency = new int[glueClusterId + 1]; float[] clusterEntropy = new float[glueClusterId + 1]; final TObjectIntMap<String> clusters = new TObjectIntHashMap<>(); for (int i = firstId; i < lastId; i++) { int ccId = cc.id(i); if (cc.size(i) == 1) { // singleton attribute ccId = glueClusterId; } clusterFrequency[ccId]++; clusterEntropy[ccId] += attributeModels[datasetId][i].getEntropy(true); clusters.put(attributeModels[datasetId][i].getInstanceName(), ccId); } for (int i = 0; i < glueClusterId + 1; i++) { clusterEntropy[i] /= clusterFrequency[i]; } return new AttributeClusters(clusterEntropy, clusters); }
Example #8
Source File: DataAccessForTesting.java From ambiverse-nlu with Apache License 2.0 | 5 votes |
@Override public TObjectIntHashMap<KBIdentifiedEntity> getInternalIdsForKBEntities(Collection<KBIdentifiedEntity> entities) { TObjectIntHashMap<KBIdentifiedEntity> ids = new TObjectIntHashMap<KBIdentifiedEntity>(entities.size()); for (KBIdentifiedEntity entity : entities) { if (entity2id.containsKey(entity.getIdentifier())) { ids.put(entity, entity2id.get(entity.getIdentifier())); } } return ids; }
Example #9
Source File: DataAccessForTesting.java From ambiverse-nlu with Apache License 2.0 | 5 votes |
@Override public TObjectIntHashMap<String> getIdsForWords(Collection<String> words) { TObjectIntHashMap<String> ids = new TObjectIntHashMap<String>(words.size()); for (String word : words) { int id = ids.getNoEntryValue(); if (word2id.containsKey(word)) { id = word2id.get(word); } ids.put(word, id); } return ids; }
Example #10
Source File: TroveObjectIntMapTest.java From hashmapTest with The Unlicense | 5 votes |
@Override public int test() { final TObjectIntMap<Integer> m_map = new TObjectIntHashMap<>( m_keys.length, m_fillFactor ); for ( int i = 0; i < m_keys.length; ++i ) m_map.put( m_keys[ i ], i ); for ( int i = 0; i < m_keys2.length; ++i ) m_map.put( m_keys2[ i ], i ); return m_map.size(); }
Example #11
Source File: BinaryMetaAnalysis.java From systemsgenetics with GNU General Public License v3.0 | 5 votes |
protected void loadSNPAnnotation() throws IOException { snpChr = new String[snpList.length]; snpPositions = new int[snpList.length]; for (int s = 0; s < snpList.length; s++) { snpChr[s] = "-10".intern(); snpPositions[s] = -10; } TObjectIntHashMap<String> snpMap = new TObjectIntHashMap<String>(snpList.length); for (int s = 0; s < snpList.length; s++) { snpMap.put(snpList[s], s); } // loads only annotation for snps that are in the datasets.. TextFile tf = new TextFile(settings.getSNPAnnotationFile(), TextFile.R, 10 * 1048576); String[] elems = tf.readLineElems(TextFile.tab); while (elems != null) { if (elems.length > 2) { String snp = elems[2]; if (snpMap.contains(snp)) { int id = snpMap.get(snp); snpChr[id] = new String(elems[0].getBytes("UTF-8")).intern(); snpPositions[id] = Integer.parseInt(elems[1]); } } elems = tf.readLineElems(TextFile.tab); } tf.close(); }
Example #12
Source File: KeytermEntityEntityMeasureTracer.java From ambiverse-nlu with Apache License 2.0 | 5 votes |
public static TIntObjectHashMap<String> getAllWordIds() throws EntityLinkingDataAccessException { TObjectIntHashMap<String> wordIds = DataAccess.getAllWordIds(); TIntObjectHashMap<String> idWords = new TIntObjectHashMap<String>(wordIds.size()); for (TObjectIntIterator<String> itr = wordIds.iterator(); itr.hasNext(); ) { itr.advance(); idWords.put(itr.value(), itr.key()); } return idWords; }
Example #13
Source File: InputTextWrapper.java From ambiverse-nlu with Apache License 2.0 | 5 votes |
public InputTextWrapper(Context context, UnitType unitType, boolean removeStopwords) throws EntityLinkingDataAccessException { logger.debug("Wrapping input text."); mentionToIgnore = null; this.unitType = unitType; int unitLength = unitType.getUnitSize(); if (context.getTokenCount() < unitLength) return; List<String> unitStrings = new ArrayList<>(context.getTokenCount()); Queue<String> curTokens = new ArrayDeque<>(unitLength); String[] curTokensArray = new String[unitLength]; for (String token : context.getTokens()) { curTokens.add(token); if (curTokens.size() == unitLength || (!curTokens.isEmpty() && curTokens.size() - 1 == unitLength)) { unitStrings.add(UnitBuilder.buildUnit(curTokens.toArray(curTokensArray))); curTokens.remove(); } } logger.debug("Get ids for words."); TObjectIntHashMap<String> wordIds = DataAccess.getIdsForWords(unitStrings); units = new int[unitStrings.size()]; unitCounts = new TIntIntHashMap((int) (wordIds.size() / Constants.DEFAULT_LOAD_FACTOR), Constants.DEFAULT_LOAD_FACTOR); numOfUnits = 0; for (int i = 0; i < unitStrings.size(); i++) { int unitId = wordIds.get(unitStrings.get(i)); if (unitId == 0) continue; logger.debug("Get contract term for unit id {}.", unitId); int contractedUnitId = DataAccess.contractTerm(unitId); if (contractedUnitId != 0) unitId = contractedUnitId; if (removeStopwords && StopWord.isStopwordOrSymbol(unitId, Language.getLanguageForString("en"))) continue; units[i] = unitId; unitCounts.adjustOrPutValue(unitId, 1, 1); numOfUnits++; } }
Example #14
Source File: AbstractGtReader.java From JedAIToolkit with Apache License 2.0 | 5 votes |
public AbstractGtReader (String filePath) { super(filePath); idDuplicates = new HashSet<>(); duplicatesGraph = new SimpleGraph(DefaultEdge.class); urlToEntityId1 = new TObjectIntHashMap(); urlToEntityId2 = new TObjectIntHashMap(); }
Example #15
Source File: TroveObjectIntMapTest.java From hashmapTest with The Unlicense | 5 votes |
@Override public int test() { final TObjectIntMap<Integer> m_map = new TObjectIntHashMap<>( m_keys.length / 2 + 1, m_fillFactor ); int add = 0, remove = 0; while ( add < m_keys.length ) { m_map.put( m_keys[ add ], add ); ++add; m_map.put( m_keys[ add ], add ); ++add; m_map.remove( m_keys[ remove++ ] ); } return m_map.size(); }
Example #16
Source File: ConllEntitiesSource.java From ambiverse-nlu with Apache License 2.0 | 5 votes |
@Override protected LinkedHashMap<Integer, String> getNamedEntitiesMap(int begin, int amount) throws EntityLinkingDataAccessException { if (begin > originalNamedEntities.size() - 1) { return new LinkedHashMap<>(); } List<KBIdentifiedEntity> kbIdentifiedEntities = originalNamedEntities.subList(begin, Math.min(begin + amount, originalNamedEntities.size())); TObjectIntHashMap<KBIdentifiedEntity> articleEntityIDRepresentationMap = DataAccess.getInternalIdsForKBEntities(kbIdentifiedEntities); return kbIdentifiedEntities.stream() .collect(Collectors.toMap(articleEntityIDRepresentationMap::get, KBIdentifiedEntity::getIdentifier, (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }, LinkedHashMap::new)); }
Example #17
Source File: DictionaryBuilder.java From ambiverse-nlu with Apache License 2.0 | 5 votes |
private void computeEntityIds(Map<String, TObjectIntHashMap<DictionaryEntity>> dictionaryEntries, TObjectIntHashMap<KBIdentifiedEntity> entityIds) { int entityId = entityIds.size() + 1; // Get int ids for entities and mentions, starting with 1. for (Entry<String, TObjectIntHashMap<DictionaryEntity>> e : dictionaryEntries.entrySet()) { // All entities are added as-is. for (DictionaryEntity entity : e.getValue().keySet()) { if (!entityIds.containsKey(KBIdentifiedEntity.getKBIdentifiedEntity(entity.entity, entity.knowledgebase))) { entityIds.put(KBIdentifiedEntity.getKBIdentifiedEntity(entity.entity, entity.knowledgebase), entityId++); } } } }
Example #18
Source File: InvestigateCovariate.java From systemsgenetics with GNU General Public License v3.0 | 5 votes |
private static void writeCounts(LinkedHashSet<String> elements, TObjectIntHashMap<String> counts, File file) throws IOException { CSVWriter writer = new CSVWriter(new BufferedWriter(new FileWriter(file)), '\t', '\0', '\0'); String[] row = new String[2]; for (String elementName : elements) { row[0] = elementName; row[1] = String.valueOf(counts.get(elementName)); writer.writeNext(row); } writer.close(); }
Example #19
Source File: DexFile.java From apkfile with Apache License 2.0 | 5 votes |
DexFile(InputStream dexStream) { this.dexStream = dexStream; classPathToClass = new HashMap<>(); classAccessorCounts = new TObjectIntHashMap<>(); methodDescriptorToMethod = new HashMap<>(); LOCAL_CLASS_PATHS = new HashSet<>(); }
Example #20
Source File: StrippedPartition.java From metanome-algorithms with Apache License 2.0 | 5 votes |
public StrippedPartition(String[] columnContent) { TObjectIntHashMap<String> valueToIndex = new TObjectIntHashMap<>(); LinkedHashMap<Integer, TEquivalence> helpMap = new LinkedHashMap<>(); for (int rowIndex = 0; rowIndex < columnContent.length; rowIndex++) { String value = columnContent[rowIndex]; // if the value wasn't there yet, the row index becomes the representative // for that equivalence class if (!valueToIndex.containsKey(value)) { valueToIndex.put(value, rowIndex); TEquivalence equivalenceGroup = new EquivalenceGroupTIntHashSet(); equivalenceGroup.add(rowIndex); helpMap.put(Integer.valueOf(rowIndex), equivalenceGroup); } // otherwise find the right equivalence class and add the current element index else { int equivalenceGroupIndex = valueToIndex.get(value); TEquivalence equivalenceClass = helpMap.get(Integer.valueOf(equivalenceGroupIndex)); equivalenceClass.add(rowIndex); } } // remove equivalence classes with only one element for(Iterator<Map.Entry<Integer, TEquivalence>> it=helpMap.entrySet().iterator(); it.hasNext();) { Map.Entry<Integer, TEquivalence> entry = it.next(); if (entry.getValue().size() <= 1) { it.remove(); } } // sort the stripped partition by equivalence group sizes this.addAll(helpMap.values()); }
Example #21
Source File: MemoryManagedJoinedPartitions.java From metanome-algorithms with Apache License 2.0 | 5 votes |
public MemoryManagedJoinedPartitions(int numberOfColumns) { this.numberOfColumns = numberOfColumns; this.key = new ColumnCollection(numberOfColumns); if (USE_MEMORY_MANAGEMENT) { this.usageCounter = new TObjectIntHashMap<>(); this.leastRecentlyUsedPartitions = new LinkedList<ColumnCollection>(); this.totalCount = new TObjectIntHashMap<>(); } for (long cardinality = 1; cardinality <= this.numberOfColumns; cardinality++) { this.put(cardinality, new HashMap<ColumnCollection, Partition>()); } }
Example #22
Source File: IntCoding.java From morpheus-core with Apache License 2.0 | 5 votes |
/** * Constructor */ public OfCurrency() { super(Currency.class); this.currencies = Currency.getAvailableCurrencies().stream().toArray(Currency[]::new); this.codeMap = new TObjectIntHashMap<>(currencies.length, 0.5f, -1); Arrays.sort(currencies, (c1, c2) -> c1.getCurrencyCode().compareTo(c2.getCurrencyCode())); for (int i = 0; i< currencies.length; ++i) { this.codeMap.put(currencies[i], i); } }
Example #23
Source File: ShareableResource.java From scheduler with GNU Lesser General Public License v3.0 | 5 votes |
/** * Make a new resource. * * @param id the resource identifier * @param defCapacity the nodes default capacity * @param defConsumption the VM default consumption */ public ShareableResource(String id, int defCapacity, int defConsumption) { this.rcId = id; vmsConsumption = new TObjectIntHashMap<>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, defConsumption); nodesCapacity = new TObjectIntHashMap<>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, defCapacity); if (defCapacity < 0) { throw new IllegalArgumentException(String.format("The %s default capacity must be >= 0", rcId)); } if (defConsumption < 0) { throw new IllegalArgumentException(String.format("The %s default consumption must be >= 0", rcId)); } this.viewId = VIEW_ID_BASE + rcId; }
Example #24
Source File: CShareableResource.java From scheduler with GNU Lesser General Public License v3.0 | 5 votes |
/** * Make a new mapping. * * @param r the resource to consider */ public CShareableResource(ShareableResource r) throws SchedulerException { this.rc = r; this.id = r.getIdentifier(); wantedCapacity = new TObjectIntHashMap<>(); wantedAmount = new TObjectIntHashMap<>(); wantedRatios = new TObjectDoubleHashMap<>(); }
Example #25
Source File: MetaQTL4CorrelationTask.java From systemsgenetics with GNU General Public License v3.0 | 5 votes |
MetaQTL4CorrelationTask(int nrThreads, int distributionSize, long[] randomizationSeeds, ArrayList<MetaQTL4MetaTrait> availableTraits, TObjectIntHashMap<MetaQTL4MetaTrait> availableTraitsHash, MetaQTL4Dataset[] datasets, GeneticVariant[][] geneticVariantIndex, MetaQTL4Settings m_settings, MetaQTL4TraitAnnotation traitAnnotation, Integer[][] traitIndex, Set<MetaQTL4MetaTrait> traitsToInclude, Set<String> variantsToInclude, int threadIndex) { this.distributionSize = distributionSize; this.randomizationSeeds = randomizationSeeds; this.availableTraits = availableTraits; this.availableTraitsHash = availableTraitsHash; this.datasets = datasets; this.geneticVariantIndex = geneticVariantIndex; this.m_settings = m_settings; this.traitAnnotation = traitAnnotation; this.traitIndex = traitIndex; this.traitsToInclude = traitsToInclude; this.variantsToInclude = variantsToInclude; this.threadIndex = threadIndex; this.realDistribution = new int[distributionSize]; this.permutedDistribution = new int[distributionSize]; }
Example #26
Source File: IndexOfObjects.java From morpheus-core with Apache License 2.0 | 5 votes |
/** * Constructor * @param iterable the keys for index * @param parent the parent index to initialize from */ private IndexOfObjects(Iterable<K> iterable, IndexOfObjects<K> parent) { super(iterable, parent); this.indexMap = new TObjectIntHashMap<>(keyArray().length(), 0.75f, -1); this.keyArray().sequential().forEachValue(v -> { final K key = v.getValue(); final int index = parent.indexMap.get(key); if (index < 0) throw new IndexException("No match for key: " + v.getValue()); final int existing = indexMap.put(key, index); if (existing >= 0) { throw new IndexException("Cannot have duplicate keys in index: " + v.getValue()); } }); }
Example #27
Source File: IndexOfObjects.java From morpheus-core with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") public final Index<K> copy() { try { final IndexOfObjects<K> clone = (IndexOfObjects<K>)super.copy(); clone.indexMap = new TObjectIntHashMap<>(indexMap); return clone; } catch (Exception ex) { throw new IndexException("Failed to clone index", ex); } }
Example #28
Source File: UpgradeKilling.java From BetterChests with GNU Lesser General Public License v3.0 | 5 votes |
@Override public void update(IUpgradableBlock chest, ItemStack stack) { if (UpgradeHelper.INSTANCE.getFrequencyTick(chest, stack, 100) != 0) { return; } AxisAlignedBB bb = new AxisAlignedBB(chest.getPosition()).grow(RADIUS); TObjectIntMap<Class<? extends EntityLiving>> map = new TObjectIntHashMap<>(); for (EntityLiving entity : chest.getWorldObj().getEntitiesWithinAABB(EntityLiving.class, bb)) { if (entity.isDead) { continue; } if (entity instanceof EntityAnimal) { EntityAnimal animal = (EntityAnimal) entity; if (entity.isChild()) { continue; } int currentAnimals = map.get(animal.getClass()); if (currentAnimals < ANIMALS_TO_KEEP_ALIVE) { map.put(animal.getClass(), currentAnimals + 1); continue; } } if (hasUpgradeOperationCost(chest)) { EntityPlayer source = null; if (chest.isUpgradeInstalled(DummyUpgradeType.AI.getStack())) { source = chest.getFakePlayer(); } entity.attackEntityFrom(getDamageSource(source), 10); drawUpgradeOperationCode(chest); } } }
Example #29
Source File: CreateTests.java From morpheus-core with Apache License 2.0 | 5 votes |
@Test() public void testMapCreateTest() { final long t1 = System.nanoTime(); final TObjectIntMap<String> map1 = new TObjectIntHashMap<>(5000000, 0.8f, -1); final long t2 = System.nanoTime(); final TLongIntMap map2 = new TLongIntHashMap(); final long t3 = System.nanoTime(); System.out.println("Map1:" + ((t2-t1)/1000000d) + " Map2:" + ((t3-t2)/100000d)); }
Example #30
Source File: InitialAlgebra.java From CQL with GNU Affero General Public License v3.0 | 5 votes |
private boolean add(En en, Term<Void, En, Void, Fk, Void, Gen, Void> term) { int x = nf0(en, term); if (x != -1) { return false; } x = fresh++; nfs.put(term, x); ens.get(en).add(x); reprs.put(x, term); TObjectIntHashMap<Fk> map = new TObjectIntHashMap<>(16, .75f, -1); for (Fk fk : schema().fksFrom(en)) { En e = schema().fks.get(fk).second; Term<Void, En, Void, Fk, Void, Gen, Void> z = Term.Fk(fk, term); add(e, z); map.put(fk, nf0(e, z)); } fks.put(x, map); if (fresh % 10000 == 0) { if (Thread.currentThread().isInterrupted() ) { throw new IgnoreException(); } } return true; }