gnu.trove.map.hash.TObjectDoubleHashMap Java Examples

The following examples show how to use gnu.trove.map.hash.TObjectDoubleHashMap. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PileupEntry.java    From systemsgenetics with GNU General Public License v3.0 6 votes vote down vote up
public PileupEntry(String chr, int pos, Allele refAllele, int readDepth, String basesString, String basesQualityString, int minimumBaseQuality) throws PileupParseException {
	this.chr = chr;
	this.pos = pos;
	this.refAllele = refAllele;
	this.readDepth = readDepth;
	this.alleleCounts = new TObjectIntHashMap<Allele>();
	this.alleleAverageQualities = new TObjectDoubleHashMap<Allele>();
	this.minimumBaseQuality = minimumBaseQuality;

	alleleCounts.put(Allele.A, 0);
	alleleCounts.put(Allele.C, 0);
	alleleCounts.put(Allele.G, 0);
	alleleCounts.put(Allele.T, 0);

	alleleAverageQualities.put(Allele.A, 0);
	alleleAverageQualities.put(Allele.C, 0);
	alleleAverageQualities.put(Allele.G, 0);
	alleleAverageQualities.put(Allele.T, 0);

	if (!alleleCounts.containsKey(refAllele)) {
		throw new PileupParseException("Error parsing pipeup entry");
	}

	int[] basesQuality = basesQualityString == null ? null : parseBasesQualityString(basesQualityString);
	parseBasesString(basesString, basesQuality);
}
 
Example #2
Source File: LexemeCooccurrenceScorer.java    From spf with GNU General Public License v2.0 6 votes vote down vote up
protected static TObjectDoubleHashMap<String> readStats(
		BufferedReader reader) throws IOException {
	final TObjectDoubleHashMap<String> pmis = new TObjectDoubleHashMap<String>();

	String line = reader.readLine();
	while (line != null) { // for each line in the file
		line = line.trim();
		if (!line.equals("")) {
			final String[] tokens = line.split("..\\:\\:..");
			final String id = tokens[0] + "  ::  " + tokens[1];
			final double score = Double.parseDouble(tokens[2]);
			pmis.put(id, new Double(score));
		}
		line = reader.readLine();
	}

	return pmis;
}
 
Example #3
Source File: CollectionUtils.java    From ambiverse-nlu with Apache License 2.0 6 votes vote down vote up
/**
 * Normalizes values so that they sum up to 1.
 *
 * @param values
 * @param <T>
 * @return Normalized values.
 */
public static <T> TObjectDoubleHashMap<T> normalizeValuesToSum(TObjectDoubleHashMap<T> values) {
  TObjectDoubleHashMap<T> normalizedScores = new TObjectDoubleHashMap<T>();
  double total = 0;
  for (TObjectDoubleIterator<T> itr = values.iterator(); itr.hasNext(); ) {
    itr.advance();
    total += itr.value();
  }
  if (total == 0) {
    return values;
  }
  for (TObjectDoubleIterator<T> itr = values.iterator(); itr.hasNext(); ) {
    itr.advance();
    Double normalizedScore = itr.value() / total;
    normalizedScores.put(itr.key(), normalizedScore);
  }
  return normalizedScores;
}
 
Example #4
Source File: RetrieverConfig.java    From cineast with MIT License 6 votes vote down vote up
public TObjectDoubleHashMap<Retriever> getRetrieversByCategory(String category){
	List<DoublePair<Class<? extends Retriever>>> list = this.retrieverCategories.get(category);
	if(list == null){
		return new TObjectDoubleHashMap<>(1);
	}

	TObjectDoubleHashMap<Retriever> _return = new TObjectDoubleHashMap<>(list.size());
	for(DoublePair<Class<? extends Retriever>> pair : list){
		Retriever rev = ReflectionHelper.instanciate(pair.key);
		if(rev != null){
			_return.put(rev, pair.value);
		}
	}

	return _return;
}
 
Example #5
Source File: SimpleEstimationContext.java    From rheem with Apache License 2.0 5 votes vote down vote up
@Override
public SimpleEstimationContext deserialize(JSONObject json, Class<? extends SimpleEstimationContext> cls) {
    final List<CardinalityEstimate> inCards = JsonSerializables.deserializeAllAsList(
            json.getJSONArray("inCards"),
            CardinalityEstimate.class
    );
    final List<CardinalityEstimate> outCards = JsonSerializables.deserializeAllAsList(
            json.getJSONArray("outCards"),
            CardinalityEstimate.class
    );

    final TObjectDoubleHashMap<String> doubleProperties = new TObjectDoubleHashMap<>();
    final JSONObject doublePropertiesJson = json.optJSONObject("properties");
    if (doublePropertiesJson != null) {
        for (String key : doublePropertiesJson.keySet()) {
            doubleProperties.put(key, doublePropertiesJson.getDouble(key));
        }
    }

    final int numExecutions = json.getInt("executions");

    return new SimpleEstimationContext(
            inCards.toArray(new CardinalityEstimate[inCards.size()]),
            outCards.toArray(new CardinalityEstimate[outCards.size()]),
            doubleProperties,
            numExecutions
    );
}
 
Example #6
Source File: CShareableResource.java    From scheduler with GNU Lesser General Public License v3.0 5 votes vote down vote up
/**
 * Make a new mapping.
 *
 * @param r the resource to consider
 */
public CShareableResource(ShareableResource r) throws SchedulerException {
    this.rc = r;
    this.id = r.getIdentifier();
    wantedCapacity = new TObjectIntHashMap<>();
    wantedAmount = new TObjectIntHashMap<>();
    wantedRatios = new TObjectDoubleHashMap<>();
}
 
Example #7
Source File: AnomalyLikelihoodTest.java    From htm.java with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * Test that sampleDistribution from a generated distribution returns roughly
 * the same parameters.
 */
@Test
public void testSampleDistribution() {
    TObjectDoubleMap<String> p = new TObjectDoubleHashMap<>();
    p.put(KEY_MEAN, 0.5);
    p.put(KEY_STDEV, Math.sqrt(0.1));
    p.put(KEY_VARIANCE, 0.1);
    
    double[] samples = sampleDistribution(new MersenneTwister(), 0.5, 0.1, 1000);
    
    Statistic np = an.estimateNormal(samples, true);
    assertTrue(assertWithinEpsilon(p.get(KEY_MEAN), np.mean, 0.1));
    assertTrue(assertWithinEpsilon(p.get(KEY_VARIANCE), np.variance, 0.1));
    assertTrue(assertWithinEpsilon(p.get(KEY_STDEV), np.stdev, 0.1));
}
 
Example #8
Source File: AnomalyLikelihoodTest.java    From htm.java with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void testNormalProbability() {
    TObjectDoubleMap<String> p = new TObjectDoubleHashMap<>();
    p.put(KEY_MEAN, 0.0);
    p.put(KEY_VARIANCE, 1.0);
    p.put(KEY_STDEV, 1.0);
    
    // Test a standard normal distribution
    // Values taken from http://en.wikipedia.org/wiki/Standard_normal_table
    assertWithinEpsilon(an.normalProbability(0.0, p), 0.5);
    assertWithinEpsilon(an.normalProbability(0.3, p), 0.3820885780);
    assertWithinEpsilon(an.normalProbability(1.0, p), 0.1587);
    assertWithinEpsilon(1.0 - an.normalProbability(1.0, p), an.normalProbability(-1.0, p));
    assertWithinEpsilon(an.normalProbability(-0.3, p), 1.0 - an.normalProbability(0.3, p));
    
    // Non standard normal distribution
    // p = {"name": "normal", "mean": 1.0, "variance": 4.0, "stdev": 2.0}
    p.put(KEY_MEAN, 1.0);
    p.put(KEY_VARIANCE, 4.0);
    p.put(KEY_STDEV, 2.0);
    assertWithinEpsilon(an.normalProbability(1.0, p), 0.5);
    assertWithinEpsilon(an.normalProbability(2.0, p), 0.3085);
    assertWithinEpsilon(an.normalProbability(3.0, p), 0.1587);
    assertWithinEpsilon(an.normalProbability(3.0, p), 1.0 - an.normalProbability(-1.0, p));
    assertWithinEpsilon(an.normalProbability(0.0, p), 1.0 - an.normalProbability(2.0, p));
    
    // Non standard normal distribution
    // p = {"name": "normal", "mean": -2.0, "variance": 0.5, "stdev": math.sqrt(0.5)}
    p.put(KEY_MEAN, -2.0);
    p.put(KEY_VARIANCE, 0.5);
    p.put(KEY_STDEV, Math.sqrt(0.5));
    assertWithinEpsilon(an.normalProbability(-2.0, p), 0.5);
    assertWithinEpsilon(an.normalProbability(-1.5, p), 0.241963652);
    assertWithinEpsilon(an.normalProbability(-2.5, p), 1.0 - an.normalProbability(-1.5, p));
}
 
Example #9
Source File: TroveHashVector.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
TroveHashVector(IHashVectorImmutable other) {
	if (other instanceof TroveHashVector) {
		this.values = new TObjectDoubleHashMap<KeyArgs>(
				((TroveHashVector) other).values);
	} else {
		this.values = new TObjectDoubleHashMap<KeyArgs>(10, 0.5f,
				ZERO_VALUE);
		for (final Pair<KeyArgs, Double> o : other) {
			values.put(o.first(), o.second());
		}
	}
}
 
Example #10
Source File: LexemeCooccurrenceScorer.java    From spf with GNU General Public License v2.0 5 votes vote down vote up
protected static TObjectDoubleHashMap<String> readStatsFile(File f)
		throws IOException {
	try (final BufferedReader reader = new BufferedReader(
			new FileReader(f))) {
		return readStats(reader);
	}
}
 
Example #11
Source File: ContinuousRetrievalLogic.java    From cineast with MIT License 5 votes vote down vote up
public static List<SegmentScoreElement> retrieveByRetriever(QueryContainer qc,
    Retriever retriever,
    ReadableQueryConfig config) {
  TObjectDoubleHashMap<Retriever> map = new TObjectDoubleHashMap<>();
  map.put(retriever, 1d);
  return ContinuousQueryDispatcher.retrieve(qc, map, API.getInitializer(), config);
}
 
Example #12
Source File: ContinuousRetrievalLogic.java    From cineast with MIT License 5 votes vote down vote up
public static List<SegmentScoreElement> retrieve(String id, String category, ReadableQueryConfig config) {
  TObjectDoubleHashMap<Retriever> retrievers = Config.sharedConfig().getRetriever()
      .getRetrieversByCategory(category);
  if (retrievers.isEmpty()) {
    LOGGER.warn("Empty retriever list for id {}, category {} and config {}, returning no results", id, category, config);
    return new ArrayList<>(0);
  }
  return ContinuousQueryDispatcher.retrieve(id, retrievers, API.getInitializer(), config);
}
 
Example #13
Source File: ContinuousRetrievalLogic.java    From cineast with MIT License 5 votes vote down vote up
public static List<SegmentScoreElement> retrieve(QueryContainer qc, String category,
    ReadableQueryConfig config) {
  TObjectDoubleHashMap<Retriever> retrievers = Config.sharedConfig().getRetriever()
      .getRetrieversByCategory(category);
  if (retrievers.isEmpty()) {
    LOGGER.warn("Empty retriever list for query {}, category {} and config {}, returning no results", qc, category, config);
    return new ArrayList<>(0);
  }
  return ContinuousQueryDispatcher.retrieve(qc, retrievers, API.getInitializer(), config);
}
 
Example #14
Source File: ContinuousQueryDispatcher.java    From cineast with MIT License 5 votes vote down vote up
public static List<SegmentScoreElement> retrieve(String segmentId,
    TObjectDoubleHashMap<Retriever> retrievers,
    RetrieverInitializer initializer,
    ReadableQueryConfig config) {
  return new ContinuousQueryDispatcher(r -> new RetrievalTask(r, segmentId, config), retrievers,
      initializer).doRetrieve();
}
 
Example #15
Source File: ContinuousQueryDispatcher.java    From cineast with MIT License 5 votes vote down vote up
public static List<SegmentScoreElement> retrieve(QueryContainer query,
    TObjectDoubleHashMap<Retriever> retrievers,
    RetrieverInitializer initializer,
    ReadableQueryConfig config) {
  return new ContinuousQueryDispatcher(r -> new RetrievalTask(r, query, config), retrievers,
      initializer).doRetrieve();
}
 
Example #16
Source File: ScoreElement.java    From cineast with MIT License 5 votes vote down vote up
/**
 * Merges the entries in a collection of ScoreElements into the provided TObjectDoubleHashMap where the ScoreElements ID serves as key
 * and its score serves as value. If an entry already exists, the value of that entry is adjusted. Every score-value is multiplied with a weight before being merged to the map.
 *
 * @param collection The collection of ScoreElements to merge.
 * @param map The score-map to merge the elements into.
 * @param weight The weight that should be applied to each score.
 * @return TObjectDoubleHashMap
 */
static <T extends ScoreElement> TObjectDoubleHashMap<String> mergeWithScoreMap(Collection<T> collection, TObjectDoubleHashMap<String> map, double weight) {
    for (T element : collection) {
        double score = element.getScore();
        if (Double.isInfinite(score) || Double.isNaN(score)) {
            continue;
        }
        double weightedScore = score * weight;
        map.adjustOrPutValue(element.getId(), weightedScore, weightedScore);
    }
    return map;
}
 
Example #17
Source File: SimilarityQueryMessageHandler.java    From cineast with MIT License 5 votes vote down vote up
/**
 * Executes a {@link SimilarityQuery}. Performs the similarity query based on the {@link QueryContainer}
 * objects provided in the {@link SimilarityQuery}.
 *
 * @param session WebSocket session the invocation is associated with.
 * @param qconf The {@link QueryConfig} that contains additional specifications.
 * @param message Instance of {@link SimilarityQuery}
 */
@Override
public void execute(Session session, QueryConfig qconf, SimilarityQuery message) throws Exception {
    /* Prepare QueryConfig (so as to obtain a QueryId). */
    final String uuid = qconf.getQueryId().toString();

    /*  Prepare map that maps category  to QueryTerm components. */
    final HashMap<String, ArrayList<QueryContainer>> categoryMap = QueryComponent.toCategoryMap(message.getComponents());

    /*  Execute similarity queries for all Category -> QueryContainer combinations in the map. */
    for (String category : categoryMap.keySet()) {
        final TObjectDoubleHashMap<String> map = new TObjectDoubleHashMap<>();
        for (QueryContainer qc : categoryMap.get(category)) {
            /* Merge partial results with score-map. */
            float weight = qc.getWeight() > 0f ? 1f : -1f; //TODO better normalisation
            ScoreElement.mergeWithScoreMap(ContinuousRetrievalLogic.retrieve(qc, category, qconf), map, weight);
        }
        /* Transform raw results into list of StringDoublePair's (segmentId -> score). */
        final int max = Config.sharedConfig().getRetriever().getMaxResults();
        final List<StringDoublePair> results = map.keySet().stream()
            .map(key -> new StringDoublePair(key, map.get(key)))
            .filter(p -> p.value > 0.0)
            .sorted(StringDoublePair.COMPARATOR)
            .limit(max)
            .collect(Collectors.toList());

        /* Finalize and submit per-category results. */
        this.finalizeAndSubmitResults(session, uuid, category, results);
    }
}
 
Example #18
Source File: NestableLoadProfileEstimatorTest.java    From rheem with Apache License 2.0 5 votes vote down vote up
@Test
public void testMathExFromSpecificationWithImport() {
    String specification = "{" +
            "\"type\":\"mathex\"," +
            "\"in\":2," +
            "\"out\":1," +
            "\"import\":[\"numIterations\"]," +
            "\"p\":0.8," +
            "\"cpu\":\"(3*in0 + 2*in1 + 7*out0) * numIterations\"," +
            "\"ram\":\"6*in0 + 4*in1 + 14*out0\"," +
            "\"overhead\":143," +
            "\"ru\":\"logGrowth(0.1, 0.1, 10000, in0+in1)\"" +
            "}";
    final NestableLoadProfileEstimator estimator =
            LoadProfileEstimators.createFromSpecification(null, specification);
    SomeExecutionOperator execOp = new SomeExecutionOperator();
    TObjectDoubleMap<String> properties = new TObjectDoubleHashMap<>();
    properties.put("numIterations", 2d);
    final LoadProfile estimate = estimator.estimate(new SimpleEstimationContext(
            new CardinalityEstimate[]{
                    new CardinalityEstimate(10, 10, 1d), new CardinalityEstimate(100, 100, 1d)
            },
            new CardinalityEstimate[]{new CardinalityEstimate(200, 300, 1d)},
            properties,
            1
    ));

    Assert.assertEquals((3 * 10 + 2 * 100 + 7 * 200)  * execOp.getNumIterations(), estimate.getCpuUsage().getLowerEstimate(), 0.01);
    Assert.assertEquals((3 * 10 + 2 * 100 + 7 * 300)  * execOp.getNumIterations(), estimate.getCpuUsage().getUpperEstimate(), 0.01);
    Assert.assertEquals(
            OptimizationUtils.logisticGrowth(0.1, 0.1, 10000, 100 + 10),
            estimate.getResourceUtilization(),
            0.000000001
    );
    Assert.assertEquals(143, estimate.getOverheadMillis());
}
 
Example #19
Source File: NestableLoadProfileEstimatorTest.java    From rheem with Apache License 2.0 5 votes vote down vote up
@Test
public void testFromJuelSpecificationWithImport() {
    String specification = "{" +
            "\"in\":2," +
            "\"out\":1," +
            "\"import\":[\"numIterations\"]," +
            "\"p\":0.8," +
            "\"cpu\":\"${(3*in0 + 2*in1 + 7*out0) * numIterations}\"," +
            "\"ram\":\"${6*in0 + 4*in1 + 14*out0}\"," +
            "\"overhead\":143," +
            "\"ru\":\"${rheem:logGrowth(0.1, 0.1, 10000, in0+in1)}\"" +
            "}";
    final NestableLoadProfileEstimator estimator =
            LoadProfileEstimators.createFromSpecification(null, specification);
    SomeExecutionOperator execOp = new SomeExecutionOperator();
    TObjectDoubleMap<String> properties = new TObjectDoubleHashMap<>();
    properties.put("numIterations", 2d);
    final LoadProfile estimate = estimator.estimate(new SimpleEstimationContext(
            new CardinalityEstimate[]{
                    new CardinalityEstimate(10, 10, 1d), new CardinalityEstimate(100, 100, 1d)
            },
            new CardinalityEstimate[]{new CardinalityEstimate(200, 300, 1d)},
            properties,
            1
    ));

    Assert.assertEquals((3 * 10 + 2 * 100 + 7 * 200)  * execOp.getNumIterations(), estimate.getCpuUsage().getLowerEstimate(), 0.01);
    Assert.assertEquals((3 * 10 + 2 * 100 + 7 * 300)  * execOp.getNumIterations(), estimate.getCpuUsage().getUpperEstimate(), 0.01);
    Assert.assertEquals(
            OptimizationUtils.logisticGrowth(0.1, 0.1, 10000, 100 + 10),
            estimate.getResourceUtilization(),
            0.000000001
    );
    Assert.assertEquals(143, estimate.getOverheadMillis());
}
 
Example #20
Source File: NestableLoadProfileEstimatorTest.java    From rheem with Apache License 2.0 5 votes vote down vote up
@Test
public void testFromMathExSpecification() {
    String specification = "{" +
            "\"type\":\"mathex\"," +
            "\"in\":2," +
            "\"out\":1," +
            "\"p\":0.8," +
            "\"cpu\":\"3*in0 + 2*in1 + 7*out0\"," +
            "\"ram\":\"6*in0 + 4*in1 + 14*out0\"," +
            "\"overhead\":143," +
            "\"ru\":\"logGrowth(0.1, 0.1, 10000, in0+in1)\"" +
            "}";
    final NestableLoadProfileEstimator estimator =
            LoadProfileEstimators.createFromSpecification(null, specification);
    final LoadProfile estimate = estimator.estimate(new SimpleEstimationContext(
            new CardinalityEstimate[]{
                    new CardinalityEstimate(10, 10, 1d), new CardinalityEstimate(100, 100, 1d)
            },
            new CardinalityEstimate[]{new CardinalityEstimate(200, 300, 1d)},
            new TObjectDoubleHashMap<>(),
            1
    ));

    Assert.assertEquals(3 * 10 + 2 * 100 + 7 * 200, estimate.getCpuUsage().getLowerEstimate(), 0.01);
    Assert.assertEquals(3 * 10 + 2 * 100 + 7 * 300, estimate.getCpuUsage().getUpperEstimate(), 0.01);
    Assert.assertEquals(
            OptimizationUtils.logisticGrowth(0.1, 0.1, 10000, 100 + 10),
            estimate.getResourceUtilization(),
            0.000000001
    );
    Assert.assertEquals(143, estimate.getOverheadMillis());
}
 
Example #21
Source File: NestableLoadProfileEstimatorTest.java    From rheem with Apache License 2.0 5 votes vote down vote up
@Test
public void testFromJuelSpecification() {
    String specification = "{" +
            "\"type\":\"juel\"," +
            "\"in\":2," +
            "\"out\":1," +
            "\"p\":0.8," +
            "\"cpu\":\"${3*in0 + 2*in1 + 7*out0}\"," +
            "\"ram\":\"${6*in0 + 4*in1 + 14*out0}\"," +
            "\"overhead\":143," +
            "\"ru\":\"${rheem:logGrowth(0.1, 0.1, 10000, in0+in1)}\"" +
            "}";
    final NestableLoadProfileEstimator estimator =
            LoadProfileEstimators.createFromSpecification(null, specification);
    final LoadProfile estimate = estimator.estimate(new SimpleEstimationContext(
            new CardinalityEstimate[]{
                    new CardinalityEstimate(10, 10, 1d), new CardinalityEstimate(100, 100, 1d)
            },
            new CardinalityEstimate[]{new CardinalityEstimate(200, 300, 1d)},
            new TObjectDoubleHashMap<>(),
            1
    ));

    Assert.assertEquals(3 * 10 + 2 * 100 + 7 * 200, estimate.getCpuUsage().getLowerEstimate(), 0.01);
    Assert.assertEquals(3 * 10 + 2 * 100 + 7 * 300, estimate.getCpuUsage().getUpperEstimate(), 0.01);
    Assert.assertEquals(
            OptimizationUtils.logisticGrowth(0.1, 0.1, 10000, 100 + 10),
            estimate.getResourceUtilization(),
            0.000000001
    );
    Assert.assertEquals(143, estimate.getOverheadMillis());
}
 
Example #22
Source File: SimpleGeneticRiskScoreCalculator.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
@Override
public TObjectDoubleHashMap<String> calculateRiskScores(RandomAccessGenotypeData genotypeData, PhenotypeData phenotypeData
) {
    throw new UnsupportedOperationException("Not supported yet.");
}
 
Example #23
Source File: InvestigateAucChildParent.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
public static TObjectDoubleMap<String> readSignificantPredictedHpoTermFile(File predictedHpoTermFile) throws FileNotFoundException, IOException {

		final CSVParser parser = new CSVParserBuilder().withSeparator('\t').withIgnoreQuotations(true).build();
		final CSVReader reader = new CSVReaderBuilder(new BufferedReader(new FileReader(predictedHpoTermFile))).withSkipLines(1).withCSVParser(parser).build();

		TObjectDoubleMap<String> hpos = new TObjectDoubleHashMap<>();

		String[] nextLine;
		while ((nextLine = reader.readNext()) != null) {

			if(Double.parseDouble(nextLine[4]) <= 0.05){
				hpos.put(nextLine[0], Double.parseDouble(nextLine[3]));
			}
			

		}

		reader.close();

		return hpos;

	}
 
Example #24
Source File: LexemeCooccurrenceScorer.java    From spf with GNU General Public License v2.0 4 votes vote down vote up
public LexemeCooccurrenceScorer(TObjectDoubleHashMap<String> scores,
		boolean lowercase) {
	this.pMIS = scores;
	this.lowercase = lowercase;
}
 
Example #25
Source File: EffectOfRandom10Percent.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
public static TObjectDoubleMap<String> readPredictedHpoTermFile(File predictedHpoTermFile) throws FileNotFoundException, IOException {

		final CSVParser parser = new CSVParserBuilder().withSeparator('\t').withIgnoreQuotations(true).build();
		final CSVReader reader = new CSVReaderBuilder(new BufferedReader(new FileReader(predictedHpoTermFile))).withSkipLines(1).withCSVParser(parser).build();

		TObjectDoubleMap<String> hpos = new TObjectDoubleHashMap<>();

		String[] nextLine;
		while ((nextLine = reader.readNext()) != null) {

			hpos.put(nextLine[0], Double.parseDouble(nextLine[3]));

		}

		reader.close();

		return hpos;

	}
 
Example #26
Source File: PileupEntry.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
public TObjectDoubleHashMap<Allele> getAlleleAverageQualities() {
	return alleleAverageQualities;
}
 
Example #27
Source File: TroveHashVector.java    From spf with GNU General Public License v2.0 4 votes vote down vote up
TroveHashVector() {
	this.values = new TObjectDoubleHashMap<KeyArgs>(10, 0.5f, ZERO_VALUE);
}
 
Example #28
Source File: ContinuousRetrievalLogic.java    From cineast with MIT License 4 votes vote down vote up
public static List<SegmentScoreElement> retrieveByRetriever(String id, Retriever retriever,
    ReadableQueryConfig config) {
  TObjectDoubleHashMap<Retriever> map = new TObjectDoubleHashMap<>();
  map.put(retriever, 1d);
  return ContinuousQueryDispatcher.retrieve(id, map, API.getInitializer(), config);
}
 
Example #29
Source File: GeneticRiskScoreCalculator.java    From systemsgenetics with GNU General Public License v3.0 votes vote down vote up
TObjectDoubleHashMap<String> calculateRiskScores(RandomAccessGenotypeData genotypeData, String onlyCount, String harmonizedData, double inclusionThreshold); 
Example #30
Source File: GeneticRiskScoreCalculator.java    From systemsgenetics with GNU General Public License v3.0 votes vote down vote up
TObjectDoubleHashMap<String> calculateRiskScores(RandomAccessGenotypeData genotypeData, PhenotypeData phenotypeData);