gnu.trove.map.TObjectDoubleMap Java Examples

The following examples show how to use gnu.trove.map.TObjectDoubleMap. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ScoreFusion.java    From cineast with MIT License 6 votes vote down vote up
private static void fuseObjectScoreIntoSegments(TObjectDoubleMap<String> scoreBySegmentId,
    double objectScore, List<MediaSegmentDescriptor> segments) {
  boolean objectSegmentsFoundInResults = false;
  for (MediaSegmentDescriptor segment : segments) {
    boolean foundElement = scoreBySegmentId.adjustValue(segment.getSegmentId(), objectScore);
    if (foundElement) {
      objectSegmentsFoundInResults = true;
    }
  }

  if (!objectSegmentsFoundInResults) {
    MediaSegmentDescriptor firstSegment = segments.get(0);
    String firstId = firstSegment.getSegmentId();
    scoreBySegmentId.put(firstId, objectScore);
  }
}
 
Example #2
Source File: ScoreFusion.java    From cineast with MIT License 6 votes vote down vote up
/**
 * Fuses the object scores into the segment scores by adding every object score to the scores of
 * its segments. If an object without any of its segments was found, the first segment gets added
 * and used instead.
 * Note that this method <i>modifies {@code scoreBySegmentId} in place without changing
 * {@code scoreByObjectId}</i>.
 *
 * @param scoreBySegmentId segment ids with their respective score
 * @param scoreByObjectId object ids with their respective score
 */
public static void fuseObjectsIntoSegments(TObjectDoubleMap<String> scoreBySegmentId,
    TObjectDoubleMap<String> scoreByObjectId) {
  MediaSegmentReader mediaSegmentReader = new MediaSegmentReader();

  Set<String> objectIds = scoreByObjectId.keySet();
  ListMultimap<String, MediaSegmentDescriptor> segmentsByObjectId =
      mediaSegmentReader.lookUpSegmentsOfObjects(objectIds);
  for (String objectId : segmentsByObjectId.keySet()) {
    assert scoreByObjectId.containsKey(objectId);
    double objectScore = scoreByObjectId.get(objectId);
    List<MediaSegmentDescriptor> segments = segmentsByObjectId.get(objectId);
    if (segments.isEmpty()) {
      logger.error("Object {} has no segments", objectId);
      continue;
    }
    fuseObjectScoreIntoSegments(scoreBySegmentId, objectScore, segments);
  }
  mediaSegmentReader.close();
}
 
Example #3
Source File: NestableLoadProfileEstimatorTest.java    From rheem with Apache License 2.0 5 votes vote down vote up
@Test
public void testFromJuelSpecificationWithImport() {
    String specification = "{" +
            "\"in\":2," +
            "\"out\":1," +
            "\"import\":[\"numIterations\"]," +
            "\"p\":0.8," +
            "\"cpu\":\"${(3*in0 + 2*in1 + 7*out0) * numIterations}\"," +
            "\"ram\":\"${6*in0 + 4*in1 + 14*out0}\"," +
            "\"overhead\":143," +
            "\"ru\":\"${rheem:logGrowth(0.1, 0.1, 10000, in0+in1)}\"" +
            "}";
    final NestableLoadProfileEstimator estimator =
            LoadProfileEstimators.createFromSpecification(null, specification);
    SomeExecutionOperator execOp = new SomeExecutionOperator();
    TObjectDoubleMap<String> properties = new TObjectDoubleHashMap<>();
    properties.put("numIterations", 2d);
    final LoadProfile estimate = estimator.estimate(new SimpleEstimationContext(
            new CardinalityEstimate[]{
                    new CardinalityEstimate(10, 10, 1d), new CardinalityEstimate(100, 100, 1d)
            },
            new CardinalityEstimate[]{new CardinalityEstimate(200, 300, 1d)},
            properties,
            1
    ));

    Assert.assertEquals((3 * 10 + 2 * 100 + 7 * 200)  * execOp.getNumIterations(), estimate.getCpuUsage().getLowerEstimate(), 0.01);
    Assert.assertEquals((3 * 10 + 2 * 100 + 7 * 300)  * execOp.getNumIterations(), estimate.getCpuUsage().getUpperEstimate(), 0.01);
    Assert.assertEquals(
            OptimizationUtils.logisticGrowth(0.1, 0.1, 10000, 100 + 10),
            estimate.getResourceUtilization(),
            0.000000001
    );
    Assert.assertEquals(143, estimate.getOverheadMillis());
}
 
Example #4
Source File: AnomalyLikelihoodTest.java    From htm.java with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * Test that sampleDistribution from a generated distribution returns roughly
 * the same parameters.
 */
@Test
public void testSampleDistribution() {
    TObjectDoubleMap<String> p = new TObjectDoubleHashMap<>();
    p.put(KEY_MEAN, 0.5);
    p.put(KEY_STDEV, Math.sqrt(0.1));
    p.put(KEY_VARIANCE, 0.1);
    
    double[] samples = sampleDistribution(new MersenneTwister(), 0.5, 0.1, 1000);
    
    Statistic np = an.estimateNormal(samples, true);
    assertTrue(assertWithinEpsilon(p.get(KEY_MEAN), np.mean, 0.1));
    assertTrue(assertWithinEpsilon(p.get(KEY_VARIANCE), np.variance, 0.1));
    assertTrue(assertWithinEpsilon(p.get(KEY_STDEV), np.stdev, 0.1));
}
 
Example #5
Source File: AnomalyLikelihoodTest.java    From htm.java with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void testNormalProbability() {
    TObjectDoubleMap<String> p = new TObjectDoubleHashMap<>();
    p.put(KEY_MEAN, 0.0);
    p.put(KEY_VARIANCE, 1.0);
    p.put(KEY_STDEV, 1.0);
    
    // Test a standard normal distribution
    // Values taken from http://en.wikipedia.org/wiki/Standard_normal_table
    assertWithinEpsilon(an.normalProbability(0.0, p), 0.5);
    assertWithinEpsilon(an.normalProbability(0.3, p), 0.3820885780);
    assertWithinEpsilon(an.normalProbability(1.0, p), 0.1587);
    assertWithinEpsilon(1.0 - an.normalProbability(1.0, p), an.normalProbability(-1.0, p));
    assertWithinEpsilon(an.normalProbability(-0.3, p), 1.0 - an.normalProbability(0.3, p));
    
    // Non standard normal distribution
    // p = {"name": "normal", "mean": 1.0, "variance": 4.0, "stdev": 2.0}
    p.put(KEY_MEAN, 1.0);
    p.put(KEY_VARIANCE, 4.0);
    p.put(KEY_STDEV, 2.0);
    assertWithinEpsilon(an.normalProbability(1.0, p), 0.5);
    assertWithinEpsilon(an.normalProbability(2.0, p), 0.3085);
    assertWithinEpsilon(an.normalProbability(3.0, p), 0.1587);
    assertWithinEpsilon(an.normalProbability(3.0, p), 1.0 - an.normalProbability(-1.0, p));
    assertWithinEpsilon(an.normalProbability(0.0, p), 1.0 - an.normalProbability(2.0, p));
    
    // Non standard normal distribution
    // p = {"name": "normal", "mean": -2.0, "variance": 0.5, "stdev": math.sqrt(0.5)}
    p.put(KEY_MEAN, -2.0);
    p.put(KEY_VARIANCE, 0.5);
    p.put(KEY_STDEV, Math.sqrt(0.5));
    assertWithinEpsilon(an.normalProbability(-2.0, p), 0.5);
    assertWithinEpsilon(an.normalProbability(-1.5, p), 0.241963652);
    assertWithinEpsilon(an.normalProbability(-2.5, p), 1.0 - an.normalProbability(-1.5, p));
}
 
Example #6
Source File: ContinuousQueryDispatcher.java    From cineast with MIT License 5 votes vote down vote up
private List<SegmentScoreElement> normalizeSortTruncate(
    TObjectDoubleMap<String> scoreBySegmentId) {
  List<SegmentScoreElement> results = new ArrayList<>(scoreBySegmentId.size());
  scoreBySegmentId.forEachEntry((segmentId, score) -> {
    results.add(new SegmentScoreElement(segmentId, MathHelper.limit(score / this.retrieverWeightSum, 0d, 1d)));
    return true;
  });

  results.sort(ScoreElement.SCORE_COMPARATOR.reversed());
  if (results.size() > MAX_RESULTS) {
    return results.subList(0, MAX_RESULTS);
  } else {
    return results;
  }
}
 
Example #7
Source File: ContinuousQueryDispatcher.java    From cineast with MIT License 5 votes vote down vote up
private void addScoreElement(TObjectDoubleMap<String> scoreById, ScoreElement next,
    double weight) {
  String id = next.getId();
  double score = next.getScore();
  if (score < 0 || score > 1) {
    LOGGER.warn("Score of retrieval task should be between [0,1], but was: {}, ignoring {}...",
        score, next);
    return;
  }

  double weightedScore = score * weight;
  scoreById.adjustOrPutValue(id, weightedScore, weightedScore);
}
 
Example #8
Source File: ContinuousQueryDispatcher.java    From cineast with MIT License 5 votes vote down vote up
private void addRetrievalResult(TObjectDoubleMap<String> scoreByObjectId,
    TObjectDoubleMap<String> scoreBySegmentId, RetrievalTask task,
    List<ScoreElement> scoreElements) {
  if (scoreElements == null) {
    LOGGER.warn("Retrieval task {} returned 'null' results.", task);
    return;
  }

  for (RetrievalResultListener listener : resultListeners) {
    listener.notify(scoreElements, task);
  }

  double retrieverWeight = this.retrieverWeights.get(task.getRetriever());
  for (ScoreElement element : scoreElements) {
    TObjectDoubleMap<String> scoreById;
    if (element instanceof ObjectScoreElement) {
      scoreById = scoreByObjectId;
    } else if (element instanceof SegmentScoreElement) {
      scoreById = scoreBySegmentId;
    } else if (element instanceof BooleanSegmentScoreElement) {
      scoreById = scoreBySegmentId; //TODO: Cleanup?
    } else {
      LOGGER.error(
          "Unknown subclass {} of ScoreElement in ContinuousQueryDispatcher.addRetrievalResult.",
          element.getClass().getSimpleName());
      continue;
    }
    this.addScoreElement(scoreById, element, retrieverWeight);
  }
}
 
Example #9
Source File: ContinuousQueryDispatcher.java    From cineast with MIT License 5 votes vote down vote up
private ContinuousQueryDispatcher(Function<Retriever, RetrievalTask> taskFactory,
    TObjectDoubleMap<Retriever> retrieverWeights,
    RetrieverInitializer initializer) {
  this.taskFactory = taskFactory;
  this.initializer = initializer;
  this.retrieverWeights = retrieverWeights;

  double weightSum = 0d;
  TDoubleIterator i = retrieverWeights.valueCollection().iterator();
  while (i.hasNext()) {
    weightSum += i.next();
  }
  this.retrieverWeightSum = weightSum;
  LOGGER.trace("Initialized continuous query dispatcher with retrievers {}", retrieverWeights);
}
 
Example #10
Source File: ScoreElement.java    From cineast with MIT License 5 votes vote down vote up
static List<SegmentScoreElement> segmentsFromSegmentsDistanceMap(
        TObjectDoubleMap<String> distanceBySegmentId, CorrespondenceFunction correspondence) {
    ImmutableList.Builder<SegmentScoreElement> builder = ImmutableList.builder();
    distanceBySegmentId.forEachEntry((id, score) -> {
        builder.add(new SegmentScoreElement(id, correspondence.applyAsDouble(score)));
        return true;
    });
    return builder.build();
}
 
Example #11
Source File: ScoreElement.java    From cineast with MIT License 5 votes vote down vote up
static List<ScoreElement> scoresFromSegmentsDistanceMap(
        TObjectDoubleMap<String> distanceBySegmentId, CorrespondenceFunction correspondence) {
    List<SegmentScoreElement> segments = segmentsFromSegmentsDistanceMap(distanceBySegmentId,
            correspondence);

    // This should not make a copy because of the immutability of 'segments'
    return ImmutableList.copyOf(segments);
}
 
Example #12
Source File: NestableLoadProfileEstimatorTest.java    From rheem with Apache License 2.0 5 votes vote down vote up
@Test
public void testMathExFromSpecificationWithImport() {
    String specification = "{" +
            "\"type\":\"mathex\"," +
            "\"in\":2," +
            "\"out\":1," +
            "\"import\":[\"numIterations\"]," +
            "\"p\":0.8," +
            "\"cpu\":\"(3*in0 + 2*in1 + 7*out0) * numIterations\"," +
            "\"ram\":\"6*in0 + 4*in1 + 14*out0\"," +
            "\"overhead\":143," +
            "\"ru\":\"logGrowth(0.1, 0.1, 10000, in0+in1)\"" +
            "}";
    final NestableLoadProfileEstimator estimator =
            LoadProfileEstimators.createFromSpecification(null, specification);
    SomeExecutionOperator execOp = new SomeExecutionOperator();
    TObjectDoubleMap<String> properties = new TObjectDoubleHashMap<>();
    properties.put("numIterations", 2d);
    final LoadProfile estimate = estimator.estimate(new SimpleEstimationContext(
            new CardinalityEstimate[]{
                    new CardinalityEstimate(10, 10, 1d), new CardinalityEstimate(100, 100, 1d)
            },
            new CardinalityEstimate[]{new CardinalityEstimate(200, 300, 1d)},
            properties,
            1
    ));

    Assert.assertEquals((3 * 10 + 2 * 100 + 7 * 200)  * execOp.getNumIterations(), estimate.getCpuUsage().getLowerEstimate(), 0.01);
    Assert.assertEquals((3 * 10 + 2 * 100 + 7 * 300)  * execOp.getNumIterations(), estimate.getCpuUsage().getUpperEstimate(), 0.01);
    Assert.assertEquals(
            OptimizationUtils.logisticGrowth(0.1, 0.1, 10000, 100 + 10),
            estimate.getResourceUtilization(),
            0.000000001
    );
    Assert.assertEquals(143, estimate.getOverheadMillis());
}
 
Example #13
Source File: SimpleEstimationContext.java    From rheem with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new instance.
 */
public SimpleEstimationContext(CardinalityEstimate[] inputCardinalities,
                               CardinalityEstimate[] outputCardinalities,
                               TObjectDoubleMap<String> doubleProperties,
                               int numExecutions) {
    this.inputCardinalities = inputCardinalities;
    this.outputCardinalities = outputCardinalities;
    this.doubleProperties = doubleProperties;
    this.numExecutions = numExecutions;
}
 
Example #14
Source File: ScoreElement.java    From cineast with MIT License 4 votes vote down vote up
static List<SegmentScoreElement> segmentsFromSegmentsMap(
        TObjectDoubleMap<String> scoreBySegmentId) {
    return segmentsFromSegmentsDistanceMap(scoreBySegmentId, CorrespondenceFunction.identity());
}
 
Example #15
Source File: EffectOfRandom10Percent.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
public static TObjectDoubleMap<String> readPredictedHpoTermFile(File predictedHpoTermFile) throws FileNotFoundException, IOException {

		final CSVParser parser = new CSVParserBuilder().withSeparator('\t').withIgnoreQuotations(true).build();
		final CSVReader reader = new CSVReaderBuilder(new BufferedReader(new FileReader(predictedHpoTermFile))).withSkipLines(1).withCSVParser(parser).build();

		TObjectDoubleMap<String> hpos = new TObjectDoubleHashMap<>();

		String[] nextLine;
		while ((nextLine = reader.readNext()) != null) {

			hpos.put(nextLine[0], Double.parseDouble(nextLine[3]));

		}

		reader.close();

		return hpos;

	}
 
Example #16
Source File: InvestigateAucChildParent.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
public static TObjectDoubleMap<String> readSignificantPredictedHpoTermFile(File predictedHpoTermFile) throws FileNotFoundException, IOException {

		final CSVParser parser = new CSVParserBuilder().withSeparator('\t').withIgnoreQuotations(true).build();
		final CSVReader reader = new CSVReaderBuilder(new BufferedReader(new FileReader(predictedHpoTermFile))).withSkipLines(1).withCSVParser(parser).build();

		TObjectDoubleMap<String> hpos = new TObjectDoubleHashMap<>();

		String[] nextLine;
		while ((nextLine = reader.readNext()) != null) {

			if(Double.parseDouble(nextLine[4]) <= 0.05){
				hpos.put(nextLine[0], Double.parseDouble(nextLine[3]));
			}
			

		}

		reader.close();

		return hpos;

	}
 
Example #17
Source File: EffectOfRandom10Percent.java    From systemsgenetics with GNU General Public License v3.0 3 votes vote down vote up
/**
 * @param args the command line arguments
 */
public static void main(String[] args) throws IOException, FileNotFoundException, ParseException {

	final File predictedHpoTermFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_auc_bonferroni.txt");

	TObjectDoubleMap<String> hpoAuc = readPredictedHpoTermFile(predictedHpoTermFile);

	

}
 
Example #18
Source File: AnomalyLikelihood.java    From htm.java with GNU Affero General Public License v3.0 2 votes vote down vote up
/**
 * Given the normal distribution specified in distributionParams, return
 * the probability of getting samples > x
 * This is essentially the Q-function
 * 
 * @param x
 * @param named
 * @return
 */
public double normalProbability(double x, TObjectDoubleMap<String> named) {
    return normalProbability(x, 
        new Statistic(named.get(KEY_MEAN), named.get(KEY_VARIANCE), named.get(KEY_STDEV)));
}
 
Example #19
Source File: InvestigateAucChildParent.java    From systemsgenetics with GNU General Public License v3.0 2 votes vote down vote up
/**
 * @param args the command line arguments
 */
public static void main(String[] args) throws IOException, FileNotFoundException, ParseException {

	final File predictedHpoTermFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_auc_bonferroni.txt");

	TObjectDoubleMap<String> hpoAuc = readSignificantPredictedHpoTermFile(predictedHpoTermFile);

	final File hpoOboFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\HPO\\135\\hp.obo");

	Ontology hpoOntology = HpoFinder.loadHpoOntology(hpoOboFile);
	final Term is_a = hpoOntology.getTerm("is_a");;

	for (String hpo : hpoAuc.keySet()) {
		final Term hpoTerm = hpoOntology.getTerm(hpo);
		final HashSet<String> hpoParents = new HashSet<>();

		for (Triple t : hpoOntology.getTriples(hpoTerm,null, is_a)) {
			
			String parent = t.getObject().getName();
			
			if(hpoAuc.containsKey(parent)){
				hpoParents.add(parent);
			}
			
			
		}
		
		if(!hpoParents.isEmpty()){
			
			double meanParents = 0;
			
			for(String parentHpo : hpoParents){
				meanParents += hpoAuc.get(parentHpo);
			}
			meanParents /= hpoParents.size();
			
			System.out.println(hpo + "\t" + String.join(";", hpoParents) + "\t" + hpoAuc.get(hpo) + "\t" + meanParents);
			
		} 
		
		
		
	}

}