org.apache.commons.math3.distribution.AbstractIntegerDistribution Java Examples
The following examples show how to use
org.apache.commons.math3.distribution.AbstractIntegerDistribution.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: QualityProbability.java From jstarcraft-ai with Apache License 2.0 | 5 votes |
public QualityProbability(Class<? extends RandomGenerator> randomClazz, int randomSeed, Class<? extends AbstractIntegerDistribution> distributionClazz, Object... distributionParameters) { this.randomSeed = randomSeed; this.random = ReflectionUtility.getInstance(randomClazz, randomSeed); this.distributionParameters = distributionParameters; distributionParameters = ArrayUtility.insert(0, distributionParameters, random); this.distribution = ReflectionUtility.getInstance(distributionClazz, distributionParameters); }
Example #2
Source File: QualityProbability.java From jstarcraft-ai with Apache License 2.0 | 5 votes |
@Override public void afterLoad() { try { random = (RandomGenerator) ReflectionUtility.getInstance(Class.forName(randomClass), randomSeed); Object[] parameters = ArrayUtility.insert(0, distributionParameters, random); distribution = (AbstractIntegerDistribution) ReflectionUtility.getInstance(Class.forName(distributionClass), parameters); } catch (Exception exception) { } }
Example #3
Source File: FisherExactTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * Computes the 2-sided pvalue of the Fisher's exact test on a normalized table that ensures that the sum of * all four entries is less than 2 * 200. */ public static double twoSidedPValue(final int[][] normalizedTable) { Utils.nonNull(normalizedTable); Utils.validateArg(normalizedTable.length == 2, () -> "input must be 2x2 " + Arrays.deepToString(normalizedTable)); Utils.validateArg(normalizedTable[0] != null && normalizedTable[0].length == 2, () -> "input must be 2x2 " + Arrays.deepToString(normalizedTable)); Utils.validateArg(normalizedTable[1] != null && normalizedTable[1].length == 2, () -> "input must be 2x2 " + Arrays.deepToString(normalizedTable)); //Note: this implementation follows the one in R base package final int[][] x= normalizedTable; final int m = x[0][0] + x[0][1]; final int n = x[1][0] + x[1][1]; final int k = x[0][0] + x[1][0]; final int lo = Math.max(0, k - n); final int hi = Math.min(k, m); final IndexRange support = new IndexRange(lo, hi + 1); if (support.size() <= 1){ //special case, support has only one value return 1.0; } final AbstractIntegerDistribution dist = new HypergeometricDistribution(null, m+n, m, k); final double[] logds = support.mapToDouble(dist::logProbability); final double threshold = logds[x[0][0] - lo] * REL_ERR; final double[] log10ds = DoubleStream.of(logds).filter(d -> d <= threshold).map(MathUtils::logToLog10).toArray(); final double pValue = MathUtils.sumLog10(log10ds); // min is necessary as numerical precision can result in pValue being slightly greater than 1.0 return Math.min(pValue, 1.0); }
Example #4
Source File: RedisHashLoadGenerator.java From yb-sample-apps with Apache License 2.0 | 4 votes |
public AbstractIntegerDistribution getSubkeyDistribution() { return subkeyFreqDist; }
Example #5
Source File: RedisHashPipelined.java From yb-sample-apps with Apache License 2.0 | 4 votes |
public RedisHashPipelined() { int kMinValueSize = 10; // Give enough room for the checksum. int kMaxValueSize = appConfig.maxValueSize; if (appConfig.valueSizeZipfExponent > 0) { int minBits = log2ceil(kMinValueSize); int maxBits = log2ceil(kMaxValueSize); AbstractIntegerDistribution valueSizeDist = new ZipfDistribution( maxBits - minBits + 1, appConfig.valueSizeZipfExponent); // Get (1 + numSubKey) value-sizes from the above distribution. // Scale up/down the values such that the expected-mean value is // appConfig.valueSize // Adjust values to make sure they are within [kMinValueSize, // kMaxValueSize] subkeyValueSize = valueSizeDist.sample(appConfig.numSubkeysPerKey + 1); Arrays.sort(subkeyValueSize); // Estimate the expected size of the subkey value size. AbstractIntegerDistribution freqDist = getRedisHashLoadGenerator().getSubkeyDistribution(); double expected_size = 0; for (int i = 0; i < subkeyValueSize.length; i++) { subkeyValueSize[i] = (1 << (subkeyValueSize[i] + minBits - 1)); expected_size += freqDist.probability(i) * subkeyValueSize[i]; } LOG.debug("Expected size for the distribution is " + valueSizeDist.getNumericalMean()); // Update the sizes so that the expected is appConfig.valueSize. for (int i = 0; i < subkeyValueSize.length; i++) { subkeyValueSize[i] = (int)Math.round( subkeyValueSize[i] * appConfig.valueSize / expected_size); // Set the min value size to be at least kMinValueSize. if (subkeyValueSize[i] < kMinValueSize) { LOG.debug("Updating value size for subkey[ " + i + "] from " + subkeyValueSize[i] + " to " + kMinValueSize); subkeyValueSize[i] = kMinValueSize; } if (subkeyValueSize[i] > kMaxValueSize) { LOG.debug("Updating value size for subkey[ " + i + "] from " + subkeyValueSize[i] + " to " + kMaxValueSize); subkeyValueSize[i] = kMaxValueSize; } LOG.info("Value size for subkey[ " + i + "] is " + subkeyValueSize[i]); } } else { subkeyValueSize = new int[appConfig.numSubkeysPerKey + 1]; Arrays.fill(subkeyValueSize, appConfig.valueSize); } subkeyValueBuffers = new byte[subkeyValueSize.length][]; for (int i = 0; i < subkeyValueSize.length; i++) { subkeyValueBuffers[i] = new byte[subkeyValueSize[i]]; } }
Example #6
Source File: InsertSizeDistribution.java From gatk with BSD 3-Clause "New" or "Revised" License | 4 votes |
private AbstractIntegerDistribution dist() { initializeDistribution(); return dist; }