Java Code Examples for htsjdk.variant.variantcontext.VariantContext#isMonomorphicInSamples()
The following examples show how to use
htsjdk.variant.variantcontext.VariantContext#isMonomorphicInSamples() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SampleList.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
@Override public Map<String, Object> annotate(final ReferenceContext ref, final VariantContext vc, final AlleleLikelihoods<GATKRead, Allele> likelihoods) { Utils.nonNull(vc); if ( vc.isMonomorphicInSamples() || !vc.hasGenotypes() ) { return Collections.emptyMap(); } final StringBuilder samples = new StringBuilder(); for ( final Genotype genotype : vc.getGenotypesOrderedByName() ) { if ( genotype.isCalled() && !genotype.isHomRef() ){ if ( samples.length() > 0 ) { samples.append(","); } samples.append(genotype.getSampleName()); } } if ( samples.length() == 0 ) { return Collections.emptyMap(); } return Collections.singletonMap(getKeyNames().get(0), samples.toString()); }
Example 2
Source File: ValidationReport.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
private SiteStatus calcSiteStatus(VariantContext vc) { if ( vc == null ) return SiteStatus.NO_CALL; if ( vc.isFiltered() ) return SiteStatus.FILTERED; if ( vc.isMonomorphicInSamples() ) return SiteStatus.MONO; if ( vc.hasGenotypes() ) return SiteStatus.POLY; // must be polymorphic if isMonomorphicInSamples was false and there are genotypes if ( vc.hasAttribute(VCFConstants.ALLELE_COUNT_KEY) ) { int ac = 0; if ( vc.getNAlleles() > 2 ) { return SiteStatus.POLY; } else ac = vc.getAttributeAsInt(VCFConstants.ALLELE_COUNT_KEY, 0); return ac > 0 ? SiteStatus.POLY : SiteStatus.MONO; } else { return TREAT_ALL_SITES_IN_EVAL_VCF_AS_CALLED ? SiteStatus.POLY : SiteStatus.NO_CALL; // we can't figure out what to do } }
Example 3
Source File: VariantSummary.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
public void update2(VariantContext eval, VariantContext comp, ReferenceContext referenceContext, ReadsContext readsContext, FeatureContext featureContext) { if ( eval == null || (getWalker().ignoreAC0Sites() && eval.isMonomorphicInSamples()) ) return; final Type type = getType(eval); if ( type == null ) return; TypeSampleMap titvTable = null; // update DP, if possible if ( eval.hasAttribute(VCFConstants.DEPTH_KEY) ) depthPerSample.inc(type, ALL); // update counts allVariantCounts.inc(type, ALL); // type specific calculations if ( type == Type.SNP && eval.isBiallelic() ) { titvTable = GATKVariantContextUtils.isTransition(eval) ? transitionsPerSample : transversionsPerSample; titvTable.inc(type, ALL); } // novelty calculation if ( comp != null || (type == Type.CNV && overlapsKnownCNV(eval, featureContext))) knownVariantCounts.inc(type, ALL); // per sample metrics for (final Genotype g : eval.getGenotypes()) { if ( ! g.isNoCall() && ! g.isHomRef() ) { countsPerSample.inc(type, g.getSampleName()); // update transition / transversion ratio if ( titvTable != null ) titvTable.inc(type, g.getSampleName()); if ( g.hasDP() ) depthPerSample.inc(type, g.getSampleName()); } } }
Example 4
Source File: VariantAFEvaluator.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
public void update1(VariantContext vc, final ReferenceContext referenceContext, final ReadsContext readsContext, final FeatureContext featureContext) { vc.getStart(); if (vc == null || !vc.isSNP() || (getWalker().ignoreAC0Sites() && vc.isMonomorphicInSamples())) { return; } for (final Genotype genotype : vc.getGenotypes()) { // eval array if (!genotype.isNoCall()) { if (genotype.getPloidy() != PLOIDY) { throw new UserException.BadInput("This tool only works with ploidy 2"); } // add AF at this site this.totalCalledSites += 1; int numReferenceAlleles= genotype.countAllele(vc.getReference()); double varAFHere = (PLOIDY - numReferenceAlleles)/PLOIDY; this.sumVariantAFs += varAFHere; totalHetSites += numReferenceAlleles == 1 ? 1 : 0; totalHomVarSites += numReferenceAlleles == 0 ? 1 : 0; totalHomRefSites += numReferenceAlleles == 2 ? 1 : 0; } } if (!vc.hasGenotypes()) { // comp ( sites only thousand genomes ) this.totalCalledSites += 1; this.sumVariantAFs += vc.getAttributeAsDouble("AF", 0.0); } }
Example 5
Source File: MultiallelicSummary.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
public void update2(VariantContext eval, VariantContext comp, final ReferenceContext referenceContext, final ReadsContext readsContext, final FeatureContext featureContext) { if ( eval == null || (getWalker().ignoreAC0Sites() && eval.isMonomorphicInSamples()) ) return; // update counts switch ( eval.getType() ) { case SNP: nSNPs++; if ( !eval.isBiallelic() ) { nMultiSNPs++; calculatePairwiseTiTv(eval); calculateSNPPairwiseNovelty(eval, comp); } break; case INDEL: nIndels++; if ( !eval.isBiallelic() ) { nMultiIndels++; calculateIndelPairwiseNovelty(eval, comp); } break; default: //throw new UserException.BadInput("Unexpected variant context type: " + eval); break; } return; }
Example 6
Source File: IndelLengthHistogram.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void update1(final VariantContext eval, final ReferenceContext referenceContext, final ReadsContext readsContext, final FeatureContext featureContext) { if ( eval.isIndel() && ! eval.isComplexIndel() ) { if ( ! ( getWalker().ignoreAC0Sites() && eval.isMonomorphicInSamples() )) { // only if we are actually polymorphic in the subsetted samples should we count the allele for ( Allele alt : eval.getAlternateAlleles() ) { final int alleleSize = alt.length() - eval.getReference().length(); if ( alleleSize == 0 ) throw new GATKException("Allele size not expected to be zero for indel: alt = " + alt + " ref = " + eval.getReference()); updateLengthHistogram(eval.getReference(), alt); } } } }
Example 7
Source File: ThetaVariantEvaluator.java From gatk with BSD 3-Clause "New" or "Revised" License | 4 votes |
public void update1(VariantContext vc, final ReferenceContext referenceContext, final ReadsContext readsContext, final FeatureContext featureContext) { if (vc == null || !vc.isSNP() || (getWalker().ignoreAC0Sites() && vc.isMonomorphicInSamples())) { return; } //this maps allele to a count ConcurrentMap<String, Integer> alleleCounts = new ConcurrentHashMap<String, Integer>(); int numHetsHere = 0; int numGenosHere = 0; int numIndsHere = 0; for (final Genotype genotype : vc.getGenotypes()) { numIndsHere++; if (!genotype.isNoCall()) { //increment stats for heterozygosity if (genotype.isHet()) { numHetsHere++; } numGenosHere++; //increment stats for pairwise mismatches for (Allele allele : genotype.getAlleles()) { if (allele.isCalled()) { String alleleString = allele.toString(); alleleCounts.putIfAbsent(alleleString, 0); alleleCounts.put(alleleString, alleleCounts.get(alleleString) + 1); } } } } if (numGenosHere > 0) { //only if have one called genotype at least this.numSites++; this.totalHet += numHetsHere / (double)numGenosHere; //compute based on num sites float harmonicFactor = 0; for (int i = 1; i <= numIndsHere; i++) { harmonicFactor += 1.0 / i; } this.thetaRegionNumSites += 1.0 / harmonicFactor; //now compute pairwise mismatches float numPairwise = 0; int numDiffs = 0; for (String allele1 : alleleCounts.keySet()) { int allele1Count = alleleCounts.get(allele1); for (String allele2 : alleleCounts.keySet()) { if (allele1.compareTo(allele2) < 0) { continue; } if (allele1 .compareTo(allele2) == 0) { numPairwise += allele1Count * (allele1Count - 1) * .5; } else { int allele2Count = alleleCounts.get(allele2); numPairwise += allele1Count * allele2Count; numDiffs += allele1Count * allele2Count; } } } if (numPairwise > 0) { this.totalAvgDiffs += numDiffs / numPairwise; } } }