Java Code Examples for htsjdk.variant.variantcontext.GenotypeBuilder#DP
The following examples show how to use
htsjdk.variant.variantcontext.GenotypeBuilder#DP .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HomRefBlock.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
@Override Genotype createHomRefGenotype(final String sampleName, final boolean floorBlocks) { final GenotypeBuilder gb = new GenotypeBuilder(sampleName, Collections.nCopies(getPloidy(), getRef())); gb.noAD().noPL().noAttributes(); // clear all attributes final int[] minPLs = getMinPLs(); final int[] minPPs = getMinPPs(); if (!floorBlocks) { gb.PL(minPLs); gb.GQ(GATKVariantContextUtils.calculateGQFromPLs(minPPs != null ? minPPs : minPLs)); gb.attribute(GATKVCFConstants.MIN_DP_FORMAT_KEY, getMinDP()); } else { gb.GQ(getGQLowerBound()); } gb.DP(getMedianDP()); if (minPPs != null) { gb.attribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY, Utils.listFromPrimitives(minPPs)); } return gb.make(); }
Example 2
Source File: VariantEnricher.java From hmftools with GNU General Public License v3.0 | 5 votes |
private void buildVariants(final String sampleId, List<BachelorGermlineVariant> bachRecords) { for (final BachelorGermlineVariant bachRecord : bachRecords) { VariantContextBuilder builder = new VariantContextBuilder(); builder.id(bachRecord.VariantId); builder.loc(bachRecord.Chromosome, bachRecord.Position, bachRecord.Position + bachRecord.Ref.length() - 1); List<Allele> alleles = Lists.newArrayList(); alleles.add(Allele.create(bachRecord.Ref, true)); alleles.add(Allele.create(bachRecord.Alts, false)); builder.alleles(alleles); List<Genotype> genoTypes = Lists.newArrayList(); GenotypeBuilder gBuilder = new GenotypeBuilder(sampleId, builder.getAlleles()); int[] adCounts = { bachRecord.getTumorRefCount(), bachRecord.getTumorAltCount() }; gBuilder.AD(adCounts); gBuilder.DP(bachRecord.getGermlineReadDepth()); genoTypes.add(gBuilder.make()); builder.genotypes(genoTypes); VariantContext variantContext = builder.make(); variantContext.getCommonInfo().addFilter("PASS"); variantContext.getCommonInfo().putAttribute(SNPEFF_IDENTIFIER, bachRecord.Annotations); bachRecord.setVariantContext(variantContext); SomaticVariant somVariant = SomaticVariantFactory.unfilteredInstance().createSomaticVariant(sampleId, variantContext); bachRecord.setSomaticVariant(somVariant); } }
Example 3
Source File: TLODBlock.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override Genotype createHomRefGenotype(final String sampleName, final boolean floorBlock) { final GenotypeBuilder gb = new GenotypeBuilder(sampleName, Collections.nCopies(2, getRef())); //FIXME: for somatic stuff we output the genotype as diploid because that's familiar for human gb.noAD().noPL().noAttributes(); // clear all attributes gb.attribute(GATKVCFConstants.TUMOR_LOG_10_ODDS_KEY, minBlockLOD); gb.DP(getMedianDP()); gb.attribute(GATKVCFConstants.MIN_DP_FORMAT_KEY, getMinDP()); return gb.make(); }
Example 4
Source File: DepthPerSampleHC.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void annotate( final ReferenceContext ref, final VariantContext vc, final Genotype g, final GenotypeBuilder gb, final AlleleLikelihoods<GATKRead, Allele> likelihoods ) { Utils.nonNull(vc); Utils.nonNull(g); Utils.nonNull(gb); if ( likelihoods == null || !g.isCalled() ) { logger.warn("Annotation will not be calculated, genotype is not called or alleleLikelihoodMap is null"); return; } // check that there are reads final String sample = g.getSampleName(); if (likelihoods.sampleEvidenceCount(likelihoods.indexOfSample(sample)) == 0) { gb.DP(0); return; } final Set<Allele> alleles = new LinkedHashSet<>(vc.getAlleles()); // make sure that there's a meaningful relationship between the alleles in the likelihoods and our VariantContext if ( !likelihoods.alleles().containsAll(alleles) ) { logger.warn("VC alleles " + alleles + " not a strict subset of AlleleLikelihoods alleles " + likelihoods.alleles()); return; } // the depth for the HC is the sum of the informative alleles at this site. It's not perfect (as we cannot // differentiate between reads that align over the event but aren't informative vs. those that aren't even // close) but it's a pretty good proxy and it matches with the AD field (i.e., sum(AD) = DP). final Map<Allele, List<Allele>> alleleSubset = alleles.stream().collect(Collectors.toMap(a -> a, a -> Arrays.asList(a))); final AlleleLikelihoods<GATKRead, Allele> subsettedLikelihoods = likelihoods.marginalize(alleleSubset); final int depth = (int) subsettedLikelihoods.bestAllelesBreakingTies(sample).stream().filter(ba -> ba.isInformative()).count(); gb.DP(depth); }