Java Code Examples for htsjdk.variant.variantcontext.VariantContext#isBiallelic()
The following examples show how to use
htsjdk.variant.variantcontext.VariantContext#isBiallelic() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VcfFuncotationFactory.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
private void populateAnnotationMap(final VariantContext funcotationFactoryVariant, final VariantContext queryVariant, final int funcotationFactoryAltAlleleIndex, final LinkedHashMap<String, String> annotations, final Map.Entry<String, Object> attributeEntry) { final String valueString; final String attributeName = attributeEntry.getKey(); // Handle collections a little differently: if (attributeEntry.getValue() instanceof Collection<?>) { @SuppressWarnings("unchecked") final Collection<Object> objectList = ((Collection<Object>) attributeEntry.getValue()); final VCFHeaderLineCount countType = supportedFieldMetadata.retrieveHeaderInfo(createFinalFieldName(this.name, attributeName)).getCountType(); if (funcotationFactoryVariant.isBiallelic() && queryVariant.isBiallelic()) { valueString = objectList.stream().map(Object::toString).collect(Collectors.joining(String.valueOf(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR_CHAR))); } else { valueString = determineValueStringFromMultiallelicAttributeList(funcotationFactoryAltAlleleIndex, objectList, countType); } } else { valueString = attributeEntry.getValue().toString(); } annotations.put(createFinalFieldName(name, attributeName), valueString); }
Example 2
Source File: TiTvVariantEvaluator.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
public void updateTiTv(VariantContext vc, boolean updateStandard) { if (vc != null && vc.isSNP() && vc.isBiallelic() && vc.isPolymorphicInSamples()) { if ( GATKVariantContextUtils.isTransition(vc)) { if (updateStandard) nTiInComp++; else nTi++; } else { if (updateStandard) nTvInComp++; else nTv++; } if (vc.hasAttribute("ANCESTRALALLELE")) { final String aaStr = vc.getAttributeAsString("ANCESTRALALLELE", "null").toUpperCase(); if ( ! aaStr.equals(".") ) { switch ( BaseUtils.SNPSubstitutionType(aaStr.getBytes()[0], vc.getAlternateAllele(0).getBases()[0] ) ) { case TRANSITION: nTiDerived++; break; case TRANSVERSION: nTvDerived++; break; default: break; } } } } }
Example 3
Source File: IndelSize.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
public List<Object> getRelevantStates(ReferenceContext referenceContext, ReadsContext readsContext, FeatureContext featureContext, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) { if (eval != null && eval.isIndel() && eval.isBiallelic()) { try { int eventLength = 0; if ( eval.isSimpleInsertion() ) { eventLength = eval.getAlternateAllele(0).length(); } else if ( eval.isSimpleDeletion() ) { eventLength = -eval.getReference().length(); } if (eventLength > MAX_INDEL_SIZE) eventLength = MAX_INDEL_SIZE; else if (eventLength < -MAX_INDEL_SIZE) eventLength = -MAX_INDEL_SIZE; return Collections.singletonList((Object)eventLength); } catch (Exception e) { return Collections.emptyList(); } } return Collections.emptyList(); }
Example 4
Source File: AS_InbreedingCoeff.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
private double calculateIC(final VariantContext vc, final Allele altAllele, final HeterozygosityCalculator heterozygosityUtils) { final int AN = vc.getCalledChrCount(); final double altAF; final double hetCount = heterozygosityUtils.getHetCount(altAllele); final double F; //shortcut to get a value closer to the non-alleleSpecific value for bialleleics if (vc.isBiallelic()) { double refAC = heterozygosityUtils.getAlleleCount(vc.getReference()); double altAC = heterozygosityUtils.getAlleleCount(altAllele); double refAF = refAC/(altAC+refAC); altAF = 1 - refAF; F = 1.0 - (hetCount / (2.0 * refAF * altAF * (double) heterozygosityUtils.getSampleCount())); // inbreeding coefficient } else { //compare number of hets for this allele (and any other second allele) with the expectation based on AFs //derive the altAF from the likelihoods to account for any accumulation of fractional counts from non-primary likelihoods, //e.g. for a GQ10 variant, the probability of the call will be ~0.9 and the second best call will be ~0.1 so adding up those 0.1s for het counts can dramatically change the AF compared with integer counts altAF = heterozygosityUtils.getAlleleCount(altAllele)/ (double) AN; F = 1.0 - (hetCount / (2.0 * (1 - altAF) * altAF * (double) heterozygosityUtils.getSampleCount())); // inbreeding coefficient } return F; }
Example 5
Source File: CalculateGenotypePosteriors.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void apply(final VariantContext variant, final ReadsContext readsContext, final ReferenceContext referenceContext, final FeatureContext featureContext) { final Collection<VariantContext> vcs = featureContext.getValues(getDrivingVariantsFeatureInput()); final Collection<VariantContext> otherVCs = featureContext.getValues(supportVariants); final int missing = supportVariants.size() - otherVCs.size(); for ( final VariantContext vc : vcs ) { final VariantContext vc_familyPriors; final VariantContext vc_bothPriors; //do family priors first (if applicable) final VariantContextBuilder builder = new VariantContextBuilder(vc); //only compute family priors for biallelelic sites if (!skipFamilyPriors && vc.isBiallelic()){ final GenotypesContext gc = famUtils.calculatePosteriorGLs(vc); builder.genotypes(gc); } VariantContextUtils.calculateChromosomeCounts(builder, false); vc_familyPriors = builder.make(); if (!skipPopulationPriors) { vc_bothPriors = PosteriorProbabilitiesUtils.calculatePosteriorProbs(vc_familyPriors, otherVCs, missing * numRefIfMissing, globalPrior, !ignoreInputSamples, defaultToAC, useACoff); } else { final VariantContextBuilder builder2 = new VariantContextBuilder(vc_familyPriors); VariantContextUtils.calculateChromosomeCounts(builder, false); vc_bothPriors = builder2.make(); } vcfWriter.add(vc_bothPriors); } }
Example 6
Source File: MultiallelicSummary.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
public void update2(VariantContext eval, VariantContext comp, final ReferenceContext referenceContext, final ReadsContext readsContext, final FeatureContext featureContext) { if ( eval == null || (getWalker().ignoreAC0Sites() && eval.isMonomorphicInSamples()) ) return; // update counts switch ( eval.getType() ) { case SNP: nSNPs++; if ( !eval.isBiallelic() ) { nMultiSNPs++; calculatePairwiseTiTv(eval); calculateSNPPairwiseNovelty(eval, comp); } break; case INDEL: nIndels++; if ( !eval.isBiallelic() ) { nMultiIndels++; calculateIndelPairwiseNovelty(eval, comp); } break; default: //throw new UserException.BadInput("Unexpected variant context type: " + eval); break; } return; }
Example 7
Source File: VariantSummary.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
public void update2(VariantContext eval, VariantContext comp, ReferenceContext referenceContext, ReadsContext readsContext, FeatureContext featureContext) { if ( eval == null || (getWalker().ignoreAC0Sites() && eval.isMonomorphicInSamples()) ) return; final Type type = getType(eval); if ( type == null ) return; TypeSampleMap titvTable = null; // update DP, if possible if ( eval.hasAttribute(VCFConstants.DEPTH_KEY) ) depthPerSample.inc(type, ALL); // update counts allVariantCounts.inc(type, ALL); // type specific calculations if ( type == Type.SNP && eval.isBiallelic() ) { titvTable = GATKVariantContextUtils.isTransition(eval) ? transitionsPerSample : transversionsPerSample; titvTable.inc(type, ALL); } // novelty calculation if ( comp != null || (type == Type.CNV && overlapsKnownCNV(eval, featureContext))) knownVariantCounts.inc(type, ALL); // per sample metrics for (final Genotype g : eval.getGenotypes()) { if ( ! g.isNoCall() && ! g.isHomRef() ) { countsPerSample.inc(type, g.getSampleName()); // update transition / transversion ratio if ( titvTable != null ) titvTable.inc(type, g.getSampleName()); if ( g.hasDP() ) depthPerSample.inc(type, g.getSampleName()); } } }
Example 8
Source File: AlleleCount.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
public List<Object> getRelevantStates(ReferenceContext referenceContext, ReadsContext readsContext, FeatureContext featureContext, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String familyName) { if (eval != null) { int AC = 0; // by default, the site is considered monomorphic try { if ( eval.isBiallelic() ) { if ( eval.hasAttribute(GATKVCFConstants.MLE_ALLELE_COUNT_KEY) ) { // the MLEAC is allowed to be larger than the AN (e.g. in the case of all PLs being 0, the GT is ./. but the exact model may arbitrarily choose an AC>1) AC = Math.min(eval.getAttributeAsInt(GATKVCFConstants.MLE_ALLELE_COUNT_KEY, 0), nchrom); } else if ( eval.hasAttribute(VCFConstants.ALLELE_COUNT_KEY) ) { AC = eval.getAttributeAsInt(VCFConstants.ALLELE_COUNT_KEY, 0); } } } catch ( ClassCastException e ) { // protect ourselves from bad inputs // TODO -- fully decode VC } if ( AC == 0 && eval.isVariant() ) { // fall back to the direct calculation for (Allele allele : eval.getAlternateAlleles()) AC = Math.max(AC, eval.getCalledChrCount(allele)); } // make sure that the AC isn't invalid if ( AC > nchrom ) throw new UserException(String.format("The AC value (%d) at position %s:%d " + "is larger than the number of chromosomes over all samples (%d)", AC, eval.getContig(), eval.getStart(), nchrom)); return Collections.singletonList((Object) AC); } else { return Collections.emptyList(); } }
Example 9
Source File: GetPileupSummaries.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void apply(AlignmentContext alignmentContext, ReferenceContext referenceContext, FeatureContext featureContext) { final List<VariantContext> vcs = featureContext.getValues(variants); if (vcs.isEmpty()) { return; } final VariantContext vc = vcs.get(0); if ( vc.isBiallelic() && vc.isSNP() && alleleFrequencyInRange(vc) ) { final ReadPileup pileup = alignmentContext.getBasePileup() .makeFilteredPileup(pe -> pe.getRead().getMappingQuality() >= minMappingQuality); pileupSummaries.add(new PileupSummary(vc, pileup)); } }
Example 10
Source File: CalculateGenotypePosteriors.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void apply(final VariantContext variant, final ReadsContext readsContext, final ReferenceContext referenceContext, final FeatureContext featureContext) { final Collection<VariantContext> otherVCs = featureContext.getValues(supportVariants); //If no resource contains a matching variant, then add numRefIfMissing as a pseudocount to the priors List<VariantContext> resourcesWithMatchingStarts = otherVCs.stream() .filter(vc -> variant.getStart() == vc.getStart()).collect(Collectors.toList()); //do family priors first (if applicable) final VariantContextBuilder builder = new VariantContextBuilder(variant); //only compute family priors for biallelelic sites if (!skipFamilyPriors && variant.isBiallelic()){ final GenotypesContext gc = famUtils.calculatePosteriorGLs(variant); builder.genotypes(gc); } VariantContextUtils.calculateChromosomeCounts(builder, false); final VariantContext vc_familyPriors = builder.make(); final VariantContext vc_bothPriors; if (!skipPopulationPriors) { vc_bothPriors = PosteriorProbabilitiesUtils.calculatePosteriorProbs(vc_familyPriors, resourcesWithMatchingStarts, resourcesWithMatchingStarts.isEmpty() ? numRefIfMissing : 0, options); } else { vc_bothPriors = vc_familyPriors; } vcfWriter.add(vc_bothPriors); }
Example 11
Source File: VariantRecalibrator.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * add a datum representing a variant site (or allele) to the data in {@code variants}, which represents the callset to be recalibrated * @param variants is modified by having a new VariantDatum added to it */ private void addDatum( final ArrayList<VariantDatum> variants, final boolean isInput, final FeatureContext featureContext, final VariantContext vc, final Allele refAllele, final Allele altAllele) { final VariantDatum datum = new VariantDatum(); // Populate the datum with lots of fields from the VariantContext, unfortunately the VC is too big so we just // pull in only the things we absolutely need. datum.referenceAllele = refAllele; datum.alternateAllele = altAllele; dataManager.decodeAnnotations(datum, vc, true); // non-deterministic because order of calls depends on load of machine datum.loc = (isInput ? new SimpleInterval(vc) : null); datum.originalQual = vc.getPhredScaledQual(); datum.isSNP = vc.isSNP() && vc.isBiallelic(); datum.isTransition = datum.isSNP && GATKVariantContextUtils.isTransition(vc); datum.isAggregate = !isInput; // Loop through the training data sets and if they overlap this locus (and allele, if applicable) then update // the prior and training status appropriately. The locus used to find training set variants is retrieved // by parseTrainingSets from the FeatureContext argument. dataManager.parseTrainingSets(featureContext, vc, datum, TRUST_ALL_POLYMORPHIC); final double priorFactor = QualityUtils.qualToProb(datum.prior); datum.prior = Math.log10(priorFactor) - Math.log10(1.0 - priorFactor); variants.add(datum); }
Example 12
Source File: GVCFBlockCombiner.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void submit(VariantContext vc) { Utils.nonNull(vc); Utils.validateArg(vc.hasGenotypes(), "GVCF assumes that the VariantContext has genotypes"); Utils.validateArg(vc.getGenotypes().size() == 1, () -> "GVCF assumes that the VariantContext has exactly one genotype but saw " + vc.getGenotypes().size()); if (sampleName == null) { sampleName = vc.getGenotype(0).getSampleName(); } if (currentBlock != null && !currentBlock.isContiguous(vc)) { // we've made a non-contiguous step (across interval, onto another chr), so finalize emitCurrentBlock(); } final Genotype g = vc.getGenotype(0); if (g.isHomRef() && vc.hasAlternateAllele(Allele.NON_REF_ALLELE) && vc.isBiallelic()) { // create bands final VariantContext maybeCompletedBand = addHomRefSite(vc, g); if (maybeCompletedBand != null) { toOutput.add(maybeCompletedBand); } } else { // g is variant, so flush the bands and emit vc emitCurrentBlock(); nextAvailableStart = vc.getEnd(); contigOfNextAvailableStart = vc.getContig(); toOutput.add(vc); } }
Example 13
Source File: LiftoverVcf.java From picard with MIT License | 5 votes |
/** * utility function to attempt to add a variant. Checks that the reference allele still matches the reference (which may have changed) * * @param vc new {@link VariantContext} * @param refSeq {@link ReferenceSequence} of new reference * @param source the original {@link VariantContext} to use for putting the original location information into vc */ private void tryToAddVariant(final VariantContext vc, final ReferenceSequence refSeq, final VariantContext source) { if (!refSeq.getName().equals(vc.getContig())) { throw new IllegalStateException("The contig of the VariantContext, " + vc.getContig() + ", doesnt match the ReferenceSequence: " + refSeq.getName()); } // Check that the reference allele still agrees with the reference sequence final boolean mismatchesReference; final Allele allele = vc.getReference(); final byte[] ref = refSeq.getBases(); final String refString = StringUtil.bytesToString(ref, vc.getStart() - 1, vc.getEnd() - vc.getStart() + 1); if (!refString.equalsIgnoreCase(allele.getBaseString())) { // consider that the ref and the alt may have been swapped in a simple biallelic SNP if (vc.isBiallelic() && vc.isSNP() && refString.equalsIgnoreCase(vc.getAlternateAllele(0).getBaseString())) { totalTrackedAsSwapRefAlt++; if (RECOVER_SWAPPED_REF_ALT) { addAndTrack(LiftoverUtils.swapRefAlt(vc, TAGS_TO_REVERSE, TAGS_TO_DROP), source); return; } } mismatchesReference = true; } else { mismatchesReference = false; } if (mismatchesReference) { rejectedRecords.add(new VariantContextBuilder(source) .filter(FILTER_MISMATCHING_REF_ALLELE) .attribute(ATTEMPTED_LOCUS, String.format("%s:%d-%d", vc.getContig(), vc.getStart(), vc.getEnd())) .attribute(ATTEMPTED_ALLELES, vc.getReference().toString() + "->" + String.join(",", vc.getAlternateAlleles().stream().map(Allele::toString).collect(Collectors.toList()))) .make()); failedAlleleCheck++; trackLiftedVariantContig(rejectsByContig, source.getContig()); } else { addAndTrack(vc, source); } }
Example 14
Source File: GetPileupSummaries.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void apply(final VariantContext vc, final ReadsContext readsContext, final ReferenceContext referenceContext, final FeatureContext featureContext ) { // if we input multiple sources of variants, ignore repeats if (lastVariant != null && vc.getStart() == lastVariant.getStart()) { return; } else if ( vc.isBiallelic() && vc.isSNP() && alleleFrequencyInRange(vc) ) { final ReadPileup pileup = GATKProtectedVariantContextUtils.getPileup(vc, readsContext); pileupSummaries.add(new PileupSummary(vc, pileup)); } lastVariant = vc; }
Example 15
Source File: PossibleDeNovo.java From gatk with BSD 3-Clause "New" or "Revised" License | 4 votes |
@Override public Map<String, Object> annotate(final ReferenceContext ref, final VariantContext vc, final AlleleLikelihoods<GATKRead, Allele> likelihoods) { Utils.nonNull(vc); Set<Trio> trioSet = initializeAndGetTrios(); if (trioSet.isEmpty()){ return Collections.emptyMap(); } final List<String> highConfDeNovoChildren = new ArrayList<>(); final List<String> lowConfDeNovoChildren = new ArrayList<>(); for (final Trio trio : trioSet) { if (vc.isBiallelic() && PossibleDeNovo.contextHasTrioLikelihoods(vc, trio) && mendelianViolation.isViolation(trio.getMother(), trio.getFather(), trio.getChild(), vc) && mendelianViolation.getParentsRefRefChildHet() > 0) { final int childGQ = vc.getGenotype(trio.getChildID()).getGQ(); final int momGQ = vc.getGenotype(trio.getMaternalID()).getGQ(); final int dadGQ = vc.getGenotype(trio.getPaternalID()).getGQ(); if (childGQ >= hi_GQ_threshold && momGQ >= hi_GQ_threshold && dadGQ >= hi_GQ_threshold) { highConfDeNovoChildren.add(trio.getChildID()); } else if (childGQ >= lo_GQ_threshold && momGQ > 0 && dadGQ > 0) { lowConfDeNovoChildren.add(trio.getChildID()); } } } final double percentNumberOfSamplesCutoff = vc.getNSamples()*percentOfSamplesCutoff; final double AFcutoff = Math.max(flatNumberOfSamplesCutoff, percentNumberOfSamplesCutoff); final int deNovoAlleleCount = vc.getCalledChrCount(vc.getAlternateAllele(0)); //we assume we're biallelic above so use the first alt final Map<String,Object> attributeMap = new LinkedHashMap<>(2); if ( !highConfDeNovoChildren.isEmpty() && deNovoAlleleCount < AFcutoff ) { attributeMap.put(GATKVCFConstants.HI_CONF_DENOVO_KEY, highConfDeNovoChildren); } if ( !lowConfDeNovoChildren.isEmpty() && deNovoAlleleCount < AFcutoff ) { attributeMap.put(GATKVCFConstants.LO_CONF_DENOVO_KEY, lowConfDeNovoChildren); } return attributeMap; }
Example 16
Source File: ASEReadCounter.java From gatk with BSD 3-Clause "New" or "Revised" License | 4 votes |
@Override public void apply(AlignmentContext alignmentContext, ReferenceContext referenceContext, FeatureContext featureContext) { final String contig = alignmentContext.getContig(); final long position = alignmentContext.getPosition(); final char refAllele = (char) referenceContext.getBase(); final List<VariantContext> VCs = featureContext.getValues(variants); if (VCs != null && VCs.size() > 1) { throw new UserException("More then one variant context at position: " + contig + ":" + position); } if (VCs == null || VCs.isEmpty()) { return; } final VariantContext vc = VCs.get(0); if (!vc.isBiallelic()) { logger.warn("Ignoring site: cannot run ASE on non-biallelic sites: " + vc.toString()); return; } if (vc.getHetCount() < 1) { logger.warn("Ignoring site: variant is not het at postion: " + contig + ":" + position); return; } if (vc.getNAlleles() == 1 || vc.getAlternateAllele(0).getBases().length == 0) { throw new UserException("The file of variant sites must contain heterozygous sites and cannot be a GVCF file containing <NON_REF> alleles."); } final char altAllele = (char) vc.getAlternateAllele(0).getBases()[0]; final String siteID = vc.getID(); final ReadPileup pileup = filterPileup(alignmentContext.getBasePileup(), countType); // count up the depths of all and QC+ bases final String line = calculateLineForSite(pileup, siteID, refAllele, altAllele); if (line != null) { outputStream.println(line); } }
Example 17
Source File: CalculateMixingFractions.java From gatk with BSD 3-Clause "New" or "Revised" License | 4 votes |
private boolean isBiallelicSingletonHetSnp(final VariantContext vc) { return vc.isBiallelic() && vc.isSNP() && ( (vc.hasAttribute(VCFConstants.ALLELE_COUNT_KEY) && getArrayAttribute(vc, VCFConstants.ALLELE_COUNT_KEY)[0] == 1) || vc.getGenotypes().stream().filter(genotype -> genotype.isHet()).count() == 1); }
Example 18
Source File: LiftoverUtils.java From picard with MIT License | 4 votes |
/** * method to swap the reference and alt alleles of a bi-allelic, SNP * * @param vc the {@link VariantContext} (bi-allelic SNP) that needs to have it's REF and ALT alleles swapped. * @param annotationsToReverse INFO field annotations (of double value) that will be reversed (x->1-x) * @param annotationsToDrop INFO field annotations that will be dropped from the result since they are invalid when REF and ALT are swapped * @return a new {@link VariantContext} with alleles swapped, INFO fields modified and in the genotypes, GT, AD and PL corrected appropriately */ public static VariantContext swapRefAlt(final VariantContext vc, final Collection<String> annotationsToReverse, final Collection<String> annotationsToDrop) { if (!vc.isBiallelic() || !vc.isSNP()) { throw new IllegalArgumentException("swapRefAlt can only process biallelic, SNPS, found " + vc.toString()); } final VariantContextBuilder swappedBuilder = new VariantContextBuilder(vc); swappedBuilder.attribute(SWAPPED_ALLELES, true); // Use getBaseString() (rather than the Allele itself) in order to create new Alleles with swapped // reference and non-variant attributes swappedBuilder.alleles(Arrays.asList(vc.getAlleles().get(1).getBaseString(), vc.getAlleles().get(0).getBaseString())); final Map<Allele, Allele> alleleMap = new HashMap<>(); // A mapping from the old allele to the new allele, to be used when fixing the genotypes alleleMap.put(vc.getAlleles().get(0), swappedBuilder.getAlleles().get(1)); alleleMap.put(vc.getAlleles().get(1), swappedBuilder.getAlleles().get(0)); final GenotypesContext swappedGenotypes = GenotypesContext.create(vc.getGenotypes().size()); for (final Genotype genotype : vc.getGenotypes()) { final List<Allele> swappedAlleles = new ArrayList<>(); for (final Allele allele : genotype.getAlleles()) { if (allele.isNoCall()) { swappedAlleles.add(allele); } else { swappedAlleles.add(alleleMap.get(allele)); } } // Flip AD final GenotypeBuilder builder = new GenotypeBuilder(genotype).alleles(swappedAlleles); if (genotype.hasAD() && genotype.getAD().length == 2) { final int[] ad = ArrayUtils.clone(genotype.getAD()); ArrayUtils.reverse(ad); builder.AD(ad); } else { builder.noAD(); } //Flip PL if (genotype.hasPL() && genotype.getPL().length == 3) { final int[] pl = ArrayUtils.clone(genotype.getPL()); ArrayUtils.reverse(pl); builder.PL(pl); } else { builder.noPL(); } swappedGenotypes.add(builder.make()); } swappedBuilder.genotypes(swappedGenotypes); for (final String key : vc.getAttributes().keySet()) { if (annotationsToDrop.contains(key)) { swappedBuilder.rmAttribute(key); } else if (annotationsToReverse.contains(key) && !vc.getAttributeAsString(key, "").equals(VCFConstants.MISSING_VALUE_v4)) { final double attributeToReverse = vc.getAttributeAsDouble(key, -1); if (attributeToReverse < 0 || attributeToReverse > 1) { log.warn("Trying to reverse attribute " + key + " but found value that isn't between 0 and 1: (" + attributeToReverse + ") in variant " + vc + ". Results might be wrong."); } swappedBuilder.attribute(key, 1 - attributeToReverse); } } return swappedBuilder.make(); }
Example 19
Source File: MendelianViolationEvaluator.java From gatk with BSD 3-Clause "New" or "Revised" License | 4 votes |
public void update1(VariantContext vc, final ReferenceContext referenceContext, final ReadsContext readsContext, final FeatureContext featureContext) { if (vc.isBiallelic() && vc.hasGenotypes()) { // todo -- currently limited to biallelic loci if (mv.countFamilyViolations(sampleDB, null, vc) > 0){ nLociViolations++; nViolations += mv.getViolationsCount(); mvRefRef_Var += mv.getParentsRefRefChildVar(); mvRefRef_Het += mv.getParentsRefRefChildHet(); mvRefHet_Var += mv.getParentsRefHetChildVar(); mvRefVar_Var += mv.getParentsRefVarChildVar(); mvRefVar_Ref += mv.getParentsRefVarChildRef(); mvVarHet_Ref += mv.getParentsVarHetChildRef(); mvVarVar_Ref += mv.getParentsVarVarChildRef(); mvVarVar_Het += mv.getParentsVarVarChildHet(); } HomRefHomRef_HomRef += mv.getRefRefRef(); HetHet_Het += mv.getHetHetHet(); HetHet_HomRef += mv.getHetHetHomRef(); HetHet_HomVar += mv.getHetHetHomVar(); HomVarHomVar_HomVar += mv.getVarVarVar(); HomRefHomVAR_Het += mv.getRefVarHet(); HetHet_inheritedRef += mv.getParentsHetHetInheritedRef(); HetHet_inheritedVar += mv.getParentsHetHetInheritedVar(); HomRefHet_inheritedRef += mv.getParentsRefHetInheritedRef(); HomRefHet_inheritedVar += mv.getParentsRefHetInheritedVar(); HomVarHet_inheritedRef += mv.getParentsVarHetInheritedRef(); HomVarHet_inheritedVar += mv.getParentsVarHetInheritedVar(); if(mv.getFamilyCalledCount()>0 || mv.getFamilyLowQualsCount()>0 || mv.getFamilyCalledCount()>0){ nVariants++; nFamCalled += mv.getFamilyCalledCount(); nLowQual += mv.getFamilyLowQualsCount(); nNoCall += mv.getFamilyNoCallCount(); nVarFamCalled += mv.getVarFamilyCalledCount(); } else{ nSkipped++; } } }
Example 20
Source File: CalculateMixingFractions.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 4 votes |
private boolean isBiallelicSingletonHetSnp(final VariantContext vc) { return vc.isBiallelic() && vc.isSNP() && ( (vc.hasAttribute(VCFConstants.ALLELE_COUNT_KEY) && getArrayAttribute(vc, VCFConstants.ALLELE_COUNT_KEY)[0] == 1) || vc.getGenotypes().stream().filter(genotype -> genotype.isHet()).count() == 1); }