Java Code Examples for htsjdk.variant.variantcontext.VariantContext#toString()
The following examples show how to use
htsjdk.variant.variantcontext.VariantContext#toString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VcfTestUtils.java From picard with MIT License | 6 votes |
public static void assertEquals(final VariantContext actual, final VariantContext expected) { if (expected == null) { Assert.assertNull(actual); return; } final String expectedString = expected.toString(); Assert.assertNotNull(actual, "null status"); Assert.assertEquals(actual.getContig(), expected.getContig(), expectedString + " Different contigs: "); Assert.assertEquals(actual.getStart(), expected.getStart(), expectedString + " Different starts: "); Assert.assertEquals(actual.getEnd(), expected.getEnd(), expectedString + " Different ends: "); Assert.assertTrue(actual.hasSameAllelesAs(expected), "Alleles differ between " + actual + " and " + expected + ": "); assertEquals(actual.getGenotypes(), expected.getGenotypes()); Assert.assertEquals(actual.getID(), expected.getID(), "IDs differ for " + expectedString); Assert.assertEquals(actual.getFilters(), expected.getFilters(), "Filters differ for " + expectedString); Assert.assertEquals(actual.getAttributes().keySet(), expected.getAttributes().keySet(), "Attributes keys differ for " + expectedString); actual.getAttributes().keySet().forEach(key->{ Assert.assertEquals(actual.getAttribute(key), expected.getAttribute(key), "Attribute values differ for key " + key + " for " + expectedString); }); }
Example 2
Source File: TestVCFInputFormat.java From Hadoop-BAM with MIT License | 5 votes |
@Test public void countEntries() throws Exception { VCFFileReader vcfFileReader = new VCFFileReader(new File("src/test/resources/" + filename), false); Iterator<VariantContext> variantIterator; if (interval == null) { variantIterator = vcfFileReader.iterator(); } else { variantIterator = vcfFileReader.query(interval.getContig(), interval.getStart(), interval.getEnd()); } int expectedCount = Iterators.size(variantIterator); int counter = 0; for (RecordReader<LongWritable, VariantContextWritable> reader : readers) { while (reader.nextKeyValue()) { writable = reader.getCurrentValue(); assertNotNull(writable); VariantContext vc = writable.get(); assertNotNull(vc); String value = vc.toString(); assertNotNull(value); counter++; } } assertEquals(expectedCount, counter); }
Example 3
Source File: TestVCFInputFormatStringency.java From Hadoop-BAM with MIT License | 5 votes |
public void checkReading(ValidationStringency validationStringency) throws Exception { String filename = "invalid_info_field.vcf"; Configuration conf = new Configuration(); String input_file = ClassLoader.getSystemClassLoader().getResource(filename).getFile(); conf.set("mapred.input.dir", "file://" + input_file); if (validationStringency != null) { VCFRecordReader.setValidationStringency(conf, validationStringency); } TaskAttemptContext taskAttemptContext = new TaskAttemptContextImpl(conf, mock(TaskAttemptID.class)); JobContext ctx = new JobContextImpl(conf, taskAttemptContext.getJobID()); VCFInputFormat inputFormat = new VCFInputFormat(conf); List<InputSplit> splits = inputFormat.getSplits(ctx); assertEquals(1, splits.size()); RecordReader<LongWritable, VariantContextWritable> reader = inputFormat.createRecordReader(splits.get(0), taskAttemptContext); int counter = 0; while (reader.nextKeyValue()) { VariantContextWritable writable = reader.getCurrentValue(); assertNotNull(writable); VariantContext vc = writable.get(); assertNotNull(vc); String value = vc.toString(); assertNotNull(value); counter++; } assertEquals(4, counter); }
Example 4
Source File: HaplotypeMap.java From picard with MIT License | 4 votes |
/** * Constructs a HaplotypeMap from the provided file. */ private void fromVcf(final File file) { try ( final VCFFileReader reader = new VCFFileReader(file, false)) { final SAMSequenceDictionary dict = reader.getFileHeader().getSequenceDictionary(); if (dict == null || dict.getSequences().isEmpty()) { throw new IllegalStateException("Haplotype map VCF file must contain header: " + file.getAbsolutePath()); } initialize(new SAMFileHeader(dict)); final Map<String, HaplotypeBlock> anchorToHaplotype = new HashMap<>(); for (final VariantContext vc : reader) { if (vc.getNSamples() > 1) { throw new IllegalStateException("Haplotype map VCF file must contain at most one sample: " + file.getAbsolutePath()); } final Genotype gc = vc.getGenotype(0); // may be null final boolean hasGc = gc != null; if (vc.getAlternateAlleles().size() != 1) { throw new IllegalStateException("Haplotype map VCF file must contain exactly one alternate allele per site: " + vc.toString()); } if (!vc.isSNP()) { throw new IllegalStateException("Haplotype map VCF file must contain only SNPs: " + vc.toString()); } if (!vc.hasAttribute(VCFConstants.ALLELE_FREQUENCY_KEY)) { throw new IllegalStateException("Haplotype map VCF Variants must have an '"+ VCFConstants.ALLELE_FREQUENCY_KEY + "' INFO field: " + vc.toString()); } if (hasGc && gc.isPhased() && !gc.hasExtendedAttribute(VCFConstants.PHASE_SET_KEY)) { throw new IllegalStateException("Haplotype map VCF Variants' genotypes that are phased must have a PhaseSet (" + VCFConstants.PHASE_SET_KEY+")" + vc.toString()); } if (hasGc && gc.isPhased() && !gc.isHet()) { throw new IllegalStateException("Haplotype map VCF Variants' genotypes that are phased must be HET" + vc.toString()); } // Then parse them out final String chrom = vc.getContig(); final int pos = vc.getStart(); final String name = vc.getID(); final byte ref = vc.getReference().getBases()[0]; final byte var = vc.getAlternateAllele(0).getBases()[0]; final double temp_maf = vc.getAttributeAsDouble(VCFConstants.ALLELE_FREQUENCY_KEY, 0D); final boolean swapped = hasGc && !gc.getAllele(0).equals(vc.getReference()); final byte major, minor; final double maf; if (swapped) { major = var; minor = ref; maf = 1 - temp_maf; } else { major = ref; minor = var; maf = temp_maf; } final String anchor = anchorFromVc(vc); // If it's the anchor snp, start the haplotype if (!anchorToHaplotype.containsKey(anchor)) { final HaplotypeBlock newBlock = new HaplotypeBlock(maf); anchorToHaplotype.put(anchor, newBlock); } final HaplotypeBlock block = anchorToHaplotype.get(anchor); block.addSnp(new Snp(name, chrom, pos, major, minor, maf, null)); } // And add them all now that they are all ready. fromHaplotypes(anchorToHaplotype.values()); } }
Example 5
Source File: LiftoverUtils.java From picard with MIT License | 4 votes |
/** * method to swap the reference and alt alleles of a bi-allelic, SNP * * @param vc the {@link VariantContext} (bi-allelic SNP) that needs to have it's REF and ALT alleles swapped. * @param annotationsToReverse INFO field annotations (of double value) that will be reversed (x->1-x) * @param annotationsToDrop INFO field annotations that will be dropped from the result since they are invalid when REF and ALT are swapped * @return a new {@link VariantContext} with alleles swapped, INFO fields modified and in the genotypes, GT, AD and PL corrected appropriately */ public static VariantContext swapRefAlt(final VariantContext vc, final Collection<String> annotationsToReverse, final Collection<String> annotationsToDrop) { if (!vc.isBiallelic() || !vc.isSNP()) { throw new IllegalArgumentException("swapRefAlt can only process biallelic, SNPS, found " + vc.toString()); } final VariantContextBuilder swappedBuilder = new VariantContextBuilder(vc); swappedBuilder.attribute(SWAPPED_ALLELES, true); // Use getBaseString() (rather than the Allele itself) in order to create new Alleles with swapped // reference and non-variant attributes swappedBuilder.alleles(Arrays.asList(vc.getAlleles().get(1).getBaseString(), vc.getAlleles().get(0).getBaseString())); final Map<Allele, Allele> alleleMap = new HashMap<>(); // A mapping from the old allele to the new allele, to be used when fixing the genotypes alleleMap.put(vc.getAlleles().get(0), swappedBuilder.getAlleles().get(1)); alleleMap.put(vc.getAlleles().get(1), swappedBuilder.getAlleles().get(0)); final GenotypesContext swappedGenotypes = GenotypesContext.create(vc.getGenotypes().size()); for (final Genotype genotype : vc.getGenotypes()) { final List<Allele> swappedAlleles = new ArrayList<>(); for (final Allele allele : genotype.getAlleles()) { if (allele.isNoCall()) { swappedAlleles.add(allele); } else { swappedAlleles.add(alleleMap.get(allele)); } } // Flip AD final GenotypeBuilder builder = new GenotypeBuilder(genotype).alleles(swappedAlleles); if (genotype.hasAD() && genotype.getAD().length == 2) { final int[] ad = ArrayUtils.clone(genotype.getAD()); ArrayUtils.reverse(ad); builder.AD(ad); } else { builder.noAD(); } //Flip PL if (genotype.hasPL() && genotype.getPL().length == 3) { final int[] pl = ArrayUtils.clone(genotype.getPL()); ArrayUtils.reverse(pl); builder.PL(pl); } else { builder.noPL(); } swappedGenotypes.add(builder.make()); } swappedBuilder.genotypes(swappedGenotypes); for (final String key : vc.getAttributes().keySet()) { if (annotationsToDrop.contains(key)) { swappedBuilder.rmAttribute(key); } else if (annotationsToReverse.contains(key) && !vc.getAttributeAsString(key, "").equals(VCFConstants.MISSING_VALUE_v4)) { final double attributeToReverse = vc.getAttributeAsDouble(key, -1); if (attributeToReverse < 0 || attributeToReverse > 1) { log.warn("Trying to reverse attribute " + key + " but found value that isn't between 0 and 1: (" + attributeToReverse + ") in variant " + vc + ". Results might be wrong."); } swappedBuilder.attribute(key, 1 - attributeToReverse); } } return swappedBuilder.make(); }