Java Code Examples for htsjdk.variant.variantcontext.writer.VariantContextWriter#close()
The following examples show how to use
htsjdk.variant.variantcontext.writer.VariantContextWriter#close() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AmberVCF.java From hmftools with GNU General Public License v3.0 | 6 votes |
public void writeBAF(@NotNull final String filename, @NotNull final Collection<TumorBAF> tumorEvidence, @NotNull final AmberHetNormalEvidence hetNormalEvidence) { final List<TumorBAF> list = Lists.newArrayList(tumorEvidence); Collections.sort(list); final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(filename).modifyOption(Options.INDEX_ON_THE_FLY, true).build(); final VCFHeader header = header(config.tumorOnly() ? Collections.singletonList(config.tumor()) : config.allSamples()); writer.setHeader(header); writer.writeHeader(header); final ListMultimap<AmberSite, Genotype> genotypeMap = ArrayListMultimap.create(); for (final String sample : hetNormalEvidence.samples()) { for (BaseDepth baseDepth : hetNormalEvidence.evidence(sample)) { genotypeMap.put(AmberSiteFactory.asSite(baseDepth), createGenotype(sample, baseDepth)); } } for (final TumorBAF tumorBAF : list) { AmberSite tumorSite = AmberSiteFactory.tumorSite(tumorBAF); genotypeMap.put(tumorSite, createGenotype(tumorBAF)); writer.add(create(tumorBAF, genotypeMap.get(tumorSite))); } writer.close(); }
Example 2
Source File: PostprocessGermlineCNVCalls.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
private void generateIntervalsVCFFileFromAllShards() { logger.info("Generating intervals VCF file..."); final VariantContextWriter intervalsVCFWriter = createVCFWriter(outputIntervalsVCFFile); final GermlineCNVIntervalVariantComposer germlineCNVIntervalVariantComposer = new GermlineCNVIntervalVariantComposer(intervalsVCFWriter, sampleName, refAutosomalIntegerCopyNumberState, allosomalContigSet); germlineCNVIntervalVariantComposer.composeVariantContextHeader(sequenceDictionary, getDefaultToolVCFHeaderLines()); logger.info(String.format("Writing intervals VCF file to %s...", outputIntervalsVCFFile.getAbsolutePath())); for (int shardIndex = 0; shardIndex < numShards; shardIndex++) { logger.info(String.format("Analyzing shard %d / %d...", shardIndex + 1, numShards)); germlineCNVIntervalVariantComposer.writeAll(getShardIntervalCopyNumberPosteriorData(shardIndex)); } intervalsVCFWriter.close(); }
Example 3
Source File: GATKToolUnitTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
@Test(dataProvider = "createVCFWriterData") public void testCreateVCFWriterWithOptions( final File inputFile, final String outputExtension, final String indexExtension, final boolean createIndex, final boolean createMD5) throws IOException { // create a writer and make sure the requested index/md5 params are honored final TestGATKToolWithVariants tool = new TestGATKToolWithVariants(); final File outputFile = setupVCFWriter(inputFile, outputExtension, tool, createIndex, createMD5, false); final VariantContextWriter writer = tool.createVCFWriter(outputFile); writer.close(); final File outFileIndex = new File(outputFile.getAbsolutePath() + indexExtension); final File outFileMD5 = new File(outputFile.getAbsolutePath() + ".md5"); Assert.assertTrue(outputFile.exists(), "No output file was not created"); Assert.assertEquals(outFileIndex.exists(), createIndex, "The createIndex argument was not honored"); Assert.assertEquals(outFileMD5.exists(), createMD5, "The createMD5 argument was not honored"); }
Example 4
Source File: MNVValidatorApplication.java From hmftools with GNU General Public License v3.0 | 6 votes |
private static void processVariants(boolean strelka, @NotNull final String filePath, @NotNull final String outputVcf, @NotNull final String tumorBam) { final VCFFileReader vcfReader = new VCFFileReader(new File(filePath), false); final VCFHeader outputHeader = generateOutputHeader(vcfReader.getFileHeader(), "TUMOR"); final VariantContextWriter vcfWriter = new VariantContextWriterBuilder().setOutputFile(outputVcf) .setReferenceDictionary(vcfReader.getFileHeader().getSequenceDictionary()) .build(); vcfWriter.writeHeader(outputHeader); final MNVValidator validator = ImmutableMNVValidator.of(tumorBam); final MNVMerger merger = ImmutableMNVMerger.of(outputHeader); Pair<PotentialMNVRegion, Optional<PotentialMNVRegion>> outputPair = ImmutablePair.of(PotentialMNVRegion.empty(), Optional.empty()); for (final VariantContext rawVariant : vcfReader) { final VariantContext simplifiedVariant = strelka ? StrelkaPostProcess.simplifyVariant(rawVariant, StrelkaPostProcess.TUMOR_GENOTYPE) : rawVariant; final PotentialMNVRegion potentialMNV = outputPair.getLeft(); outputPair = MNVDetector.addMnvToRegion(potentialMNV, simplifiedVariant); outputPair.getRight().ifPresent(mnvRegion -> validator.mergeVariants(mnvRegion, merger).forEach(vcfWriter::add)); } validator.mergeVariants(outputPair.getLeft(), merger).forEach(vcfWriter::add); vcfWriter.close(); vcfReader.close(); LOGGER.info("Written output variants to " + outputVcf); }
Example 5
Source File: SVVCFWriter.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
private static void writeVariants(final String fileName, final List<VariantContext> variantsArrayList, final SAMSequenceDictionary referenceSequenceDictionary, final Set<VCFHeaderLine> defaultToolVCFHeaderLines) { try (final OutputStream outputStream = new BufferedOutputStream(BucketUtils.createFile(fileName))) { final VariantContextWriter vcfWriter = getVariantContextWriter(outputStream, referenceSequenceDictionary); final VCFHeader vcfHeader = getVcfHeader(referenceSequenceDictionary); defaultToolVCFHeaderLines.forEach(vcfHeader::addMetaDataLine); vcfWriter.writeHeader(vcfHeader); variantsArrayList.forEach(vcfWriter::add); vcfWriter.close(); } catch (final IOException e) { throw new GATKException("Could not create output file", e); } }
Example 6
Source File: GATKToolUnitTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
@Test(dataProvider = "createVCFWriterData") public void testCreateVCFWriterWithNoSequenceDictionary( final File inputFile, final String outputExtension, final String indexExtension, final boolean createIndex, final boolean createMD5) throws IOException { // verify that a null sequence dictionary still results in a file, but with no index final TestGATKVariantToolWithNoSequenceDictionary tool = new TestGATKVariantToolWithNoSequenceDictionary(); final File outputFile = setupVCFWriter(inputFile, outputExtension, tool, createIndex, createMD5, false); final VariantContextWriter writer = tool.createVCFWriter(outputFile); writer.close(); final File outFileIndex = new File(outputFile.getAbsolutePath() + indexExtension); final File outFileMD5 = new File(outputFile.getAbsolutePath() + ".md5"); Assert.assertTrue(outputFile.exists(), "No output file was not created"); Assert.assertEquals(outFileIndex.exists(), false, "An index file should not have been created"); // always false with no seq dictionary Assert.assertEquals(outFileMD5.exists(), createMD5, "The createMD5 argument was not honored"); }
Example 7
Source File: PostprocessGermlineCNVCalls.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
private void generateSegmentsVCFFileFromAllShards() { logger.info("Generating segments VCF file..."); /* perform segmentation */ final File pythonScriptOutputPath = IOUtils.createTempDir("gcnv-segmented-calls"); final boolean pythonScriptSucceeded = executeSegmentGermlineCNVCallsPythonScript( sampleIndex, inputContigPloidyCallsPath, sortedCallsShardPaths, sortedModelShardPaths, pythonScriptOutputPath); if (!pythonScriptSucceeded) { throw new UserException("Python return code was non-zero."); } /* parse segments */ final File copyNumberSegmentsFile = getCopyNumberSegmentsFile(pythonScriptOutputPath, sampleIndex); final IntegerCopyNumberSegmentCollection integerCopyNumberSegmentCollection = new IntegerCopyNumberSegmentCollection(copyNumberSegmentsFile); final String sampleNameFromSegmentCollection = integerCopyNumberSegmentCollection .getMetadata().getSampleName(); Utils.validate(sampleNameFromSegmentCollection.equals(sampleName), String.format("Sample name found in the header of copy-number segments file is " + "different from the expected sample name (found: %s, expected: %s).", sampleNameFromSegmentCollection, sampleName)); /* write variants */ logger.info(String.format("Writing segments VCF file to %s...", outputSegmentsVCFFile.getAbsolutePath())); final VariantContextWriter segmentsVCFWriter = createVCFWriter(outputSegmentsVCFFile); final GermlineCNVSegmentVariantComposer germlineCNVSegmentVariantComposer = new GermlineCNVSegmentVariantComposer(segmentsVCFWriter, sampleName, refAutosomalIntegerCopyNumberState, allosomalContigSet); germlineCNVSegmentVariantComposer.composeVariantContextHeader(sequenceDictionary, getDefaultToolVCFHeaderLines()); germlineCNVSegmentVariantComposer.writeAll(integerCopyNumberSegmentCollection.getRecords()); segmentsVCFWriter.close(); }
Example 8
Source File: HotspotEvidenceVCF.java From hmftools with GNU General Public License v3.0 | 5 votes |
public void write(@NotNull final String filename, @NotNull final List<HotspotEvidence> evidenceList) { final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(filename).modifyOption(Options.INDEX_ON_THE_FLY, false).build(); writer.setHeader(header); writer.writeHeader(header); final ListMultimap<GenomePosition, HotspotEvidence> evidenceMap = Multimaps.index(evidenceList, GenomePositions::create); for (GenomePosition site : evidenceMap.keySet()) { final List<HotspotEvidence> evidence = evidenceMap.get(site); final VariantContext context = create(evidence); writer.add(context); } writer.close(); }
Example 9
Source File: UpdateVcfSequenceDictionary.java From picard with MIT License | 5 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsReadable(SEQUENCE_DICTIONARY); IOUtil.assertFileIsWritable(OUTPUT); final SAMSequenceDictionary samSequenceDictionary = SAMSequenceDictionaryExtractor.extractDictionary(SEQUENCE_DICTIONARY.toPath()); final VCFFileReader fileReader = new VCFFileReader(INPUT, false); final VCFHeader fileHeader = fileReader.getFileHeader(); final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setReferenceDictionary(samSequenceDictionary) .clearOptions(); if (CREATE_INDEX) builder.setOption(Options.INDEX_ON_THE_FLY); final VariantContextWriter vcfWriter = builder.setOutputFile(OUTPUT).build(); fileHeader.setSequenceDictionary(samSequenceDictionary); vcfWriter.writeHeader(fileHeader); final ProgressLogger progress = new ProgressLogger(log, 10000); final CloseableIterator<VariantContext> iterator = fileReader.iterator(); while (iterator.hasNext()) { final VariantContext context = iterator.next(); vcfWriter.add(context); progress.record(context.getContig(), context.getStart()); } CloserUtil.close(iterator); CloserUtil.close(fileReader); vcfWriter.close(); return 0; }
Example 10
Source File: RenameSampleInVcf.java From picard with MIT License | 5 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final VCFFileReader in = new VCFFileReader(INPUT, false); final VCFHeader header = in.getFileHeader(); if (header.getGenotypeSamples().size() > 1) { throw new IllegalArgumentException("Input VCF must be single-sample."); } if (OLD_SAMPLE_NAME != null && !OLD_SAMPLE_NAME.equals(header.getGenotypeSamples().get(0))) { throw new IllegalArgumentException("Input VCF did not contain expected sample. Contained: " + header.getGenotypeSamples().get(0)); } final EnumSet<Options> options = EnumSet.copyOf(VariantContextWriterBuilder.DEFAULT_OPTIONS); if (CREATE_INDEX) options.add(Options.INDEX_ON_THE_FLY); else options.remove(Options.INDEX_ON_THE_FLY); final VCFHeader outHeader = new VCFHeader(header.getMetaDataInInputOrder(), CollectionUtil.makeList(NEW_SAMPLE_NAME)); final VariantContextWriter out = new VariantContextWriterBuilder() .setOptions(options) .setOutputFile(OUTPUT).setReferenceDictionary(outHeader.getSequenceDictionary()).build(); out.writeHeader(outHeader); for (final VariantContext ctx : in) { out.add(ctx); } out.close(); in.close(); return 0; }
Example 11
Source File: GATKVariantContextUtilsUnitTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Test public void testCreateVariantContextWriterNoOptions() { final File tmpDir = createTempDir("createVCFTest"); final File outFile = new File(tmpDir.getAbsolutePath(), "createVCFTest" + ".vcf"); final VariantContextWriter vcw = GATKVariantContextUtils.createVCFWriter(outFile.toPath(), null, false); vcw.close(); final File outFileIndex = new File(outFile.getAbsolutePath() + ".idx"); final File outFileMD5 = new File(outFile.getAbsolutePath() + ".md5"); Assert.assertTrue(outFile.exists(), "No output file was not created"); Assert.assertFalse(outFileIndex.exists(), "The createIndex argument was not honored"); Assert.assertFalse(outFileMD5.exists(), "The createMD5 argument was not honored"); }
Example 12
Source File: FindMendelianViolations.java From picard with MIT License | 5 votes |
private void writeAllViolations(final MendelianViolationDetector.Result result) { if (VCF_DIR != null) { LOG.info(String.format("Writing family violation VCFs to %s/", VCF_DIR.getAbsolutePath())); final VariantContextComparator vcComparator = new VariantContextComparator(inputHeader.get().getContigLines()); final Set<VCFHeaderLine> headerLines = new LinkedHashSet<>(inputHeader.get().getMetaDataInInputOrder()); headerLines.add(new VCFInfoHeaderLine(MendelianViolationDetector.MENDELIAN_VIOLATION_KEY, 1, VCFHeaderLineType.String, "Type of mendelian violation.")); headerLines.add(new VCFInfoHeaderLine(MendelianViolationDetector.ORIGINAL_AC, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Original AC")); headerLines.add(new VCFInfoHeaderLine(MendelianViolationDetector.ORIGINAL_AF, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Original AF")); headerLines.add(new VCFInfoHeaderLine(MendelianViolationDetector.ORIGINAL_AN, 1, VCFHeaderLineType.Integer, "Original AN")); for (final PedFile.PedTrio trio : pedFile.get().values()) { final File outputFile = new File(VCF_DIR, IOUtil.makeFileNameSafe(trio.getFamilyId() + IOUtil.VCF_FILE_EXTENSION)); LOG.info(String.format("Writing %s violation VCF to %s", trio.getFamilyId(), outputFile.getAbsolutePath())); final VariantContextWriter out = new VariantContextWriterBuilder() .setOutputFile(outputFile) .unsetOption(INDEX_ON_THE_FLY) .build(); final VCFHeader newHeader = new VCFHeader(headerLines, CollectionUtil.makeList(trio.getMaternalId(), trio.getPaternalId(), trio.getIndividualId())); final TreeSet<VariantContext> orderedViolations = new TreeSet<>(vcComparator); orderedViolations.addAll(result.violations().get(trio.getFamilyId())); out.writeHeader(newHeader); orderedViolations.forEach(out::add); out.close(); } } }
Example 13
Source File: VcfFormatConverter.java From picard with MIT License | 5 votes |
@Override protected int doWork() { final ProgressLogger progress = new ProgressLogger(LOG, 10000); IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final VCFFileReader reader = new VCFFileReader(INPUT, REQUIRE_INDEX); final VCFHeader header = new VCFHeader(reader.getFileHeader()); final SAMSequenceDictionary sequenceDictionary = header.getSequenceDictionary(); if (CREATE_INDEX && sequenceDictionary == null) { throw new PicardException("A sequence dictionary must be available in the input file when creating indexed output."); } final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setOutputFile(OUTPUT) .setReferenceDictionary(sequenceDictionary); if (CREATE_INDEX) builder.setOption(Options.INDEX_ON_THE_FLY); else builder.unsetOption(Options.INDEX_ON_THE_FLY); final VariantContextWriter writer = builder.build(); writer.writeHeader(header); final CloseableIterator<VariantContext> iterator = reader.iterator(); while (iterator.hasNext()) { final VariantContext context = iterator.next(); writer.add(context); progress.record(context.getContig(), context.getStart()); } CloserUtil.close(iterator); CloserUtil.close(reader); writer.close(); return 0; }
Example 14
Source File: CreateSomaticPanelOfNormals.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 5 votes |
public Object doWork() { final List<File> inputVcfs = new ArrayList<>(vcfs); final Collection<CloseableIterator<VariantContext>> iterators = new ArrayList<>(inputVcfs.size()); final Collection<VCFHeader> headers = new HashSet<>(inputVcfs.size()); final VCFHeader headerOfFirstVcf = new VCFFileReader(inputVcfs.get(0), false).getFileHeader(); final SAMSequenceDictionary sequenceDictionary = headerOfFirstVcf.getSequenceDictionary(); final VariantContextComparator comparator = headerOfFirstVcf.getVCFRecordComparator(); for (final File vcf : inputVcfs) { final VCFFileReader reader = new VCFFileReader(vcf, false); iterators.add(reader.iterator()); final VCFHeader header = reader.getFileHeader(); Utils.validateArg(comparator.isCompatible(header.getContigLines()), () -> vcf.getAbsolutePath() + " has incompatible contigs."); headers.add(header); } final VariantContextWriter writer = GATKVariantContextUtils.createVCFWriter(outputVcf, sequenceDictionary, false, Options.INDEX_ON_THE_FLY); writer.writeHeader(new VCFHeader(VCFUtils.smartMergeHeaders(headers, false))); final MergingIterator<VariantContext> mergingIterator = new MergingIterator<>(comparator, iterators); SimpleInterval currentPosition = new SimpleInterval("FAKE", 1, 1); final List<VariantContext> variantsAtThisPosition = new ArrayList<>(20); while (mergingIterator.hasNext()) { final VariantContext vc = mergingIterator.next(); if (!currentPosition.overlaps(vc)) { processVariantsAtSamePosition(variantsAtThisPosition, writer); variantsAtThisPosition.clear(); currentPosition = new SimpleInterval(vc.getContig(), vc.getStart(), vc.getStart()); } variantsAtThisPosition.add(vc); } mergingIterator.close(); writer.close(); return "SUCCESS"; }
Example 15
Source File: StrelkaPostProcessApplication.java From hmftools with GNU General Public License v3.0 | 5 votes |
private static void processVariants(@NotNull final String filePath, @NotNull final Slicer highConfidenceSlicer, @NotNull final String outputVcf, @NotNull final String sampleName, @NotNull final String tumorBam) { final VCFFileReader vcfReader = new VCFFileReader(new File(filePath), false); final VCFHeader outputHeader = generateOutputHeader(vcfReader.getFileHeader(), sampleName); final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(outputVcf) .setReferenceDictionary(outputHeader.getSequenceDictionary()) .build(); writer.writeHeader(outputHeader); final MNVValidator validator = ImmutableMNVValidator.of(tumorBam); final MNVMerger merger = ImmutableMNVMerger.of(outputHeader); Pair<PotentialMNVRegion, Optional<PotentialMNVRegion>> outputPair = ImmutablePair.of(PotentialMNVRegion.empty(), Optional.empty()); final VariantContextFilter filter = new StrelkaPostProcess(highConfidenceSlicer); for (final VariantContext variantContext : vcfReader) { if (filter.test(variantContext)) { final VariantContext simplifiedVariant = StrelkaPostProcess.simplifyVariant(variantContext, sampleName); final PotentialMNVRegion potentialMNV = outputPair.getLeft(); outputPair = MNVDetector.addMnvToRegion(potentialMNV, simplifiedVariant); outputPair.getRight().ifPresent(mnvRegion -> validator.mergeVariants(mnvRegion, merger).forEach(writer::add)); } } validator.mergeVariants(outputPair.getLeft(), merger).forEach(writer::add); writer.close(); vcfReader.close(); LOGGER.info("Written output variants to " + outputVcf); }
Example 16
Source File: GATKVariantContextUtilsUnitTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Test(expectedExceptions=IllegalArgumentException.class) public void testCreateVariantContextWriterNoReference() { // should throw due to lack of reference final File outFile = createTempFile("testVCFWriter", ".vcf"); final VariantContextWriter vcw = GATKVariantContextUtils.createVCFWriter( outFile.toPath(), null, true, Options.INDEX_ON_THE_FLY); vcw.close(); }
Example 17
Source File: AmberVCF.java From hmftools with GNU General Public License v3.0 | 5 votes |
void writeSNPCheck(@NotNull final String filename, @NotNull final List<BaseDepth> baseDepths) { final List<BaseDepth> list = Lists.newArrayList(baseDepths); Collections.sort(list); final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(filename).modifyOption(Options.INDEX_ON_THE_FLY, true).build(); final VCFHeader header = header(Lists.newArrayList(config.primaryReference())); writer.setHeader(header); writer.writeHeader(header); list.forEach(x -> writer.add(create(x))); writer.close(); }
Example 18
Source File: AmberVCF.java From hmftools with GNU General Public License v3.0 | 5 votes |
void writeContamination(@NotNull final String filename, @NotNull final Collection<TumorContamination> evidence) { final List<TumorContamination> list = Lists.newArrayList(evidence); Collections.sort(list); final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(filename).modifyOption(Options.INDEX_ON_THE_FLY, true).build(); final VCFHeader header = header(Lists.newArrayList(config.primaryReference(), config.tumor())); writer.setHeader(header); writer.writeHeader(header); list.forEach(x -> writer.add(create(x))); writer.close(); }
Example 19
Source File: MakeSitesOnlyVcf.java From picard with MIT License | 4 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final VCFFileReader reader = new VCFFileReader(INPUT, false); final VCFHeader inputVcfHeader = new VCFHeader(reader.getFileHeader().getMetaDataInInputOrder()); final SAMSequenceDictionary sequenceDictionary = inputVcfHeader.getSequenceDictionary(); if (CREATE_INDEX && sequenceDictionary == null) { throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output."); } final ProgressLogger progress = new ProgressLogger(Log.getInstance(MakeSitesOnlyVcf.class), 10000); // Setup the site-only file writer final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setOutputFile(OUTPUT) .setReferenceDictionary(sequenceDictionary); if (CREATE_INDEX) builder.setOption(Options.INDEX_ON_THE_FLY); else builder.unsetOption(Options.INDEX_ON_THE_FLY); final VariantContextWriter writer = builder.build(); final VCFHeader header = new VCFHeader(inputVcfHeader.getMetaDataInInputOrder(), SAMPLE); writer.writeHeader(header); // Go through the input, strip the records and write them to the output final CloseableIterator<VariantContext> iterator = reader.iterator(); while (iterator.hasNext()) { final VariantContext full = iterator.next(); final VariantContext site = subsetToSamplesWithOriginalAnnotations(full, SAMPLE); writer.add(site); progress.record(site.getContig(), site.getStart()); } CloserUtil.close(iterator); CloserUtil.close(reader); writer.close(); return 0; }
Example 20
Source File: SplitVcfs.java From picard with MIT License | 4 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); final ProgressLogger progress = new ProgressLogger(log, 10000); final VCFFileReader fileReader = new VCFFileReader(INPUT, false); final VCFHeader fileHeader = fileReader.getFileHeader(); final SAMSequenceDictionary sequenceDictionary = SEQUENCE_DICTIONARY != null ? SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(SEQUENCE_DICTIONARY).getSequenceDictionary() : fileHeader.getSequenceDictionary(); if (CREATE_INDEX && sequenceDictionary == null) { throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output."); } final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setReferenceDictionary(sequenceDictionary) .clearOptions(); if (CREATE_INDEX) builder.setOption(Options.INDEX_ON_THE_FLY); final VariantContextWriter snpWriter = builder.setOutputFile(SNP_OUTPUT).build(); final VariantContextWriter indelWriter = builder.setOutputFile(INDEL_OUTPUT).build(); snpWriter.writeHeader(fileHeader); indelWriter.writeHeader(fileHeader); int incorrectVariantCount = 0; final CloseableIterator<VariantContext> iterator = fileReader.iterator(); while (iterator.hasNext()) { final VariantContext context = iterator.next(); if (context.isIndel()) indelWriter.add(context); else if (context.isSNP()) snpWriter.add(context); else { if (STRICT) throw new IllegalStateException("Found a record with type " + context.getType().name()); else incorrectVariantCount++; } progress.record(context.getContig(), context.getStart()); } if (incorrectVariantCount > 0) { log.debug("Found " + incorrectVariantCount + " records that didn't match SNP or INDEL"); } CloserUtil.close(iterator); CloserUtil.close(fileReader); snpWriter.close(); indelWriter.close(); return 0; }