Java Code Examples for htsjdk.variant.variantcontext.writer.VariantContextWriter#add()
The following examples show how to use
htsjdk.variant.variantcontext.writer.VariantContextWriter#add() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AmberVCF.java From hmftools with GNU General Public License v3.0 | 6 votes |
public void writeBAF(@NotNull final String filename, @NotNull final Collection<TumorBAF> tumorEvidence, @NotNull final AmberHetNormalEvidence hetNormalEvidence) { final List<TumorBAF> list = Lists.newArrayList(tumorEvidence); Collections.sort(list); final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(filename).modifyOption(Options.INDEX_ON_THE_FLY, true).build(); final VCFHeader header = header(config.tumorOnly() ? Collections.singletonList(config.tumor()) : config.allSamples()); writer.setHeader(header); writer.writeHeader(header); final ListMultimap<AmberSite, Genotype> genotypeMap = ArrayListMultimap.create(); for (final String sample : hetNormalEvidence.samples()) { for (BaseDepth baseDepth : hetNormalEvidence.evidence(sample)) { genotypeMap.put(AmberSiteFactory.asSite(baseDepth), createGenotype(sample, baseDepth)); } } for (final TumorBAF tumorBAF : list) { AmberSite tumorSite = AmberSiteFactory.tumorSite(tumorBAF); genotypeMap.put(tumorSite, createGenotype(tumorBAF)); writer.add(create(tumorBAF, genotypeMap.get(tumorSite))); } writer.close(); }
Example 2
Source File: HMMPostProcessor.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 6 votes |
/** * For each segment in genotypingSegments, compose the variant context and write to outputWriter * * @param genotypingSegments a list of genotyping segments * @param outputWriter a VCF writer * @param variantPrefix a prefix for composing variant IDs * @param commandLine (optional) command line used to generated the data */ private void composeVariantContextAndWrite(@Nonnull final List<GenotypingSegment> genotypingSegments, @Nonnull final VariantContextWriter outputWriter, @Nonnull final String variantPrefix, @Nullable final String commandLine) { outputWriter.writeHeader(composeHeader(commandLine)); int counter = 0; int prevReportedDonePercentage = -1; for (final GenotypingSegment segment : genotypingSegments) { final int donePercentage = (int)(100 * counter / (double)genotypingSegments.size()); if (donePercentage % 10 == 0 && prevReportedDonePercentage != donePercentage) { logger.info(String.format("%d%% done...", donePercentage)); prevReportedDonePercentage = donePercentage; } final VariantContext variant = composeVariantContext(segment, variantPrefix); counter++; outputWriter.add(variant); } logger.info("100% done."); }
Example 3
Source File: VariantDataManager.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
public void writeOutRecalibrationTable(final VariantContextWriter recalWriter, final SAMSequenceDictionary seqDictionary) { // we need to sort in coordinate order in order to produce a valid VCF Collections.sort( data, VariantDatum.getComparator(seqDictionary) ); // create dummy alleles to be used List<Allele> alleles = Arrays.asList(Allele.create("N", true), Allele.create("<VQSR>", false)); for( final VariantDatum datum : data ) { if (VRAC.useASannotations) alleles = Arrays.asList(datum.referenceAllele, datum.alternateAllele); //use the alleles to distinguish between multiallelics in AS mode VariantContextBuilder builder = new VariantContextBuilder("VQSR", datum.loc.getContig(), datum.loc.getStart(), datum.loc.getEnd(), alleles); builder.attribute(VCFConstants.END_KEY, datum.loc.getEnd()); builder.attribute(GATKVCFConstants.VQS_LOD_KEY, String.format("%.4f", datum.lod)); builder.attribute(GATKVCFConstants.CULPRIT_KEY, (datum.worstAnnotation != -1 ? annotationKeys.get(datum.worstAnnotation) : "NULL")); if ( datum.atTrainingSite ) builder.attribute(GATKVCFConstants.POSITIVE_LABEL_KEY, true); if ( datum.atAntiTrainingSite ) builder.attribute(GATKVCFConstants.NEGATIVE_LABEL_KEY, true); recalWriter.add(builder.make()); } }
Example 4
Source File: GATKVariantContextUtilsUnitTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
private void writeBadVariant(final VariantContextWriter writer) { //write a variant with a (bad) attribute that doesn't appear in the header to the output final VariantContextBuilder vcBuilder = new VariantContextBuilder("","chr1", 1, 1, Arrays.asList(Aref)); vcBuilder.attribute("fake", new Object()); final VariantContext vc = vcBuilder.make(); writer.add(vc); }
Example 5
Source File: HotspotEvidenceVCF.java From hmftools with GNU General Public License v3.0 | 5 votes |
public void write(@NotNull final String filename, @NotNull final List<HotspotEvidence> evidenceList) { final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(filename).modifyOption(Options.INDEX_ON_THE_FLY, false).build(); writer.setHeader(header); writer.writeHeader(header); final ListMultimap<GenomePosition, HotspotEvidence> evidenceMap = Multimaps.index(evidenceList, GenomePositions::create); for (GenomePosition site : evidenceMap.keySet()) { final List<HotspotEvidence> evidence = evidenceMap.get(site); final VariantContext context = create(evidence); writer.add(context); } writer.close(); }
Example 6
Source File: GATKToolUnitTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
private void writeHeaderAndBadVariant(final VariantContextWriter writer) { final VariantContextBuilder vcBuilder = new VariantContextBuilder( "chr1","1", 1, 1, Arrays.asList(Allele.create("A", true))); vcBuilder.attribute("fake", new Object()); final VariantContext vc = vcBuilder.make(); final VCFHeader vcfHeader = new VCFHeader(); writer.writeHeader(vcfHeader); writer.add(vc); }
Example 7
Source File: UpdateVcfSequenceDictionary.java From picard with MIT License | 5 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsReadable(SEQUENCE_DICTIONARY); IOUtil.assertFileIsWritable(OUTPUT); final SAMSequenceDictionary samSequenceDictionary = SAMSequenceDictionaryExtractor.extractDictionary(SEQUENCE_DICTIONARY.toPath()); final VCFFileReader fileReader = new VCFFileReader(INPUT, false); final VCFHeader fileHeader = fileReader.getFileHeader(); final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setReferenceDictionary(samSequenceDictionary) .clearOptions(); if (CREATE_INDEX) builder.setOption(Options.INDEX_ON_THE_FLY); final VariantContextWriter vcfWriter = builder.setOutputFile(OUTPUT).build(); fileHeader.setSequenceDictionary(samSequenceDictionary); vcfWriter.writeHeader(fileHeader); final ProgressLogger progress = new ProgressLogger(log, 10000); final CloseableIterator<VariantContext> iterator = fileReader.iterator(); while (iterator.hasNext()) { final VariantContext context = iterator.next(); vcfWriter.add(context); progress.record(context.getContig(), context.getStart()); } CloserUtil.close(iterator); CloserUtil.close(fileReader); vcfWriter.close(); return 0; }
Example 8
Source File: RenameSampleInVcf.java From picard with MIT License | 5 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final VCFFileReader in = new VCFFileReader(INPUT, false); final VCFHeader header = in.getFileHeader(); if (header.getGenotypeSamples().size() > 1) { throw new IllegalArgumentException("Input VCF must be single-sample."); } if (OLD_SAMPLE_NAME != null && !OLD_SAMPLE_NAME.equals(header.getGenotypeSamples().get(0))) { throw new IllegalArgumentException("Input VCF did not contain expected sample. Contained: " + header.getGenotypeSamples().get(0)); } final EnumSet<Options> options = EnumSet.copyOf(VariantContextWriterBuilder.DEFAULT_OPTIONS); if (CREATE_INDEX) options.add(Options.INDEX_ON_THE_FLY); else options.remove(Options.INDEX_ON_THE_FLY); final VCFHeader outHeader = new VCFHeader(header.getMetaDataInInputOrder(), CollectionUtil.makeList(NEW_SAMPLE_NAME)); final VariantContextWriter out = new VariantContextWriterBuilder() .setOptions(options) .setOutputFile(OUTPUT).setReferenceDictionary(outHeader.getSequenceDictionary()).build(); out.writeHeader(outHeader); for (final VariantContext ctx : in) { out.add(ctx); } out.close(); in.close(); return 0; }
Example 9
Source File: SortVcf.java From picard with MIT License | 5 votes |
private void writeSortedOutput(final VCFHeader outputHeader, final SortingCollection<VariantContext> sortedOutput) { final ProgressLogger writeProgress = new ProgressLogger(log, 25000, "wrote", "records"); final EnumSet<Options> options = CREATE_INDEX ? EnumSet.of(Options.INDEX_ON_THE_FLY) : EnumSet.noneOf(Options.class); final VariantContextWriter out = new VariantContextWriterBuilder(). setReferenceDictionary(outputHeader.getSequenceDictionary()). setOptions(options). setOutputFile(OUTPUT).build(); out.writeHeader(outputHeader); for (final VariantContext variantContext : sortedOutput) { out.add(variantContext); writeProgress.record(variantContext.getContig(), variantContext.getStart()); } out.close(); }
Example 10
Source File: VcfFormatConverter.java From picard with MIT License | 5 votes |
@Override protected int doWork() { final ProgressLogger progress = new ProgressLogger(LOG, 10000); IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final VCFFileReader reader = new VCFFileReader(INPUT, REQUIRE_INDEX); final VCFHeader header = new VCFHeader(reader.getFileHeader()); final SAMSequenceDictionary sequenceDictionary = header.getSequenceDictionary(); if (CREATE_INDEX && sequenceDictionary == null) { throw new PicardException("A sequence dictionary must be available in the input file when creating indexed output."); } final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setOutputFile(OUTPUT) .setReferenceDictionary(sequenceDictionary); if (CREATE_INDEX) builder.setOption(Options.INDEX_ON_THE_FLY); else builder.unsetOption(Options.INDEX_ON_THE_FLY); final VariantContextWriter writer = builder.build(); writer.writeHeader(header); final CloseableIterator<VariantContext> iterator = reader.iterator(); while (iterator.hasNext()) { final VariantContext context = iterator.next(); writer.add(context); progress.record(context.getContig(), context.getStart()); } CloserUtil.close(iterator); CloserUtil.close(reader); writer.close(); return 0; }
Example 11
Source File: CreateSomaticPanelOfNormals.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 5 votes |
private static void processVariantsAtSamePosition(final List<VariantContext> variants, final VariantContextWriter writer) { if (variants.size() > 1){ final VariantContext mergedVc = AssemblyBasedCallerUtils.makeMergedVariantContext(variants); final VariantContext outputVc = new VariantContextBuilder() .source(mergedVc.getSource()) .loc(mergedVc.getContig(), mergedVc.getStart(), mergedVc.getEnd()) .alleles(mergedVc.getAlleles()) .make(); writer.add(outputVc); } }
Example 12
Source File: EvaluateCopyNumberTriStateCalls.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override protected Object doWork() { final TargetCollection<Target> targets = targetArguments.readTargetCollection(false); final VCFFileReader truthReader = openVCFReader(truthFile); final VCFFileReader callsReader = openVCFReader(callsFile); final GenotypeEvaluationRecordWriter caseWriter = openGenotypeEvaluationOutputWriter(caseDetailOutputFile); if (samples.isEmpty()) { samples = composeSetOfSamplesToEvaluate(callsReader); } final VariantContextWriter outputWriter = openVCFWriter(outputFile, samples); final Map<String, EvaluationSampleSummaryRecord> sampleStats = samples.stream() .collect(Collectors.toMap(s -> s, EvaluationSampleSummaryRecord::new)); final List<SimpleInterval> intervals = composeListOfProcessingIntervalsFromInputs(truthReader, callsReader); for (final SimpleInterval interval : intervals) { for (final VariantEvaluationContext vc : processInterval(truthReader, callsReader, interval, targets)) { outputWriter.add(vc); outputCases(caseWriter, vc, targets); updateSampleStats(sampleStats, vc); } } truthReader.close(); callsReader.close(); outputWriter.close(); closeCaseRecordWriter(caseWriter); writeSampleSummaryFile(sampleSummaryOutputFile, sampleStats); return "SUCCESS"; }
Example 13
Source File: ViccExtractorTestApplication.java From hmftools with GNU General Public License v3.0 | 5 votes |
private static void writeHotspots(@NotNull String hotspotVcf, @NotNull Map<ViccEntry, ViccExtractionResult> resultsPerEntry) { VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(hotspotVcf) .setOutputFileType(VariantContextWriterBuilder.OutputType.VCF) .setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER) .modifyOption(Options.INDEX_ON_THE_FLY, false) .build(); VCFHeader header = new VCFHeader(Sets.newHashSet(), Lists.newArrayList()); writer.writeHeader(header); for (Map.Entry<VariantHotspot, HotspotAnnotation> entry : convertAndSort(resultsPerEntry).entrySet()) { VariantHotspot hotspot = entry.getKey(); HotspotAnnotation annotation = entry.getValue(); List<Allele> hotspotAlleles = buildAlleles(hotspot); VariantContext variantContext = new VariantContextBuilder().noGenotypes() .source("VICC") .chr(hotspot.chromosome()) .start(hotspot.position()) .alleles(hotspotAlleles) .computeEndFromAlleles(hotspotAlleles, (int) hotspot.position()) .attribute("sources", annotation.sources()) .attribute("feature", ProteinKeyFormatter.toProteinKey(annotation.gene(), annotation.transcript(), annotation.proteinAnnotation())) .make(); LOGGER.debug("Writing {}", variantContext); writer.add(variantContext); } writer.close(); }
Example 14
Source File: MergeVcfs.java From picard with MIT License | 4 votes |
@Override protected int doWork() { final ProgressLogger progress = new ProgressLogger(log, 10000); final List<String> sampleList = new ArrayList<String>(); INPUT = IOUtil.unrollFiles(INPUT, IOUtil.VCF_EXTENSIONS); final Collection<CloseableIterator<VariantContext>> iteratorCollection = new ArrayList<CloseableIterator<VariantContext>>(INPUT.size()); final Collection<VCFHeader> headers = new HashSet<VCFHeader>(INPUT.size()); VariantContextComparator variantContextComparator = null; SAMSequenceDictionary sequenceDictionary = null; if (SEQUENCE_DICTIONARY != null) { sequenceDictionary = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(SEQUENCE_DICTIONARY).getFileHeader().getSequenceDictionary(); } for (final File file : INPUT) { IOUtil.assertFileIsReadable(file); final VCFFileReader fileReader = new VCFFileReader(file, false); final VCFHeader fileHeader = fileReader.getFileHeader(); if (fileHeader.getContigLines().isEmpty()) { if (sequenceDictionary == null) { throw new IllegalArgumentException(SEQ_DICT_REQUIRED); } else { fileHeader.setSequenceDictionary(sequenceDictionary); } } if (variantContextComparator == null) { variantContextComparator = fileHeader.getVCFRecordComparator(); } else { if (!variantContextComparator.isCompatible(fileHeader.getContigLines())) { throw new IllegalArgumentException( "The contig entries in input file " + file.getAbsolutePath() + " are not compatible with the others."); } } if (sequenceDictionary == null) sequenceDictionary = fileHeader.getSequenceDictionary(); if (sampleList.isEmpty()) { sampleList.addAll(fileHeader.getSampleNamesInOrder()); } else { if (!sampleList.equals(fileHeader.getSampleNamesInOrder())) { throw new IllegalArgumentException("Input file " + file.getAbsolutePath() + " has sample entries that don't match the other files."); } } // add comments in the first header if (headers.isEmpty()) { COMMENT.stream().forEach(C -> fileHeader.addMetaDataLine(new VCFHeaderLine("MergeVcfs.comment", C))); } headers.add(fileHeader); iteratorCollection.add(fileReader.iterator()); } if (CREATE_INDEX && sequenceDictionary == null) { throw new PicardException(String.format("Index creation failed. %s", SEQ_DICT_REQUIRED)); } final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setOutputFile(OUTPUT) .setReferenceDictionary(sequenceDictionary); if (CREATE_INDEX) { builder.setOption(Options.INDEX_ON_THE_FLY); } else { builder.unsetOption(Options.INDEX_ON_THE_FLY); } final VariantContextWriter writer = builder.build(); writer.writeHeader(new VCFHeader(VCFUtils.smartMergeHeaders(headers, false), sampleList)); final MergingIterator<VariantContext> mergingIterator = new MergingIterator<VariantContext>(variantContextComparator, iteratorCollection); while (mergingIterator.hasNext()) { final VariantContext context = mergingIterator.next(); writer.add(context); progress.record(context.getContig(), context.getStart()); } CloserUtil.close(mergingIterator); writer.close(); return 0; }
Example 15
Source File: SplitVcfs.java From picard with MIT License | 4 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); final ProgressLogger progress = new ProgressLogger(log, 10000); final VCFFileReader fileReader = new VCFFileReader(INPUT, false); final VCFHeader fileHeader = fileReader.getFileHeader(); final SAMSequenceDictionary sequenceDictionary = SEQUENCE_DICTIONARY != null ? SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(SEQUENCE_DICTIONARY).getSequenceDictionary() : fileHeader.getSequenceDictionary(); if (CREATE_INDEX && sequenceDictionary == null) { throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output."); } final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setReferenceDictionary(sequenceDictionary) .clearOptions(); if (CREATE_INDEX) builder.setOption(Options.INDEX_ON_THE_FLY); final VariantContextWriter snpWriter = builder.setOutputFile(SNP_OUTPUT).build(); final VariantContextWriter indelWriter = builder.setOutputFile(INDEL_OUTPUT).build(); snpWriter.writeHeader(fileHeader); indelWriter.writeHeader(fileHeader); int incorrectVariantCount = 0; final CloseableIterator<VariantContext> iterator = fileReader.iterator(); while (iterator.hasNext()) { final VariantContext context = iterator.next(); if (context.isIndel()) indelWriter.add(context); else if (context.isSNP()) snpWriter.add(context); else { if (STRICT) throw new IllegalStateException("Found a record with type " + context.getType().name()); else incorrectVariantCount++; } progress.record(context.getContig(), context.getStart()); } if (incorrectVariantCount > 0) { log.debug("Found " + incorrectVariantCount + " records that didn't match SNP or INDEL"); } CloserUtil.close(iterator); CloserUtil.close(fileReader); snpWriter.close(); indelWriter.close(); return 0; }
Example 16
Source File: MakeSitesOnlyVcf.java From picard with MIT License | 4 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final VCFFileReader reader = new VCFFileReader(INPUT, false); final VCFHeader inputVcfHeader = new VCFHeader(reader.getFileHeader().getMetaDataInInputOrder()); final SAMSequenceDictionary sequenceDictionary = inputVcfHeader.getSequenceDictionary(); if (CREATE_INDEX && sequenceDictionary == null) { throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output."); } final ProgressLogger progress = new ProgressLogger(Log.getInstance(MakeSitesOnlyVcf.class), 10000); // Setup the site-only file writer final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .setOutputFile(OUTPUT) .setReferenceDictionary(sequenceDictionary); if (CREATE_INDEX) builder.setOption(Options.INDEX_ON_THE_FLY); else builder.unsetOption(Options.INDEX_ON_THE_FLY); final VariantContextWriter writer = builder.build(); final VCFHeader header = new VCFHeader(inputVcfHeader.getMetaDataInInputOrder(), SAMPLE); writer.writeHeader(header); // Go through the input, strip the records and write them to the output final CloseableIterator<VariantContext> iterator = reader.iterator(); while (iterator.hasNext()) { final VariantContext full = iterator.next(); final VariantContext site = subsetToSamplesWithOriginalAnnotations(full, SAMPLE); writer.add(site); progress.record(site.getContig(), site.getStart()); } CloserUtil.close(iterator); CloserUtil.close(reader); writer.close(); return 0; }
Example 17
Source File: FilterVcf.java From picard with MIT License | 4 votes |
@Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); VCFFileReader in = null; VariantContextWriter out = null; try {// try/finally used to close 'in' and 'out' in = new VCFFileReader(INPUT, false); final List<VariantFilter> variantFilters = new ArrayList<>(4); variantFilters.add(new AlleleBalanceFilter(MIN_AB)); variantFilters.add(new FisherStrandFilter(MAX_FS)); variantFilters.add(new QdFilter(MIN_QD)); if (JAVASCRIPT_FILE != null) { try { variantFilters.add(new VariantContextJavascriptFilter(JAVASCRIPT_FILE, in.getFileHeader())); } catch (final IOException error) { throw new PicardException("javascript-related error", error); } } final List<GenotypeFilter> genotypeFilters = CollectionUtil.makeList(new GenotypeQualityFilter(MIN_GQ), new DepthFilter(MIN_DP)); final FilterApplyingVariantIterator iterator = new FilterApplyingVariantIterator(in.iterator(), variantFilters, genotypeFilters); final VCFHeader header = in.getFileHeader(); // If the user is writing to a .bcf or .vcf, VariantContextBuilderWriter requires a Sequence Dictionary. Make sure that the // Input VCF has one. final VariantContextWriterBuilder variantContextWriterBuilder = new VariantContextWriterBuilder(); if (isVcfOrBcf(OUTPUT)) { final SAMSequenceDictionary sequenceDictionary = header.getSequenceDictionary(); if (sequenceDictionary == null) { throw new PicardException("The input vcf must have a sequence dictionary in order to create indexed vcf or bcfs."); } variantContextWriterBuilder.setReferenceDictionary(sequenceDictionary); } out = variantContextWriterBuilder.setOutputFile(OUTPUT).build(); header.addMetaDataLine(new VCFFilterHeaderLine("AllGtsFiltered", "Site filtered out because all genotypes are filtered out.")); header.addMetaDataLine(new VCFFormatHeaderLine("FT", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Genotype filters.")); for (final VariantFilter filter : variantFilters) { filter.headerLines().forEach(header::addMetaDataLine); } out.writeHeader(in.getFileHeader()); while (iterator.hasNext()) { final VariantContext vc = iterator.next(); progress.record(vc.getContig(), vc.getStart()); out.add(vc); } return 0; } finally { CloserUtil.close(out); CloserUtil.close(in); } }
Example 18
Source File: Concordance.java From gatk with BSD 3-Clause "New" or "Revised" License | 4 votes |
private static void tryToWrite(final VariantContextWriter writer, final VariantContext vc) { if (writer != null) { writer.add(vc); } }
Example 19
Source File: Concordance.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 4 votes |
private static void tryToWrite(final VariantContextWriter writer, final VariantContext vc) { if (writer != null) { writer.add(vc); } }