Java Code Examples for net.lingala.zip4j.ZipFile#extractAll()
The following examples show how to use
net.lingala.zip4j.ZipFile#extractAll() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: S3DataManagerTest.java From aws-codebuild-jenkins-plugin with Apache License 2.0 | 6 votes |
@Test public void testZipSourceBuildSpec() throws Exception { String buildSpecName = "Buildspec.yml"; File buildSpec = new File("/tmp/source/" + buildSpecName); String buildSpecContents = "yo\n"; FileUtils.write(buildSpec, buildSpecContents); ZipOutputStream out = new ZipOutputStream(new FileOutputStream("/tmp/source.zip")); ZipSourceCallable.zipSource(testZipSourceWorkspace, "/tmp/source/", out, "/tmp/source/"); out.close(); File zip = new File("/tmp/source.zip"); assertTrue(zip.exists()); File unzipFolder = new File("/tmp/folder/"); unzipFolder.mkdir(); ZipFile z = new ZipFile(zip.getPath()); z.extractAll(unzipFolder.getPath()); assertTrue(unzipFolder.list().length == 1); assertEquals(buildSpecName, unzipFolder.list()[0]); File extractedBuildSpec = new File(unzipFolder.getPath() + "/" + buildSpecName); assertTrue(FileUtils.readFileToString(extractedBuildSpec).equals(buildSpecContents)); }
Example 2
Source File: ZipFileVerifier.java From zip4j with Apache License 2.0 | 6 votes |
public static void verifyZipFileByExtractingAllFiles(File zipFileToExtract, char[] password, File outputFolder, int expectedNumberOfEntries, boolean verifyFileContents, Charset charset) throws IOException { assertThat(zipFileToExtract).isNotNull(); assertThat(zipFileToExtract).exists(); ZipFile zipFile = new ZipFile(zipFileToExtract, password); if (charset != null) { zipFile.setCharset(charset); } zipFile.extractAll(outputFolder.getPath()); assertThat(zipFile.getFileHeaders().size()).as("Number of file headers").isEqualTo(expectedNumberOfEntries); List<File> extractedFiles = FileUtils.getFilesInDirectoryRecursive(outputFolder, true, true); assertThat(extractedFiles).hasSize(expectedNumberOfEntries); if (verifyFileContents) { verifyFolderContentsSameAsSourceFiles(outputFolder); } }
Example 3
Source File: S3DataManagerTest.java From aws-codebuild-jenkins-plugin with Apache License 2.0 | 5 votes |
@Test public void testZipSourceOneDirEmpty() throws Exception { String buildSpecName = "Buildspec.yml"; File buildSpec = new File("/tmp/source/" + buildSpecName); String buildSpecContents = "yo\n"; FileUtils.write(buildSpec, buildSpecContents); File sourceDir = new File("/tmp/source/src"); sourceDir.mkdir(); ZipOutputStream out = new ZipOutputStream(new FileOutputStream("/tmp/source.zip")); ZipSourceCallable.zipSource(testZipSourceWorkspace, "/tmp/source/", out, "/tmp/source/"); out.close(); File zip = new File("/tmp/source.zip"); assertTrue(zip.exists()); File unzipFolder = new File("/tmp/folder/"); unzipFolder.mkdir(); ZipFile z = new ZipFile(zip.getPath()); z.extractAll(unzipFolder.getPath()); assertEquals(2, unzipFolder.list().length); assertTrue(Arrays.asList(unzipFolder.list()).contains(buildSpecName)); assertTrue(Arrays.asList(unzipFolder.list()).contains("src")); File extractedBuildSpec = new File(unzipFolder.getPath() + "/" + buildSpecName); assertEquals(buildSpecContents, FileUtils.readFileToString(extractedBuildSpec)); File srcDir = new File(unzipFolder.getPath() + "/src"); assertTrue(srcDir.isDirectory()); assertEquals(0, srcDir.list().length); }
Example 4
Source File: S3DataManagerTest.java From aws-codebuild-jenkins-plugin with Apache License 2.0 | 5 votes |
@Test public void testZipSourceOneDir() throws Exception { String buildSpecName = "Buildspec.yml"; File buildSpec = new File("/tmp/source/" + buildSpecName); String buildSpecContents = "yo\n"; FileUtils.write(buildSpec, buildSpecContents); File sourceDir = new File("/tmp/source/src"); sourceDir.mkdir(); String srcFileName = "/tmp/source/src/file.java"; File srcFile = new File(srcFileName); String srcFileContents = "int i = 1;"; FileUtils.write(srcFile, srcFileContents); ZipOutputStream out = new ZipOutputStream(new FileOutputStream("/tmp/source.zip")); ZipSourceCallable.zipSource(testZipSourceWorkspace, "/tmp/source/", out, "/tmp/source/"); out.close(); File zip = new File("/tmp/source.zip"); assertTrue(zip.exists()); File unzipFolder = new File("/tmp/folder/"); unzipFolder.mkdir(); ZipFile z = new ZipFile(zip.getPath()); z.extractAll(unzipFolder.getPath()); String[] fileList = unzipFolder.list(); assertNotNull(fileList); Arrays.sort(fileList); assertTrue(fileList.length == 2); assertEquals(fileList[0], buildSpecName); File extractedBuildSpec = new File(unzipFolder.getPath() + "/" + buildSpecName); assertEquals(FileUtils.readFileToString(extractedBuildSpec), buildSpecContents); assertEquals(fileList[1], "src"); File extractedSrcFile = new File(unzipFolder.getPath() + "/src/file.java"); assertTrue(extractedSrcFile.exists()); assertEquals(FileUtils.readFileToString(extractedSrcFile), srcFileContents); }
Example 5
Source File: EarthProjectsService.java From collect-earth with MIT License | 5 votes |
private File unzipContents(File projectZipFile, String projectName) throws ZipException { File projectFolder = new File( getProjectsFolder() + File.separator + projectName ); if( projectFolder.exists() || projectFolder.mkdirs() ){ ZipFile zipFile = new ZipFile(projectZipFile); zipFile.extractAll( projectFolder.getAbsolutePath() ); } return projectFolder; }
Example 6
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineWithPhased() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chr20"; String inputFolder = "test-data/data/chr20-phased"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); assertTrue(context.hasInMemory("Remaining sites in total: 7,735")); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.dose.vcf.gz", 100000000, false); assertEquals("20", file.getChromosome()); assertEquals(51, file.getNoSamples()); assertEquals(true, file.isPhased()); assertEquals(TOTAL_REFPANEL_CHR20_B37 + ONLY_IN_INPUT, file.getNoSnps()); // FileUtil.deleteDirectory("test-data/tmp"); }
Example 7
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testCompareInfoAndDosageSize() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chr20"; String inputFolder = "test-data/data/chr20-unphased"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); assertTrue(context.hasInMemory("Remaining sites in total: 7,735")); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.dose.vcf.gz", 100000000, false); assertEquals(true, checkAmountOfColumns("test-data/tmp/chr20.info.gz", 13)); assertEquals(true, checkSortPositionInfo("test-data/tmp/chr20.info.gz")); assertEquals("20", file.getChromosome()); assertEquals(51, file.getNoSamples()); assertEquals(true, file.isPhased()); assertEquals(TOTAL_REFPANEL_CHR20_B37, file.getNoSnps()); // subtract header int infoCount = getLineCount("test-data/tmp/chr20.info.gz"); assertEquals(infoCount - 1, file.getNoSnps()); FileUtil.deleteDirectory("test-data/tmp"); }
Example 8
Source File: ImputationChrXTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testChrXPipelineWithEagleHg38() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chrX-hg38"; String inputFolder = "test-data/data/chrX-unphased"; // maybe git large files? if (!new File( "test-data/configs/hapmap-chrX-hg38/ref-panels/ALL.X.nonPAR.phase1_v3.snps_indels_svs.genotypes.all.noSingleton.recode.hg38.bcf") .exists()) { System.out.println("chrX bcf nonPAR file not available"); return; } // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_X.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile vcfFile = VcfFileUtil.load("test-data/tmp/chrX.dose.vcf.gz", 100000000, false); assertEquals("X", vcfFile.getChromosome()); assertEquals(26, vcfFile.getNoSamples()); assertEquals(true, vcfFile.isPhased()); assertEquals(TOTAL_REFPANEL_CHRX_B38, vcfFile.getNoSnps()); FileUtil.deleteDirectory("test-data/tmp"); }
Example 9
Source File: ImputationChrXTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineChrXWithEaglePhasingOnly() throws IOException, ZipException { if (!new File( "test-data/configs/hapmap-chrX-hg38/ref-panels/ALL.X.nonPAR.phase1_v3.snps_indels_svs.genotypes.all.noSingleton.recode.hg38.bcf") .exists()) { System.out.println("chrX bcf nonPAR file not available"); return; } String configFolder = "test-data/configs/hapmap-chrX"; String inputFolder = "test-data/data/chrX-unphased"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "phase1"); context.setInput("mode", "phasing"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_X.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile vcfFile = VcfFileUtil.load("test-data/tmp/chrX.phased.vcf.gz", 100000000, false); assertEquals(true, vcfFile.isPhased()); VCFFileReader vcfReader = new VCFFileReader(new File(vcfFile.getVcfFilename()), false); CloseableIterator<VariantContext> it = vcfReader.iterator(); while (it.hasNext()) { VariantContext line = it.next(); if (line.getStart() == 44322058) { assertEquals("A", line.getGenotype("HG00096").getGenotypeString()); System.out.println(line.getGenotype("HG00097").getGenotypeString()); assertEquals("A|A", line.getGenotype("HG00097").getGenotypeString()); } } vcfReader.close(); FileUtil.deleteDirectory("test-data/tmp"); }
Example 10
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineWithBeaglePhasingOnly() throws IOException, ZipException { String configFolder = "test-data/configs/beagle"; String inputFolder = "test-data/data/chr20-unphased"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); context.setInput("mode", "phasing"); context.setInput("phasing", "beagle"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); assertTrue(context.hasInMemory("Remaining sites in total: 7,735")); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.phased.vcf.gz", 100000000, false); assertEquals("20", file.getChromosome()); assertEquals(51, file.getNoSamples()); assertEquals(true, file.isPhased()); assertEquals(TOTAL_SNPS_INPUT - SNPS_MONOMORPHIC - ONLY_IN_INPUT, file.getNoSnps()); FileUtil.deleteDirectory("test-data/tmp"); }
Example 11
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineWithEaglePhasingOnly() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chr20"; String inputFolder = "test-data/data/chr20-unphased"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); context.setInput("mode", "phasing"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); assertTrue(context.hasInMemory("Remaining sites in total: 7,735")); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.phased.vcf.gz", 100000000, false); assertEquals("20", file.getChromosome()); assertEquals(51, file.getNoSamples()); assertEquals(true, file.isPhased()); assertEquals(TOTAL_SNPS_INPUT - SNPS_MONOMORPHIC - ONLY_IN_INPUT, file.getNoSnps()); FileUtil.deleteDirectory("test-data/tmp"); }
Example 12
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineWithEaglePhasingOnlyWithPhasedData() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chr20"; String inputFolder = "test-data/data/chr20-phased"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); context.setInput("mode", "phasing"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); assertTrue(context.hasInMemory("Remaining sites in total: 7,735")); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.phased.vcf.gz", 100000000, false); assertEquals("20", file.getChromosome()); assertEquals(51, file.getNoSamples()); assertEquals(true, file.isPhased()); assertEquals(TOTAL_SNPS_INPUT - SNPS_MONOMORPHIC, file.getNoSnps()); FileUtil.deleteDirectory("test-data/tmp"); }
Example 13
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineWithEagle() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chr20"; String inputFolder = "test-data/data/chr20-unphased"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); assertTrue(context.hasInMemory("Remaining sites in total: 7,735")); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.dose.vcf.gz", 100000000, false); assertEquals("20", file.getChromosome()); assertEquals(51, file.getNoSamples()); assertEquals(true, file.isPhased()); assertEquals(TOTAL_REFPANEL_CHR20_B37, file.getNoSnps()); int snpInInfo = getLineCount("test-data/tmp/chr20.info.gz") - 1; assertEquals(snpInInfo, file.getNoSnps()); FileUtil.deleteDirectory("test-data/tmp"); }
Example 14
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineWithHttpUrl() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chr1"; String inputFolder = "https://imputationserver.sph.umich.edu/static/downloads/hapmap300.chr1.recode.vcf.gz"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); // create step instance InputValidation inputValidation = new InputValidationMock(configFolder); // run and test boolean result = run(context, inputValidation); // check if step is failed assertEquals(true, result); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); result = run(context, qcStats); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_1.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr1.dose.vcf.gz", 100000000, false); assertEquals("1", file.getChromosome()); assertEquals(60, file.getNoSamples()); assertEquals(true, file.isPhased()); FileUtil.deleteDirectory("test-data/tmp"); }
Example 15
Source File: ImputationChrXTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testChrXPipelinePhased() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chrX"; String inputFolder = "test-data/data/chrX-phased"; File file = new File("test-data/tmp"); if (file.exists()) { FileUtil.deleteDirectory(file); } // create workflow context WorkflowTestContext context = buildContext(inputFolder, "phase1"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_X.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile vcfFile = VcfFileUtil.load("test-data/tmp/chrX.dose.vcf.gz", 100000000, false); assertEquals("X", vcfFile.getChromosome()); assertEquals(26, vcfFile.getNoSamples()); assertEquals(true, vcfFile.isPhased()); assertEquals(TOTAL_REFPANEL_CHRX_B37, vcfFile.getNoSnps()); FileUtil.deleteDirectory(file); }
Example 16
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineWithPhasedHg38ToHg19() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chr20"; String inputFolder = "test-data/data/chr20-phased-hg38"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); context.setInput("build", "hg38"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); assertTrue(context.hasInMemory("Remaining sites in total: 7,735")); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.dose.vcf.gz", 100000000, false); assertEquals("20", file.getChromosome()); assertEquals(51, file.getNoSamples()); assertEquals(true, file.isPhased()); assertEquals(TOTAL_REFPANEL_CHR20_B37 + ONLY_IN_INPUT, file.getNoSnps()); FileUtil.deleteDirectory("test-data/tmp"); }
Example 17
Source File: ImputationChrXTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testChrXPipelineWithEagle() throws IOException, ZipException { // maybe git large files? if (!new File( "test-data/configs/hapmap-chrX/ref-panels/ALL.chrX.nonPAR.phase1_v3.snps_indels_svs.genotypes.all.noSingleton.recode.bcf") .exists()) { System.out.println("chrX bcf nonPAR file not available"); return; } String configFolder = "test-data/configs/hapmap-chrX"; String inputFolder = "test-data/data/chrX-unphased"; File file = new File("test-data/tmp"); if (file.exists()) { FileUtil.deleteDirectory(file); } // create workflow context WorkflowTestContext context = buildContext(inputFolder, "phase1"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_X.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile vcfFile = VcfFileUtil.load("test-data/tmp/chrX.dose.vcf.gz", 100000000, false); assertEquals("X", vcfFile.getChromosome()); FileUtil.deleteDirectory(file); }
Example 18
Source File: S3DataManagerTest.java From aws-codebuild-jenkins-plugin with Apache License 2.0 | 4 votes |
@Test public void testZipSourceOneDirMultipleFiles() throws Exception { String buildSpecName = "buildspec.yml"; String rootFileName = "pom.xml"; String sourceDirName = "src"; String srcFileName = "file.java"; String srcFile2Name = "util.java"; File buildSpec = new File("/tmp/source/" + buildSpecName); File rootFile = new File("/tmp/source/" + rootFileName); File sourceDir = new File("/tmp/source/" + sourceDirName); sourceDir.mkdir(); File srcFile = new File("/tmp/source/src/" + srcFileName); File srcFile2 = new File("/tmp/source/src/" + srcFile2Name); String rootFileContents = "<plugin>codebuild</plugin>"; String buildSpecContents = "Hello!!!!!"; String srcFileContents = "int i = 1;"; String srcFile2Contents = "util() { ; }"; FileUtils.write(buildSpec, buildSpecContents); FileUtils.write(rootFile, rootFileContents); FileUtils.write(srcFile, srcFileContents); FileUtils.write(srcFile2, srcFile2Contents); ZipOutputStream out = new ZipOutputStream(new FileOutputStream("/tmp/source.zip")); ZipSourceCallable.zipSource(testZipSourceWorkspace, "/tmp/source/", out, "/tmp/source/"); out.close(); File zip = new File("/tmp/source.zip"); assertTrue(zip.exists()); File unzipFolder = new File("/tmp/folder/"); unzipFolder.mkdir(); ZipFile z = new ZipFile(zip.getPath()); z.extractAll(unzipFolder.getPath()); assertTrue(unzipFolder.list().length == 3); File srcFolder = new File("/tmp/folder/src/"); assertTrue(srcFolder.list().length == 2); List<String> files = Arrays.asList(unzipFolder.list()); assertTrue(files.contains(buildSpecName)); assertTrue(files.contains(rootFileName)); assertTrue(files.contains(sourceDirName)); File extractedBuildSpec = new File(unzipFolder.getPath() + "/" + buildSpecName); File extractedRootFile = new File(unzipFolder.getPath() + "/" + rootFileName); File extractedSrcFile = new File(unzipFolder.getPath() + "/src/" + srcFileName); File extractedSrcFile2 = new File(unzipFolder.getPath() + "/src/" + srcFile2Name); assertTrue(FileUtils.readFileToString(extractedBuildSpec).equals(buildSpecContents)); assertTrue(FileUtils.readFileToString(extractedRootFile).equals(rootFileContents)); assertTrue(FileUtils.readFileToString(extractedSrcFile).equals(srcFileContents)); assertTrue(FileUtils.readFileToString(extractedSrcFile2).equals(srcFile2Contents)); }
Example 19
Source File: ImputationChrMT.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testChrMTPipeline() throws IOException, ZipException { String configFolder = "test-data/configs/phylotree-chrMT"; String inputFolder = "test-data/data/chrMT"; File file = new File("test-data/tmp"); if (file.exists()) { FileUtil.deleteDirectory(file); } // create workflow context WorkflowTestContext context = buildContext(inputFolder, "phylotree"); // create step instance InputValidation inputValidation = new InputValidationMock(configFolder); // run and test boolean result = run(context, inputValidation); assertEquals(true, result); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); result = run(context, qcStats); assertTrue(result); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_MT.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile vcfFile = VcfFileUtil.load("test-data/tmp/chrMT.dose.vcf.gz", 100000000, false); assertEquals("MT", vcfFile.getChromosome()); assertEquals(5435, vcfFile.getNoSamples()); FileUtil.deleteDirectory(file); }
Example 20
Source File: ImputationTest.java From imputationserver with GNU Affero General Public License v3.0 | 4 votes |
@Test public void testPipelineWithPhasedHg19ToHg38() throws IOException, ZipException { String configFolder = "test-data/configs/hapmap-chr20-hg38"; String inputFolder = "test-data/data/chr20-phased"; // create workflow context WorkflowTestContext context = buildContext(inputFolder, "hapmap2"); // run qc to create chunkfile QcStatisticsMock qcStats = new QcStatisticsMock(configFolder); boolean result = run(context, qcStats); assertTrue(result); assertTrue(context.hasInMemory("Remaining sites in total: 7,735")); // add panel to hdfs importRefPanel(FileUtil.path(configFolder, "ref-panels")); // importMinimacMap("test-data/B38_MAP_FILE.map"); importBinaries("files/bin"); // run imputation ImputationMinimac3Mock imputation = new ImputationMinimac3Mock(configFolder); result = run(context, imputation); assertTrue(result); // run export CompressionEncryptionMock export = new CompressionEncryptionMock("files"); result = run(context, export); assertTrue(result); ZipFile zipFile = new ZipFile("test-data/tmp/local/chr_20.zip", PASSWORD.toCharArray()); zipFile.extractAll("test-data/tmp"); VcfFile file = VcfFileUtil.load("test-data/tmp/chr20.dose.vcf.gz", 100000000, false); assertEquals("20", file.getChromosome()); assertEquals(51, file.getNoSamples()); assertEquals(true, file.isPhased()); assertEquals(TOTAL_REFPANEL_CHR20_B38 + ONLY_IN_INPUT, file.getNoSnps()); FileUtil.deleteDirectory("test-data/tmp"); }