org.apache.commons.compress.archivers.ArchiveOutputStream Java Examples
The following examples show how to use
org.apache.commons.compress.archivers.ArchiveOutputStream.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DefaultArchiveExtractorTest.java From flow with Apache License 2.0 | 6 votes |
@Test(expected = ArchiveExtractionException.class) public void extractTarAsZip_ArchiveExtractionExceptionIsThrown() throws IOException, ArchiveExtractionException{ File archiveFile = new File(baseDir, "archive.zip"); archiveFile.createNewFile(); Path tempArchive = archiveFile.toPath(); try (OutputStream fo = Files.newOutputStream( tempArchive); OutputStream gzo = new GzipCompressorOutputStream( fo); ArchiveOutputStream o = new TarArchiveOutputStream(gzo)) { o.putArchiveEntry( o.createArchiveEntry(new File(ROOT_FILE), ROOT_FILE)); o.closeArchiveEntry(); } new DefaultArchiveExtractor().extract(archiveFile, targetDir); }
Example #2
Source File: ZipEdgeArchiveBuilder.java From datacollector with Apache License 2.0 | 6 votes |
protected void addArchiveEntry( ArchiveOutputStream archiveOutput, Object fileContent, String pipelineId, String fileName ) throws IOException { File pipelineFile = File.createTempFile(pipelineId, fileName); FileOutputStream pipelineOutputStream = new FileOutputStream(pipelineFile); ObjectMapperFactory.get().writeValue(pipelineOutputStream, fileContent); pipelineOutputStream.flush(); pipelineOutputStream.close(); ZipArchiveEntry archiveEntry = new ZipArchiveEntry( pipelineFile, DATA_PIPELINES_FOLDER + pipelineId + "/" + fileName ); archiveEntry.setSize(pipelineFile.length()); archiveOutput.putArchiveEntry(archiveEntry); IOUtils.copy(new FileInputStream(pipelineFile), archiveOutput); archiveOutput.closeArchiveEntry(); }
Example #3
Source File: TarEdgeArchiveBuilder.java From datacollector with Apache License 2.0 | 6 votes |
protected void addArchiveEntry( ArchiveOutputStream archiveOutput, Object fileContent, String pipelineId, String fileName ) throws IOException { File pipelineFile = File.createTempFile(pipelineId, fileName); FileOutputStream pipelineOutputStream = new FileOutputStream(pipelineFile); ObjectMapperFactory.get().writeValue(pipelineOutputStream, fileContent); pipelineOutputStream.flush(); pipelineOutputStream.close(); TarArchiveEntry archiveEntry = new TarArchiveEntry( pipelineFile, DATA_PIPELINES_FOLDER + pipelineId + "/" + fileName ); archiveEntry.setSize(pipelineFile.length()); archiveOutput.putArchiveEntry(archiveEntry); IOUtils.copy(new FileInputStream(pipelineFile), archiveOutput); archiveOutput.closeArchiveEntry(); }
Example #4
Source File: GeneratorService.java From vertx-starter with Apache License 2.0 | 6 votes |
private void addFile(Path rootPath, Path filePath, ArchiveOutputStream stream) throws IOException { String relativePath = rootPath.relativize(filePath).toString(); if (relativePath.length() == 0) return; String entryName = jarFileWorkAround(leadingDot(relativePath)); ArchiveEntry entry = stream.createArchiveEntry(filePath.toFile(), entryName); if (EXECUTABLES.contains(entryName)) { if (entry instanceof ZipArchiveEntry) { ZipArchiveEntry zipArchiveEntry = (ZipArchiveEntry) entry; zipArchiveEntry.setUnixMode(0744); } else if (entry instanceof TarArchiveEntry) { TarArchiveEntry tarArchiveEntry = (TarArchiveEntry) entry; tarArchiveEntry.setMode(0100744); } } stream.putArchiveEntry(entry); if (filePath.toFile().isFile()) { try (InputStream i = Files.newInputStream(filePath)) { IOUtils.copy(i, stream); } } stream.closeArchiveEntry(); }
Example #5
Source File: AzkabanJobHelper.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@edu.umd.cs.findbugs.annotations.SuppressWarnings( value = "OBL_UNSATISFIED_OBLIGATION", justification = "Lombok construct of @Cleanup is handing this, but not detected by FindBugs") private static void addFilesToZip(File zipFile, List<File> filesToAdd) throws IOException { try { @Cleanup OutputStream archiveStream = new FileOutputStream(zipFile); @Cleanup ArchiveOutputStream archive = new ArchiveStreamFactory().createArchiveOutputStream(ArchiveStreamFactory.ZIP, archiveStream); for (File fileToAdd : filesToAdd) { ZipArchiveEntry entry = new ZipArchiveEntry(fileToAdd.getName()); archive.putArchiveEntry(entry); @Cleanup BufferedInputStream input = new BufferedInputStream(new FileInputStream(fileToAdd)); IOUtils.copy(input, archive); archive.closeArchiveEntry(); } archive.finish(); } catch (ArchiveException e) { throw new IOException("Issue with creating archive", e); } }
Example #6
Source File: GeneratorService.java From vertx-starter with Apache License 2.0 | 6 votes |
public Buffer onProjectRequested(VertxProject project) throws Exception { ArchiveOutputStreamFactory factory; ArchiveFormat archiveFormat = project.getArchiveFormat(); if (archiveFormat == ArchiveFormat.TGZ) { factory = baos -> new TarArchiveOutputStream(new GzipCompressorOutputStream(baos)); } else if (archiveFormat == ArchiveFormat.ZIP) { factory = baos -> new ZipArchiveOutputStream(baos); } else { throw new IllegalArgumentException("Unsupported archive format: " + archiveFormat.getFileExtension()); } try (TempDir tempDir = TempDir.create(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ArchiveOutputStream out = factory.create(baos)) { createProject(project, tempDir); generateArchive(tempDir, out); out.finish(); out.close(); return Buffer.buffer(baos.toByteArray()); } }
Example #7
Source File: RemoteApiBasedDockerProvider.java From docker-maven-plugin with Apache License 2.0 | 6 votes |
private static void addToTar(ArchiveOutputStream tar, File file, String fileNameAndPath) throws IOException { if (!file.exists() || !file.canRead()) { throw new FileNotFoundException(String.format("Cannot read file %s. Are you sure it exists?", file.getAbsolutePath())); } if (file.isDirectory()) { for (File fileInDirectory : file.listFiles()) { if (!fileNameAndPath.endsWith("/")) { fileNameAndPath = fileNameAndPath + "/"; } addToTar(tar, fileInDirectory, fileNameAndPath + fileInDirectory.getName()); } } else { ArchiveEntry entry = tar.createArchiveEntry(file, fileNameAndPath); tar.putArchiveEntry(entry); try (FileInputStream fis = new FileInputStream(file)) { IOUtils.copy(fis, tar); } tar.closeArchiveEntry(); } }
Example #8
Source File: CompressExample.java From spring-boot with Apache License 2.0 | 6 votes |
public static void makeOnlyZip() throws IOException, ArchiveException{ File f1 = new File("D:/compresstest.txt"); File f2 = new File("D:/test1.xml"); final OutputStream out = new FileOutputStream("D:/中文名字.zip"); ArchiveOutputStream os = new ArchiveStreamFactory().createArchiveOutputStream(ArchiveStreamFactory.ZIP, out); os.putArchiveEntry(new ZipArchiveEntry(f1.getName())); IOUtils.copy(new FileInputStream(f1), os); os.closeArchiveEntry(); os.putArchiveEntry(new ZipArchiveEntry(f2.getName())); IOUtils.copy(new FileInputStream(f2), os); os.closeArchiveEntry(); os.close(); }
Example #9
Source File: LogUtils.java From konduit-serving with Apache License 2.0 | 6 votes |
public static File getZippedLogs() throws ArchiveException, IOException { File zippedFile = new File(DirectoryFetcher.getEndpointLogsDir(), "logs.zip"); try (BufferedOutputStream archiveStream = new BufferedOutputStream(new FileOutputStream(zippedFile))) { try (ArchiveOutputStream archive = new ArchiveStreamFactory().createArchiveOutputStream(ArchiveStreamFactory.ZIP, archiveStream)) { File logsFile = getEndpointLogsFile(); if(logsFile != null) { ZipArchiveEntry entry = new ZipArchiveEntry(logsFile.getName()); archive.putArchiveEntry(entry); try (BufferedInputStream input = new BufferedInputStream(new FileInputStream(logsFile))) { IOUtils.copy(input, archive); archive.closeArchiveEntry(); archive.finish(); } } else { throw new FileNotFoundException("No logs file found!"); } } } return zippedFile; }
Example #10
Source File: ZipTest.java From document-management-system with GNU General Public License v2.0 | 6 votes |
public void testApache() throws IOException, ArchiveException { log.debug("testApache()"); File zip = File.createTempFile("apache_", ".zip"); // Create zip FileOutputStream fos = new FileOutputStream(zip); ArchiveOutputStream aos = new ArchiveStreamFactory().createArchiveOutputStream("zip", fos); aos.putArchiveEntry(new ZipArchiveEntry("coñeta")); aos.closeArchiveEntry(); aos.close(); // Read zip FileInputStream fis = new FileInputStream(zip); ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream("zip", fis); ZipArchiveEntry zae = (ZipArchiveEntry) ais.getNextEntry(); assertEquals(zae.getName(), "coñeta"); ais.close(); }
Example #11
Source File: SetsArchiver.java From cantor with BSD 3-Clause "New" or "Revised" License | 6 votes |
public static void archive(final Sets sets, final String namespace, final Path destination, final int chunkSize) throws IOException { checkArchiveArguments(sets, namespace, destination); checkArgument(chunkSize <= MAX_CHUNK_SIZE, "chunk size must be <=" + MAX_CHUNK_SIZE); // get all sets for the namespace, any sets added after won't be archived final Collection<String> setNames = sets.sets(namespace); try (final ArchiveOutputStream archive = getArchiveOutputStream(destination)) { // archive each set one at a time for (final String set : setNames) { logger.info("archiving set {}.{}", namespace, set); int start = 0; Map<String, Long> entries = sets.get(namespace, set, start, chunkSize); while (!entries.isEmpty()) { final int end = start + entries.size(); final String name = String.format("sets-%s-%s-%s-%s", namespace, set, start, end); // store chunks as tar archives so we can restore them in chunks too final SetsChunk chunk = SetsChunk.newBuilder().setSet(set).putAllEntries(entries).build(); writeArchiveEntry(archive, name, chunk.toByteArray()); logger.info("archived {} entries ({}-{}) into chunk '{}' for set {}.{}", entries.size(), start, end, name, namespace, set); start = end; entries = sets.get(namespace, set, start, chunkSize); } } } }
Example #12
Source File: ObjectsArchiver.java From cantor with BSD 3-Clause "New" or "Revised" License | 6 votes |
public static void archive(final Objects objects, final String namespace, final Path destination, int chunkSize) throws IOException { checkArchiveArguments(objects, namespace, destination); checkArgument(chunkSize <= MAX_CHUNK_SIZE, "chunk size must be <=" + MAX_CHUNK_SIZE); try (final ArchiveOutputStream archive = getArchiveOutputStream(destination)) { // get objects to archive in chunks in case of large namespaces int start = 0; Collection<String> keys = objects.keys(namespace, start, chunkSize); while (!keys.isEmpty()) { final Map<String, byte[]> chunk = objects.get(namespace, keys); final int end = start + chunk.size(); final String name = String.format("objects-%s-%s-%s", namespace, start, end); // store chunks as tar archives so we can restore them in chunks too writeArchiveEntry(archive, name, getBytes(chunk)); logger.info("archived {} objects ({}-{}) into chunk '{}'", chunk.size(), start, end, name); start = end; keys = objects.keys(namespace, start, chunkSize); } } }
Example #13
Source File: ProjectGenerationController.java From initializr with Apache License 2.0 | 6 votes |
private <T extends ArchiveEntry> Path createArchive(ProjectGenerationResult result, String fileExtension, Function<OutputStream, ? extends ArchiveOutputStream> archiveOutputStream, BiFunction<File, String, T> archiveEntry, BiConsumer<T, Integer> setMode) throws IOException { Path archive = this.projectGenerationInvoker.createDistributionFile(result.getRootDirectory(), "." + fileExtension); String wrapperScript = getWrapperScript(result.getProjectDescription()); try (ArchiveOutputStream output = archiveOutputStream.apply(Files.newOutputStream(archive))) { Files.walk(result.getRootDirectory()).filter((path) -> !result.getRootDirectory().equals(path)) .forEach((path) -> { try { String entryName = getEntryName(result.getRootDirectory(), path); T entry = archiveEntry.apply(path.toFile(), entryName); setMode.accept(entry, getUnixMode(wrapperScript, entryName, path)); output.putArchiveEntry(entry); if (!Files.isDirectory(path)) { Files.copy(path, output); } output.closeArchiveEntry(); } catch (IOException ex) { throw new IllegalStateException(ex); } }); } return archive; }
Example #14
Source File: DefaultArchiveExtractorTest.java From flow with Apache License 2.0 | 6 votes |
@Test public void extractTarGz_contentsAreExtracted() throws IOException, ArchiveExtractionException { File archiveFile = new File(baseDir, "archive.tar.gz"); archiveFile.createNewFile(); Path tempArchive = archiveFile.toPath(); try (OutputStream fo = Files.newOutputStream( tempArchive); OutputStream gzo = new GzipCompressorOutputStream( fo); ArchiveOutputStream o = new TarArchiveOutputStream(gzo)) { o.putArchiveEntry( o.createArchiveEntry(new File(ROOT_FILE), ROOT_FILE)); o.closeArchiveEntry(); o.putArchiveEntry( o.createArchiveEntry(new File(SUBFOLDER_FILE), SUBFOLDER_FILE)); o.closeArchiveEntry(); } new DefaultArchiveExtractor().extract(archiveFile, targetDir); Assert.assertTrue("Archive root.file was not extracted", new File(targetDir, ROOT_FILE).exists()); Assert.assertTrue("Archive subfolder/folder.file was not extracted", new File(targetDir, SUBFOLDER_FILE).exists()); }
Example #15
Source File: CommonsArchiver.java From jarchivelib with Apache License 2.0 | 6 votes |
@Override public File create(String archive, File destination, File... sources) throws IOException { IOUtils.requireDirectory(destination); File archiveFile = createNewArchiveFile(archive, getFilenameExtension(), destination); ArchiveOutputStream outputStream = null; try { outputStream = createArchiveOutputStream(archiveFile); writeToArchive(sources, outputStream); outputStream.flush(); } finally { IOUtils.closeQuietly(outputStream); } return archiveFile; }
Example #16
Source File: CompressionTools.java From aws-codepipeline-plugin-for-jenkins with Apache License 2.0 | 6 votes |
private static void compressArchive( final Path pathToCompress, final ArchiveOutputStream archiveOutputStream, final ArchiveEntryFactory archiveEntryFactory, final CompressionType compressionType, final BuildListener listener) throws IOException { final List<File> files = addFilesToCompress(pathToCompress, listener); LoggingHelper.log(listener, "Compressing directory '%s' as a '%s' archive", pathToCompress.toString(), compressionType.name()); for (final File file : files) { final String newTarFileName = pathToCompress.relativize(file.toPath()).toString(); final ArchiveEntry archiveEntry = archiveEntryFactory.create(file, newTarFileName); archiveOutputStream.putArchiveEntry(archiveEntry); try (final FileInputStream fileInputStream = new FileInputStream(file)) { IOUtils.copy(fileInputStream, archiveOutputStream); } archiveOutputStream.closeArchiveEntry(); } }
Example #17
Source File: CompressExtension.java From jphp with Apache License 2.0 | 6 votes |
@Override public void onRegister(CompileScope scope) { // register classes ... registerWrapperClass(scope, ArchiveEntry.class, PArchiveEntry.class); registerWrapperClass(scope, TarArchiveEntry.class, PTarArchiveEntry.class); registerWrapperClass(scope, ZipArchiveEntry.class, PZipArchiveEntry.class); registerWrapperClass(scope, ArchiveInputStream.class, PArchiveInput.class); registerWrapperClass(scope, TarArchiveInputStream.class, PTarArchiveInput.class); registerWrapperClass(scope, ZipArchiveInputStream.class, PZipArchiveInput.class); registerWrapperClass(scope, ArchiveOutputStream.class, PArchiveOutput.class); registerWrapperClass(scope, TarArchiveOutputStream.class, PTarArchiveOutput.class); registerWrapperClass(scope, ZipArchiveOutputStream.class, PZipArchiveOutput.class); registerClass(scope, PGzipOutputStream.class); registerClass(scope, PGzipInputStream.class); registerClass(scope, PBzip2OutputStream.class); registerClass(scope, PBZip2InputStream.class); registerClass(scope, PLz4OutputStream.class); registerClass(scope, PLz4InputStream.class); registerClass(scope, PArchive.class); registerClass(scope, PTarArchive.class); registerClass(scope, PZipArchive.class); }
Example #18
Source File: TarContainerPacker.java From hadoop-ozone with Apache License 2.0 | 6 votes |
/** * Given a containerData include all the required container data/metadata * in a tar file. * * @param container Container to archive (data + metadata). * @param output Destination tar file/stream. */ @Override public void pack(Container<KeyValueContainerData> container, OutputStream output) throws IOException { KeyValueContainerData containerData = container.getContainerData(); try (OutputStream compressed = compress(output); ArchiveOutputStream archiveOutput = tar(compressed)) { includePath(containerData.getDbFile().toPath(), DB_DIR_NAME, archiveOutput); includePath(Paths.get(containerData.getChunksPath()), CHUNKS_DIR_NAME, archiveOutput); includeFile(container.getContainerFile(), CONTAINER_FILE_NAME, archiveOutput); } catch (CompressorException e) { throw new IOException( "Can't compress the container: " + containerData.getContainerID(), e); } }
Example #19
Source File: TraceImportOperationTest.java From tracecompass with Eclipse Public License 2.0 | 5 votes |
private static void createArchive(String sourcePath, File archive) throws FileNotFoundException, IOException, ArchiveException { try (OutputStream out = new FileOutputStream(archive); ArchiveOutputStream archiveOutputStream = new ArchiveStreamFactory().createArchiveOutputStream(ArchiveStreamFactory.ZIP, out)) { for (File file : FileUtils.listFiles(new File(sourcePath), null, true)) { String name = file.getAbsolutePath().substring(sourcePath.length()); archiveOutputStream.putArchiveEntry(new ZipArchiveEntry(name)); try (BufferedInputStream in = new BufferedInputStream(new FileInputStream(file))) { IOUtils.copy(in, archiveOutputStream); } archiveOutputStream.closeArchiveEntry(); } archiveOutputStream.finish(); } }
Example #20
Source File: TarContainerPacker.java From hadoop-ozone with Apache License 2.0 | 5 votes |
private void includePath(Path dir, String subdir, ArchiveOutputStream archiveOutput) throws IOException { try (Stream<Path> dirEntries = Files.list(dir)) { for (Path path : dirEntries.collect(toList())) { String entryName = subdir + "/" + path.getFileName(); includeFile(path.toFile(), entryName, archiveOutput); } } }
Example #21
Source File: ZipUtils.java From allure-teamcity with Apache License 2.0 | 5 votes |
public static void zip(Path archive, Path base, List<Path> files) throws IOException { try (ArchiveOutputStream output = new ZipArchiveOutputStream(new FileOutputStream(archive.toFile()))) { for (Path file : files) { String entryName = base.toAbsolutePath().relativize(file).toString(); ArchiveEntry entry = output.createArchiveEntry(file.toFile(), entryName); output.putArchiveEntry(entry); try (InputStream i = Files.newInputStream(file)) { IOUtils.copy(i, output); } output.closeArchiveEntry(); } output.finish(); } }
Example #22
Source File: SimpleTest.java From allure-teamcity with Apache License 2.0 | 5 votes |
private static void zipViaApacheCompress(Path archive, Path base, List<Path> files) throws IOException { try (ArchiveOutputStream output = new ZipArchiveOutputStream(new FileOutputStream(archive.toFile()))) { for (Path file : files) { String entryName = base.toAbsolutePath().relativize(file).toString(); ArchiveEntry entry = output.createArchiveEntry(file.toFile(), entryName); output.putArchiveEntry(entry); try (InputStream i = Files.newInputStream(file)) { IOUtils.copy(i, output); } output.closeArchiveEntry(); } output.finish(); } }
Example #23
Source File: ZipFiles.java From ankush with GNU Lesser General Public License v3.0 | 5 votes |
/** * Zip file. * * @param filePath the file path * @return the string */ public String zipFile(String filePath) { try { /* Create Output Stream that will have final zip files */ OutputStream zipOutput = new FileOutputStream(new File(filePath + ".zip")); /* * Create Archive Output Stream that attaches File Output Stream / and * specifies type of compression */ ArchiveOutputStream logicalZip = new ArchiveStreamFactory() .createArchiveOutputStream(ArchiveStreamFactory.ZIP, zipOutput); /* Create Archieve entry - write header information */ logicalZip.putArchiveEntry(new ZipArchiveEntry(FilenameUtils.getName(filePath))); /* Copy input file */ IOUtils.copy(new FileInputStream(new File(filePath)), logicalZip); /* Close Archieve entry, write trailer information */ logicalZip.closeArchiveEntry(); /* Finish addition of entries to the file */ logicalZip.finish(); /* Close output stream, our files are zipped */ zipOutput.close(); } catch (Exception e) { System.err.println(e.getMessage()); return null; } return filePath + ".zip"; }
Example #24
Source File: ExecMojo.java From tomee with Apache License 2.0 | 5 votes |
private void addClasses(final List<String> classes, final ArchiveOutputStream os) throws IOException { if (classes != null) { // user classes for (final String className : classes) { addToJar(os, load(className)); } } }
Example #25
Source File: CommonsArchiver.java From jarchivelib with Apache License 2.0 | 5 votes |
/** * Recursion entry point for {@link #writeToArchive(File, File[], ArchiveOutputStream)}. * <br> * Recursively writes all given source {@link File}s into the given {@link ArchiveOutputStream}. * * @param sources the files to write in to the archive * @param archive the archive to write into * @throws IOException when an I/O error occurs */ protected void writeToArchive(File[] sources, ArchiveOutputStream archive) throws IOException { for (File source : sources) { if (!source.exists()) { throw new FileNotFoundException(source.getPath()); } else if (!source.canRead()) { throw new FileNotFoundException(source.getPath() + " (Permission denied)"); } writeToArchive(source.getParentFile(), new File[]{ source }, archive); } }
Example #26
Source File: CommonsArchiver.java From jarchivelib with Apache License 2.0 | 5 votes |
/** * Recursively writes all given source {@link File}s into the given {@link ArchiveOutputStream}. The paths of the * sources in the archive will be relative to the given parent {@code File}. * * @param parent the parent file node for computing a relative path (see {@link IOUtils#relativePath(File, File)}) * @param sources the files to write in to the archive * @param archive the archive to write into * @throws IOException when an I/O error occurs */ protected void writeToArchive(File parent, File[] sources, ArchiveOutputStream archive) throws IOException { for (File source : sources) { String relativePath = IOUtils.relativePath(parent, source); createArchiveEntry(source, relativePath, archive); if (source.isDirectory()) { writeToArchive(parent, source.listFiles(), archive); } } }
Example #27
Source File: AbstractBaseArchiver.java From cantor with BSD 3-Clause "New" or "Revised" License | 5 votes |
static void writeArchiveEntry(final ArchiveOutputStream archive, final String name, final byte[] bytes) throws IOException { final TarArchiveEntry entry = new TarArchiveEntry(name); entry.setSize(bytes.length); archive.putArchiveEntry(entry); archive.write(bytes); archive.closeArchiveEntry(); }
Example #28
Source File: TarContainerPacker.java From hadoop-ozone with Apache License 2.0 | 5 votes |
static void includeFile(File file, String entryName, ArchiveOutputStream archiveOutput) throws IOException { ArchiveEntry entry = archiveOutput.createArchiveEntry(file, entryName); archiveOutput.putArchiveEntry(entry); try (InputStream input = new FileInputStream(file)) { IOUtils.copy(input, archiveOutput); } archiveOutput.closeArchiveEntry(); }
Example #29
Source File: TestTarContainerPacker.java From hadoop-ozone with Apache License 2.0 | 5 votes |
private File packContainerWithSingleFile(File file, String entryName) throws Exception { File targetFile = TEMP_DIR.resolve("container.tar.gz").toFile(); try (FileOutputStream output = new FileOutputStream(targetFile); CompressorOutputStream gzipped = new CompressorStreamFactory() .createCompressorOutputStream(GZIP, output); ArchiveOutputStream archive = new TarArchiveOutputStream(gzipped)) { TarContainerPacker.includeFile(file, entryName, archive); } return targetFile; }
Example #30
Source File: OMDBCheckpointServlet.java From hadoop-ozone with Apache License 2.0 | 5 votes |
/** * Write OM DB Checkpoint to an output stream as a compressed file (tgz). * * @param checkpoint checkpoint file * @param destination desination output stream. * @throws IOException */ public static void writeOmDBCheckpointToStream(DBCheckpoint checkpoint, OutputStream destination) throws IOException { try (CompressorOutputStream gzippedOut = new CompressorStreamFactory() .createCompressorOutputStream(CompressorStreamFactory.GZIP, destination)) { try (ArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(gzippedOut)) { Path checkpointPath = checkpoint.getCheckpointLocation(); try (Stream<Path> files = Files.list(checkpointPath)) { for (Path path : files.collect(Collectors.toList())) { if (path != null) { Path fileName = path.getFileName(); if (fileName != null) { includeFile(path.toFile(), fileName.toString(), archiveOutputStream); } } } } } } catch (CompressorException e) { throw new IOException( "Can't compress the checkpoint: " + checkpoint.getCheckpointLocation(), e); } }