org.apache.commons.compress.compressors.CompressorStreamFactory Java Examples
The following examples show how to use
org.apache.commons.compress.compressors.CompressorStreamFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestCompressionInputBuilder.java From datacollector with Apache License 2.0 | 6 votes |
private void testCompressedFile(String compressionType) throws Exception { //write data into the stream using the specified compression ByteArrayOutputStream bOut = new ByteArrayOutputStream(); CompressorOutputStream cOut = new CompressorStreamFactory().createCompressorOutputStream(compressionType, bOut); cOut.write("StreamSets".getBytes()); cOut.close(); //create compression input CompressionDataParser.CompressionInputBuilder compressionInputBuilder = new CompressionDataParser.CompressionInputBuilder(Compression.COMPRESSED_FILE, null, new ByteArrayInputStream(bOut.toByteArray()), "0"); CompressionDataParser.CompressionInput input = compressionInputBuilder.build(); //verify Assert.assertNotNull(input); Assert.assertEquals("myFile::4567", input.wrapOffset("myFile::4567")); Assert.assertEquals("myFile::4567", input.wrapRecordId("myFile::4567")); InputStream myFile = input.getNextInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(myFile)); Assert.assertEquals("StreamSets", reader.readLine()); }
Example #2
Source File: CompressingContentStoreTest.java From alfresco-simple-content-stores with Apache License 2.0 | 6 votes |
@Test public void simpleUnconfiguredDefaultCompression() throws Exception { final DictionaryService dictionaryService = EasyMock.mock(DictionaryService.class); final CompressingContentStore compressingContentStore = new CompressingContentStore(); compressingContentStore.setNamespaceService(PREFIX_RESOLVER); compressingContentStore.setDictionaryService(dictionaryService); final FileContentStore fileContentStore = new FileContentStore(); fileContentStore.setRootDirectory(backingStoreFolder.getAbsolutePath()); fileContentStore.setProtocol("store"); compressingContentStore.setBackingStore(fileContentStore); final FileContentStore temporaryContentStore = new FileContentStore(); temporaryContentStore.setRootDirectory(temporaryStoreFolder.getAbsolutePath()); temporaryContentStore.setProtocol("store"); compressingContentStore.setTemporaryStore(temporaryContentStore); fileContentStore.afterPropertiesSet(); temporaryContentStore.afterPropertiesSet(); compressingContentStore.afterPropertiesSet(); testCompressableMimetype(compressingContentStore, fileContentStore, MimetypeMap.MIMETYPE_TEXT_PLAIN, CompressorStreamFactory.GZIP); }
Example #3
Source File: SPARQLQueryBuilderGenericTest.java From inception with Apache License 2.0 | 6 votes |
@SuppressWarnings("resource") private void importData(Repository aRepo, String aUrl) throws IOException { try (InputStream aIS = openAsStream(aUrl)) { InputStream is = new BufferedInputStream(aIS); try { // Stream is expected to be closed by caller of importData is = new CompressorStreamFactory().createCompressorInputStream(is); } catch (CompressorException e) { // Probably not compressed then or unknown format - just try as is. } // Detect the file format RDFFormat format = Rio.getParserFormatForFileName(aUrl).orElse(RDFFormat.RDFXML); try (RepositoryConnection conn = aRepo.getConnection()) { // If the RDF file contains relative URLs, then they probably start with a hash. // To avoid having two hashes here, we drop the hash from the base prefix configured // by the user. String prefix = StringUtils.removeEnd(kb.getBasePrefix(), "#"); conn.add(is, prefix, format); } } }
Example #4
Source File: NpmPackageParser.java From nexus-public with Eclipse Public License 1.0 | 6 votes |
/** * Parses the package.json in the supplied tar.gz if present and extractable. In all other situations, an empty map * will be returned indicating the absence of (or inability to extract) a valid package.json file and its contents. */ public Map<String, Object> parsePackageJson(final Supplier<InputStream> supplier) { try (InputStream is = new BufferedInputStream(supplier.get())) { final CompressorStreamFactory compressorStreamFactory = new CompressorStreamFactory(); try (InputStream cis = compressorStreamFactory.createCompressorInputStream(GZIP, is)) { final ArchiveStreamFactory archiveFactory = new ArchiveStreamFactory(); try (ArchiveInputStream ais = archiveFactory.createArchiveInputStream(TAR, cis)) { return parsePackageJsonInternal(ais); } } } catch (Exception e) { log.debug("Error occurred while processing package.json, returning empty map to continue", e); return emptyMap(); } }
Example #5
Source File: HalyardExportTest.java From Halyard with Apache License 2.0 | 6 votes |
private static int getTriplesCount(String uri, String compression, RDFFormat format) throws Exception { InputStream in = FileSystem.get(URI.create(uri), HBaseServerTestInstance.getInstanceConfig()).open(new Path(uri)); try { if (compression != null) { in = new CompressorStreamFactory().createCompressorInputStream(compression, in); } RDFParser parser = Rio.createParser(format); final AtomicInteger i = new AtomicInteger(); parser.setRDFHandler(new AbstractRDFHandler(){ @Override public void handleStatement(Statement st) throws RDFHandlerException { i.incrementAndGet(); } }); parser.parse(in, uri); return i.get(); } finally { in.close(); } }
Example #6
Source File: RPackagesBuilder.java From nexus-repository-r with Eclipse Public License 1.0 | 6 votes |
/** * Using collected package details builds PACKAGES.gz file and returns it as byte array. * <p> * Call this method ONLY after all information about packages is appended to packageInformation map. * * @return PACKAGES.gz as byte array. */ public byte[] buildPackagesGz() throws IOException { try (ByteArrayOutputStream os = new ByteArrayOutputStream()) { CompressorStreamFactory compressorStreamFactory = new CompressorStreamFactory(); try (CompressorOutputStream cos = compressorStreamFactory.createCompressorOutputStream(GZIP, os); OutputStreamWriter writer = new OutputStreamWriter(cos, UTF_8)) { for (Entry<String, Map<String, String>> eachPackage : packageInformation.entrySet()) { writePackageInfo(writer, eachPackage.getValue()); } } return os.toByteArray(); } catch (CompressorException e) { throw new IOException("Error compressing metadata", e); } }
Example #7
Source File: CompressingContentStoreTest.java From alfresco-simple-content-stores with Apache License 2.0 | 6 votes |
@Test public void customCompression() throws Exception { final DictionaryService dictionaryService = EasyMock.mock(DictionaryService.class); final CompressingContentStore compressingContentStore = new CompressingContentStore(); compressingContentStore.setNamespaceService(PREFIX_RESOLVER); compressingContentStore.setDictionaryService(dictionaryService); compressingContentStore.setCompressionType(CompressorStreamFactory.BZIP2); final FileContentStore fileContentStore = new FileContentStore(); fileContentStore.setRootDirectory(backingStoreFolder.getAbsolutePath()); fileContentStore.setProtocol("store"); compressingContentStore.setBackingStore(fileContentStore); final FileContentStore temporaryContentStore = new FileContentStore(); temporaryContentStore.setRootDirectory(temporaryStoreFolder.getAbsolutePath()); temporaryContentStore.setProtocol("store"); compressingContentStore.setTemporaryStore(temporaryContentStore); fileContentStore.afterPropertiesSet(); temporaryContentStore.afterPropertiesSet(); compressingContentStore.afterPropertiesSet(); testCompressableMimetype(compressingContentStore, fileContentStore, MimetypeMap.MIMETYPE_TEXT_PLAIN, CompressorStreamFactory.BZIP2); }
Example #8
Source File: Archives.java From wildfly-maven-plugin with GNU Lesser General Public License v2.1 | 6 votes |
private static Path getArchive(final Path path) throws IOException { final Path result; // Get the extension final String fileName = path.getFileName().toString(); final String loweredFileName = fileName.toLowerCase(Locale.ENGLISH); if (loweredFileName.endsWith(".gz")) { String tempFileName = fileName.substring(0, loweredFileName.indexOf(".gz")); final int index = tempFileName.lastIndexOf('.'); if (index > 0) { result = Files.createTempFile(tempFileName.substring(0, index), tempFileName.substring(index)); } else { result = Files.createTempFile(tempFileName.substring(0, index), ""); } try (CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream(new BufferedInputStream(Files.newInputStream(path)))) { Files.copy(in, result, StandardCopyOption.REPLACE_EXISTING); } catch (CompressorException e) { throw new IOException(e); } } else { result = path; } return result; }
Example #9
Source File: HalyardExportTest.java From Halyard with Apache License 2.0 | 5 votes |
public static int getLinesCount(String uri, String compression) throws Exception { InputStream in = FileSystem.get(URI.create(uri), HBaseServerTestInstance.getInstanceConfig()).open(new Path(uri)); try { if (compression != null) { in = new CompressorStreamFactory().createCompressorInputStream(compression, in); } BufferedReader br = new BufferedReader(new InputStreamReader(in)); int i = 0; while (br.readLine() != null) i++; return i; } finally { in.close(); } }
Example #10
Source File: WebDriverHandlerImpl.java From IridiumApplicationTesting with MIT License | 5 votes |
private String extractZipDriver( @NotNull final String driver, @NotNull final String name, @NotNull final List<File> tempFiles) throws IOException, CompressorException { checkNotNull(driver); checkArgument(StringUtils.isNotBlank(name)); final InputStream driverURL = getClass().getResourceAsStream(driver); /* The driver may not be bundled */ if (driverURL == null) { throw new DriverException("The driver " + driver + " resource does not exist."); } final CompressorInputStream input = new CompressorStreamFactory() .createCompressorInputStream(CompressorStreamFactory.GZIP, driverURL); final TarArchiveInputStream tarInput = new TarArchiveInputStream(input); /* Sometimes tar files contain a "." directory, which we want to ignore. So loop until we get a file that isn't in a directory. */ TarArchiveEntry tarArchiveEntry = tarInput.getNextTarEntry(); while (tarArchiveEntry.getName().contains("/")) { tarArchiveEntry = tarInput.getNextTarEntry(); } return copyDriver(tarInput, name, tempFiles); }
Example #11
Source File: InputFile.java From kafka-connect-spooldir with Apache License 2.0 | 5 votes |
public InputStream openStream(boolean buffered) throws IOException { if (null != this.inputStream) { throw new IOException( String.format("File %s is already open", this.inputFile) ); } final String extension = Files.getFileExtension(inputFile.getName()); log.trace("openStream() - fileName = '{}' extension = '{}'", inputFile, extension); this.inputStream = new FileInputStream(this.inputFile); if (buffered) { log.trace( "openStream() - Wrapping '{}' in a BufferedInputStream with bufferSize = {}", this.inputFile, this.bufferSize ); this.inputStream = new BufferedInputStream(this.inputStream, this.bufferSize); } if (SUPPORTED_COMPRESSION_TYPES.containsKey(extension)) { final String compressor = SUPPORTED_COMPRESSION_TYPES.get(extension); log.info("Decompressing {} as {}", inputFile, compressor); final CompressorStreamFactory compressorStreamFactory = new CompressorStreamFactory(); try { this.inputStream = compressorStreamFactory.createCompressorInputStream( compressor, this.inputStream ); } catch (CompressorException e) { throw new IOException("Exception thrown while creating compressor stream " + compressor, e); } } log.info("Creating processing flag {}", this.processingFlag); Files.touch(this.processingFlag); return inputStream; }
Example #12
Source File: CompressionDataParser.java From datacollector with Apache License 2.0 | 5 votes |
public CompressorInput(InputStream inputStream) throws IOException { try { this.inputStream = new CompressorStreamFactory(DECOMPRESS_UNTIL_EOF).createCompressorInputStream( new BufferedInputStream(inputStream)); } catch (CompressorException e) { throw new IOException(e); } }
Example #13
Source File: HalyardSummary.java From Halyard with Apache License 2.0 | 5 votes |
private void setupOutput() throws IOException { String targetUrl = conf.get(TARGET); if (splitOutput || out == null) { if (out != null) { writer.endRDF(); out.close(); } targetUrl = MessageFormat.format(targetUrl, outputCounter++); fsOut = FileSystem.get(URI.create(targetUrl), conf).create(new Path(targetUrl)); out = fsOut; try { if (targetUrl.endsWith(".bz2")) { out = new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.BZIP2, out); targetUrl = targetUrl.substring(0, targetUrl.length() - 4); } else if (targetUrl.endsWith(".gz")) { out = new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.GZIP, out); targetUrl = targetUrl.substring(0, targetUrl.length() - 3); } } catch (CompressorException ce) { throw new IOException(ce); } Optional<RDFFormat> form = Rio.getWriterFormatForFileName(targetUrl); if (!form.isPresent()) throw new IOException("Unsupported target file format extension: " + targetUrl); writer = Rio.createWriter(form.get(), out); writer.handleNamespace("", NAMESPACE); writer.handleNamespace(XMLSchema.PREFIX, XMLSchema.NAMESPACE); writer.handleNamespace(RDF.PREFIX, RDF.NAMESPACE); writer.handleNamespace(RDFS.PREFIX, RDFS.NAMESPACE); try (CloseableIteration<? extends Namespace, SailException> iter = sail.getNamespaces()) { while (iter.hasNext()) { Namespace ns = iter.next(); writer.handleNamespace(ns.getPrefix(), ns.getName()); } } writer.startRDF(); } }
Example #14
Source File: ModelExtractor.java From tensorflow with Apache License 2.0 | 5 votes |
/** * Detect the Archive and the Compressor from the file extension * * @param fileName File name with extension * @return Returns a tuple of the detected (Archive, Compressor). Null stands for not available archive or detector. * The (null, null) response stands for no Archive or Compressor discovered. */ private String[] detectArchiveAndCompressor(String fileName) { String normalizedFileName = fileName.trim().toLowerCase(); if (normalizedFileName.endsWith(".tar.gz") || normalizedFileName.endsWith(".tgz") || normalizedFileName.endsWith(".taz")) { return new String[] { ArchiveStreamFactory.TAR, CompressorStreamFactory.GZIP }; } else if (normalizedFileName.endsWith(".tar.bz2") || normalizedFileName.endsWith(".tbz2") || normalizedFileName.endsWith(".tbz")) { return new String[] { ArchiveStreamFactory.TAR, CompressorStreamFactory.BZIP2 }; } else if (normalizedFileName.endsWith(".cpgz")) { return new String[] { ArchiveStreamFactory.CPIO, CompressorStreamFactory.GZIP }; } else if (hasArchive(normalizedFileName)) { return new String[] { findArchive(normalizedFileName).get(), null }; } else if (hasCompressor(normalizedFileName)) { return new String[] { null, findCompressor(normalizedFileName).get() }; } else if (normalizedFileName.endsWith(".gzip")) { return new String[] { null, CompressorStreamFactory.GZIP }; } else if (normalizedFileName.endsWith(".bz2") || normalizedFileName.endsWith(".bz")) { return new String[] { null, CompressorStreamFactory.BZIP2 }; } // No archived/compressed return new String[] { null, null }; }
Example #15
Source File: StreamUtils.java From lucene-solr with Apache License 2.0 | 5 votes |
private InputStream inputStream(InputStream in) throws IOException { try { return csfType==null ? in : new CompressorStreamFactory().createCompressorInputStream(csfType, in); } catch (CompressorException e) { throw new IOException(e.getMessage(), e); } }
Example #16
Source File: StreamUtils.java From lucene-solr with Apache License 2.0 | 5 votes |
private OutputStream outputStream(OutputStream os) throws IOException { try { return csfType==null ? os : new CompressorStreamFactory().createCompressorOutputStream(csfType, os); } catch (CompressorException e) { throw new IOException(e.getMessage(), e); } }
Example #17
Source File: WriteLineDocTaskTest.java From lucene-solr with Apache License 2.0 | 5 votes |
private void doReadTest(Path file, Type fileType, String expTitle, String expDate, String expBody) throws Exception { InputStream in = Files.newInputStream(file); switch(fileType) { case BZIP2: in = csFactory.createCompressorInputStream(CompressorStreamFactory.BZIP2, in); break; case GZIP: in = csFactory.createCompressorInputStream(CompressorStreamFactory.GZIP, in); break; case PLAIN: break; // nothing to do default: assertFalse("Unknown file type!",true); //fail, should not happen } try (BufferedReader br = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) { String line = br.readLine(); assertHeaderLine(line); line = br.readLine(); assertNotNull(line); String[] parts = line.split(Character.toString(WriteLineDocTask.SEP)); int numExpParts = expBody == null ? 2 : 3; assertEquals(numExpParts, parts.length); assertEquals(expTitle, parts[0]); assertEquals(expDate, parts[1]); if (expBody != null) { assertEquals(expBody, parts[2]); } assertNull(br.readLine()); } }
Example #18
Source File: CompressingContentStoreTest.java From alfresco-simple-content-stores with Apache License 2.0 | 5 votes |
@Test public void mimetypeRestrictedCompression() throws Exception { final DictionaryService dictionaryService = EasyMock.mock(DictionaryService.class); final CompressingContentStore compressingContentStore = new CompressingContentStore(); compressingContentStore.setNamespaceService(PREFIX_RESOLVER); compressingContentStore.setDictionaryService(dictionaryService); compressingContentStore.setMimetypesToCompress(Arrays.asList(MimetypeMap.MIMETYPE_TEXT_PLAIN, MimetypeMap.MIMETYPE_XML)); final FileContentStore fileContentStore = new FileContentStore(); fileContentStore.setRootDirectory(backingStoreFolder.getAbsolutePath()); fileContentStore.setProtocol("store"); compressingContentStore.setBackingStore(fileContentStore); final FileContentStore temporaryContentStore = new FileContentStore(); temporaryContentStore.setRootDirectory(temporaryStoreFolder.getAbsolutePath()); temporaryContentStore.setProtocol("store"); compressingContentStore.setTemporaryStore(temporaryContentStore); fileContentStore.afterPropertiesSet(); temporaryContentStore.afterPropertiesSet(); compressingContentStore.afterPropertiesSet(); testCompressableMimetype(compressingContentStore, fileContentStore, MimetypeMap.MIMETYPE_TEXT_PLAIN, CompressorStreamFactory.GZIP); testCompressableMimetype(compressingContentStore, fileContentStore, MimetypeMap.MIMETYPE_XML, CompressorStreamFactory.GZIP); testUncompressableMimetype(compressingContentStore, fileContentStore, MimetypeMap.MIMETYPE_PDF); }
Example #19
Source File: Util.java From WikipediaEntities with GNU Affero General Public License v3.0 | 5 votes |
/** * Open a file, choosing a decompressor if necessary. * * @param fname Filename to open * @return Input stream * @throws FileNotFoundException When the file does not exist */ public static InputStream openInput(String fname) throws FileNotFoundException { InputStream fin = new FileInputStream(fname); try { return new CompressorStreamFactory(true).createCompressorInputStream(new BufferedInputStream(fin)); } catch(CompressorException e) { return fin; } }
Example #20
Source File: DecompressingContentReader.java From alfresco-simple-content-stores with Apache License 2.0 | 5 votes |
/** * {@inheritDoc} */ @Override public synchronized ReadableByteChannel getReadableChannel() throws ContentIOException { this.ensureDelegate(); final String mimetype = this.getMimetype(); LOGGER.debug("Determined mimetype {} as provided via setter / content data - mimetypes to compress are {}", mimetype, this.mimetypesToCompress); final boolean shouldCompress = this.mimetypesToCompress == null || this.mimetypesToCompress.isEmpty() || (mimetype != null && (this.mimetypesToCompress.contains(mimetype) || this.isMimetypeToCompressWildcardMatch(mimetype))); ReadableByteChannel channel; if (shouldCompress) { LOGGER.debug("Content will be decompressed from backing store (url={})", this.getContentUrl()); final String compressiongType = this.compressionType != null && !this.compressionType.trim().isEmpty() ? this.compressionType : CompressorStreamFactory.GZIP; try { final CompressorInputStream is = COMPRESSOR_STREAM_FACTORY.createCompressorInputStream(compressiongType, this.delegate.getContentInputStream()); channel = Channels.newChannel(is); } catch (final CompressorException e) { LOGGER.error("Failed to open decompressing channel", e); throw new ContentIOException("Failed to open channel: " + this, e); } } else { LOGGER.debug("Content will not be decompressed from backing store (url={})", this.getContentUrl()); channel = super.getReadableChannel(); } return channel; }
Example #21
Source File: CompressTools.java From MyBox with Apache License 2.0 | 5 votes |
public static String detectCompressor(BufferedInputStream fileIn, String name) { String compressor = null; try { compressor = CompressorStreamFactory.detect(fileIn); } catch (Exception ex) { if ("lz4".equals(name)) { try ( CompressorInputStream in = new BlockLZ4CompressorInputStream(fileIn)) { compressor = "lz4-block"; } catch (Exception e) { } } } return compressor; }
Example #22
Source File: TestCompressionInputBuilder.java From datacollector with Apache License 2.0 | 5 votes |
private void testConcatenatedCompressedFile(String compressionType) throws Exception { ByteArrayOutputStream bytes1 = new ByteArrayOutputStream(); ByteArrayOutputStream bytes2 = new ByteArrayOutputStream(); CompressorOutputStream compressed1 = new CompressorStreamFactory() .createCompressorOutputStream(compressionType, bytes1); CompressorOutputStream compressed2 = new CompressorStreamFactory() .createCompressorOutputStream(compressionType, bytes2); compressed1.write("line1\n".getBytes()); compressed1.close(); compressed2.write("line2".getBytes()); compressed2.close(); CompressionDataParser.CompressionInputBuilder compressionInputBuilder = new CompressionDataParser.CompressionInputBuilder( Compression.COMPRESSED_FILE, null, new SequenceInputStream( new ByteArrayInputStream(bytes1.toByteArray()), new ByteArrayInputStream(bytes2.toByteArray()) ), "0" ); CompressionDataParser.CompressionInput input = compressionInputBuilder.build(); //verify Assert.assertNotNull(input); Assert.assertEquals("myFile::4567", input.wrapOffset("myFile::4567")); Assert.assertEquals("myFile::4567", input.wrapRecordId("myFile::4567")); InputStream myFile = input.getNextInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(myFile)); Assert.assertEquals("line1", reader.readLine()); Assert.assertEquals("line2", reader.readLine()); }
Example #23
Source File: CommonsCompressAction.java From logging-log4j2 with Apache License 2.0 | 5 votes |
/** * Compresses a file. * * @param name the compressor name, i.e. "gz", "bzip2", "xz", "pack200", or "deflate". * @param source file to compress, may not be null. * @param destination compressed file, may not be null. * @param deleteSource if true, attempt to delete file on completion. Failure to delete does not cause an exception * to be thrown or affect return value. * * @return true if source file compressed. * @throws IOException on IO exception. */ public static boolean execute(final String name, final File source, final File destination, final boolean deleteSource) throws IOException { if (!source.exists()) { return false; } LOGGER.debug("Starting {} compression of {}", name, source.getPath() ); try (final FileInputStream input = new FileInputStream(source); final BufferedOutputStream output = new BufferedOutputStream( new CompressorStreamFactory().createCompressorOutputStream(name, new FileOutputStream( destination)))) { IOUtils.copy(input, output, BUF_SIZE); LOGGER.debug("Finished {} compression of {}", name, source.getPath() ); } catch (final CompressorException e) { throw new IOException(e); } if (deleteSource) { try { if (Files.deleteIfExists(source.toPath())) { LOGGER.debug("Deleted {}", source.toString()); } else { LOGGER.warn("Unable to delete {} after {} compression. File did not exist", source.toString(), name); } } catch (final Exception ex) { LOGGER.warn("Unable to delete {} after {} compression, {}", source.toString(), name, ex.getMessage()); } } return true; }
Example #24
Source File: TestTarContainerPacker.java From hadoop-ozone with Apache License 2.0 | 5 votes |
private File packContainerWithSingleFile(File file, String entryName) throws Exception { File targetFile = TEMP_DIR.resolve("container.tar.gz").toFile(); try (FileOutputStream output = new FileOutputStream(targetFile); CompressorOutputStream gzipped = new CompressorStreamFactory() .createCompressorOutputStream(GZIP, output); ArchiveOutputStream archive = new TarArchiveOutputStream(gzipped)) { TarContainerPacker.includeFile(file, entryName, archive); } return targetFile; }
Example #25
Source File: OMDBCheckpointServlet.java From hadoop-ozone with Apache License 2.0 | 5 votes |
/** * Write OM DB Checkpoint to an output stream as a compressed file (tgz). * * @param checkpoint checkpoint file * @param destination desination output stream. * @throws IOException */ public static void writeOmDBCheckpointToStream(DBCheckpoint checkpoint, OutputStream destination) throws IOException { try (CompressorOutputStream gzippedOut = new CompressorStreamFactory() .createCompressorOutputStream(CompressorStreamFactory.GZIP, destination)) { try (ArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(gzippedOut)) { Path checkpointPath = checkpoint.getCheckpointLocation(); try (Stream<Path> files = Files.list(checkpointPath)) { for (Path path : files.collect(Collectors.toList())) { if (path != null) { Path fileName = path.getFileName(); if (fileName != null) { includeFile(path.toFile(), fileName.toString(), archiveOutputStream); } } } } } } catch (CompressorException e) { throw new IOException( "Can't compress the checkpoint: " + checkpoint.getCheckpointLocation(), e); } }
Example #26
Source File: ArchiveResource.java From embedded-cassandra with Apache License 2.0 | 5 votes |
private static ArchiveInputStreamFactory create(String archiveType, String compressorType) { return is -> { ArchiveStreamFactory af = new ArchiveStreamFactory(); CompressorStreamFactory csf = new CompressorStreamFactory(); return af.createArchiveInputStream(archiveType, csf.createCompressorInputStream(compressorType, is)); }; }
Example #27
Source File: KnowledgeBaseServiceImpl.java From inception with Apache License 2.0 | 5 votes |
@SuppressWarnings("resource") @Override public void importData(KnowledgeBase kb, String aFilename, InputStream aIS) throws RDFParseException, RepositoryException, IOException { if (kb.isReadOnly()) { log.warn("Knowledge base [{}] is read only, will not import!", kb.getName()); return; } InputStream is = new BufferedInputStream(aIS); try { // Stream is expected to be closed by caller of importData is = new CompressorStreamFactory().createCompressorInputStream(is); } catch (CompressorException e) { // Probably not compressed then or unknown format - just try as is. log.debug("Stream is not compressed, continue as is."); } // Detect the file format RDFFormat format = Rio.getParserFormatForFileName(aFilename).orElse(RDFFormat.RDFXML); // Load files into the repository try (RepositoryConnection conn = getConnection(kb)) { // If the RDF file contains relative URLs, then they probably start with a hash. // To avoid having two hashes here, we drop the hash from the base prefix configured // by the user. String prefix = StringUtils.removeEnd(kb.getBasePrefix(), "#"); conn.add(is, prefix, format); } }
Example #28
Source File: FilesArchiveCompressController.java From MyBox with Apache License 2.0 | 5 votes |
@Override public void afterHandleFiles() { try { if (archiver.equalsIgnoreCase(ArchiveStreamFactory.SEVEN_Z)) { sevenZOutput.finish(); sevenZOutput.close(); } else { archiveOut.finish(); archiveOut.close(); } if (targetFile.exists()) { targetFile.delete(); } if (!message("None").equals(compressor)) { File tmpFile = FileTools.getTempFile(); try ( BufferedInputStream inputStream = new BufferedInputStream(new FileInputStream(archiveFile)); BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(tmpFile)); CompressorOutputStream compressOut = new CompressorStreamFactory(). createCompressorOutputStream(compressor, out)) { IOUtils.copy(inputStream, compressOut); } tmpFile.renameTo(targetFile); } else { archiveFile.renameTo(targetFile); } } catch (Exception e) { logger.debug(e.toString()); } }
Example #29
Source File: FilesDecompressUnarchiveBatchController.java From MyBox with Apache License 2.0 | 5 votes |
@Override public boolean beforeHandleFiles() { try { cFactory = new CompressorStreamFactory(); aFactory = new ArchiveStreamFactory(); archiveSuccess = archiveFail = 0; charsetIncorrect = false; return true; } catch (Exception e) { logger.debug(e.toString()); return false; } }
Example #30
Source File: FileLineFetcher.java From hugegraph-loader with Apache License 2.0 | 5 votes |
private static Reader createCompressReader(InputStream stream, FileSource source) throws Exception { Compression compression = source.compression(); String charset = source.charset(); switch (compression) { case NONE: return new InputStreamReader(stream, charset); case GZIP: case BZ2: case XZ: case LZMA: case SNAPPY_RAW: case SNAPPY_FRAMED: case Z: case DEFLATE: case LZ4_BLOCK: case LZ4_FRAMED: CompressorStreamFactory factory = new CompressorStreamFactory(); CompressorInputStream cis = factory.createCompressorInputStream( compression.string(), stream); return new InputStreamReader(cis, charset); default: throw new LoadException("Unsupported compression format '%s'", compression); } }