org.apache.commons.compress.archivers.ar.ArArchiveInputStream Java Examples
The following examples show how to use
org.apache.commons.compress.archivers.ar.ArArchiveInputStream.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Packages.java From packagedrone with Eclipse Public License 1.0 | 5 votes |
public static Map<String, String> parseControlFile ( final File packageFile ) throws IOException, ParserException { try ( final ArArchiveInputStream in = new ArArchiveInputStream ( new FileInputStream ( packageFile ) ) ) { ArchiveEntry ar; while ( ( ar = in.getNextEntry () ) != null ) { if ( !ar.getName ().equals ( "control.tar.gz" ) ) { continue; } try ( final TarArchiveInputStream inputStream = new TarArchiveInputStream ( new GZIPInputStream ( in ) ) ) { TarArchiveEntry te; while ( ( te = inputStream.getNextTarEntry () ) != null ) { String name = te.getName (); if ( name.startsWith ( "./" ) ) { name = name.substring ( 2 ); } if ( !name.equals ( "control" ) ) { continue; } return parseControlFile ( inputStream ); } } } } return null; }
Example #2
Source File: CxxLibraryIntegrationTest.java From buck with Apache License 2.0 | 5 votes |
@Test public void thinArchivesDoNotContainAbsolutePaths() throws IOException { CxxPlatform cxxPlatform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); BuildRuleResolver ruleResolver = new TestActionGraphBuilder(); assumeTrue( cxxPlatform .getAr() .resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE) .supportsThinArchives()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_library", tmp); workspace.setUp(); Path archive = workspace.buildAndReturnOutput("-c", "cxx.archive_contents=thin", "//:foo#default,static"); // NOTE: Replace the thin header with a normal header just so the commons compress parser // can parse the archive contents. try (OutputStream outputStream = Files.newOutputStream(workspace.getPath(archive), StandardOpenOption.WRITE)) { outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER); } // Now iterate the archive and verify it contains no absolute paths. try (ArArchiveInputStream stream = new ArArchiveInputStream(new FileInputStream(workspace.getPath(archive).toFile()))) { ArArchiveEntry entry; while ((entry = stream.getNextArEntry()) != null) { if (!entry.getName().isEmpty()) { assertFalse( "found absolute path: " + entry.getName(), workspace.getDestPath().getFileSystem().getPath(entry.getName()).isAbsolute()); } } } }
Example #3
Source File: ArFileTree.java From gradle-plugins with MIT License | 4 votes |
public ArFileTree(File archiveFile, ArchiveInputStreamProvider<ArArchiveInputStream> inputStreamProvider, File tmpDir, Chmod chmod, DirectoryFileTreeFactory directoryFileTreeFactory, FileHasher fileHasher) { super(archiveFile, inputStreamProvider, tmpDir, chmod, directoryFileTreeFactory, fileHasher); }
Example #4
Source File: CompressFileOperationsImpl.java From gradle-plugins with MIT License | 4 votes |
@Override public FileTree arTree(Object arPath) { File file = fileOperations.file(arPath); ArFileTree arFileTree = new ArFileTree(file, f -> new ArArchiveInputStream(new FileInputStream(f)), getExpandDir(), fileSystem, directoryFileTreeFactory, fileHasher); return new FileTreeAdapter(arFileTree, patternSetFactory); }
Example #5
Source File: ArFileTree.java From gradle-plugins with MIT License | 4 votes |
public ArFileTree(File archiveFile, ArchiveInputStreamProvider<ArArchiveInputStream> inputStreamProvider, File tmpDir, Chmod chmod, DirectoryFileTreeFactory directoryFileTreeFactory, FileHasher fileHasher) { super(archiveFile, inputStreamProvider, tmpDir, chmod, directoryFileTreeFactory, fileHasher); }
Example #6
Source File: CompressFileOperationsImpl.java From gradle-plugins with MIT License | 4 votes |
@Override public FileTree arTree(Object arPath) { File file = fileOperations.file(arPath); ArFileTree arFileTree = new ArFileTree(file, f -> new ArArchiveInputStream(new FileInputStream(f)), getExpandDir(), fileSystem, directoryFileTreeFactory, fileHasher); return new FileTreeAdapter(arFileTree, patternSetFactory); }
Example #7
Source File: ArchiveUtils.java From dropwizard-debpkg-maven-plugin with Apache License 2.0 | 4 votes |
public static void extractAr(File file, File destination) throws IOException { try (final ArArchiveInputStream in = new ArArchiveInputStream(new FileInputStream(file))) { extractArchive(in, destination); } }
Example #8
Source File: ArchiveStepIntegrationTest.java From buck with Apache License 2.0 | 4 votes |
@Test @SuppressWarnings("PMD.AvoidUsingOctalValues") public void thatGeneratedArchivesAreDeterministic() throws IOException, InterruptedException { assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX); ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); // Build up the paths to various files the archive step will use. BuildRuleResolver ruleResolver = new TestActionGraphBuilder(); Archiver archiver = platform.getAr().resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE); Path output = filesystem.getPath("output.a"); Path input = filesystem.getPath("input.dat"); filesystem.writeContentsToPath("blah", input); Preconditions.checkState(filesystem.resolve(input).toFile().setExecutable(true)); ImmutableList<String> archiverCmd = archiver.getCommandPrefix(ruleResolver.getSourcePathResolver()); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep( filesystem, archiver.getEnvironment(ruleResolver.getSourcePathResolver()), archiverCmd, ImmutableList.of(), getArchiveOptions(false), output, ImmutableList.of(input), archiver, filesystem.getPath("scratchDir")); FileScrubberStep fileScrubberStep = new FileScrubberStep(filesystem, output, archiver.getScrubbers()); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); exitCode = fileScrubberStep.execute(executionContext).getExitCode(); assertEquals("archive scrub step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) { ArArchiveEntry entry = stream.getNextArEntry(); assertEquals( ObjectFileCommonModificationDate.COMMON_MODIFICATION_TIME_STAMP, entry.getLastModified()); assertEquals(0, entry.getUserId()); assertEquals(0, entry.getGroupId()); assertEquals(String.format("0%o", entry.getMode()), 0100644, entry.getMode()); } // test the beginning of description to make sure it matches the archive command String desc = archiveStep.getDescription(executionContext); assertThat(desc, Matchers.startsWith(archiverCmd.get(0))); }
Example #9
Source File: ArchiveStepIntegrationTest.java From buck with Apache License 2.0 | 4 votes |
@Test public void emptyArchives() throws IOException, InterruptedException { ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build( new CxxBuckConfig( FakeBuckConfig.builder() .setFilesystem(filesystem) .setSections(CxxToolchainUtilsForTests.configureCxxToolchainsAndGetConfig()) .build())); // Build up the paths to various files the archive step will use. BuildRuleResolver ruleResolver = new TestActionGraphBuilder(); Archiver archiver = platform.getAr().resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE); Path output = filesystem.getPath("output.a"); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep( filesystem, archiver.getEnvironment(ruleResolver.getSourcePathResolver()), archiver.getCommandPrefix(ruleResolver.getSourcePathResolver()), ImmutableList.of(), getArchiveOptions(false), output, ImmutableList.of(), archiver, filesystem.getPath("scratchDir")); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) { assertThat(stream.getNextArEntry(), Matchers.nullValue()); } }
Example #10
Source File: ArchiveStepIntegrationTest.java From buck with Apache License 2.0 | 4 votes |
@Test public void inputDirs() throws IOException, InterruptedException { assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX); ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); // Build up the paths to various files the archive step will use. BuildRuleResolver ruleResolver = new TestActionGraphBuilder(); Archiver archiver = platform.getAr().resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE); Path output = filesystem.getPath("output.a"); Path input = filesystem.getPath("foo/blah.dat"); filesystem.mkdirs(input.getParent()); filesystem.writeContentsToPath("blah", input); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep( filesystem, archiver.getEnvironment(ruleResolver.getSourcePathResolver()), archiver.getCommandPrefix(ruleResolver.getSourcePathResolver()), ImmutableList.of(), getArchiveOptions(false), output, ImmutableList.of(input.getParent()), archiver, filesystem.getPath("scratchDir")); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) { ArArchiveEntry entry = stream.getNextArEntry(); assertThat(entry.getName(), Matchers.equalTo("blah.dat")); } }
Example #11
Source File: ArchiveStepIntegrationTest.java From buck with Apache License 2.0 | 4 votes |
@Test public void thinArchives() throws IOException, InterruptedException { assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX); ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); // Build up the paths to various files the archive step will use. BuildRuleResolver ruleResolver = new TestActionGraphBuilder(); Archiver archiver = platform.getAr().resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE); assumeTrue(archiver.supportsThinArchives()); Path output = filesystem.getPath("foo/libthin.a"); filesystem.mkdirs(output.getParent()); // Create a really large input file so it's obvious that the archive is thin. Path input = filesystem.getPath("bar/blah.dat"); filesystem.mkdirs(input.getParent()); byte[] largeInputFile = new byte[1024 * 1024]; byte[] fillerToRepeat = "hello\n".getBytes(StandardCharsets.UTF_8); for (int i = 0; i < largeInputFile.length; i++) { largeInputFile[i] = fillerToRepeat[i % fillerToRepeat.length]; } filesystem.writeBytesToPath(largeInputFile, input); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep( filesystem, archiver.getEnvironment(ruleResolver.getSourcePathResolver()), archiver.getCommandPrefix(ruleResolver.getSourcePathResolver()), ImmutableList.of(), getArchiveOptions(true), output, ImmutableList.of(input), archiver, filesystem.getPath("scratchDir")); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Verify that the thin header is present. assertThat(filesystem.readFirstLine(output), Matchers.equalTo(Optional.of("!<thin>"))); // Verify that even though the archived contents is really big, the archive is still small. assertThat(filesystem.getFileSize(output), Matchers.lessThan(1000L)); // NOTE: Replace the thin header with a normal header just so the commons compress parser // can parse the archive contents. try (OutputStream outputStream = Files.newOutputStream(filesystem.resolve(output), StandardOpenOption.WRITE)) { outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER); } // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) { ArArchiveEntry entry = stream.getNextArEntry(); // Verify that the input names are relative paths from the outputs parent dir. assertThat( entry.getName(), Matchers.equalTo(output.getParent().relativize(input).toString())); } }