Java Code Examples for org.apache.flink.core.fs.FileSystem#open()
The following examples show how to use
org.apache.flink.core.fs.FileSystem#open() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FileUtils.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private static void addToZip(Path fileOrDirectory, FileSystem fs, Path rootDir, ZipOutputStream out) throws IOException { String relativePath = fileOrDirectory.getPath().replace(rootDir.getPath() + '/', ""); if (fs.getFileStatus(fileOrDirectory).isDir()) { out.putNextEntry(new ZipEntry(relativePath + '/')); for (FileStatus containedFile : fs.listStatus(fileOrDirectory)) { addToZip(containedFile.getPath(), fs, rootDir, out); } } else { ZipEntry entry = new ZipEntry(relativePath); out.putNextEntry(entry); try (FSDataInputStream in = fs.open(fileOrDirectory)) { IOUtils.copyBytes(in, out, false); } out.closeEntry(); } }
Example 2
Source File: FileUtils.java From flink with Apache License 2.0 | 6 votes |
private static void addToZip(Path fileOrDirectory, FileSystem fs, Path rootDir, ZipOutputStream out) throws IOException { String relativePath = fileOrDirectory.getPath().replace(rootDir.getPath() + '/', ""); if (fs.getFileStatus(fileOrDirectory).isDir()) { out.putNextEntry(new ZipEntry(relativePath + '/')); for (FileStatus containedFile : fs.listStatus(fileOrDirectory)) { addToZip(containedFile.getPath(), fs, rootDir, out); } } else { ZipEntry entry = new ZipEntry(relativePath); out.putNextEntry(entry); try (FSDataInputStream in = fs.open(fileOrDirectory)) { IOUtils.copyBytes(in, out, false); } out.closeEntry(); } }
Example 3
Source File: FileInputFormat.java From flink with Apache License 2.0 | 6 votes |
@Override public void run() { try { final FileSystem fs = FileSystem.get(this.split.getPath().toUri()); this.fdis = fs.open(this.split.getPath()); // check for canceling and close the stream in that case, because no one will obtain it if (this.aborted) { final FSDataInputStream f = this.fdis; this.fdis = null; f.close(); } } catch (Throwable t) { this.error = t; } }
Example 4
Source File: FileUtils.java From flink with Apache License 2.0 | 6 votes |
private static void addToZip(Path fileOrDirectory, FileSystem fs, Path rootDir, ZipOutputStream out) throws IOException { String relativePath = fileOrDirectory.getPath().replace(rootDir.getPath() + '/', ""); if (fs.getFileStatus(fileOrDirectory).isDir()) { out.putNextEntry(new ZipEntry(relativePath + '/')); for (FileStatus containedFile : fs.listStatus(fileOrDirectory)) { addToZip(containedFile.getPath(), fs, rootDir, out); } } else { ZipEntry entry = new ZipEntry(relativePath); out.putNextEntry(entry); try (FSDataInputStream in = fs.open(fileOrDirectory)) { IOUtils.copyBytes(in, out, false); } out.closeEntry(); } }
Example 5
Source File: FileInputFormat.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void run() { try { final FileSystem fs = FileSystem.get(this.split.getPath().toUri()); this.fdis = fs.open(this.split.getPath()); // check for canceling and close the stream in that case, because no one will obtain it if (this.aborted) { final FSDataInputStream f = this.fdis; this.fdis = null; f.close(); } } catch (Throwable t) { this.error = t; } }
Example 6
Source File: FileUtils.java From flink with Apache License 2.0 | 5 votes |
private static void internalCopyFile(Path sourcePath, Path targetPath, boolean executable, FileSystem sFS, FileSystem tFS) throws IOException { try (FSDataOutputStream lfsOutput = tFS.create(targetPath, FileSystem.WriteMode.NO_OVERWRITE); FSDataInputStream fsInput = sFS.open(sourcePath)) { IOUtils.copyBytes(fsInput, lfsOutput); //noinspection ResultOfMethodCallIgnored new File(targetPath.toString()).setExecutable(executable); } }
Example 7
Source File: BinaryInputFormat.java From flink with Apache License 2.0 | 5 votes |
/** * Fill in the statistics. The last modification time and the total input size are prefilled. * * @param files * The files that are associated with this block input format. * @param stats * The pre-filled statistics. */ protected SequentialStatistics createStatistics(List<FileStatus> files, FileBaseStatistics stats) throws IOException { if (files.isEmpty()) { return null; } BlockInfo blockInfo = new BlockInfo(); long totalCount = 0; for (FileStatus file : files) { // invalid file if (file.getLen() < blockInfo.getInfoSize()) { continue; } FileSystem fs = file.getPath().getFileSystem(); try (FSDataInputStream fdis = fs.open(file.getPath(), blockInfo.getInfoSize())) { fdis.seek(file.getLen() - blockInfo.getInfoSize()); blockInfo.read(new DataInputViewStreamWrapper(fdis)); totalCount += blockInfo.getAccumulatedRecordCount(); } } final float avgWidth = totalCount == 0 ? 0 : ((float) stats.getTotalInputSize() / totalCount); return new SequentialStatistics(stats.getLastModificationTime(), stats.getTotalInputSize(), avgWidth, totalCount); }
Example 8
Source File: FileUtils.java From flink with Apache License 2.0 | 5 votes |
private static void internalCopyFile(Path sourcePath, Path targetPath, boolean executable, FileSystem sFS, FileSystem tFS) throws IOException { try (FSDataOutputStream lfsOutput = tFS.create(targetPath, FileSystem.WriteMode.NO_OVERWRITE); FSDataInputStream fsInput = sFS.open(sourcePath)) { IOUtils.copyBytes(fsInput, lfsOutput); //noinspection ResultOfMethodCallIgnored new File(targetPath.toString()).setExecutable(executable); } }
Example 9
Source File: StanfordTweetsDataSetInputFormat.java From flink-examples with MIT License | 5 votes |
@Override public void open(TweetFileInputSplit split) throws IOException { FileSystem fileSystem = getFileSystem(); this.reader = new BufferedReader(new InputStreamReader(fileSystem.open(split.getPath()))); // Pre-read next line to easily check if we've reached the end of an input split this.nextLine = reader.readLine(); }
Example 10
Source File: HadoopSwiftFileSystemITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSimpleFileWriteAndRead() throws Exception { final Configuration conf = createConfiguration(); final String testLine = "Hello Upload!"; FileSystem.initialize(conf); final Path path = new Path("swift://" + CONTAINER + '.' + SERVICENAME + '/' + TEST_DATA_DIR + "/test.txt"); final FileSystem fs = path.getFileSystem(); try { try (FSDataOutputStream out = fs.create(path, WriteMode.OVERWRITE); OutputStreamWriter writer = new OutputStreamWriter(out, StandardCharsets.UTF_8)) { writer.write(testLine); } try (FSDataInputStream in = fs.open(path); InputStreamReader ir = new InputStreamReader(in, StandardCharsets.UTF_8); BufferedReader reader = new BufferedReader(ir)) { String line = reader.readLine(); assertEquals(testLine, line); } } finally { fs.delete(path, false); } }
Example 11
Source File: FileUtils.java From flink with Apache License 2.0 | 5 votes |
public static Path expandDirectory(Path file, Path targetDirectory) throws IOException { FileSystem sourceFs = file.getFileSystem(); FileSystem targetFs = targetDirectory.getFileSystem(); Path rootDir = null; try (ZipInputStream zis = new ZipInputStream(sourceFs.open(file))) { ZipEntry entry; while ((entry = zis.getNextEntry()) != null) { Path relativePath = new Path(entry.getName()); if (rootDir == null) { // the first entry contains the name of the original directory that was zipped rootDir = relativePath; } Path newFile = new Path(targetDirectory, relativePath); if (entry.isDirectory()) { targetFs.mkdirs(newFile); } else { try (FSDataOutputStream fileStream = targetFs.create(newFile, FileSystem.WriteMode.NO_OVERWRITE)) { // do not close the streams here as it prevents access to further zip entries IOUtils.copyBytes(zis, fileStream, false); } } zis.closeEntry(); } } return new Path(targetDirectory, rootDir); }
Example 12
Source File: BinaryInputFormat.java From flink with Apache License 2.0 | 5 votes |
/** * Fill in the statistics. The last modification time and the total input size are prefilled. * * @param files * The files that are associated with this block input format. * @param stats * The pre-filled statistics. */ protected SequentialStatistics createStatistics(List<FileStatus> files, FileBaseStatistics stats) throws IOException { if (files.isEmpty()) { return null; } BlockInfo blockInfo = new BlockInfo(); long totalCount = 0; for (FileStatus file : files) { // invalid file if (file.getLen() < blockInfo.getInfoSize()) { continue; } FileSystem fs = file.getPath().getFileSystem(); try (FSDataInputStream fdis = fs.open(file.getPath(), blockInfo.getInfoSize())) { fdis.seek(file.getLen() - blockInfo.getInfoSize()); blockInfo.read(new DataInputViewStreamWrapper(fdis)); totalCount += blockInfo.getAccumulatedRecordCount(); } } final float avgWidth = totalCount == 0 ? 0 : ((float) stats.getTotalInputSize() / totalCount); return new SequentialStatistics(stats.getLastModificationTime(), stats.getTotalInputSize(), avgWidth, totalCount); }
Example 13
Source File: FileUtils.java From flink with Apache License 2.0 | 5 votes |
public static Path expandDirectory(Path file, Path targetDirectory) throws IOException { FileSystem sourceFs = file.getFileSystem(); FileSystem targetFs = targetDirectory.getFileSystem(); Path rootDir = null; try (ZipInputStream zis = new ZipInputStream(sourceFs.open(file))) { ZipEntry entry; while ((entry = zis.getNextEntry()) != null) { Path relativePath = new Path(entry.getName()); if (rootDir == null) { // the first entry contains the name of the original directory that was zipped rootDir = relativePath; } Path newFile = new Path(targetDirectory, relativePath); if (entry.isDirectory()) { targetFs.mkdirs(newFile); } else { try (FSDataOutputStream fileStream = targetFs.create(newFile, FileSystem.WriteMode.NO_OVERWRITE)) { // do not close the streams here as it prevents access to further zip entries IOUtils.copyBytes(zis, fileStream, false); } } zis.closeEntry(); } } return new Path(targetDirectory, rootDir); }
Example 14
Source File: FileUtils.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static Path expandDirectory(Path file, Path targetDirectory) throws IOException { FileSystem sourceFs = file.getFileSystem(); FileSystem targetFs = targetDirectory.getFileSystem(); Path rootDir = null; try (ZipInputStream zis = new ZipInputStream(sourceFs.open(file))) { ZipEntry entry; while ((entry = zis.getNextEntry()) != null) { Path relativePath = new Path(entry.getName()); if (rootDir == null) { // the first entry contains the name of the original directory that was zipped rootDir = relativePath; } Path newFile = new Path(targetDirectory, relativePath); if (entry.isDirectory()) { targetFs.mkdirs(newFile); } else { try (FSDataOutputStream fileStream = targetFs.create(newFile, FileSystem.WriteMode.NO_OVERWRITE)) { // do not close the streams here as it prevents access to further zip entries IOUtils.copyBytes(zis, fileStream, false); } } zis.closeEntry(); } } return new Path(targetDirectory, rootDir); }
Example 15
Source File: FileUtils.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static void internalCopyFile(Path sourcePath, Path targetPath, boolean executable, FileSystem sFS, FileSystem tFS) throws IOException { try (FSDataOutputStream lfsOutput = tFS.create(targetPath, FileSystem.WriteMode.NO_OVERWRITE); FSDataInputStream fsInput = sFS.open(sourcePath)) { IOUtils.copyBytes(fsInput, lfsOutput); //noinspection ResultOfMethodCallIgnored new File(targetPath.toString()).setExecutable(executable); } }
Example 16
Source File: BinaryInputFormat.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Fill in the statistics. The last modification time and the total input size are prefilled. * * @param files * The files that are associated with this block input format. * @param stats * The pre-filled statistics. */ protected SequentialStatistics createStatistics(List<FileStatus> files, FileBaseStatistics stats) throws IOException { if (files.isEmpty()) { return null; } BlockInfo blockInfo = new BlockInfo(); long totalCount = 0; for (FileStatus file : files) { // invalid file if (file.getLen() < blockInfo.getInfoSize()) { continue; } FileSystem fs = file.getPath().getFileSystem(); try (FSDataInputStream fdis = fs.open(file.getPath(), blockInfo.getInfoSize())) { fdis.seek(file.getLen() - blockInfo.getInfoSize()); blockInfo.read(new DataInputViewStreamWrapper(fdis)); totalCount += blockInfo.getAccumulatedRecordCount(); } } final float avgWidth = totalCount == 0 ? 0 : ((float) stats.getTotalInputSize() / totalCount); return new SequentialStatistics(stats.getLastModificationTime(), stats.getTotalInputSize(), avgWidth, totalCount); }
Example 17
Source File: RocksDBStateUploader.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
private StreamStateHandle uploadLocalFileToCheckpointFs( Path filePath, CheckpointStreamFactory checkpointStreamFactory, CloseableRegistry closeableRegistry) throws IOException { FSDataInputStream inputStream = null; CheckpointStreamFactory.CheckpointStateOutputStream outputStream = null; try { final byte[] buffer = new byte[READ_BUFFER_SIZE]; FileSystem backupFileSystem = filePath.getFileSystem(); inputStream = backupFileSystem.open(filePath); closeableRegistry.registerCloseable(inputStream); outputStream = checkpointStreamFactory .createCheckpointStateOutputStream(CheckpointedStateScope.SHARED); closeableRegistry.registerCloseable(outputStream); while (true) { int numBytes = inputStream.read(buffer); if (numBytes == -1) { break; } outputStream.write(buffer, 0, numBytes); } StreamStateHandle result = null; if (closeableRegistry.unregisterCloseable(outputStream)) { result = outputStream.closeAndGetHandle(); outputStream = null; } return result; } finally { if (closeableRegistry.unregisterCloseable(inputStream)) { IOUtils.closeQuietly(inputStream); } if (closeableRegistry.unregisterCloseable(outputStream)) { IOUtils.closeQuietly(outputStream); } } }
Example 18
Source File: FlinkFileReader.java From hadoopoffice with Apache License 2.0 | 4 votes |
public InputStream openFile(Path path) throws IOException { FileSystem fs = FileSystem.get(path.toUri()); return fs.open(path); }
Example 19
Source File: BlobClient.java From flink with Apache License 2.0 | 3 votes |
/** * Uploads a single file to the {@link PermanentBlobService} of the given {@link BlobServer}. * * @param jobId * ID of the job this blob belongs to (or <tt>null</tt> if job-unrelated) * @param file * file to upload * * @throws IOException * if the upload fails */ public PermanentBlobKey uploadFile(JobID jobId, Path file) throws IOException { final FileSystem fs = file.getFileSystem(); try (InputStream is = fs.open(file)) { return (PermanentBlobKey) putInputStream(jobId, is, PERMANENT_BLOB); } }
Example 20
Source File: BlobClient.java From flink with Apache License 2.0 | 3 votes |
/** * Uploads a single file to the {@link PermanentBlobService} of the given {@link BlobServer}. * * @param jobId * ID of the job this blob belongs to (or <tt>null</tt> if job-unrelated) * @param file * file to upload * * @throws IOException * if the upload fails */ public PermanentBlobKey uploadFile(JobID jobId, Path file) throws IOException { final FileSystem fs = file.getFileSystem(); try (InputStream is = fs.open(file)) { return (PermanentBlobKey) putInputStream(jobId, is, PERMANENT_BLOB); } }