Java Code Examples for java.nio.channels.Channels#newOutputStream()
The following examples show how to use
java.nio.channels.Channels#newOutputStream() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SocketServerLauncher.java From xtext-core with Eclipse Public License 2.0 | 6 votes |
public void launch(String[] args) { Injector injector = Guice.createInjector(getServerModule()); try (AsynchronousServerSocketChannel serverSocket = AsynchronousServerSocketChannel.open() .bind(getSocketAddress(args))) { LOG.info("Started server socket at " + getSocketAddress(args)); while (true) { AsynchronousSocketChannel socketChannel = serverSocket.accept().get(); InputStream in = Channels.newInputStream(socketChannel); OutputStream out = Channels.newOutputStream(socketChannel); PrintWriter trace = getTrace(args); boolean validate = shouldValidate(args); LanguageServerImpl languageServer = injector.getInstance(LanguageServerImpl.class); LOG .info("Starting Xtext Language Server for client " + socketChannel.getRemoteAddress()); Launcher<LanguageClient> launcher = Launcher.createLauncher(languageServer, LanguageClient.class, in, out, validate, trace); languageServer.connect(launcher.getRemoteProxy()); launcher.startListening(); LOG.info("Xtext Language Server has been started."); } } catch (Throwable t) { t.printStackTrace(); } }
Example 2
Source File: DataOutputTest.java From stratio-cassandra with Apache License 2.0 | 6 votes |
@Test public void testRandomAccessFile() throws IOException { File file = FileUtils.createTempFile("dataoutput", "test"); try { final RandomAccessFile raf = new RandomAccessFile(file, "rw"); DataOutputStreamAndChannel write = new DataOutputStreamAndChannel(Channels.newOutputStream(raf.getChannel()), raf.getChannel()); DataInput canon = testWrite(write); write.close(); DataInputStream test = new DataInputStream(new FileInputStream(file)); testRead(test, canon); test.close(); } finally { Assert.assertTrue(file.delete()); } }
Example 3
Source File: AbstractContentWriter.java From alfresco-repository with GNU Lesser General Public License v3.0 | 6 votes |
/** * @see Channels#newOutputStream(java.nio.channels.WritableByteChannel) */ public OutputStream getContentOutputStream() throws ContentIOException { try { WritableByteChannel channel = getWritableChannel(); OutputStream is = new BufferedOutputStream(Channels.newOutputStream(channel)); // done return is; } catch (Throwable e) { throw new ContentIOException("Failed to open stream onto channel: \n" + " writer: " + this, e); } }
Example 4
Source File: ChannelsTest.java From j2objc with Apache License 2.0 | 6 votes |
public void testNewOutputStreamWritableByteChannel() throws Exception { byte[] writebuf = new byte[this.testNum]; ByteBuffer writebcbuf = ByteBuffer.allocateDirect(this.testNum); this.fouts = new FileOutputStream(tmpFile); WritableByteChannel writebc = this.fouts.getChannel(); assertTrue(writebc.isOpen()); OutputStream testouts = Channels.newOutputStream(writebc); // read in testins and fins use the same pointer testouts.write(writebuf); this.assertFileSizeSame(tmpFile, this.testNum); writebc.write(writebcbuf); this.assertFileSizeSame(tmpFile, this.testNum * 2); testouts.write(writebuf); this.assertFileSizeSame(tmpFile, this.testNum * 3); // readbc.close() affect testins writebc.close(); assertFalse(writebc.isOpen()); try { testouts.write(writebuf); fail(); } catch (ClosedChannelException e) { // correct } }
Example 5
Source File: ParquetIO.java From beam with Apache License 2.0 | 6 votes |
@Override public void open(WritableByteChannel channel) throws IOException { checkNotNull(getJsonSchema(), "Schema cannot be null"); Schema schema = new Schema.Parser().parse(getJsonSchema()); BeamParquetOutputFile beamParquetOutputFile = new BeamParquetOutputFile(Channels.newOutputStream(channel)); AvroParquetWriter.Builder<GenericRecord> builder = AvroParquetWriter.<GenericRecord>builder(beamParquetOutputFile) .withSchema(schema) .withCompressionCodec(getCompressionCodec()) .withWriteMode(OVERWRITE); if (getConfiguration() != null) { builder = builder.withConf(getConfiguration().get()); } this.writer = builder.build(); }
Example 6
Source File: FileUtil.java From FairEmail with GNU General Public License v3.0 | 6 votes |
/** * Copies data from the input channel to the output file channel. * * @param input the input channel to copy. * @param output the output channel to copy. * @throws IOException if there is an I/O error. */ @SuppressLint("LambdaLast") public static void copy(@NonNull ReadableByteChannel input, @NonNull FileChannel output) throws IOException { try { if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) { output.transferFrom(input, 0, Long.MAX_VALUE); } else { InputStream inputStream = Channels.newInputStream(input); OutputStream outputStream = Channels.newOutputStream(output); int length; byte[] buffer = new byte[1024 * 4]; while ((length = inputStream.read(buffer)) > 0) { outputStream.write(buffer, 0, length); } } output.force(false); } finally { input.close(); output.close(); } }
Example 7
Source File: GoogleCloudStorageNewIntegrationTest.java From hadoop-connectors with Apache License 2.0 | 5 votes |
@Test public void create_gzipEncodedFile() throws Exception { String testBucket = gcsfsIHelper.sharedBucketName1; StorageResourceId testFile = new StorageResourceId(testBucket, getTestResource()); TrackingHttpRequestInitializer gcsRequestsTracker = new TrackingHttpRequestInitializer(httpRequestsInitializer); GoogleCloudStorage gcs = new GoogleCloudStorageImpl(gcsOptions, gcsRequestsTracker); try (OutputStream os = new GZIPOutputStream(Channels.newOutputStream(gcs.create(testFile, GZIP_CREATE_OPTIONS)))) { os.write("content".getBytes(UTF_8)); } assertThat(gcsRequestsTracker.getAllRequestStrings()) .containsExactly( getRequestString(testBucket, testFile.getObjectName()), resumableUploadRequestString( testBucket, testFile.getObjectName(), /* generationId= */ 1, /* replaceGenerationId= */ true), resumableUploadChunkRequestString( testBucket, testFile.getObjectName(), /* generationId= */ 2, /* uploadId= */ 1)); assertThat(gcs.getItemInfo(testFile).getContentEncoding()).isEqualTo("gzip"); }
Example 8
Source File: BigQueryRowWriter.java From beam with Apache License 2.0 | 5 votes |
BigQueryRowWriter(String basename, String mimeType) throws Exception { String uId = UUID.randomUUID().toString(); resourceId = FileSystems.matchNewResource(basename + uId, false); LOG.info("Opening {} to {}.", this.getClass().getSimpleName(), resourceId); channel = FileSystems.create(resourceId, mimeType); out = new CountingOutputStream(Channels.newOutputStream(channel)); }
Example 9
Source File: NamedPipe.java From jkube with Eclipse Public License 2.0 | 5 votes |
@Override public OutputStream getOutputStream() throws IOException { if (!channel.isOpen()) { throw new SocketException("Socket is closed"); } if (outputShutdown) { throw new SocketException("Socket output is shutdown"); } return new FilterOutputStream(Channels.newOutputStream(channel)) { @Override public void write(byte[] b, int off, int len) throws IOException { if(log.isDebugEnabled()){ String request = new String(b, off, len, StandardCharsets.UTF_8); String logValue = isAscii(request) ? request : "not logged due to non-ASCII characters. "; log.debug("REQUEST %s", logValue); } out.write(b, off, len); } @Override public void close() throws IOException { shutdownOutput(); } }; }
Example 10
Source File: SocketSodepService.java From jolie with GNU Lesser General Public License v2.1 | 5 votes |
public SocketSodepService( ServiceFactory factory, URI location, Value protocolConfiguration ) throws IOException { super( factory, location ); socketChannel = SocketChannel.open( new InetSocketAddress( location.getHost(), location.getPort() ) ); protocol = new SodepProtocol( new ClosedVariablePath( new Pair[ 0 ], protocolConfiguration ) ); istream = Channels.newInputStream( socketChannel ); ostream = Channels.newOutputStream( socketChannel ); }
Example 11
Source File: AvroByteReaderTest.java From beam with Apache License 2.0 | 5 votes |
/** Write input elements to a file and return information about the Avro-encoded file. */ private <T> AvroFileInfo<T> initInputFile(List<List<T>> elemsList, Coder<T> coder) throws Exception { File tmpFile = tmpFolder.newFile("file.avro"); AvroFileInfo<T> fileInfo = new AvroFileInfo<>(); fileInfo.filename = tmpFile.getPath(); // Write the data. OutputStream outStream = Channels.newOutputStream( FileSystems.create( FileSystems.matchNewResource(fileInfo.filename, false), MimeTypes.BINARY)); Schema schema = Schema.create(Schema.Type.BYTES); DatumWriter<ByteBuffer> datumWriter = new GenericDatumWriter<>(schema); try (DataFileWriter<ByteBuffer> fileWriter = new DataFileWriter<>(datumWriter)) { fileWriter.create(schema, outStream); boolean first = true; for (List<T> elems : elemsList) { if (first) { first = false; } else { // Ensure a block boundary here. long syncPoint = fileWriter.sync(); fileInfo.syncPoints.add(syncPoint); } for (T elem : elems) { byte[] encodedElement = CoderUtils.encodeToByteArray(coder, elem); fileWriter.append(ByteBuffer.wrap(encodedElement)); fileInfo.elementSizes.add(encodedElement.length); fileInfo.totalElementEncodedSize += encodedElement.length; } } } return fileInfo; }
Example 12
Source File: ObjectSocketChannelStream.java From database with GNU General Public License v2.0 | 5 votes |
public ObjectOutputStream getOutputStream() { if (outStr == null) { try { this.outStr = new ObjectOutputStream(Channels.newOutputStream(channel)); } catch (IOException e) { throw new RuntimeException(e); } } return outStr; }
Example 13
Source File: AbstractWorkspace.java From buck with Apache License 2.0 | 5 votes |
/** Stamp the buck-out directory if it exists and isn't stamped already */ private void stampBuckVersion() throws IOException { if (!Files.exists(destPath.resolve(BuckConstant.getBuckOutputPath()))) { return; } try (OutputStream outputStream = new BufferedOutputStream( Channels.newOutputStream( Files.newByteChannel( destPath.resolve(BuckConstant.getBuckOutputPath().resolve(".currentversion")), ImmutableSet.<OpenOption>of( StandardOpenOption.CREATE, StandardOpenOption.WRITE))))) { outputStream.write(BuckVersion.getVersion().getBytes(Charsets.UTF_8)); } }
Example 14
Source File: FileSystemProvider.java From Bytecoder with Apache License 2.0 | 4 votes |
/** * Opens or creates a file, returning an output stream that may be used to * write bytes to the file. This method works in exactly the manner * specified by the {@link Files#newOutputStream} method. * * <p> The default implementation of this method opens a channel to the file * as if by invoking the {@link #newByteChannel} method and constructs a * stream that writes bytes to the channel. This method should be overridden * where appropriate. * * @param path * the path to the file to open or create * @param options * options specifying how the file is opened * * @return a new output stream * * @throws IllegalArgumentException * if {@code options} contains an invalid combination of options * @throws UnsupportedOperationException * if an unsupported option is specified * @throws IOException * if an I/O error occurs * @throws SecurityException * In the case of the default provider, and a security manager is * installed, the {@link SecurityManager#checkWrite(String) checkWrite} * method is invoked to check write access to the file. The {@link * SecurityManager#checkDelete(String) checkDelete} method is * invoked to check delete access if the file is opened with the * {@code DELETE_ON_CLOSE} option. */ public OutputStream newOutputStream(Path path, OpenOption... options) throws IOException { int len = options.length; Set<OpenOption> opts ; if (len == 0) { opts = DEFAULT_OPEN_OPTIONS; } else { opts = new HashSet<>(); for (OpenOption opt: options) { if (opt == StandardOpenOption.READ) throw new IllegalArgumentException("READ not allowed"); opts.add(opt); } opts.add(StandardOpenOption.WRITE); } WritableByteChannel wbc = newByteChannel(path, opts); if (wbc instanceof FileChannelImpl) { ((FileChannelImpl) wbc).setUninterruptible(); } return Channels.newOutputStream(wbc); }
Example 15
Source File: CSVSink.java From dlp-dataflow-deidentification with Apache License 2.0 | 4 votes |
@Override public void open(WritableByteChannel channel) throws IOException { writer = new PrintWriter(Channels.newOutputStream(channel)); }
Example 16
Source File: ThriftIO.java From beam with Apache License 2.0 | 4 votes |
@Override public void open(WritableByteChannel channel) throws IOException { this.writer = new ThriftWriter<>(Channels.newOutputStream(channel), getProtocolFactory()); }
Example 17
Source File: StreamWriter.java From stratio-cassandra with Apache License 2.0 | 4 votes |
/** * Stream file of specified sections to given channel. * * StreamWriter uses LZF compression on wire to decrease size to transfer. * * @param channel where this writes data to * @throws IOException on any I/O error */ public void write(WritableByteChannel channel) throws IOException { long totalSize = totalSize(); RandomAccessReader file = sstable.openDataReader(); ChecksumValidator validator = new File(sstable.descriptor.filenameFor(Component.CRC)).exists() ? DataIntegrityMetadata.checksumValidator(sstable.descriptor) : null; transferBuffer = validator == null ? new byte[DEFAULT_CHUNK_SIZE] : new byte[validator.chunkSize]; // setting up data compression stream compressedOutput = new LZFOutputStream(Channels.newOutputStream(channel)); long progress = 0L; try { // stream each of the required sections of the file for (Pair<Long, Long> section : sections) { long start = validator == null ? section.left : validator.chunkStart(section.left); int readOffset = (int) (section.left - start); // seek to the beginning of the section file.seek(start); if (validator != null) validator.seek(start); // length of the section to read long length = section.right - start; // tracks write progress long bytesRead = 0; while (bytesRead < length) { long lastBytesRead = write(file, validator, readOffset, length, bytesRead); bytesRead += lastBytesRead; progress += (lastBytesRead - readOffset); session.progress(sstable.descriptor, ProgressInfo.Direction.OUT, progress, totalSize); readOffset = 0; } // make sure that current section is send compressedOutput.flush(); } } finally { // no matter what happens close file FileUtils.closeQuietly(file); FileUtils.closeQuietly(validator); } }
Example 18
Source File: VectorSerializer.java From Bats with Apache License 2.0 | 4 votes |
private Writer(WritableByteChannel channel) { this.channel = channel; output = Channels.newOutputStream(channel); }
Example 19
Source File: NamedPipeSocket.java From docker-java with Apache License 2.0 | 4 votes |
@Override public OutputStream getOutputStream() { return Channels.newOutputStream(channel); }
Example 20
Source File: GoogleHadoopOutputStream.java From hadoop-connectors with Apache License 2.0 | 4 votes |
private static OutputStream createOutputStream( WritableByteChannel channel, GoogleCloudStorageOptions gcsOptions) { OutputStream out = Channels.newOutputStream(channel); int bufferSize = gcsOptions.getWriteChannelOptions().getBufferSize(); return bufferSize > 0 ? new BufferedOutputStream(out, bufferSize) : out; }