java.nio.channels.FileChannel Scala Examples
The following examples show how to use java.nio.channels.FileChannel.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: NIOLogFileWriter.scala From scribe with MIT License | 5 votes |
package scribe.writer.file import java.nio.ByteBuffer import java.nio.channels.FileChannel import java.nio.file.{OpenOption, StandardOpenOption} import scala.annotation.tailrec class NIOLogFileWriter(lf: LogFile) extends LogFileWriter { private lazy val options: List[OpenOption] = if (lf.append) { List(StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE) } else { List(StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE) } private lazy val channel: FileChannel = FileChannel.open(lf.path, options: _*) override def write(output: String): Unit = { val bytes = output.getBytes(lf.charset) val buffer = ByteBuffer.wrap(bytes) writeBuffer(buffer) buffer.clear() } @tailrec private def writeBuffer(buffer: ByteBuffer): Unit = if (buffer.hasRemaining) { channel.write(buffer) writeBuffer(buffer) } override def flush(): Unit = channel.force(false) override def dispose(): Unit = if (channel.isOpen) { channel.close() } }
Example 2
Source File: Using.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.jawa.core.compiler.compile.io import java.io.{Closeable, FileInputStream, FileOutputStream, InputStream, OutputStream, File => JavaFile} import java.io.{BufferedInputStream, BufferedOutputStream, InputStreamReader, OutputStreamWriter} import java.io.{BufferedReader, BufferedWriter} import java.util.zip.GZIPInputStream import java.net.URL import java.nio.channels.FileChannel import java.nio.charset.Charset import java.util.jar.{JarFile, JarInputStream, JarOutputStream} import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream} import ErrorHandling.translate import scala.reflect.{Manifest => SManifest} abstract class Using[Source, T] { protected def open(src: Source): T def apply[R](src: Source)(f: T => R): R = { val resource = open(src) try { f(resource) } finally { close(resource) } } protected def close(out: T): Unit } abstract class WrapUsing[Source, T](implicit srcMf: SManifest[Source], targetMf: SManifest[T]) extends Using[Source, T] { protected def label[S](m: SManifest[S]): String = m.runtimeClass.getSimpleName protected def openImpl(source: Source): T protected final def open(source: Source): T = translate("Error wrapping " + label(srcMf) + " in " + label(targetMf) + ": ") { openImpl(source) } } trait OpenFile[T] extends Using[JavaFile, T] { protected def openImpl(file: JavaFile): T protected final def open(file: JavaFile): T = { val parent = file.getParentFile if(parent != null) IO.createDirectory(parent) openImpl(file) } } object Using { def wrap[Source, T<: Closeable](openF: Source => T)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] = wrap(openF, closeCloseable) def wrap[Source, T](openF: Source => T, closeF: T => Unit)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] = new WrapUsing[Source, T] { def openImpl(source: Source): T = openF(source) def close(t: T): Unit = closeF(t) } def resource[Source, T <: Closeable](openF: Source => T): Using[Source,T] = resource(openF, closeCloseable) def resource[Source, T](openF: Source => T, closeF: T => Unit): Using[Source,T] = new Using[Source,T] { def open(s: Source): T = openF(s) def close(s: T): Unit = closeF(s) } def file[T <: Closeable](openF: JavaFile => T): OpenFile[T] = file(openF, closeCloseable) def file[T](openF: JavaFile => T, closeF: T => Unit): OpenFile[T] = new OpenFile[T] { def openImpl(file: JavaFile): T = openF(file) def close(t: T): Unit = closeF(t) } private def closeCloseable[T <: Closeable]: T => Unit = _.close() def bufferedOutputStream: Using[OutputStream, BufferedOutputStream] = wrap((out: OutputStream) => new BufferedOutputStream(out) ) def bufferedInputStream: Using[InputStream, BufferedInputStream] = wrap((in: InputStream) => new BufferedInputStream(in) ) def fileOutputStream(append: Boolean = false): OpenFile[BufferedOutputStream] = file(f => new BufferedOutputStream(new FileOutputStream(f, append))) def fileInputStream: OpenFile[BufferedInputStream] = file(f => new BufferedInputStream(new FileInputStream(f))) def urlInputStream: Using[URL, BufferedInputStream] = resource((u: URL) => translate("Error opening " + u + ": ")(new BufferedInputStream(u.openStream))) def fileOutputChannel: OpenFile[FileChannel] = file(f => new FileOutputStream(f).getChannel) def fileInputChannel: OpenFile[FileChannel] = file(f => new FileInputStream(f).getChannel) def fileWriter(charset: Charset = IO.utf8, append: Boolean = false): OpenFile[BufferedWriter] = file(f => new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f, append), charset)) ) def fileReader(charset: Charset): OpenFile[BufferedReader] = file(f => new BufferedReader(new InputStreamReader(new FileInputStream(f), charset)) ) def urlReader(charset: Charset): Using[URL, BufferedReader] = resource((u: URL) => new BufferedReader(new InputStreamReader(u.openStream, charset))) def jarFile(verify: Boolean): OpenFile[JarFile] = file(f => new JarFile(f, verify), (_: JarFile).close()) def zipFile: OpenFile[ZipFile] = file(f => new ZipFile(f), (_: ZipFile).close()) def streamReader: Using[(InputStream, Charset), InputStreamReader] = wrap{ (_: (InputStream, Charset)) match { case (in, charset) => new InputStreamReader(in, charset) } } def gzipInputStream: Using[InputStream, GZIPInputStream] = wrap((in: InputStream) => new GZIPInputStream(in, 8192) ) def zipInputStream: Using[InputStream, ZipInputStream] = wrap((in: InputStream) => new ZipInputStream(in)) def zipOutputStream: Using[OutputStream, ZipOutputStream] = wrap((out: OutputStream) => new ZipOutputStream(out)) def gzipOutputStream: Using[OutputStream, GZIPOutputStream] = wrap((out: OutputStream) => new GZIPOutputStream(out, 8192), (_: GZIPOutputStream).finish()) def jarOutputStream: Using[OutputStream, JarOutputStream] = wrap((out: OutputStream) => new JarOutputStream(out)) def jarInputStream: Using[InputStream, JarInputStream] = wrap((in: InputStream) => new JarInputStream(in)) def zipEntry(zip: ZipFile): Using[ZipEntry, InputStream] = resource((entry: ZipEntry) => translate("Error opening " + entry.getName + " in " + zip + ": ") { zip.getInputStream(entry) } ) }
Example 3
Source File: FileIO.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core.io.files import java.io.File import java.nio.channels.FileChannel import java.nio.file.{FileSystems, Files, Path, StandardOpenOption} import scala.util.control.NonFatal import com.typesafe.config.Config import swave.core.impl.util.SettingsCompanion import swave.core.io.Bytes import swave.core.macros._ object FileIO extends SpoutFromFiles with DrainToFiles { lazy val userHomePath: Path = FileSystems.getDefault.getPath(System getProperty "user.home") def resolveFileSystemPath(pathName: String): Path = if (pathName.length >= 2 && pathName.charAt(0) == '~' && pathName.charAt(1) == File.separatorChar) { userHomePath.resolve(pathName substring 2) } else FileSystems.getDefault.getPath(pathName) val WriteCreateOptions: Set[StandardOpenOption] = { import StandardOpenOption._ Set(CREATE, TRUNCATE_EXISTING, WRITE) } final case class Settings(defaultFileReadingChunkSize: Int, defaultFileWritingChunkSize: Int) { requireArg(defaultFileReadingChunkSize > 0, "`defaultFileChunkSize` must be > 0") requireArg(defaultFileWritingChunkSize >= 0, "`defaultFileWritingChunkSize` must be >= 0") def withDefaultFileReadingChunkSize(defaultFileReadingChunkSize: Int) = copy(defaultFileReadingChunkSize = defaultFileReadingChunkSize) def withDefaultFileWritingChunkSize(defaultFileWritingChunkSize: Int) = copy(defaultFileWritingChunkSize = defaultFileWritingChunkSize) } object Settings extends SettingsCompanion[Settings]("swave.core.file-io") { def fromSubConfig(c: Config): Settings = Settings( defaultFileReadingChunkSize = c getInt "default-file-reading-chunk-size", defaultFileWritingChunkSize = c getInt "default-file-writing-chunk-size") } def writeFile[T: Bytes](fileName: String, data: T): Unit = writeFile(resolveFileSystemPath(fileName), data) def writeFile[T: Bytes](file: File, data: T): Unit = writeFile(file.toPath, data) def writeFile[T: Bytes](path: Path, data: T, options: StandardOpenOption*): Unit = { implicit def decorator(value: T): Bytes.Decorator[T] = Bytes.decorator(value) Files.write(path, data.toArray, options: _*) () } def readFile[T: Bytes](fileName: String): T = readFile(resolveFileSystemPath(fileName)) def readFile[T: Bytes](file: File): T = readFile(file.toPath) def readFile[T: Bytes](path: Path): T = implicitly[Bytes[T]].apply(Files.readAllBytes(path)) private[io] def quietClose(channel: FileChannel): Unit = try channel.close() catch { case NonFatal(_) ⇒ } }
Example 4
Source File: RecoverLog.scala From polynote with Apache License 2.0 | 5 votes |
package polynote import java.nio.channels.FileChannel import java.nio.file.{Files, Paths, StandardOpenOption} import java.time.Instant import cats.effect.Effect import polynote.app.{Args, MainArgs} import polynote.kernel.logging.Logging import polynote.messages.{Message, Notebook, NotebookUpdate, ShortList} import polynote.server.AppEnv import zio.{Ref, Runtime, Task, UIO, ZIO} import zio.ZIO.effectTotal import zio.blocking.effectBlocking import fs2.Stream import polynote.server.repository.{FileBasedRepository, NotebookContent} import polynote.server.repository.format.ipynb.IPythonFormat import polynote.server.repository.fs.WAL import polynote.server.taskConcurrent import scodec.bits.ByteVector import scodec.stream.decode import scodec.codecs import scodec.stream.decode.StreamDecoder object RecoverLog { def replay(messages: Stream[Task, (Instant, Message)], ref: Ref[Notebook], log: Logging.Service): UIO[Unit] = messages.map(_._2).evalMap { case nb: Notebook => ref.set(nb) case upd: NotebookUpdate => ref.update { nb => try { upd.applyTo(nb) } catch { case err: Throwable => log.errorSync(Some("Dropped update because an error occurred when applying it"), err) nb } } case _ => ZIO.unit }.compile.drain.catchAll { err => log.error(Some("Error occurred while replaying the log; printing the final state anyway."), err) } def main(implicit ev: Effect[Task]): ZIO[AppEnv, String, Int] = for { args <- ZIO.access[MainArgs](_.get[Args].rest) path <- ZIO(args.head).flatMap(pathStr => effectBlocking(Paths.get(pathStr).toRealPath())).orDie is <- effectBlocking(FileChannel.open(path, StandardOpenOption.READ)).orDie log <- Logging.access _ <- Logging.info(s"Reading log entries from ${path}...") messages = WAL.decoder.decodeMmap(is) ref <- Ref.make(Notebook("", ShortList.Nil, None)) _ <- replay(messages, ref, log) format = new IPythonFormat result <- ref.get encoded <- format.encodeNotebook(NotebookContent(result.cells, result.config)).orDie _ <- effectTotal(println(encoded)) } yield 0 }
Example 5
Source File: CheckerTest.scala From spark-bam with Apache License 2.0 | 5 votes |
package org.hammerlab.bam.check.full import java.nio.channels.FileChannel import hammerlab.path._ import org.hammerlab.bam.check.full.error.{ Flags, InvalidCigarOp, NoReadName, Result, Success } import org.hammerlab.bam.header.ContigLengths import org.hammerlab.bam.test.resources.bam2 import org.hammerlab.bgzf.Pos import org.hammerlab.bgzf.block.SeekableUncompressedBytes import org.hammerlab.channel.SeekableByteChannel.ChannelByteChannel import org.hammerlab.hadoop.Configuration import org.hammerlab.test.Suite class CheckerTest extends Suite { def check(path: Path, pos: Pos, expected: Result): Unit = { val uncompressedBytes = SeekableUncompressedBytes( ChannelByteChannel(FileChannel.open(path)) ) implicit val conf = Configuration() val checker = Checker( uncompressedBytes, ContigLengths(path) ) checker(pos) should be(expected) } test("true positive") { check( bam2, Pos(439897, 52186), Success(10) ) } test("2 checks fail in header") { check( bam2, Pos(0, 5649), Flags( tooFewFixedBlockBytes = false, readPosError = None, nextReadPosError = None, readNameError = Some(NoReadName), cigarOpsError = Some(InvalidCigarOp), tooFewRemainingBytesImplied = false, readsBeforeError = 0 ) ) } test("EoF") { check( bam2, Pos(1006167, 15243), Flags( tooFewFixedBlockBytes = true, None, None, None, None, false, readsBeforeError = 0 ) ) } }
Example 6
Source File: MetadataStreamTest.scala From spark-bam with Apache License 2.0 | 5 votes |
package org.hammerlab.bgzf.block import java.nio.channels.FileChannel import org.hammerlab.bam.test.resources.bam2 import org.hammerlab.channel.SeekableByteChannel._ import org.hammerlab.test.Suite class MetadataStreamTest extends Suite { test("metadata") { val ch = FileChannel.open(bam2) MetadataStream(ch) .take(10) .toList should be( List( Metadata( 0, 26169, 65498), Metadata( 26169, 24080, 65498), Metadata( 50249, 25542, 65498), Metadata( 75791, 22308, 65498), Metadata( 98099, 20688, 65498), Metadata(118787, 19943, 65498), Metadata(138730, 20818, 65498), Metadata(159548, 21957, 65498), Metadata(181505, 19888, 65498), Metadata(201393, 20517, 65498) ) ) ch.position(0) val stream = MetadataStream(ch) stream.size should be(25) ch.isOpen should be(true) stream.close() ch.isOpen should be(false) } }
Example 7
Source File: ByteStreamTest.scala From spark-bam with Apache License 2.0 | 5 votes |
package org.hammerlab.bgzf.block import java.nio.channels.FileChannel import org.hammerlab.bgzf.Pos import org.hammerlab.channel.ByteChannel import org.hammerlab.bam.test.resources.bam2 import org.hammerlab.test.Suite class ByteStreamTest extends Suite { def checkHeader(implicit byteStream: UncompressedBytesI[_], byteChannel: ByteChannel): Unit = { byteChannel.readString(4, includesNull = false) should be("BAM\1") val headerTextLength = 4253 byteChannel.getInt should be(headerTextLength) val headerStr = byteChannel.readString(headerTextLength) headerStr.take(100) should be( """@HD VN:1.5 GO:none SO:coordinate |@SQ SN:1 LN:249250621 |@SQ SN:2 LN:243199373 |@SQ SN:3 LN:198022430 |@""" .stripMargin ) val numReferenceSequences = byteChannel.getInt numReferenceSequences should be(84) // skip to last reference-sequence-length byteChannel.skip(5646 - byteChannel.position().toInt) byteStream.curPos should be(Some(Pos(0, 5646))) byteChannel.getInt should be(547496) // Last reference sequence length byteStream.curPos should be(Some(Pos(0, 5650))) byteChannel.getInt should be(620) // First record length, in (uncompressed) bytes byteStream.curPos should be(Some(Pos(0, 5654))) val firstBlockLength = 65498 // Skip to 4 bytes from the end of this block byteChannel.skip(firstBlockLength - 4 - byteChannel.position().toInt) byteStream.clear() byteStream.curPos should be(Some(Pos(0, firstBlockLength - 4))) byteChannel.getInt byteStream.curPos should be(Some(Pos(26169, 0))) } test("ByteStream") { implicit val byteStream = UncompressedBytes( bam2.inputStream ) implicit val byteChannel: ByteChannel = byteStream checkHeader } test("SeekableByteStream") { implicit val byteStream = SeekableUncompressedBytes(bam2) implicit val byteChannel: ByteChannel = byteStream checkHeader def checkRead(): Unit = { byteStream.seek(Pos(26169, 16277)) byteChannel.getInt should be(642) byteChannel.getInt should be(0) byteChannel.getInt should be(12815) val readNameLen = byteChannel.getInt & 0xff byteChannel.skip(20) byteChannel.readString(readNameLen) should be("HWI-ST807:461:C2P0JACXX:4:2311:16471:84756") } checkRead() checkRead() byteStream.seek(Pos(0, 0)) val freshByteChannel: ByteChannel = byteStream checkHeader(byteStream, freshByteChannel) } }