java.nio.channels.SocketChannel Scala Examples
The following examples show how to use java.nio.channels.SocketChannel.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: TcpIncomingConnection.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.io import java.nio.channels.SocketChannel import scala.collection.immutable import akka.actor.ActorRef import akka.io.Inet.SocketOption private[io] class TcpIncomingConnection( _tcp: TcpExt, _channel: SocketChannel, registry: ChannelRegistry, bindHandler: ActorRef, options: immutable.Traversable[SocketOption], readThrottling: Boolean) extends TcpConnection(_tcp, _channel, readThrottling) { signDeathPact(bindHandler) registry.register(channel, initialOps = 0) def receive = { case registration: ChannelRegistration ⇒ completeConnect(registration, bindHandler, options) } }
Example 2
Source File: SocketGenerator.scala From flink-demos with Apache License 2.0 | 5 votes |
package com.dataartisans.flink.example.eventpattern.Socket import java.net.{InetAddress, InetSocketAddress} import java.nio.{ByteOrder, ByteBuffer} import java.nio.channels.SocketChannel import com.dataartisans.flink.example.eventpattern.{StandaloneGeneratorBase, Event} import org.apache.flink.util.Collector object SocketGenerator extends StandaloneGeneratorBase { val BASE_PORT = 51762 def main(args: Array[String]): Unit = { val numPartitions = 4 //args(0).toInt val collectors = new Array[SocketCollector](numPartitions) // create the generator threads for (i <- 0 until collectors.length) { collectors(i) = new SocketCollector(BASE_PORT + i) } runGenerator(collectors) } } class SocketCollector(val port: Int) extends Collector[Event] { val channel = SocketChannel.open(new InetSocketAddress(InetAddress.getByName("localhost"), port)) channel.configureBlocking(true) channel.finishConnect() val buffer = ByteBuffer.allocateDirect(4096).order(ByteOrder.LITTLE_ENDIAN) override def collect(t: Event): Unit = { if (buffer.remaining() < 8) { buffer.flip() channel.write(buffer) buffer.clear() } buffer.putInt(t.sourceAddress) buffer.putInt(t.event) } override def close(): Unit = { if (buffer.position() > 0) { buffer.flip() channel.write(buffer) } channel.close() } }
Example 3
Source File: DataStreamSink.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.datastream.writer import java.nio.ByteBuffer import java.nio.ByteBuffer import java.nio.channels.SocketChannel import com.actian.spark_vector.datastream.{ padding, DataStreamConnector } import com.actian.spark_vector.colbuffer.WriteColumnBuffer def write(len: Int, numTuples: Int, columnBufs: Seq[WriteColumnBuffer[_]]): Unit = { writeDataHeader(len, numTuples) columnBufs.foreach { case cb => cb.flip() writeDataColumn(cb) cb.clear() } } }
Example 4
Source File: DataStreamTap.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.datastream.reader import com.actian.spark_vector.util.ResourceUtil.closeResourceOnFailure import com.actian.spark_vector.colbuffer.ColumnBuffer import com.actian.spark_vector.util.Logging import java.nio.ByteBuffer import java.nio.channels.SocketChannel private[reader] case class DataStreamTap(implicit val socket: SocketChannel) extends Logging with Serializable { import DataStreamReader._ private final val SuccessCode = 0 // X100CPT_SUCCESS private final val BinaryDataCode = 5 // X100CPT_BINARY_DATA_V2 private final val NumTuplesIndex = 4 private var vector: ByteBuffer = _ private var isNextVectorBuffered = false private var remaining = true private def readVector(reuseBuffer: ByteBuffer): ByteBuffer = readWithByteBuffer(Option(reuseBuffer)) { vectors => val packetType = vectors.getInt() if (packetType != BinaryDataCode && packetType != SuccessCode) throw new Exception(s"Invalid packet type code = ${packetType}.") if (vectors.getInt(NumTuplesIndex) == 0) { logDebug(s"Empty data stream.") remaining = false } vectors } def read()(implicit reuseBuffer: ByteBuffer): ByteBuffer = { if (!remaining) throw new NoSuchElementException("Empty data stream.") if (isNextVectorBuffered) { isNextVectorBuffered = false } else { vector = readVector(reuseBuffer) } vector } def isEmpty()(implicit reuseBuffer: ByteBuffer): Boolean = { if (remaining) read() isNextVectorBuffered = true !remaining } def close(): Unit = socket.close }
Example 5
Source File: VectorSRPServer.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.srp import java.nio.channels.SocketChannel import com.actian.spark_vector.datastream.reader.DataStreamReader._ import com.actian.spark_vector.datastream.writer.DataStreamWriter._ import com.actian.spark_vector.util.Logging import com.actian.spark_vector.vector.{ ErrorCodes, VectorException } def authenticate(implicit socket: SocketChannel): Unit = { val (username, aVal) = readWithByteBuffer() { in => if (!readCode(in, authCode)) throw new VectorException(AuthError, "Authentication failed: didn't receive auth code on authentication") val I = readString(in) val Aval = readByteArray(in) (I, Aval) } logTrace(s"Received username $username and A=${aVal.toHexString}") getSessionWithClientParameters(username, aVal) match { case Some((sesVal, kVal, s, bVal)) => { logTrace(s"S = ${sesVal.toHexString}, K = ${kVal.toHexString}, s = ${s.toHexString}, B = ${bVal.toHexString}") writeWithByteBuffer { out => writeCode(out, sBCode) writeString(out, s.toHexString) writeByteArray(out, bVal) } val clientM = readWithByteBuffer() { in => if (!readCode(in, MCode)) throw new VectorException(AuthError, "Authentication failed: unable to read code before verification of client M key") readByteArray(in) } logTrace(s"Received clientM=${clientM.toHexString}, server version is ${VectorSRPClient.M(username, s, aVal, bVal, kVal).toHexString}") if (!clientM.sameElements(VectorSRPClient.M(username, s, aVal, bVal, kVal))) throw new VectorException(AuthError, "Authentication failed: client M differs from server M") writeWithByteBuffer { out => writeCode(out, serverMCode) writeByteArray(out, M(aVal, clientM, kVal)) } } case None => throw new VectorException(AuthError, s"Authentication failed: username $username is not recognized") } } }
Example 6
Source File: VectorSRPClient.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.srp import java.nio.channels.SocketChannel import scala.BigInt import scala.annotation.tailrec import com.actian.spark_vector.datastream.writer.DataStreamWriter import com.actian.spark_vector.datastream.reader.DataStreamReader import com.actian.spark_vector.util.Logging import com.actian.spark_vector.vector.ErrorCodes._ import com.actian.spark_vector.vector.VectorException def authenticate(username: String, password: String)(implicit socket: SocketChannel): Unit = { val a = super.a val A = super.A(a) writeWithByteBuffer { out => writeCode(out, authCode) writeString(out, username) writeByteArray(out, A) } val (s, b): (Array[Byte], Array[Byte]) = readWithByteBuffer[(Array[Byte], Array[Byte])]() { in => if (!readCode(in, sBCode)) { throw new VectorException(AuthError, "Authentication failed: unable to read Ok code after exchanging username and A") } (BigInt(readString(in), 16), readByteArray(in)) } val B = b val u = super.u(A, B) val x = super.x(s, username, password) val S = super.S(x, B, a, u) val K = H(S) val clientM = M(username, s, A, B, K) writeWithByteBuffer { out => writeCode(out, MCode) writeByteArray(out, clientM) } val serverM = readWithByteBuffer[Array[Byte]]() { in => if (!readCode(in, serverMCode)) { throw new VectorException(AuthError, "Authentication failed: unable to read code before verification of server M key") } readByteArray(in) } if (!H(A ++ clientM ++ K).sameElements(serverM)) { throw new VectorException(AuthError, "Authentication failed: M and serverM differ") } } } // scalastyle:on magic.number
Example 7
Source File: VectorNet.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.vector import java.nio.ByteBuffer import java.nio.channels.SocketChannel import com.actian.spark_vector.datastream.reader.DataStreamReader.readWithByteBuffer import com.actian.spark_vector.datastream.writer.DataStreamWriter.writeWithByteBuffer def readClientType(expectedClientType: Int)(implicit socket: SocketChannel): Unit = { readWithByteBuffer() { in => checkPacketType(ClientPacketType, in) val clientType = in.getInt if (clientType != expectedClientType) { throw new VectorException(ErrorCodes.CommunicationError, s"Unexpected client type($clientType). Expected($expectedClientType)") } } writeWithByteBuffer { _.writeInt(SuccessPacketType) } } }