Java Code Examples for org.apache.flink.core.memory.MemorySegment#wrap()
The following examples show how to use
org.apache.flink.core.memory.MemorySegment#wrap() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MemorySegmentSimpleTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testByteBufferWrapping() { try { MemorySegment seg = MemorySegmentFactory.allocateUnpooledSegment(1024); ByteBuffer buf1 = seg.wrap(13, 47); assertEquals(13, buf1.position()); assertEquals(60, buf1.limit()); assertEquals(47, buf1.remaining()); ByteBuffer buf2 = seg.wrap(500, 267); assertEquals(500, buf2.position()); assertEquals(767, buf2.limit()); assertEquals(267, buf2.remaining()); ByteBuffer buf3 = seg.wrap(0, 1024); assertEquals(0, buf3.position()); assertEquals(1024, buf3.limit()); assertEquals(1024, buf3.remaining()); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 2
Source File: MemorySegmentSimpleTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testByteBufferWrapping() { try { MemorySegment seg = MemorySegmentFactory.allocateUnpooledSegment(1024); ByteBuffer buf1 = seg.wrap(13, 47); assertEquals(13, buf1.position()); assertEquals(60, buf1.limit()); assertEquals(47, buf1.remaining()); ByteBuffer buf2 = seg.wrap(500, 267); assertEquals(500, buf2.position()); assertEquals(767, buf2.limit()); assertEquals(267, buf2.remaining()); ByteBuffer buf3 = seg.wrap(0, 1024); assertEquals(0, buf3.position()); assertEquals(1024, buf3.limit()); assertEquals(1024, buf3.remaining()); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 3
Source File: MemorySegmentSimpleTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testByteBufferWrapping() { try { MemorySegment seg = MemorySegmentFactory.allocateUnpooledSegment(1024); ByteBuffer buf1 = seg.wrap(13, 47); assertEquals(13, buf1.position()); assertEquals(60, buf1.limit()); assertEquals(47, buf1.remaining()); ByteBuffer buf2 = seg.wrap(500, 267); assertEquals(500, buf2.position()); assertEquals(767, buf2.limit()); assertEquals(267, buf2.remaining()); ByteBuffer buf3 = seg.wrap(0, 1024); assertEquals(0, buf3.position()); assertEquals(1024, buf3.limit()); assertEquals(1024, buf3.remaining()); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 4
Source File: BufferReaderWriterUtil.java From flink with Apache License 2.0 | 5 votes |
@Nullable static Buffer readFromByteChannel( FileChannel channel, ByteBuffer headerBuffer, MemorySegment memorySegment, BufferRecycler bufferRecycler) throws IOException { headerBuffer.clear(); if (!tryReadByteBuffer(channel, headerBuffer)) { return null; } headerBuffer.flip(); final ByteBuffer targetBuf; final int header; final int size; try { header = headerBuffer.getInt(); size = headerBuffer.getInt(); targetBuf = memorySegment.wrap(0, size); } catch (BufferUnderflowException | IllegalArgumentException e) { // buffer underflow if header buffer is undersized // IllegalArgumentException if size is outside memory segment size throwCorruptDataException(); return null; // silence compiler } readByteBufferFully(channel, targetBuf); return bufferFromMemorySegment(memorySegment, bufferRecycler, size, header == HEADER_VALUE_IS_EVENT); }
Example 5
Source File: BufferReaderWriterUtil.java From flink with Apache License 2.0 | 5 votes |
@Nullable static Buffer readFromByteChannel( FileChannel channel, ByteBuffer headerBuffer, MemorySegment memorySegment, BufferRecycler bufferRecycler) throws IOException { headerBuffer.clear(); if (!tryReadByteBuffer(channel, headerBuffer)) { return null; } headerBuffer.flip(); final ByteBuffer targetBuf; final boolean isEvent; final boolean isCompressed; final int size; try { isEvent = headerBuffer.getShort() == HEADER_VALUE_IS_EVENT; isCompressed = headerBuffer.getShort() == BUFFER_IS_COMPRESSED; size = headerBuffer.getInt(); targetBuf = memorySegment.wrap(0, size); } catch (BufferUnderflowException | IllegalArgumentException e) { // buffer underflow if header buffer is undersized // IllegalArgumentException if size is outside memory segment size throwCorruptDataException(); return null; // silence compiler } readByteBufferFully(channel, targetBuf); Buffer.DataType dataType = isEvent ? Buffer.DataType.EVENT_BUFFER : Buffer.DataType.DATA_BUFFER; return new NetworkBuffer(memorySegment, bufferRecycler, dataType, isCompressed, size); }
Example 6
Source File: SpillingAdaptiveSpanningRecordDeserializer.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
private void addNextChunkFromMemorySegment(MemorySegment segment, int offset, int numBytes) throws IOException { int segmentPosition = offset; int segmentRemaining = numBytes; // check where to go. if we have a partial length, we need to complete it first if (this.lengthBuffer.position() > 0) { int toPut = Math.min(this.lengthBuffer.remaining(), segmentRemaining); segment.get(segmentPosition, this.lengthBuffer, toPut); // did we complete the length? if (this.lengthBuffer.hasRemaining()) { return; } else { this.recordLength = this.lengthBuffer.getInt(0); this.lengthBuffer.clear(); segmentPosition += toPut; segmentRemaining -= toPut; if (this.recordLength > THRESHOLD_FOR_SPILLING) { this.spillingChannel = createSpillingChannel(); } else { ensureBufferCapacity(this.recordLength); } } } // copy as much as we need or can for this next spanning record int needed = this.recordLength - this.accumulatedRecordBytes; int toCopy = Math.min(needed, segmentRemaining); if (spillingChannel != null) { // spill to file ByteBuffer toWrite = segment.wrap(segmentPosition, toCopy); FileUtils.writeCompletely(this.spillingChannel, toWrite); } else { segment.get(segmentPosition, buffer, this.accumulatedRecordBytes, toCopy); } this.accumulatedRecordBytes += toCopy; if (toCopy < segmentRemaining) { // there is more data in the segment this.leftOverData = segment; this.leftOverStart = segmentPosition + toCopy; this.leftOverLimit = numBytes + offset; } if (accumulatedRecordBytes == recordLength) { // we have the full record if (spillingChannel == null) { this.serializationReadBuffer.setBuffer(buffer, 0, recordLength); } else { spillingChannel.close(); BufferedInputStream inStream = new BufferedInputStream(new FileInputStream(spillFile), 2 * 1024 * 1024); this.spillFileReader = new DataInputViewStreamWrapper(inStream); } } }
Example 7
Source File: SpillingAdaptiveSpanningRecordDeserializer.java From flink with Apache License 2.0 | 4 votes |
private void addNextChunkFromMemorySegment(MemorySegment segment, int offset, int numBytes) throws IOException { int segmentPosition = offset; int segmentRemaining = numBytes; // check where to go. if we have a partial length, we need to complete it first if (this.lengthBuffer.position() > 0) { int toPut = Math.min(this.lengthBuffer.remaining(), segmentRemaining); segment.get(segmentPosition, this.lengthBuffer, toPut); // did we complete the length? if (this.lengthBuffer.hasRemaining()) { return; } else { this.recordLength = this.lengthBuffer.getInt(0); this.lengthBuffer.clear(); segmentPosition += toPut; segmentRemaining -= toPut; if (this.recordLength > THRESHOLD_FOR_SPILLING) { this.spillingChannel = createSpillingChannel(); } else { ensureBufferCapacity(this.recordLength); } } } // copy as much as we need or can for this next spanning record int needed = this.recordLength - this.accumulatedRecordBytes; int toCopy = Math.min(needed, segmentRemaining); if (spillingChannel != null) { // spill to file ByteBuffer toWrite = segment.wrap(segmentPosition, toCopy); FileUtils.writeCompletely(this.spillingChannel, toWrite); } else { segment.get(segmentPosition, buffer, this.accumulatedRecordBytes, toCopy); } this.accumulatedRecordBytes += toCopy; if (toCopy < segmentRemaining) { // there is more data in the segment this.leftOverData = segment; this.leftOverStart = segmentPosition + toCopy; this.leftOverLimit = numBytes + offset; } if (accumulatedRecordBytes == recordLength) { // we have the full record if (spillingChannel == null) { this.serializationReadBuffer.setBuffer(buffer, 0, recordLength); } else { spillingChannel.close(); BufferedInputStream inStream = new BufferedInputStream(new FileInputStream(spillFile), 2 * 1024 * 1024); this.spillFileReader = new DataInputViewStreamWrapper(inStream); } } }