org.apache.flink.testutils.serialization.types.IntType Java Examples
The following examples show how to use
org.apache.flink.testutils.serialization.types.IntType.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SpanningRecordSerializationTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testHandleMixedLargeRecords() throws Exception { final int numValues = 99; final int segmentSize = 32 * 1024; List<SerializationTestType> originalRecords = new ArrayList<>((numValues + 1) / 2); LargeObjectType genLarge = new LargeObjectType(); Random rnd = new Random(); for (int i = 0; i < numValues; i++) { if (i % 2 == 0) { originalRecords.add(new IntType(42)); } else { originalRecords.add(genLarge.getRandom(rnd)); } } testSerializationRoundTrip(originalRecords, segmentSize); }
Example #2
Source File: SpanningRecordSerializationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testHandleMixedLargeRecords() throws Exception { final int numValues = 99; final int segmentSize = 32 * 1024; List<SerializationTestType> originalRecords = new ArrayList<>((numValues + 1) / 2); LargeObjectType genLarge = new LargeObjectType(); Random rnd = new Random(); for (int i = 0; i < numValues; i++) { if (i % 2 == 0) { originalRecords.add(new IntType(42)); } else { originalRecords.add(genLarge.getRandom(rnd)); } } testSerializationRoundTrip(originalRecords, segmentSize); }
Example #3
Source File: SpanningRecordSerializationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testHandleMixedLargeRecords() throws Exception { final int numValues = 99; final int segmentSize = 32 * 1024; List<SerializationTestType> originalRecords = new ArrayList<>((numValues + 1) / 2); LargeObjectType genLarge = new LargeObjectType(); Random rnd = new Random(); for (int i = 0; i < numValues; i++) { if (i % 2 == 0) { originalRecords.add(new IntType(42)); } else { originalRecords.add(genLarge.getRandom(rnd)); } } testSerializationRoundTrip(originalRecords, segmentSize); }
Example #4
Source File: BroadcastRecordWriterTest.java From flink with Apache License 2.0 | 5 votes |
/** * FLINK-17780: Tests that a shared buffer(or memory segment) of a buffer builder is only freed when all consumers * are closed. */ @Test public void testRandomEmitAndBufferRecycling() throws Exception { int recordSize = 8; final TestPooledBufferProvider bufferProvider = new TestPooledBufferProvider(2, 2 * recordSize); final KeepingPartitionWriter partitionWriter = new KeepingPartitionWriter(bufferProvider) { @Override public int getNumberOfSubpartitions() { return 2; } }; final BroadcastRecordWriter<SerializationTestType> writer = new BroadcastRecordWriter<>(partitionWriter, 0, "test"); // force materialization of both buffers for easier availability tests List<Buffer> buffers = Arrays.asList(bufferProvider.requestBuffer(), bufferProvider.requestBuffer()); buffers.forEach(Buffer::recycleBuffer); assertEquals(2, bufferProvider.getNumberOfAvailableBuffers()); // fill first buffer writer.randomEmit(new IntType(1), 0); writer.broadcastEmit(new IntType(2)); assertEquals(1, bufferProvider.getNumberOfAvailableBuffers()); // simulate consumption of first buffer consumer; this should not free buffers assertEquals(1, partitionWriter.getAddedBufferConsumers(0).size()); closeConsumer(partitionWriter, 0, 2 * recordSize); assertEquals(1, bufferProvider.getNumberOfAvailableBuffers()); // use second buffer writer.broadcastEmit(new IntType(3)); assertEquals(0, bufferProvider.getNumberOfAvailableBuffers()); // fully free first buffer assertEquals(2, partitionWriter.getAddedBufferConsumers(1).size()); closeConsumer(partitionWriter, 1, recordSize); assertEquals(1, bufferProvider.getNumberOfAvailableBuffers()); }