org.apache.flink.api.common.typeutils.base.LongSerializer Java Examples
The following examples show how to use
org.apache.flink.api.common.typeutils.base.LongSerializer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StatefulOperatorChainedTaskTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); counterState = context .getKeyedStateStore() .getState(new ValueStateDescriptor<>(prefix + "counter-state", LongSerializer.INSTANCE)); // set key manually to make RocksDBListState get the serialized key. setCurrentKey("10"); if (context.isRestored()) { counter = counterState.value(); assertEquals(snapshotOutData, counter); counterState.clear(); } }
Example #2
Source File: KVStateRequestSerializerRocksDBTest.java From flink with Apache License 2.0 | 6 votes |
/** * Tests list serialization and deserialization match. * * @see KvStateRequestSerializerTest#testListSerialization() * KvStateRequestSerializerTest#testListSerialization() using the heap state back-end * test */ @Test public void testListSerialization() throws Exception { final long key = 0L; final RocksDBKeyedStateBackend<Long> longHeapKeyedStateBackend = RocksDBTestUtils .builderForTestDefaults(temporaryFolder.getRoot(), LongSerializer.INSTANCE) .build(); longHeapKeyedStateBackend.setCurrentKey(key); final InternalListState<Long, VoidNamespace, Long> listState = longHeapKeyedStateBackend.createInternalState(VoidNamespaceSerializer.INSTANCE, new ListStateDescriptor<>("test", LongSerializer.INSTANCE)); KvStateRequestSerializerTest.testListSerialization(key, listState); longHeapKeyedStateBackend.dispose(); }
Example #3
Source File: IntervalJoinOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); this.leftBuffer = context.getKeyedStateStore().getMapState(new MapStateDescriptor<>( LEFT_BUFFER, LongSerializer.INSTANCE, new ListSerializer<>(new BufferEntrySerializer<>(leftTypeSerializer)) )); this.rightBuffer = context.getKeyedStateStore().getMapState(new MapStateDescriptor<>( RIGHT_BUFFER, LongSerializer.INSTANCE, new ListSerializer<>(new BufferEntrySerializer<>(rightTypeSerializer)) )); }
Example #4
Source File: TupleComparatorTTT1Test.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>> createSerializer() { return new TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>( (Class<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new TupleSerializer<Tuple2<String, Double>> ( (Class<Tuple2<String, Double>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }), new TupleSerializer<Tuple2<Long, Long>> ( (Class<Tuple2<Long, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ LongSerializer.INSTANCE, LongSerializer.INSTANCE }), new TupleSerializer<Tuple2<Integer, Long>> ( (Class<Tuple2<Integer, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE }) }); }
Example #5
Source File: CopyOnWriteSkipListStateMapBasicOpTest.java From flink with Apache License 2.0 | 6 votes |
/** * This tests the internal capability of using partial {@link ByteBuffer}, making sure the internal methods * works when put/get state with a key stored at a none-zero offset of a ByteBuffer. */ @Test public void testPutAndGetNodeWithNoneZeroOffset() { final int key = 10; final long namespace = 0L; final String valueString = "test"; SkipListKeySerializer<Integer, Long> skipListKeySerializer = new SkipListKeySerializer<>(IntSerializer.INSTANCE, LongSerializer.INSTANCE); SkipListValueSerializer<String> skipListValueSerializer = new SkipListValueSerializer<>(StringSerializer.INSTANCE); byte[] keyBytes = skipListKeySerializer.serialize(key, namespace); byte[] constructedKeyBytes = new byte[keyBytes.length + 1]; System.arraycopy(keyBytes, 0, constructedKeyBytes, 1, keyBytes.length); MemorySegment keySegment = MemorySegmentFactory.wrap(constructedKeyBytes); int keyLen = keyBytes.length; byte[] value = skipListValueSerializer.serialize(valueString); stateMap.putValue(keySegment, 1, keyLen, value, false); String state = stateMap.getNode(keySegment, 1, keyLen); assertThat(state, is(valueString)); }
Example #6
Source File: StatefulOperatorChainedTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); counterState = context .getKeyedStateStore() .getState(new ValueStateDescriptor<>(prefix + "counter-state", LongSerializer.INSTANCE)); // set key manually to make RocksDBListState get the serialized key. setCurrentKey("10"); if (context.isRestored()) { counter = counterState.value(); assertEquals(snapshotOutData, counter); counterState.clear(); } }
Example #7
Source File: SharedBuffer.java From flink with Apache License 2.0 | 6 votes |
public SharedBuffer(KeyedStateStore stateStore, TypeSerializer<V> valueSerializer) { this.eventsBuffer = stateStore.getMapState( new MapStateDescriptor<>( eventsStateName, EventId.EventIdSerializer.INSTANCE, new Lockable.LockableTypeSerializer<>(valueSerializer))); this.entries = stateStore.getMapState( new MapStateDescriptor<>( entriesStateName, new NodeId.NodeIdSerializer(), new Lockable.LockableTypeSerializer<>(new SharedBufferNode.SharedBufferNodeSerializer()))); this.eventsCount = stateStore.getMapState( new MapStateDescriptor<>( eventsCountStateName, LongSerializer.INSTANCE, IntSerializer.INSTANCE)); }
Example #8
Source File: TupleComparatorTTT3Test.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>> createSerializer() { return new TupleSerializer<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>( (Class<Tuple3<Tuple2<String, Double>, Tuple2<Long, Long>, Tuple2<Integer, Long>>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new TupleSerializer<Tuple2<String, Double>> ( (Class<Tuple2<String, Double>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ StringSerializer.INSTANCE, DoubleSerializer.INSTANCE }), new TupleSerializer<Tuple2<Long, Long>> ( (Class<Tuple2<Long, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ LongSerializer.INSTANCE, LongSerializer.INSTANCE }), new TupleSerializer<Tuple2<Integer, Long>> ( (Class<Tuple2<Integer, Long>>) (Class<?>) Tuple2.class, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE }) }); }
Example #9
Source File: StateBackendBenchmarkUtils.java From flink with Apache License 2.0 | 6 votes |
private static HeapKeyedStateBackend<Long> createHeapKeyedStateBackend(File rootDir) throws IOException { File recoveryBaseDir = prepareDirectory(recoveryDirName, rootDir); KeyGroupRange keyGroupRange = new KeyGroupRange(0, 1); int numberOfKeyGroups = keyGroupRange.getNumberOfKeyGroups(); ExecutionConfig executionConfig = new ExecutionConfig(); HeapPriorityQueueSetFactory priorityQueueSetFactory = new HeapPriorityQueueSetFactory(keyGroupRange, numberOfKeyGroups, 128); HeapKeyedStateBackendBuilder<Long> backendBuilder = new HeapKeyedStateBackendBuilder<>( null, new LongSerializer(), Thread.currentThread().getContextClassLoader(), numberOfKeyGroups, keyGroupRange, executionConfig, TtlTimeProvider.DEFAULT, Collections.emptyList(), AbstractStateBackend.getCompressionDecorator(executionConfig), new LocalRecoveryConfig(false, new LocalRecoveryDirectoryProviderImpl(recoveryBaseDir, new JobID(), new JobVertexID(), 0)), priorityQueueSetFactory, false, new CloseableRegistry() ); return backendBuilder.build(); }
Example #10
Source File: CopyOnWriteSkipListStateMapBasicOpTest.java From flink with Apache License 2.0 | 6 votes |
/** * Test state map iterator illegal next call. */ @Test public void testNamespaceNodeIteratorIllegalNextInvocation() { SkipListKeySerializer<Integer, Long> skipListKeySerializer = new SkipListKeySerializer<>(IntSerializer.INSTANCE, LongSerializer.INSTANCE); byte[] namespaceBytes = skipListKeySerializer.serializeNamespace(namespace); MemorySegment namespaceSegment = MemorySegmentFactory.wrap(namespaceBytes); Iterator<Long> iterator = stateMap.new NamespaceNodeIterator(namespaceSegment, 0, namespaceBytes.length); while (iterator.hasNext()) { iterator.next(); } try { iterator.next(); fail("Should have thrown NoSuchElementException."); } catch (NoSuchElementException e) { // expected } }
Example #11
Source File: NestedRowDataTest.java From flink with Apache License 2.0 | 5 votes |
private BinaryRowData getBinaryRowData() { BinaryRowData row = new BinaryRowData(1); BinaryRowWriter writer = new BinaryRowWriter(row); GenericTypeInfo<MyObj> info = new GenericTypeInfo<>(MyObj.class); TypeSerializer<MyObj> genericSerializer = info.createSerializer(new ExecutionConfig()); GenericRowData gRow = new GenericRowData(5); gRow.setField(0, 1); gRow.setField(1, 5L); gRow.setField(2, StringData.fromString("12345678")); gRow.setField(3, null); gRow.setField(4, RawValueData.fromObject(new MyObj(15, 5))); RowDataSerializer serializer = new RowDataSerializer( new LogicalType[]{ DataTypes.INT().getLogicalType(), DataTypes.BIGINT().getLogicalType(), DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType(), DataTypes.RAW(info).getLogicalType() }, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE, StringDataSerializer.INSTANCE, StringDataSerializer.INSTANCE, new RawValueDataSerializer<>(genericSerializer) }); writer.writeRow(0, gRow, serializer); writer.complete(); return row; }
Example #12
Source File: MigrationUtils.java From flink with Apache License 2.0 | 5 votes |
static <T> Queue<ComputationState> deserializeComputationStates( org.apache.flink.cep.nfa.SharedBuffer<T> sharedBuffer, TypeSerializer<T> eventSerializer, DataInputView source) throws IOException { Queue<ComputationState> computationStates = new LinkedList<>(); StringSerializer stateNameSerializer = StringSerializer.INSTANCE; LongSerializer timestampSerializer = LongSerializer.INSTANCE; DeweyNumber.DeweyNumberSerializer versionSerializer = DeweyNumber.DeweyNumberSerializer.INSTANCE; int computationStateNo = source.readInt(); for (int i = 0; i < computationStateNo; i++) { String state = stateNameSerializer.deserialize(source); String prevState = stateNameSerializer.deserialize(source); long timestamp = timestampSerializer.deserialize(source); DeweyNumber version = versionSerializer.deserialize(source); long startTimestamp = timestampSerializer.deserialize(source); int counter = source.readInt(); T event = null; if (source.readBoolean()) { event = eventSerializer.deserialize(source); } NodeId nodeId; EventId startEventId; if (prevState != null) { nodeId = sharedBuffer.getNodeId(prevState, timestamp, counter, event); startEventId = sharedBuffer.getStartEventId(version.getRun()); } else { nodeId = null; startEventId = null; } computationStates.add(ComputationState.createState(state, nodeId, version, startTimestamp, startEventId)); } return computationStates; }
Example #13
Source File: KvStateRequestSerializerTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Tests key and namespace deserialization utils with too many bytes. */ @Test(expected = IOException.class) public void testKeyAndNamespaceDeserializationTooMany2() throws Exception { // Long + null String + 2 bytes KvStateSerializer.deserializeKeyAndNamespace( new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 42, 0, 2, 2}, LongSerializer.INSTANCE, StringSerializer.INSTANCE); }
Example #14
Source File: KvStateRequestSerializerTest.java From flink with Apache License 2.0 | 5 votes |
/** * Tests value deserialization with too many bytes. */ @Test(expected = IOException.class) public void testDeserializeValueTooMany2() throws Exception { // Long + 2 bytes KvStateSerializer.deserializeValue(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 2, 2}, LongSerializer.INSTANCE); }
Example #15
Source File: StreamSQLTestProgram.java From flink with Apache License 2.0 | 5 votes |
@Override public void initializeState(FunctionInitializationContext context) throws Exception { state = context.getOperatorStateStore().getListState( new ListStateDescriptor<Long>("state", LongSerializer.INSTANCE)); for (Long l : state.get()) { ms += l; } }
Example #16
Source File: KvStateRequestSerializerTest.java From flink with Apache License 2.0 | 5 votes |
/** * Verifies that the serialization of a list using the given list state * matches the deserialization with {@link KvStateSerializer#deserializeList}. * * @param key * key of the list state * @param listState * list state using the {@link VoidNamespace}, must also be a {@link InternalKvState} instance * * @throws Exception */ public static void testListSerialization( final long key, final InternalListState<Long, VoidNamespace, Long> listState) throws Exception { TypeSerializer<Long> valueSerializer = LongSerializer.INSTANCE; listState.setCurrentNamespace(VoidNamespace.INSTANCE); // List final int numElements = 10; final List<Long> expectedValues = new ArrayList<>(); for (int i = 0; i < numElements; i++) { final long value = ThreadLocalRandom.current().nextLong(); expectedValues.add(value); listState.add(value); } final byte[] serializedKey = KvStateSerializer.serializeKeyAndNamespace( key, LongSerializer.INSTANCE, VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE); final byte[] serializedValues = listState.getSerializedValue( serializedKey, listState.getKeySerializer(), listState.getNamespaceSerializer(), listState.getValueSerializer()); List<Long> actualValues = KvStateSerializer.deserializeList(serializedValues, valueSerializer); assertEquals(expectedValues, actualValues); // Single value long expectedValue = ThreadLocalRandom.current().nextLong(); byte[] serializedValue = KvStateSerializer.serializeValue(expectedValue, valueSerializer); List<Long> actualValue = KvStateSerializer.deserializeList(serializedValue, valueSerializer); assertEquals(1, actualValue.size()); assertEquals(expectedValue, actualValue.get(0).longValue()); }
Example #17
Source File: CopyOnWriteSkipListStateMapTestUtils.java From flink with Apache License 2.0 | 5 votes |
@Nonnull static CopyOnWriteSkipListStateMap<Integer, Long, String> createStateMapForTesting( int keysToDelete, float logicalKeysRemoveRatio, Allocator spaceAllocator) { return new CopyOnWriteSkipListStateMap<>( IntSerializer.INSTANCE, LongSerializer.INSTANCE, StringSerializer.INSTANCE, spaceAllocator, keysToDelete, logicalKeysRemoveRatio); }
Example #18
Source File: KvStateRequestSerializerTest.java From flink with Apache License 2.0 | 5 votes |
/** * Tests map deserialization with too few bytes. */ @Test(expected = IOException.class) public void testDeserializeMapTooShort3() throws Exception { // Long (Key1) + Boolean (false) + Long (Value1) + 1 byte (incomplete Key2) KvStateSerializer.deserializeMap(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3}, LongSerializer.INSTANCE, LongSerializer.INSTANCE); }
Example #19
Source File: KvStateRequestSerializerTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Tests map deserialization with too few bytes. */ @Test(expected = IOException.class) public void testDeserializeMapTooShort2() throws Exception { // Long (Key) + 1 byte (incomplete Value) KvStateSerializer.deserializeMap(new byte[]{1, 1, 1, 1, 1, 1, 1, 1, 0}, LongSerializer.INSTANCE, LongSerializer.INSTANCE); }
Example #20
Source File: TupleComparatorILD2Test.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Integer, Long, Double>> createSerializer() { return new TupleSerializer<Tuple3<Integer, Long, Double>>( (Class<Tuple3<Integer, Long, Double>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new IntSerializer(), new LongSerializer(), new DoubleSerializer()}); }
Example #21
Source File: SerializationProxiesTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testKeyedBackendSerializationProxyRoundtrip() throws Exception { TypeSerializer<?> keySerializer = IntSerializer.INSTANCE; TypeSerializer<?> namespaceSerializer = LongSerializer.INSTANCE; TypeSerializer<?> stateSerializer = DoubleSerializer.INSTANCE; List<StateMetaInfoSnapshot> stateMetaInfoList = new ArrayList<>(); stateMetaInfoList.add(new RegisteredKeyValueStateBackendMetaInfo<>( StateDescriptor.Type.VALUE, "a", namespaceSerializer, stateSerializer).snapshot()); stateMetaInfoList.add(new RegisteredKeyValueStateBackendMetaInfo<>( StateDescriptor.Type.VALUE, "b", namespaceSerializer, stateSerializer).snapshot()); stateMetaInfoList.add(new RegisteredKeyValueStateBackendMetaInfo<>( StateDescriptor.Type.VALUE, "c", namespaceSerializer, stateSerializer).snapshot()); KeyedBackendSerializationProxy<?> serializationProxy = new KeyedBackendSerializationProxy<>(keySerializer, stateMetaInfoList, true); byte[] serialized; try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) { serializationProxy.write(new DataOutputViewStreamWrapper(out)); serialized = out.toByteArray(); } serializationProxy = new KeyedBackendSerializationProxy<>(Thread.currentThread().getContextClassLoader()); try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) { serializationProxy.read(new DataInputViewStreamWrapper(in)); } Assert.assertTrue(serializationProxy.isUsingKeyGroupCompression()); Assert.assertTrue(serializationProxy.getKeySerializerSnapshot() instanceof IntSerializer.IntSerializerSnapshot); assertEqualStateMetaInfoSnapshotsLists(stateMetaInfoList, serializationProxy.getStateMetaInfoSnapshots()); }
Example #22
Source File: StreamTaskNetworkInputTest.java From flink with Apache License 2.0 | 5 votes |
private void serializeRecord(long value, BufferBuilder bufferBuilder) throws IOException { RecordSerializer<SerializationDelegate<StreamElement>> serializer = new SpanningRecordSerializer<>(); SerializationDelegate<StreamElement> serializationDelegate = new SerializationDelegate<>( new StreamElementSerializer<>(LongSerializer.INSTANCE)); serializationDelegate.setInstance(new StreamRecord<>(value)); serializer.serializeRecord(serializationDelegate); assertFalse(serializer.copyToBufferBuilder(bufferBuilder).isFullBuffer()); }
Example #23
Source File: TtlStateFactory.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private <UK, UV> IS createMapState() throws Exception { MapStateDescriptor<UK, UV> mapStateDesc = (MapStateDescriptor<UK, UV>) stateDesc; MapStateDescriptor<UK, TtlValue<UV>> ttlDescriptor = new MapStateDescriptor<>( stateDesc.getName(), mapStateDesc.getKeySerializer(), new TtlSerializer<>(LongSerializer.INSTANCE, mapStateDesc.getValueSerializer())); return (IS) new TtlMapState<>(createTtlStateContext(ttlDescriptor)); }
Example #24
Source File: EventTimeOrderingOperator.java From flink-connectors with Apache License 2.0 | 5 votes |
@Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); // create a map-based queue to buffer input elements if (elementQueueState == null) { elementQueueState = getRuntimeContext().getMapState( new MapStateDescriptor<>( EVENT_QUEUE_STATE_NAME, LongSerializer.INSTANCE, new ListSerializer<>(inputSerializer) ) ); } }
Example #25
Source File: LegacyStatefulJobSavepointMigrationITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void open() throws Exception { super.open(); timerService = getInternalTimerService( "timer", LongSerializer.INSTANCE, this); }
Example #26
Source File: TtlStateFactory.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private IS createReducingState() throws Exception { ReducingStateDescriptor<SV> reducingStateDesc = (ReducingStateDescriptor<SV>) stateDesc; ReducingStateDescriptor<TtlValue<SV>> ttlDescriptor = new ReducingStateDescriptor<>( stateDesc.getName(), new TtlReduceFunction<>(reducingStateDesc.getReduceFunction(), ttlConfig, timeProvider), new TtlSerializer<>(LongSerializer.INSTANCE, stateDesc.getSerializer())); return (IS) new TtlReducingState<>(createTtlStateContext(ttlDescriptor)); }
Example #27
Source File: StatefulJobSavepointMigrationITCase.java From flink with Apache License 2.0 | 5 votes |
@Override public void onProcessingTime(InternalTimer<Long, Long> timer) throws Exception { ValueState<Long> state = getKeyedStateBackend().getPartitionedState( timer.getNamespace(), LongSerializer.INSTANCE, stateDescriptor); assertEquals(state.value(), timer.getNamespace()); getRuntimeContext().getAccumulator(SUCCESSFUL_PROCESSING_TIME_CHECK_ACCUMULATOR).add(1); }
Example #28
Source File: TtlStateFactory.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings({"deprecation", "unchecked"}) private <T> IS createFoldingState() throws Exception { FoldingStateDescriptor<T, SV> foldingStateDescriptor = (FoldingStateDescriptor<T, SV>) stateDesc; SV initAcc = stateDesc.getDefaultValue(); TtlValue<SV> ttlInitAcc = initAcc == null ? null : new TtlValue<>(initAcc, Long.MAX_VALUE); FoldingStateDescriptor<T, TtlValue<SV>> ttlDescriptor = new FoldingStateDescriptor<>( stateDesc.getName(), ttlInitAcc, new TtlFoldFunction<>(foldingStateDescriptor.getFoldFunction(), ttlConfig, timeProvider, initAcc), new TtlSerializer<>(LongSerializer.INSTANCE, stateDesc.getSerializer())); return (IS) new TtlFoldingState<>(createTtlStateContext(ttlDescriptor)); }
Example #29
Source File: StatefulJobSavepointMigrationITCase.java From flink with Apache License 2.0 | 5 votes |
@Override public void onProcessingTime(InternalTimer<Long, Long> timer) throws Exception { ValueState<Long> state = getKeyedStateBackend().getPartitionedState( timer.getNamespace(), LongSerializer.INSTANCE, stateDescriptor); assertEquals(state.value(), timer.getNamespace()); getRuntimeContext().getAccumulator(SUCCESSFUL_PROCESSING_TIME_CHECK_ACCUMULATOR).add(1); }
Example #30
Source File: TupleComparatorILD3Test.java From flink with Apache License 2.0 | 5 votes |
@Override protected TupleComparator<Tuple3<Integer, Long, Double>> createComparator(boolean ascending) { return new TupleComparator<Tuple3<Integer, Long, Double>>( new int[]{0, 1, 2}, new TypeComparator[]{ new IntComparator(ascending), new LongComparator(ascending), new DoubleComparator(ascending) }, new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE, DoubleSerializer.INSTANCE }); }