org.apache.flink.core.memory.DataInputDeserializer Java Examples
The following examples show how to use
org.apache.flink.core.memory.DataInputDeserializer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RocksDBPriorityQueueSetFactory.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
RocksDBPriorityQueueSetFactory( KeyGroupRange keyGroupRange, int keyGroupPrefixBytes, int numberOfKeyGroups, Map<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation, RocksDB db, RocksDBWriteBatchWrapper writeBatchWrapper, RocksDBNativeMetricMonitor nativeMetricMonitor, Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory) { this.keyGroupRange = keyGroupRange; this.keyGroupPrefixBytes = keyGroupPrefixBytes; this.numberOfKeyGroups = numberOfKeyGroups; this.kvStateInformation = kvStateInformation; this.db = db; this.writeBatchWrapper = writeBatchWrapper; this.nativeMetricMonitor = nativeMetricMonitor; this.columnFamilyOptionsFactory = columnFamilyOptionsFactory; this.sharedElementOutView = new DataOutputSerializer(128); this.sharedElementInView = new DataInputDeserializer(); }
Example #2
Source File: KeyGroupPartitionedPriorityQueueWithRocksDBStoreTest.java From flink with Apache License 2.0 | 6 votes |
private KeyGroupPartitionedPriorityQueue.PartitionQueueSetFactory< TestElement, RocksDBCachingPriorityQueueSet<TestElement>> newFactory() { return (keyGroupId, numKeyGroups, keyExtractorFunction, elementComparator) -> { DataOutputSerializer outputStreamWithPos = new DataOutputSerializer(128); DataInputDeserializer inputStreamWithPos = new DataInputDeserializer(); int keyGroupPrefixBytes = RocksDBKeySerializationUtils.computeRequiredBytesInKeyGroupPrefix(numKeyGroups); TreeOrderedSetCache orderedSetCache = new TreeOrderedSetCache(32); return new RocksDBCachingPriorityQueueSet<>( keyGroupId, keyGroupPrefixBytes, rocksDBResource.getRocksDB(), rocksDBResource.getReadOptions(), rocksDBResource.getDefaultColumnFamily(), TestElementSerializer.INSTANCE, outputStreamWithPos, inputStreamWithPos, rocksDBResource.getBatchWrapper(), orderedSetCache); }; }
Example #3
Source File: KvStateSerializer.java From flink with Apache License 2.0 | 6 votes |
/** * Deserializes all kv pairs with the given serializer. * * @param serializedValue Serialized value of type Map<UK, UV> * @param keySerializer Serializer for UK * @param valueSerializer Serializer for UV * @param <UK> Type of the key * @param <UV> Type of the value. * @return Deserialized map or <code>null</code> if the serialized value * is <code>null</code> * @throws IOException On failure during deserialization */ public static <UK, UV> Map<UK, UV> deserializeMap(byte[] serializedValue, TypeSerializer<UK> keySerializer, TypeSerializer<UV> valueSerializer) throws IOException { if (serializedValue != null) { DataInputDeserializer in = new DataInputDeserializer(serializedValue, 0, serializedValue.length); Map<UK, UV> result = new HashMap<>(); while (in.available() > 0) { UK key = keySerializer.deserialize(in); boolean isNull = in.readBoolean(); UV value = isNull ? null : valueSerializer.deserialize(in); result.put(key, value); } return result; } else { return null; } }
Example #4
Source File: StreamElementSerializerTest.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <T, X extends StreamElement> X serializeAndDeserialize( X record, StreamElementSerializer<T> serializer) throws IOException { DataOutputSerializer output = new DataOutputSerializer(32); serializer.serialize(record, output); // additional binary copy step DataInputDeserializer copyInput = new DataInputDeserializer(output.getByteArray(), 0, output.length()); DataOutputSerializer copyOutput = new DataOutputSerializer(32); serializer.copy(copyInput, copyOutput); DataInputDeserializer input = new DataInputDeserializer(copyOutput.getByteArray(), 0, copyOutput.length()); return (X) serializer.deserialize(input); }
Example #5
Source File: KvStateSerializer.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Deserializes the value with the given serializer. * * @param serializedValue Serialized value of type T * @param serializer Serializer for T * @param <T> Type of the value * @return Deserialized value or <code>null</code> if the serialized value * is <code>null</code> * @throws IOException On failure during deserialization */ public static <T> T deserializeValue(byte[] serializedValue, TypeSerializer<T> serializer) throws IOException { if (serializedValue == null) { return null; } else { final DataInputDeserializer deser = new DataInputDeserializer( serializedValue, 0, serializedValue.length); final T value = serializer.deserialize(deser); if (deser.available() > 0) { throw new IOException( "Unconsumed bytes in the deserialized value. " + "This indicates a mismatch in the value serializers " + "used by the KvState instance and this access."); } return value; } }
Example #6
Source File: RocksDBPriorityQueueSetFactory.java From flink with Apache License 2.0 | 6 votes |
RocksDBPriorityQueueSetFactory( KeyGroupRange keyGroupRange, int keyGroupPrefixBytes, int numberOfKeyGroups, Map<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation, RocksDB db, RocksDBWriteBatchWrapper writeBatchWrapper, RocksDBNativeMetricMonitor nativeMetricMonitor, Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory) { this.keyGroupRange = keyGroupRange; this.keyGroupPrefixBytes = keyGroupPrefixBytes; this.numberOfKeyGroups = numberOfKeyGroups; this.kvStateInformation = kvStateInformation; this.db = db; this.writeBatchWrapper = writeBatchWrapper; this.nativeMetricMonitor = nativeMetricMonitor; this.columnFamilyOptionsFactory = columnFamilyOptionsFactory; this.sharedElementOutView = new DataOutputSerializer(128); this.sharedElementInView = new DataInputDeserializer(); }
Example #7
Source File: RocksDBKeySerializationUtilsTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testNamespaceSerializationAndDeserialization() throws Exception { final DataOutputSerializer outputView = new DataOutputSerializer(8); final DataInputDeserializer inputView = new DataInputDeserializer(); for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) { outputView.clear(); RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false); inputView.setBuffer(outputView.getCopyOfBuffer()); int deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false); Assert.assertEquals(orgNamespace, deserializedNamepsace); RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true); inputView.setBuffer(outputView.getCopyOfBuffer()); deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true); Assert.assertEquals(orgNamespace, deserializedNamepsace); } }
Example #8
Source File: StreamElementSerializerTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <T, X extends StreamElement> X serializeAndDeserialize( X record, StreamElementSerializer<T> serializer) throws IOException { DataOutputSerializer output = new DataOutputSerializer(32); serializer.serialize(record, output); // additional binary copy step DataInputDeserializer copyInput = new DataInputDeserializer(output.getByteArray(), 0, output.length()); DataOutputSerializer copyOutput = new DataOutputSerializer(32); serializer.copy(copyInput, copyOutput); DataInputDeserializer input = new DataInputDeserializer(copyOutput.getByteArray(), 0, copyOutput.length()); return (X) serializer.deserialize(input); }
Example #9
Source File: RocksDBMapState.java From flink with Apache License 2.0 | 6 votes |
RocksDBMapEntry( @Nonnull final RocksDB db, @Nonnegative final int userKeyOffset, @Nonnull final byte[] rawKeyBytes, @Nonnull final byte[] rawValueBytes, @Nonnull final TypeSerializer<UK> keySerializer, @Nonnull final TypeSerializer<UV> valueSerializer, @Nonnull DataInputDeserializer dataInputView) { this.db = db; this.userKeyOffset = userKeyOffset; this.keySerializer = keySerializer; this.valueSerializer = valueSerializer; this.rawKeyBytes = rawKeyBytes; this.rawValueBytes = rawValueBytes; this.deleted = false; this.dataInputView = dataInputView; }
Example #10
Source File: RocksDBKeySerializationUtilsTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testKeySerializationAndDeserialization() throws Exception { final DataOutputSerializer outputView = new DataOutputSerializer(8); final DataInputDeserializer inputView = new DataInputDeserializer(); // test for key for (int orgKey = 0; orgKey < 100; ++orgKey) { outputView.clear(); RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false); inputView.setBuffer(outputView.getCopyOfBuffer()); int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false); Assert.assertEquals(orgKey, deserializedKey); RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true); inputView.setBuffer(outputView.getCopyOfBuffer()); deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true); Assert.assertEquals(orgKey, deserializedKey); } }
Example #11
Source File: RocksDBCachingPriorityQueueSet.java From flink with Apache License 2.0 | 6 votes |
RocksDBCachingPriorityQueueSet( @Nonnegative int keyGroupId, @Nonnegative int keyGroupPrefixBytes, @Nonnull RocksDB db, @Nonnull ReadOptions readOptions, @Nonnull ColumnFamilyHandle columnFamilyHandle, @Nonnull TypeSerializer<E> byteOrderProducingSerializer, @Nonnull DataOutputSerializer outputStream, @Nonnull DataInputDeserializer inputStream, @Nonnull RocksDBWriteBatchWrapper batchWrapper, @Nonnull OrderedByteArraySetCache orderedByteArraySetCache) { this.db = db; this.readOptions = readOptions; this.columnFamilyHandle = columnFamilyHandle; this.byteOrderProducingSerializer = byteOrderProducingSerializer; this.batchWrapper = batchWrapper; this.outputView = outputStream; this.inputView = inputStream; this.orderedCache = orderedByteArraySetCache; this.allElementsInCache = false; this.groupPrefixBytes = createKeyGroupBytes(keyGroupId, keyGroupPrefixBytes); this.seekHint = groupPrefixBytes; this.internalIndex = HeapPriorityQueueElement.NOT_CONTAINED; }
Example #12
Source File: RocksStateKeysIterator.java From flink with Apache License 2.0 | 6 votes |
public RocksStateKeysIterator( @Nonnull RocksIteratorWrapper iterator, @Nonnull String state, @Nonnull TypeSerializer<K> keySerializer, int keyGroupPrefixBytes, boolean ambiguousKeyPossible, @Nonnull byte[] namespaceBytes) { this.iterator = iterator; this.state = state; this.keySerializer = keySerializer; this.keyGroupPrefixBytes = keyGroupPrefixBytes; this.namespaceBytes = namespaceBytes; this.nextKey = null; this.previousKey = null; this.ambiguousKeyPossible = ambiguousKeyPossible; this.byteArrayDataInputView = new DataInputDeserializer(); }
Example #13
Source File: KvStateSerializer.java From flink with Apache License 2.0 | 6 votes |
/** * Deserializes the value with the given serializer. * * @param serializedValue Serialized value of type T * @param serializer Serializer for T * @param <T> Type of the value * @return Deserialized value or <code>null</code> if the serialized value * is <code>null</code> * @throws IOException On failure during deserialization */ public static <T> T deserializeValue(byte[] serializedValue, TypeSerializer<T> serializer) throws IOException { if (serializedValue == null) { return null; } else { final DataInputDeserializer deser = new DataInputDeserializer( serializedValue, 0, serializedValue.length); final T value = serializer.deserialize(deser); if (deser.available() > 0) { throw new IOException( "Unconsumed bytes in the deserialized value. " + "This indicates a mismatch in the value serializers " + "used by the KvState instance and this access."); } return value; } }
Example #14
Source File: AbstractRocksDBState.java From flink with Apache License 2.0 | 6 votes |
/** * Creates a new RocksDB backed state. * * @param columnFamily The RocksDB column family that this state is associated to. * @param namespaceSerializer The serializer for the namespace. * @param valueSerializer The serializer for the state. * @param defaultValue The default value for the state. * @param backend The backend for which this state is bind to. */ protected AbstractRocksDBState( ColumnFamilyHandle columnFamily, TypeSerializer<N> namespaceSerializer, TypeSerializer<V> valueSerializer, V defaultValue, RocksDBKeyedStateBackend<K> backend) { this.namespaceSerializer = namespaceSerializer; this.backend = backend; this.columnFamily = columnFamily; this.writeOptions = backend.getWriteOptions(); this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "State value serializer"); this.defaultValue = defaultValue; this.dataOutputView = new DataOutputSerializer(128); this.dataInputView = new DataInputDeserializer(); this.sharedKeyNamespaceSerializer = backend.getSharedRocksKeyBuilder(); }
Example #15
Source File: AbstractRocksDBState.java From flink with Apache License 2.0 | 6 votes |
/** * Creates a new RocksDB backed state. * * @param columnFamily The RocksDB column family that this state is associated to. * @param namespaceSerializer The serializer for the namespace. * @param valueSerializer The serializer for the state. * @param defaultValue The default value for the state. * @param backend The backend for which this state is bind to. */ protected AbstractRocksDBState( ColumnFamilyHandle columnFamily, TypeSerializer<N> namespaceSerializer, TypeSerializer<V> valueSerializer, V defaultValue, RocksDBKeyedStateBackend<K> backend) { this.namespaceSerializer = namespaceSerializer; this.backend = backend; this.columnFamily = columnFamily; this.writeOptions = backend.getWriteOptions(); this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "State value serializer"); this.defaultValue = defaultValue; this.dataOutputView = new DataOutputSerializer(128); this.dataInputView = new DataInputDeserializer(); this.sharedKeyNamespaceSerializer = backend.getSharedRocksKeyBuilder(); }
Example #16
Source File: RocksDBKeySerializationUtilsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testNamespaceSerializationAndDeserialization() throws Exception { final DataOutputSerializer outputView = new DataOutputSerializer(8); final DataInputDeserializer inputView = new DataInputDeserializer(); for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) { outputView.clear(); RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false); inputView.setBuffer(outputView.getCopyOfBuffer()); int deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false); Assert.assertEquals(orgNamespace, deserializedNamepsace); RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true); inputView.setBuffer(outputView.getCopyOfBuffer()); deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true); Assert.assertEquals(orgNamespace, deserializedNamepsace); } }
Example #17
Source File: RocksDBSerializedCompositeKeyBuilderTest.java From flink with Apache License 2.0 | 6 votes |
private <K, N, U> void assertKeyGroupKeyNamespaceUserKeyBytes( K key, int keyGroup, int prefixBytes, TypeSerializer<K> keySerializer, N namespace, TypeSerializer<N> namespaceSerializer, U userKey, TypeSerializer<U> userKeySerializer, DataInputDeserializer deserializer, boolean ambiguousCompositeKeyPossible) throws IOException { assertKeyGroupKeyNamespaceBytes( key, keyGroup, prefixBytes, keySerializer, namespace, namespaceSerializer, deserializer, ambiguousCompositeKeyPossible); Assert.assertEquals(userKey, userKeySerializer.deserialize(deserializer)); }
Example #18
Source File: RocksDBMapState.java From flink with Apache License 2.0 | 6 votes |
RocksDBMapEntry( @Nonnull final RocksDB db, @Nonnegative final int userKeyOffset, @Nonnull final byte[] rawKeyBytes, @Nonnull final byte[] rawValueBytes, @Nonnull final TypeSerializer<UK> keySerializer, @Nonnull final TypeSerializer<UV> valueSerializer, @Nonnull DataInputDeserializer dataInputView) { this.db = db; this.userKeyOffset = userKeyOffset; this.keySerializer = keySerializer; this.valueSerializer = valueSerializer; this.rawKeyBytes = rawKeyBytes; this.rawValueBytes = rawValueBytes; this.deleted = false; this.dataInputView = dataInputView; }
Example #19
Source File: RocksDBKeySerializationUtils.java From flink with Apache License 2.0 | 5 votes |
public static <K> K readKey( TypeSerializer<K> keySerializer, DataInputDeserializer inputView, boolean ambiguousKeyPossible) throws IOException { int beforeRead = inputView.getPosition(); K key = keySerializer.deserialize(inputView); if (ambiguousKeyPossible) { int length = inputView.getPosition() - beforeRead; readVariableIntBytes(inputView, length); } return key; }
Example #20
Source File: RocksDBSerializedCompositeKeyBuilderTest.java From flink with Apache License 2.0 | 5 votes |
private <K> void testSetKeyInternal(TypeSerializer<K> serializer, Collection<K> testKeys, int maxParallelism) throws IOException { final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1; RocksDBSerializedCompositeKeyBuilder<K> keyBuilder = createRocksDBSerializedCompositeKeyBuilder(serializer, prefixBytes); final DataInputDeserializer deserializer = new DataInputDeserializer(); for (K testKey : testKeys) { int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism); byte[] result = dataOutputSerializer.getCopyOfBuffer(); deserializer.setBuffer(result); assertKeyKeyGroupBytes(testKey, keyGroup, prefixBytes, serializer, deserializer, false); Assert.assertEquals(0, deserializer.available()); } }
Example #21
Source File: AbstractRocksDBState.java From flink with Apache License 2.0 | 5 votes |
public void migrateSerializedValue( DataInputDeserializer serializedOldValueInput, DataOutputSerializer serializedMigratedValueOutput, TypeSerializer<V> priorSerializer, TypeSerializer<V> newSerializer) throws StateMigrationException { try { V value = priorSerializer.deserialize(serializedOldValueInput); newSerializer.serialize(value, serializedMigratedValueOutput); } catch (Exception e) { throw new StateMigrationException("Error while trying to migrate RocksDB state.", e); } }
Example #22
Source File: CompositeTypeSerializerSnapshotTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testRestoreCompositeTypeSerializer() throws IOException { // the target compatibilities of the nested serializers doesn't matter, // because we're only testing the restore serializer TypeSerializer<?>[] testNestedSerializers = { new NestedSerializer(TargetCompatibility.COMPATIBLE_AS_IS), new NestedSerializer(TargetCompatibility.INCOMPATIBLE), new NestedSerializer(TargetCompatibility.COMPATIBLE_AFTER_MIGRATION) }; TestCompositeTypeSerializer testSerializer = new TestCompositeTypeSerializer("outer-config", testNestedSerializers); TypeSerializerSnapshot<String> testSerializerSnapshot = testSerializer.snapshotConfiguration(); DataOutputSerializer out = new DataOutputSerializer(128); TypeSerializerSnapshot.writeVersionedSnapshot(out, testSerializerSnapshot); DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer()); testSerializerSnapshot = TypeSerializerSnapshot.readVersionedSnapshot( in, Thread.currentThread().getContextClassLoader()); // now, restore the composite type serializer; // the restored nested serializer should be a RestoredNestedSerializer testSerializer = (TestCompositeTypeSerializer) testSerializerSnapshot.restoreSerializer(); Assert.assertTrue(testSerializer.getNestedSerializers()[0].getClass() == RestoredNestedSerializer.class); Assert.assertTrue(testSerializer.getNestedSerializers()[1].getClass() == RestoredNestedSerializer.class); Assert.assertTrue(testSerializer.getNestedSerializers()[2].getClass() == RestoredNestedSerializer.class); }
Example #23
Source File: RocksDBListState.java From flink with Apache License 2.0 | 5 votes |
@Override public void migrateSerializedValue( DataInputDeserializer serializedOldValueInput, DataOutputSerializer serializedMigratedValueOutput, TypeSerializer<List<V>> priorSerializer, TypeSerializer<List<V>> newSerializer) throws StateMigrationException { Preconditions.checkArgument(priorSerializer instanceof ListSerializer); Preconditions.checkArgument(newSerializer instanceof ListSerializer); TypeSerializer<V> priorElementSerializer = ((ListSerializer<V>) priorSerializer).getElementSerializer(); TypeSerializer<V> newElementSerializer = ((ListSerializer<V>) newSerializer).getElementSerializer(); try { while (serializedOldValueInput.available() > 0) { V element = deserializeNextElement(serializedOldValueInput, priorElementSerializer); newElementSerializer.serialize(element, serializedMigratedValueOutput); if (serializedOldValueInput.available() > 0) { serializedMigratedValueOutput.write(DELIMITER); } } } catch (Exception e) { throw new StateMigrationException("Error while trying to migrate RocksDB list state.", e); } }
Example #24
Source File: AvroSerializerSnapshotTest.java From flink with Apache License 2.0 | 5 votes |
/** * Serialize an (avro)TypeSerializerSnapshot and deserialize it. */ private static <T> AvroSerializerSnapshot<T> roundTrip(TypeSerializerSnapshot<T> original) throws IOException { // writeSnapshot(); DataOutputSerializer out = new DataOutputSerializer(1024); original.writeSnapshot(out); // init AvroSerializerSnapshot<T> restored = new AvroSerializerSnapshot<>(); // readSnapshot(); DataInputView in = new DataInputDeserializer(out.wrapAsByteBuffer()); restored.readSnapshot(restored.getCurrentVersion(), in, original.getClass().getClassLoader()); return restored; }
Example #25
Source File: AvroSerializerSnapshotTest.java From flink with Apache License 2.0 | 5 votes |
/** * Serialize an (avro)TypeSerializerSnapshot and deserialize it. */ private static <T> AvroSerializerSnapshot<T> roundTrip(TypeSerializerSnapshot<T> original) throws IOException { // writeSnapshot(); DataOutputSerializer out = new DataOutputSerializer(1024); original.writeSnapshot(out); // init AvroSerializerSnapshot<T> restored = new AvroSerializerSnapshot<>(); // readSnapshot(); DataInputView in = new DataInputDeserializer(out.wrapAsByteBuffer()); restored.readSnapshot(restored.getCurrentVersion(), in, original.getClass().getClassLoader()); return restored; }
Example #26
Source File: ProtobufSerializerTest.java From stateful-functions with Apache License 2.0 | 5 votes |
@Test public void duplicatedSerializerCanDeserialize() throws IOException { ProtobufSerializer<SimpleMessage> serializer = ProtobufSerializer.forMessageGeneratedClass(SimpleMessage.class); DataOutputSerializer out = new DataOutputSerializer(512); serializer.serialize(originalMessage, out); DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer()); SimpleMessage foo = serializer.duplicate().deserialize(in); assertThat(foo, is(originalMessage)); }
Example #27
Source File: RocksDBListState.java From flink with Apache License 2.0 | 5 votes |
@Override public void migrateSerializedValue( DataInputDeserializer serializedOldValueInput, DataOutputSerializer serializedMigratedValueOutput, TypeSerializer<List<V>> priorSerializer, TypeSerializer<List<V>> newSerializer) throws StateMigrationException { Preconditions.checkArgument(priorSerializer instanceof ListSerializer); Preconditions.checkArgument(newSerializer instanceof ListSerializer); TypeSerializer<V> priorElementSerializer = ((ListSerializer<V>) priorSerializer).getElementSerializer(); TypeSerializer<V> newElementSerializer = ((ListSerializer<V>) newSerializer).getElementSerializer(); try { while (serializedOldValueInput.available() > 0) { V element = deserializeNextElement(serializedOldValueInput, priorElementSerializer); newElementSerializer.serialize(element, serializedMigratedValueOutput); if (serializedOldValueInput.available() > 0) { serializedMigratedValueOutput.write(DELIMITER); } } } catch (Exception e) { throw new StateMigrationException("Error while trying to migrate RocksDB list state.", e); } }
Example #28
Source File: RocksDBListState.java From flink with Apache License 2.0 | 5 votes |
private static <V> V deserializeNextElement(DataInputDeserializer in, TypeSerializer<V> elementSerializer) { try { if (in.available() > 0) { V element = elementSerializer.deserialize(in); if (in.available() > 0) { in.readByte(); } return element; } } catch (IOException e) { throw new FlinkRuntimeException("Unexpected list element deserialization failure", e); } return null; }
Example #29
Source File: OutputStreamBasedPartFileWriter.java From flink with Apache License 2.0 | 5 votes |
@Override public InProgressFileRecoverable deserialize(int version, byte[] serialized) throws IOException { switch (version) { case 1: DataInputView dataInputView = new DataInputDeserializer(serialized); validateMagicNumber(dataInputView); return deserializeV1(dataInputView); default: throw new IOException("Unrecognized version or corrupt state: " + version); } }
Example #30
Source File: RocksStateKeysIterator.java From flink with Apache License 2.0 | 5 votes |
private K deserializeKey(byte[] keyBytes, DataInputDeserializer readView) throws IOException { readView.setBuffer(keyBytes, keyGroupPrefixBytes, keyBytes.length - keyGroupPrefixBytes); return RocksDBKeySerializationUtils.readKey( keySerializer, byteArrayDataInputView, ambiguousKeyPossible); }