org.apache.flink.util.InstantiationUtil Java Examples
The following examples show how to use
org.apache.flink.util.InstantiationUtil.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TypeExtractor.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Returns the type information factory for a type using the factory registry or annotations. */ @Internal public static <OUT> TypeInfoFactory<OUT> getTypeInfoFactory(Type t) { final Class<?> factoryClass; if (registeredTypeInfoFactories.containsKey(t)) { factoryClass = registeredTypeInfoFactories.get(t); } else { if (!isClassType(t) || !typeToClass(t).isAnnotationPresent(TypeInfo.class)) { return null; } final TypeInfo typeInfoAnnotation = typeToClass(t).getAnnotation(TypeInfo.class); factoryClass = typeInfoAnnotation.value(); // check for valid factory class if (!TypeInfoFactory.class.isAssignableFrom(factoryClass)) { throw new InvalidTypesException("TypeInfo annotation does not specify a valid TypeInfoFactory."); } } // instantiate return (TypeInfoFactory<OUT>) InstantiationUtil.instantiate(factoryClass); }
Example #2
Source File: Optimizer.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private OptimizerPostPass getPostPassFromPlan(Plan program) { final String className = program.getPostPassClassName(); if (className == null) { throw new CompilerException("Optimizer Post Pass class description is null"); } try { Class<? extends OptimizerPostPass> clazz = Class.forName(className).asSubclass(OptimizerPostPass.class); try { return InstantiationUtil.instantiate(clazz, OptimizerPostPass.class); } catch (RuntimeException rtex) { // unwrap the source exception if (rtex.getCause() != null) { throw new CompilerException("Cannot instantiate optimizer post pass: " + rtex.getMessage(), rtex.getCause()); } else { throw rtex; } } } catch (ClassNotFoundException cnfex) { throw new CompilerException("Cannot load Optimizer post-pass class '" + className + "'.", cnfex); } catch (ClassCastException ccex) { throw new CompilerException("Class '" + className + "' is not an optimizer post-pass.", ccex); } }
Example #3
Source File: FileCacheReadsFromBlobTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testFileDownloadedFromBlob() throws Exception { JobID jobID = new JobID(); ExecutionAttemptID attemptID = new ExecutionAttemptID(); final String fileName = "test_file"; // copy / create the file final DistributedCache.DistributedCacheEntry entry = new DistributedCache.DistributedCacheEntry( fileName, false, InstantiationUtil.serializeObject(permanentBlobKey)); Future<Path> copyResult = fileCache.createTmpFile(fileName, entry, jobID, attemptID); final Path dstPath = copyResult.get(); final String actualContent = Files.toString(new File(dstPath.toUri()), StandardCharsets.UTF_8); assertTrue(dstPath.getFileSystem().exists(dstPath)); assertEquals(testFileContent, actualContent); }
Example #4
Source File: GenericCsvInputFormat.java From Alink with Apache License 2.0 | 6 votes |
private void initializeParsers() { Class<?>[] fieldClasses = extractTypeClasses(fieldTypes); // instantiate the parsers FieldParser<?>[] parsers = new FieldParser<?>[fieldClasses.length]; for (int i = 0; i < fieldClasses.length; i++) { if (fieldClasses[i] != null) { Class<? extends FieldParser<?>> parserType = FieldParser.getParserForType(fieldClasses[i]); if (parserType == null) { throw new RuntimeException("No parser available for type '" + fieldClasses[i].getName() + "'."); } FieldParser<?> p = InstantiationUtil.instantiate(parserType, FieldParser.class); p.setCharset(charset); parsers[i] = p; } } this.fieldParsers = parsers; this.holders = new Object[fieldTypes.length]; for (int i = 0; i < fieldTypes.length; i++) { holders[i] = fieldParsers[i].createValue(); } }
Example #5
Source File: JavaSerializer.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings({"unchecked", "rawtypes"}) @Override public T read(Kryo kryo, Input input, Class aClass) { try { ObjectMap graphContext = kryo.getGraphContext(); ObjectInputStream objectStream = (ObjectInputStream)graphContext.get(this); if (objectStream == null) { // make sure we use Kryo's classloader objectStream = new InstantiationUtil.ClassLoaderObjectInputStream(input, kryo.getClassLoader()); graphContext.put(this, objectStream); } return (T) objectStream.readObject(); } catch (Exception ex) { throw new KryoException("Error during Java deserialization.", ex); } }
Example #6
Source File: JobManagerWatermarkTracker.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public Map<String, WatermarkState> add(byte[] valueBytes, Map<String, WatermarkState> accumulator) { addCount++; final WatermarkUpdate value; try { value = InstantiationUtil.deserializeObject(valueBytes, this.getClass().getClassLoader()); } catch (Exception e) { throw new RuntimeException(e); } WatermarkState ws = accumulator.get(value.id); if (ws == null) { accumulator.put(value.id, ws = new WatermarkState()); } ws.watermark = value.watermark; ws.lastUpdated = System.currentTimeMillis(); return accumulator; }
Example #7
Source File: KafkaShortRetentionTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@BeforeClass public static void prepare() throws ClassNotFoundException { LOG.info("-------------------------------------------------------------------------"); LOG.info(" Starting KafkaShortRetentionTestBase "); LOG.info("-------------------------------------------------------------------------"); // dynamically load the implementation for the test Class<?> clazz = Class.forName("org.apache.flink.streaming.connectors.kafka.KafkaTestEnvironmentImpl"); kafkaServer = (KafkaTestEnvironment) InstantiationUtil.instantiate(clazz); LOG.info("Starting KafkaTestBase.prepare() for Kafka " + kafkaServer.getVersion()); if (kafkaServer.isSecureRunSupported()) { secureProps = kafkaServer.getSecureProperties(); } Properties specificProperties = new Properties(); specificProperties.setProperty("log.retention.hours", "0"); specificProperties.setProperty("log.retention.minutes", "0"); specificProperties.setProperty("log.retention.ms", "250"); specificProperties.setProperty("log.retention.check.interval.ms", "100"); kafkaServer.prepare(kafkaServer.createConfig().setKafkaServerProperties(specificProperties)); standardProps = kafkaServer.getStandardProperties(); }
Example #8
Source File: TypeSerializerSerializationUtil.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override public void read(DataInputView in) throws IOException { super.read(in); // read in a way that allows the stream to recover from exceptions int serializerBytes = in.readInt(); byte[] buffer = new byte[serializerBytes]; in.readFully(buffer); ClassLoader previousClassLoader = Thread.currentThread().getContextClassLoader(); try ( InstantiationUtil.FailureTolerantObjectInputStream ois = new InstantiationUtil.FailureTolerantObjectInputStream(new ByteArrayInputStream(buffer), userClassLoader)) { Thread.currentThread().setContextClassLoader(userClassLoader); typeSerializer = (TypeSerializer<T>) ois.readObject(); } catch (Exception e) { throw new UnloadableTypeSerializerException(e, buffer); } finally { Thread.currentThread().setContextClassLoader(previousClassLoader); } }
Example #9
Source File: AbstractIterativeTask.java From flink with Apache License 2.0 | 6 votes |
private void reinstantiateDriver() throws Exception { if (this.driver instanceof ResettableDriver) { final ResettableDriver<?, ?> resDriver = (ResettableDriver<?, ?>) this.driver; resDriver.reset(); } else { Class<? extends Driver<S, OT>> driverClass = this.config.getDriver(); this.driver = InstantiationUtil.instantiate(driverClass, Driver.class); try { this.driver.setup(this); } catch (Throwable t) { throw new Exception("The pact driver setup for '" + this.getEnvironment().getTaskInfo().getTaskName() + "' , caused an error: " + t.getMessage(), t); } } }
Example #10
Source File: KryoRegistrationSerializerConfigSnapshot.java From flink with Apache License 2.0 | 6 votes |
@Override public void write(DataOutputView out) throws IOException { out.writeUTF(kryoRegistration.getRegisteredClass().getName()); final KryoRegistration.SerializerDefinitionType serializerDefinitionType = kryoRegistration.getSerializerDefinitionType(); out.writeInt(serializerDefinitionType.ordinal()); switch (serializerDefinitionType) { case UNSPECIFIED: // nothing else to write break; case CLASS: out.writeUTF(kryoRegistration.getSerializerClass().getName()); break; case INSTANCE: try (final DataOutputViewStream outViewWrapper = new DataOutputViewStream(out)) { InstantiationUtil.serializeObject(outViewWrapper, kryoRegistration.getSerializableSerializerInstance()); } break; default: // this should not happen; adding as a guard for the future throw new IllegalStateException( "Unrecognized Kryo registration serializer definition type: " + serializerDefinitionType); } }
Example #11
Source File: StatefulComplexPayloadSerializer.java From flink with Apache License 2.0 | 6 votes |
@Override public ComplexPayload copy(ComplexPayload from) { try { Thread currentThread = Thread.currentThread(); if (currentOwnerThread.compareAndSet(null, currentThread)) { return InstantiationUtil.deserializeObject( InstantiationUtil.serializeObject(from), currentThread.getContextClassLoader()); } else { throw new IllegalStateException("Concurrent access to type serializer detected!"); } } catch (Exception e) { throw new RuntimeException(e); } finally { currentOwnerThread.set(null); } }
Example #12
Source File: TaskEventTest.java From flink with Apache License 2.0 | 6 votes |
/** * This test checks the serialization/deserialization of {@link IntegerTaskEvent} objects. */ @Test public void testIntegerTaskEvent() { try { final IntegerTaskEvent orig = new IntegerTaskEvent(11); final IntegerTaskEvent copy = InstantiationUtil.createCopyWritable(orig); assertEquals(orig.getInteger(), copy.getInteger()); assertEquals(orig.hashCode(), copy.hashCode()); assertTrue(orig.equals(copy)); } catch (IOException ioe) { fail(ioe.getMessage()); } }
Example #13
Source File: PojoSerializerSnapshotData.java From flink with Apache License 2.0 | 6 votes |
private static <T> PojoSerializerSnapshotData<T> readSnapshotData(DataInputView in, ClassLoader userCodeClassLoader) throws IOException { Class<T> pojoClass = InstantiationUtil.resolveClassByName(in, userCodeClassLoader); LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots = readOptionalMap( in, fieldReader(userCodeClassLoader), snapshotReader(userCodeClassLoader)); LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>> registeredSubclassSerializerSnapshots = readOptionalMap( in, classReader(userCodeClassLoader), snapshotReader(userCodeClassLoader)); LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>> nonRegisteredSubclassSerializerSnapshots = readOptionalMap( in, classReader(userCodeClassLoader), snapshotReader(userCodeClassLoader)); return new PojoSerializerSnapshotData<>(pojoClass, fieldSerializerSnapshots, registeredSubclassSerializerSnapshots, nonRegisteredSubclassSerializerSnapshots); }
Example #14
Source File: FlinkStateInternals.java From flink-dataflow with Apache License 2.0 | 6 votes |
@Override public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException { if (!contents.isEmpty()) { // serialize the coder. byte[] coder = InstantiationUtil.serializeObject(elemCoder); checkpointBuilder.addListUpdatesBuilder() .setTag(stateKey) .setData(coder) .writeInt(contents.size()); for (T item : contents) { // encode the element ByteString.Output stream = ByteString.newOutput(); elemCoder.encode(item, stream, Coder.Context.OUTER); ByteString data = stream.toByteString(); // add the data to the checkpoint. checkpointBuilder.setData(data); } } }
Example #15
Source File: GenericTypeComparator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public int getNormalizeKeyLen() { if (this.reference == null) { this.reference = InstantiationUtil.instantiate(this.type); } NormalizableKey<?> key = (NormalizableKey<?>) this.reference; return key.getMaxNormalizedKeyLen(); }
Example #16
Source File: TupleSerializerConfigSnapshot.java From flink with Apache License 2.0 | 5 votes |
@Override public void write(DataOutputView out) throws IOException { super.write(out); try (final DataOutputViewStream outViewWrapper = new DataOutputViewStream(out)) { InstantiationUtil.serializeObject(outViewWrapper, tupleClass); } }
Example #17
Source File: StatefulComplexPayloadSerializer.java From flink with Apache License 2.0 | 5 votes |
@Override public void serialize(ComplexPayload record, DataOutputView target) throws IOException { try { if (currentOwnerThread.compareAndSet(null, Thread.currentThread())) { target.write(InstantiationUtil.serializeObject(record)); } else { throw new IllegalStateException("Concurrent access to type serializer detected!"); } } finally { currentOwnerThread.set(null); } }
Example #18
Source File: StreamConfig.java From flink with Apache License 2.0 | 5 votes |
public void setOutputSelectors(List<OutputSelector<?>> outputSelectors) { try { InstantiationUtil.writeObjectToConfig(outputSelectors, this.config, OUTPUT_SELECTOR_WRAPPER); } catch (IOException e) { throw new StreamTaskException("Could not serialize output selectors", e); } }
Example #19
Source File: TaskConfig.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Sets the default convergence criterion of a {@link DeltaIteration} * * @param aggregatorName * @param convCriterion */ public void setImplicitConvergenceCriterion(String aggregatorName, ConvergenceCriterion<?> convCriterion) { try { InstantiationUtil.writeObjectToConfig(convCriterion, this.config, ITERATION_IMPLICIT_CONVERGENCE_CRITERION); } catch (IOException e) { throw new RuntimeException("Error while writing the implicit convergence criterion object to the task configuration."); } this.config.setString(ITERATION_IMPLICIT_CONVERGENCE_CRITERION_AGG_NAME, aggregatorName); }
Example #20
Source File: StreamConfig.java From flink with Apache License 2.0 | 5 votes |
public <T> List<OutputSelector<T>> getOutputSelectors(ClassLoader userCodeClassloader) { try { List<OutputSelector<T>> selectors = InstantiationUtil.readObjectFromConfig(this.config, OUTPUT_SELECTOR_WRAPPER, userCodeClassloader); return selectors == null ? Collections.<OutputSelector<T>>emptyList() : selectors; } catch (Exception e) { throw new StreamTaskException("Could not read output selectors", e); } }
Example #21
Source File: CheckpointCoordinatorTestingUtils.java From flink with Apache License 2.0 | 5 votes |
static Tuple2<byte[], List<long[]>> serializeTogetherAndTrackOffsets( List<List<? extends Serializable>> serializables) throws IOException { List<long[]> offsets = new ArrayList<>(serializables.size()); List<byte[]> serializedGroupValues = new ArrayList<>(); int runningGroupsOffset = 0; for (List<? extends Serializable> list : serializables) { long[] currentOffsets = new long[list.size()]; offsets.add(currentOffsets); for (int i = 0; i < list.size(); ++i) { currentOffsets[i] = runningGroupsOffset; byte[] serializedValue = InstantiationUtil.serializeObject(list.get(i)); serializedGroupValues.add(serializedValue); runningGroupsOffset += serializedValue.length; } } //write all generated values in a single byte array, which is index by groupOffsetsInFinalByteArray byte[] allSerializedValuesConcatenated = new byte[runningGroupsOffset]; runningGroupsOffset = 0; for (byte[] serializedGroupValue : serializedGroupValues) { System.arraycopy( serializedGroupValue, 0, allSerializedValuesConcatenated, runningGroupsOffset, serializedGroupValue.length); runningGroupsOffset += serializedGroupValue.length; } return new Tuple2<>(allSerializedValuesConcatenated, offsets); }
Example #22
Source File: InternalTimersSnapshotReaderWriters.java From flink with Apache License 2.0 | 5 votes |
@Override protected void writeKeyAndNamespaceSerializers(DataOutputView out) throws IOException { // the pre-versioned format only serializes the serializers, without their configuration snapshots try (ByteArrayOutputStreamWithPos stream = new ByteArrayOutputStreamWithPos()) { InstantiationUtil.serializeObject(stream, keySerializer); InstantiationUtil.serializeObject(stream, namespaceSerializer); out.write(stream.getBuf(), 0, stream.getPosition()); } }
Example #23
Source File: JobMasterTest.java From flink with Apache License 2.0 | 5 votes |
@Nonnull private static List<InputSplit> getInputSplits(int numberInputSplits, Supplier<SerializedInputSplit> nextInputSplit) throws Exception { final List<InputSplit> actualInputSplits = new ArrayList<>(numberInputSplits); for (int i = 0; i < numberInputSplits; i++) { final SerializedInputSplit serializedInputSplit = nextInputSplit.get(); assertThat(serializedInputSplit.isEmpty(), is(false)); actualInputSplits.add(InstantiationUtil.deserializeObject(serializedInputSplit.getInputSplitData(), ClassLoader.getSystemClassLoader())); } return actualInputSplits; }
Example #24
Source File: StreamConfig.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public void setStreamOperator(StreamOperator<?> operator) { if (operator != null) { config.setClass(USER_FUNCTION, operator.getClass()); try { InstantiationUtil.writeObjectToConfig(operator, this.config, SERIALIZEDUDF); } catch (IOException e) { throw new StreamTaskException("Cannot serialize operator object " + operator.getClass() + ".", e); } } }
Example #25
Source File: FileCache.java From flink with Apache License 2.0 | 5 votes |
CopyFromBlobProcess(DistributedCacheEntry e, JobID jobID, PermanentBlobService blobService, Path target) throws Exception { this.isExecutable = e.isExecutable; this.isDirectory = e.isZipped; this.jobID = jobID; this.blobService = blobService; this.blobKey = InstantiationUtil.deserializeObject(e.blobKey, Thread.currentThread().getContextClassLoader()); this.target = target; }
Example #26
Source File: StatefulComplexPayloadSerializer.java From flink with Apache License 2.0 | 5 votes |
@Override public void serialize(ComplexPayload record, DataOutputView target) throws IOException { try { if (currentOwnerThread.compareAndSet(null, Thread.currentThread())) { target.write(InstantiationUtil.serializeObject(record)); } else { throw new IllegalStateException("Concurrent access to type serializer detected!"); } } finally { currentOwnerThread.set(null); } }
Example #27
Source File: TaskConfig.java From flink with Apache License 2.0 | 5 votes |
public DataDistribution getOutputDataDistribution(int outputNum, final ClassLoader cl) throws ClassNotFoundException { final String className = this.config.getString(OUTPUT_DATA_DISTRIBUTION_CLASS, null); if (className == null) { return null; } final Class<? extends DataDistribution> clazz; try { clazz = Class.forName(className, true, cl).asSubclass(DataDistribution.class); } catch (ClassCastException ccex) { throw new CorruptConfigurationException("The class noted in the configuration as the data distribution " + "is no subclass of DataDistribution."); } final DataDistribution distribution = InstantiationUtil.instantiate(clazz, DataDistribution.class); final byte[] stateEncoded = this.config.getBytes(OUTPUT_DATA_DISTRIBUTION_PREFIX + outputNum, null); if (stateEncoded == null) { throw new CorruptConfigurationException( "The configuration contained the data distribution type, but no serialized state."); } final ByteArrayInputStream bais = new ByteArrayInputStream(stateEncoded); final DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais); try { distribution.read(in); return distribution; } catch (Exception ex) { throw new RuntimeException("The deserialization of the encoded data distribution state caused an error" + (ex.getMessage() == null ? "." : ": " + ex.getMessage()), ex); } }
Example #28
Source File: StreamConfig.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public StateBackend getStateBackend(ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig(this.config, STATE_BACKEND, cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate statehandle provider.", e); } }
Example #29
Source File: CheckpointCoordinatorTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static void collectResult(int opIdx, OperatorStateHandle operatorStateHandle, List<String> resultCollector) throws Exception { try (FSDataInputStream in = operatorStateHandle.openInputStream()) { for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> entry : operatorStateHandle.getStateNameToPartitionOffsets().entrySet()) { for (long offset : entry.getValue().getOffsets()) { in.seek(offset); Integer state = InstantiationUtil. deserializeObject(in, Thread.currentThread().getContextClassLoader()); resultCollector.add(opIdx + " : " + entry.getKey() + " : " + state); } } } }
Example #30
Source File: TaskConfig.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public <T> UserCodeWrapper<T> getStubWrapper(ClassLoader cl) { try { return (UserCodeWrapper<T>) InstantiationUtil.readObjectFromConfig(this.config, STUB_OBJECT, cl); } catch (ClassNotFoundException | IOException e) { throw new CorruptConfigurationException("Could not read the user code wrapper: " + e.getMessage(), e); } }