org.apache.flink.api.java.ClosureCleaner Java Examples
The following examples show how to use
org.apache.flink.api.java.ClosureCleaner.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KeyedTwoInputStreamOperatorTestHarness.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public KeyedTwoInputStreamOperatorTestHarness( TwoInputStreamOperator<IN1, IN2, OUT> operator, KeySelector<IN1, K> keySelector1, KeySelector<IN2, K> keySelector2, TypeInformation<K> keyType, int maxParallelism, int numSubtasks, int subtaskIndex) throws Exception { super(operator, maxParallelism, numSubtasks, subtaskIndex); ClosureCleaner.clean(keySelector1, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); ClosureCleaner.clean(keySelector2, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); config.setStatePartitioner(0, keySelector1); config.setStatePartitioner(1, keySelector2); config.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #2
Source File: FlinkPulsarProducer.java From pulsar with Apache License 2.0 | 6 votes |
public FlinkPulsarProducer(String serviceUrl, String defaultTopicName, Authentication authentication, SerializationSchema<T> serializationSchema, PulsarKeyExtractor<T> keyExtractor, PulsarPropertiesExtractor<T> propertiesExtractor) { checkArgument(StringUtils.isNotBlank(serviceUrl), "Service url cannot be blank"); checkArgument(StringUtils.isNotBlank(defaultTopicName), "TopicName cannot be blank"); checkNotNull(authentication, "auth cannot be null, set disabled for no auth"); clientConf = new ClientConfigurationData(); producerConf = new ProducerConfigurationData(); this.clientConf.setServiceUrl(serviceUrl); this.clientConf.setAuthentication(authentication); this.producerConf.setTopicName(defaultTopicName); this.schema = checkNotNull(serializationSchema, "Serialization Schema not set"); this.flinkPulsarKeyExtractor = getOrNullKeyExtractor(keyExtractor); this.flinkPulsarPropertiesExtractor = getOrNullPropertiesExtractor(propertiesExtractor); ClosureCleaner.ensureSerializable(serializationSchema); }
Example #3
Source File: FlinkPravegaReaderTest.java From flink-connectors with Apache License 2.0 | 6 votes |
/** * Creates a {@link TestableFlinkPravegaReader} with event time and watermarking. */ private static TestableFlinkPravegaReader<Integer> createReaderWithWatermark(AssignerWithTimeWindows<Integer> assignerWithTimeWindows) { ClientConfig clientConfig = ClientConfig.builder().build(); ReaderGroupConfig rgConfig = ReaderGroupConfig.builder().stream(SAMPLE_STREAM).build(); boolean enableMetrics = true; try { ClosureCleaner.clean(assignerWithTimeWindows, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); SerializedValue<AssignerWithTimeWindows<Integer>> serializedAssigner = new SerializedValue<>(assignerWithTimeWindows); return new TestableFlinkPravegaReader<>( "hookUid", clientConfig, rgConfig, SAMPLE_SCOPE, GROUP_NAME, DESERIALIZATION_SCHEMA, serializedAssigner, READER_TIMEOUT, CHKPT_TIMEOUT, enableMetrics); } catch (IOException e) { throw new IllegalArgumentException("The given assigner is not serializable", e); } }
Example #4
Source File: Pattern.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Applies a stop condition for a looping state. It allows cleaning the underlying state. * * @param untilCondition a condition an event has to satisfy to stop collecting events into looping state * @return The same pattern with applied untilCondition */ public Pattern<T, F> until(IterativeCondition<F> untilCondition) { Preconditions.checkNotNull(untilCondition, "The condition cannot be null"); if (this.untilCondition != null) { throw new MalformedPatternException("Only one until condition can be applied."); } if (!quantifier.hasProperty(Quantifier.QuantifierProperty.LOOPING)) { throw new MalformedPatternException("The until condition is only applicable to looping states."); } ClosureCleaner.clean(untilCondition, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); this.untilCondition = untilCondition; return this; }
Example #5
Source File: Pattern.java From flink with Apache License 2.0 | 6 votes |
/** * Applies a stop condition for a looping state. It allows cleaning the underlying state. * * @param untilCondition a condition an event has to satisfy to stop collecting events into looping state * @return The same pattern with applied untilCondition */ public Pattern<T, F> until(IterativeCondition<F> untilCondition) { Preconditions.checkNotNull(untilCondition, "The condition cannot be null"); if (this.untilCondition != null) { throw new MalformedPatternException("Only one until condition can be applied."); } if (!quantifier.hasProperty(Quantifier.QuantifierProperty.LOOPING)) { throw new MalformedPatternException("The until condition is only applicable to looping states."); } ClosureCleaner.clean(untilCondition, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); this.untilCondition = untilCondition; return this; }
Example #6
Source File: KeyedTwoInputStreamOperatorTestHarness.java From flink with Apache License 2.0 | 6 votes |
public KeyedTwoInputStreamOperatorTestHarness( TwoInputStreamOperator<IN1, IN2, OUT> operator, KeySelector<IN1, K> keySelector1, KeySelector<IN2, K> keySelector2, TypeInformation<K> keyType, int maxParallelism, int numSubtasks, int subtaskIndex) throws Exception { super(operator, maxParallelism, numSubtasks, subtaskIndex); ClosureCleaner.clean(keySelector1, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); ClosureCleaner.clean(keySelector2, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); config.setStatePartitioner(0, keySelector1); config.setStatePartitioner(1, keySelector2); config.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #7
Source File: KeyedOneInputStreamOperatorTestHarness.java From flink with Apache License 2.0 | 5 votes |
public KeyedOneInputStreamOperatorTestHarness( OneInputStreamOperator<IN, OUT> operator, final KeySelector<IN, K> keySelector, TypeInformation<K> keyType, int maxParallelism, int numSubtasks, int subtaskIndex) throws Exception { super(operator, maxParallelism, numSubtasks, subtaskIndex); ClosureCleaner.clean(keySelector, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); config.setStatePartitioner(0, keySelector); config.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #8
Source File: StreamExecutionEnvironment.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Returns a "closure-cleaned" version of the given function. Cleans only if closure cleaning * is not disabled in the {@link org.apache.flink.api.common.ExecutionConfig} */ @Internal public <F> F clean(F f) { if (getConfig().isClosureCleanerEnabled()) { ClosureCleaner.clean(f, getConfig().getClosureCleanerLevel(), true); } ClosureCleaner.ensureSerializable(f); return f; }
Example #9
Source File: RpcGlobalAggregateManager.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public <IN, ACC, OUT> OUT updateGlobalAggregate(String aggregateName, Object aggregand, AggregateFunction<IN, ACC, OUT> aggregateFunction) throws IOException { ClosureCleaner.clean(aggregateFunction, ExecutionConfig.ClosureCleanerLevel.RECURSIVE,true); byte[] serializedAggregateFunction = InstantiationUtil.serializeObject(aggregateFunction); Object result = null; try { result = jobMasterGateway.updateGlobalAggregate(aggregateName, aggregand, serializedAggregateFunction).get(); } catch (Exception e) { throw new IOException("Error updating global aggregate.", e); } return (OUT) result; }
Example #10
Source File: Pattern.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Adds a condition that has to be satisfied by an event * in order to be considered a match. If another condition has already been * set, the new one is going to be combined with the previous with a * logical {@code OR}. In other case, this is going to be the only * condition. * * @param condition The condition as an {@link IterativeCondition}. * @return The pattern with the new condition is set. */ public Pattern<T, F> or(IterativeCondition<F> condition) { Preconditions.checkNotNull(condition, "The condition cannot be null."); ClosureCleaner.clean(condition, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); if (this.condition == null) { this.condition = condition; } else { this.condition = new RichOrCondition<>(this.condition, condition); } return this; }
Example #11
Source File: Pattern.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Adds a condition that has to be satisfied by an event * in order to be considered a match. If another condition has already been * set, the new one is going to be combined with the previous with a * logical {@code AND}. In other case, this is going to be the only * condition. * * @param condition The condition as an {@link IterativeCondition}. * @return The pattern with the new condition is set. */ public Pattern<T, F> where(IterativeCondition<F> condition) { Preconditions.checkNotNull(condition, "The condition cannot be null."); ClosureCleaner.clean(condition, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); if (this.condition == null) { this.condition = condition; } else { this.condition = new RichAndCondition<>(this.condition, condition); } return this; }
Example #12
Source File: KeyedOneInputStreamOperatorTestHarness.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public KeyedOneInputStreamOperatorTestHarness( OneInputStreamOperator<IN, OUT> operator, final KeySelector<IN, K> keySelector, TypeInformation<K> keyType, int maxParallelism, int numSubtasks, int subtaskIndex) throws Exception { super(operator, maxParallelism, numSubtasks, subtaskIndex); ClosureCleaner.clean(keySelector, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); config.setStatePartitioner(0, keySelector); config.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #13
Source File: ClosureCleanerTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test(expected = InvalidProgramException.class) public void testNonSerializable() throws Exception { MapCreator creator = new NonSerializableMapCreator(); MapFunction<Integer, Integer> map = creator.getMap(); ClosureCleaner.ensureSerializable(map); int result = map.map(3); Assert.assertEquals(result, 4); }
Example #14
Source File: AbstractParameterToolTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
protected void validate(ParameterTool parameter) { ClosureCleaner.ensureSerializable(parameter); internalValidate(parameter); // -------- test behaviour after serialization ------------ try { byte[] b = InstantiationUtil.serializeObject(parameter); final ParameterTool copy = InstantiationUtil.deserializeObject(b, getClass().getClassLoader()); internalValidate(copy); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } }
Example #15
Source File: StreamExecutionEnvironment.java From flink with Apache License 2.0 | 5 votes |
/** * Returns a "closure-cleaned" version of the given function. Cleans only if closure cleaning * is not disabled in the {@link org.apache.flink.api.common.ExecutionConfig} */ @Internal public <F> F clean(F f) { if (getConfig().isClosureCleanerEnabled()) { ClosureCleaner.clean(f, getConfig().getClosureCleanerLevel(), true); } ClosureCleaner.ensureSerializable(f); return f; }
Example #16
Source File: OneInputStreamTaskTestHarness.java From flink with Apache License 2.0 | 5 votes |
public <K> void configureForKeyedStream( KeySelector<IN, K> keySelector, TypeInformation<K> keyType) { ClosureCleaner.clean(keySelector, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); streamConfig.setStatePartitioner(0, keySelector); streamConfig.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #17
Source File: KeyedOneInputStreamOperatorTestHarness.java From flink with Apache License 2.0 | 5 votes |
public KeyedOneInputStreamOperatorTestHarness( final OneInputStreamOperator<IN, OUT> operator, final KeySelector<IN, K> keySelector, final TypeInformation<K> keyType, final MockEnvironment environment) throws Exception { super(operator, environment); ClosureCleaner.clean(keySelector, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); config.setStatePartitioner(0, keySelector); config.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #18
Source File: OneInputStreamTaskTestHarness.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public <K> void configureForKeyedStream( KeySelector<IN, K> keySelector, TypeInformation<K> keyType) { ClosureCleaner.clean(keySelector, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); streamConfig.setStatePartitioner(0, keySelector); streamConfig.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #19
Source File: FlinkPulsarProducer.java From pulsar with Apache License 2.0 | 5 votes |
public FlinkPulsarProducer(ClientConfigurationData clientConfigurationData, ProducerConfigurationData producerConfigurationData, SerializationSchema<T> serializationSchema, PulsarKeyExtractor<T> keyExtractor, PulsarPropertiesExtractor<T> propertiesExtractor) { this.clientConf = checkNotNull(clientConfigurationData, "client conf can not be null"); this.producerConf = checkNotNull(producerConfigurationData, "producer conf can not be null"); this.schema = checkNotNull(serializationSchema, "Serialization Schema not set"); this.flinkPulsarKeyExtractor = getOrNullKeyExtractor(keyExtractor); this.flinkPulsarPropertiesExtractor = getOrNullPropertiesExtractor(propertiesExtractor); ClosureCleaner.ensureSerializable(serializationSchema); }
Example #20
Source File: KeyedOneInputStreamOperatorTestHarness.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public KeyedOneInputStreamOperatorTestHarness( final OneInputStreamOperator<IN, OUT> operator, final KeySelector<IN, K> keySelector, final TypeInformation<K> keyType, final MockEnvironment environment) throws Exception { super(operator, environment); ClosureCleaner.clean(keySelector, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); config.setStatePartitioner(0, keySelector); config.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #21
Source File: ClosureCleanerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testWriteReplace() { WithWriteReplace.SerializablePayload writeReplace = new WithWriteReplace.SerializablePayload(new WithWriteReplace.Payload("text")); Assert.assertEquals("text", writeReplace.get().getRaw()); ClosureCleaner.clean(writeReplace, ExecutionConfig.ClosureCleanerLevel.TOP_LEVEL, true); }
Example #22
Source File: ClosureCleanerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testRecursiveClass() { RecursiveClass recursiveClass = new RecursiveClass(new RecursiveClass()); ClosureCleaner.clean(recursiveClass, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); ClosureCleaner.ensureSerializable(recursiveClass); }
Example #23
Source File: ClosureCleanerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testWrapperClass() throws Exception { MapCreator creator = new NonSerializableMapCreator(); MapFunction<Integer, Integer> notCleanedMap = creator.getMap(); WrapperMapFunction wrapped = new WrapperMapFunction(notCleanedMap); ClosureCleaner.clean(wrapped, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); ClosureCleaner.ensureSerializable(wrapped); int result = wrapped.map(3); Assert.assertEquals(result, 4); }
Example #24
Source File: ClosureCleanerTest.java From flink with Apache License 2.0 | 5 votes |
@Test(expected = InvalidProgramException.class) public void testNestedNonSerializable() throws Exception { MapCreator creator = new NestedNonSerializableMapCreator(1); MapFunction<Integer, Integer> map = creator.getMap(); ClosureCleaner.clean(map, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); ClosureCleaner.ensureSerializable(map); int result = map.map(3); Assert.assertEquals(result, 4); }
Example #25
Source File: ClosureCleanerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNestedSerializable() throws Exception { MapCreator creator = new NestedSerializableMapCreator(1); MapFunction<Integer, Integer> map = creator.getMap(); ClosureCleaner.clean(map, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); ClosureCleaner.ensureSerializable(map); int result = map.map(3); Assert.assertEquals(result, 4); }
Example #26
Source File: ClosureCleanerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSerializable() throws Exception { MapCreator creator = new SerializableMapCreator(1); MapFunction<Integer, Integer> map = creator.getMap(); ClosureCleaner.clean(map, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); int result = map.map(3); Assert.assertEquals(result, 4); }
Example #27
Source File: ClosureCleanerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCleanedNonSerializable() throws Exception { MapCreator creator = new NonSerializableMapCreator(); MapFunction<Integer, Integer> map = creator.getMap(); ClosureCleaner.clean(map, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); int result = map.map(3); Assert.assertEquals(result, 4); }
Example #28
Source File: ClosureCleanerTest.java From flink with Apache License 2.0 | 5 votes |
@Test(expected = InvalidProgramException.class) public void testNonSerializable() throws Exception { MapCreator creator = new NonSerializableMapCreator(); MapFunction<Integer, Integer> map = creator.getMap(); ClosureCleaner.ensureSerializable(map); int result = map.map(3); Assert.assertEquals(result, 4); }
Example #29
Source File: FlinkKafkaProducerBase.java From flink with Apache License 2.0 | 5 votes |
/** * The main constructor for creating a FlinkKafkaProducer. * * @param defaultTopicId The default topic to write data to * @param serializationSchema A serializable serialization schema for turning user objects into a kafka-consumable byte[] supporting key/value messages * @param producerConfig Configuration properties for the KafkaProducer. 'bootstrap.servers.' is the only required argument. * @param customPartitioner A serializable partitioner for assigning messages to Kafka partitions. Passing null will use Kafka's partitioner. */ public FlinkKafkaProducerBase(String defaultTopicId, KeyedSerializationSchema<IN> serializationSchema, Properties producerConfig, FlinkKafkaPartitioner<IN> customPartitioner) { requireNonNull(defaultTopicId, "TopicID not set"); requireNonNull(serializationSchema, "serializationSchema not set"); requireNonNull(producerConfig, "producerConfig not set"); ClosureCleaner.clean(customPartitioner, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); ClosureCleaner.ensureSerializable(serializationSchema); this.defaultTopicId = defaultTopicId; this.schema = serializationSchema; this.producerConfig = producerConfig; this.flinkKafkaPartitioner = customPartitioner; // set the producer configuration properties for kafka record key value serializers. if (!producerConfig.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) { this.producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); } else { LOG.warn("Overwriting the '{}' is not recommended", ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG); } if (!producerConfig.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) { this.producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); } else { LOG.warn("Overwriting the '{}' is not recommended", ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG); } // eagerly ensure that bootstrap servers are set. if (!this.producerConfig.containsKey(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)) { throw new IllegalArgumentException(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG + " must be supplied in the producer config properties."); } this.topicPartitionsMap = new HashMap<>(); }
Example #30
Source File: AbstractParameterToolTest.java From flink with Apache License 2.0 | 5 votes |
protected void validate(ParameterTool parameter) { ClosureCleaner.ensureSerializable(parameter); internalValidate(parameter); // -------- test behaviour after serialization ------------ try { byte[] b = InstantiationUtil.serializeObject(parameter); final ParameterTool copy = InstantiationUtil.deserializeObject(b, getClass().getClassLoader()); internalValidate(copy); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } }