org.apache.flink.api.common.typeinfo.TypeInformation Java Examples
The following examples show how to use
org.apache.flink.api.common.typeinfo.TypeInformation.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CsvSinkBatchOp.java From Alink with Apache License 2.0 | 7 votes |
@Override public CsvSinkBatchOp sinkFrom(BatchOperator in) { final String filePath = getFilePath(); final String fieldDelim = getFieldDelimiter(); final int numFiles = getNumFiles(); final TypeInformation[] types = in.getColTypes(); final Character quoteChar = getQuoteChar(); FileSystem.WriteMode mode = FileSystem.WriteMode.NO_OVERWRITE; if (getOverwriteSink()) { mode = FileSystem.WriteMode.OVERWRITE; } DataSet<String> textLines = ((DataSet<Row>) in.getDataSet()) .map(new CsvUtil.FormatCsvFunc(types, fieldDelim, quoteChar)) .map(new MapFunction<Row, String>() { @Override public String map(Row value) throws Exception { return (String) value.getField(0); } }); textLines.writeAsText(filePath, mode).name("csv_sink").setParallelism(numFiles); return this; }
Example #2
Source File: DataStream.java From flink with Apache License 2.0 | 6 votes |
/** * Applies the given {@link ProcessFunction} on the input stream, thereby * creating a transformed output stream. * * <p>The function will be called for every element in the input streams and can produce zero * or more output elements. * * @param processFunction The {@link ProcessFunction} that is called for each element * in the stream. * * @param <R> The type of elements emitted by the {@code ProcessFunction}. * * @return The transformed {@link DataStream}. */ @PublicEvolving public <R> SingleOutputStreamOperator<R> process(ProcessFunction<T, R> processFunction) { TypeInformation<R> outType = TypeExtractor.getUnaryOperatorReturnType( processFunction, ProcessFunction.class, 0, 1, TypeExtractor.NO_INDEX, getType(), Utils.getCallLocationName(), true); return process(processFunction, outType); }
Example #3
Source File: KeyedStateDeduplication.java From flink-learning with Apache License 2.0 | 6 votes |
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); ValueStateDescriptor<Boolean> keyedStateDuplicated = new ValueStateDescriptor<>("KeyedStateDeduplication", TypeInformation.of(new TypeHint<Boolean>() {})); // 状态 TTL 相关配置,过期时间设定为 36 小时 StateTtlConfig ttlConfig = StateTtlConfig .newBuilder(Time.hours(36)) .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite) .setStateVisibility( StateTtlConfig.StateVisibility.NeverReturnExpired) .cleanupInRocksdbCompactFilter(50000000L) .build(); // 开启 TTL keyedStateDuplicated.enableTimeToLive(ttlConfig); // 从状态后端恢复状态 isExist = getRuntimeContext().getState(keyedStateDuplicated); }
Example #4
Source File: SelectorFunctionKeysTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testOriginalTypes2() throws Exception { final TupleTypeInfo<Tuple2<Integer, String>> t1 = new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO ); TypeInformation<PojoWithMultiplePojos> t2 = TypeExtractor.getForClass(PojoWithMultiplePojos.class); Keys<PojoWithMultiplePojos> sk = new Keys.SelectorFunctionKeys<>( new KeySelector3(), t2, t1 ); Assert.assertArrayEquals( new TypeInformation<?>[] { t1 }, sk.getOriginalKeyFieldTypes() ); }
Example #5
Source File: AllWindowTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings({"rawtypes", "unchecked"}) public void testFoldWithEvictor() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple3<String, String, Integer>> window1 = source .windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))) .evictor(CountEvictor.of(100)) .fold(new Tuple3<>("", "", 1), new DummyFolder()); OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window1.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof EvictingWindowOperator); EvictingWindowOperator<String, Tuple2<String, Integer>, ?, ?> winOperator = (EvictingWindowOperator<String, Tuple2<String, Integer>, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows); Assert.assertTrue(winOperator.getEvictor() instanceof CountEvictor); Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor); winOperator.setOutputType((TypeInformation) window1.getType(), new ExecutionConfig()); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #6
Source File: MLEnvironment.java From Alink with Apache License 2.0 | 6 votes |
/** * Factory to create stream {@link Table}. * <p> * We create stream table by session shared StreamExecutionEnvironment * * @param rows list of rows to create table. * @param colNames the column name of the table. * @return the created stream table. * @see MLEnvironment#getStreamExecutionEnvironment() * @see MLEnvironment#getStreamTableEnvironment() */ public Table createStreamTable(List<Row> rows, String[] colNames) { if (rows == null || rows.size() < 1) { throw new IllegalArgumentException("Values can not be empty."); } Row first = rows.iterator().next(); int arity = first.getArity(); TypeInformation<?>[] types = new TypeInformation[arity]; for (int i = 0; i < arity; ++i) { types[i] = TypeExtractor.getForObject(first.getField(i)); } DataStream<Row> dataSet = getStreamExecutionEnvironment().fromCollection(rows); return DataStreamConversionUtil.toTable(this, dataSet, colNames, types); }
Example #7
Source File: JavaApiPostPass.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private <T> TypeComparatorFactory<?> createComparator(TypeInformation<T> typeInfo, FieldList keys, boolean[] sortOrder) { TypeComparator<T> comparator; if (typeInfo instanceof CompositeType) { comparator = ((CompositeType<T>) typeInfo).createComparator(keys.toArray(), sortOrder, 0, executionConfig); } else if (typeInfo instanceof AtomicType) { // handle grouping of atomic types comparator = ((AtomicType<T>) typeInfo).createComparator(sortOrder[0], executionConfig); } else { throw new RuntimeException("Unrecognized type: " + typeInfo); } return new RuntimeComparatorFactory<T>(comparator); }
Example #8
Source File: PlanRightUnwrappingCoGroupOperator.java From flink with Apache License 2.0 | 6 votes |
public PlanRightUnwrappingCoGroupOperator( CoGroupFunction<I1, I2, OUT> udf, int[] key1, Keys.SelectorFunctionKeys<I2, K> key2, String name, TypeInformation<OUT> resultType, TypeInformation<I1> typeInfo1, TypeInformation<Tuple2<K, I2>> typeInfoWithKey2) { super( new TupleRightUnwrappingCoGrouper<I1, I2, OUT, K>(udf), new BinaryOperatorInformation<I1, Tuple2<K, I2>, OUT>( typeInfo1, typeInfoWithKey2, resultType), key1, key2.computeLogicalKeyPositions(), name); }
Example #9
Source File: OneInputStreamTaskTestHarness.java From flink with Apache License 2.0 | 6 votes |
public OneInputStreamTaskTestHarness( Function<Environment, ? extends StreamTask<OUT, ?>> taskFactory, int numInputGates, int numInputChannelsPerGate, TypeInformation<IN> inputType, TypeInformation<OUT> outputType, File localRootDir) { super(taskFactory, outputType, localRootDir); this.inputType = inputType; inputSerializer = inputType.createSerializer(executionConfig); this.numInputGates = numInputGates; this.numInputChannelsPerGate = numInputChannelsPerGate; streamConfig.setStateKeySerializer(inputSerializer); }
Example #10
Source File: FoldApplyProcessWindowFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public void setOutputType(TypeInformation<R> outTypeInfo, ExecutionConfig executionConfig) { accSerializer = accTypeInformation.createSerializer(executionConfig); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputViewStreamWrapper out = new DataOutputViewStreamWrapper(baos); try { accSerializer.serialize(initialValue, out); } catch (IOException ioe) { throw new RuntimeException("Unable to serialize initial value of type " + initialValue.getClass().getSimpleName() + " of fold window function.", ioe); } serializedInitialValue = baos.toByteArray(); }
Example #11
Source File: KMeansModelMapper.java From Alink with Apache License 2.0 | 6 votes |
public KMeansModelMapper(TableSchema modelSchema, TableSchema dataSchema, Params params) { super(modelSchema, dataSchema, params); String[] reservedColNames = this.params.get(KMeansPredictParams.RESERVED_COLS); String predResultColName = this.params.get(KMeansPredictParams.PREDICTION_COL); isPredDetail = params.contains(KMeansPredictParams.PREDICTION_DETAIL_COL); isPredDistance = params.contains(KMeansPredictParams.PREDICTION_DISTANCE_COL); List<String> outputCols = new ArrayList<>(); List<TypeInformation> outputTypes = new ArrayList<>(); outputCols.add(predResultColName); outputTypes.add(Types.LONG); if (isPredDetail) { outputCols.add(params.get(KMeansPredictParams.PREDICTION_DETAIL_COL)); outputTypes.add(Types.STRING); } if (isPredDistance) { outputCols.add(params.get(KMeansPredictParams.PREDICTION_DISTANCE_COL)); outputTypes.add(Types.DOUBLE); } this.outputColsHelper = new OutputColsHelper(dataSchema, outputCols.toArray(new String[0]), outputTypes.toArray(new TypeInformation[0]), reservedColNames); }
Example #12
Source File: SemanticPropUtil.java From flink with Apache License 2.0 | 6 votes |
private static boolean areFieldsCompatible(String sourceField, TypeInformation<?> inType, String targetField, TypeInformation<?> outType, boolean throwException) { try { // get source type information TypeInformation<?> sourceType = getExpressionTypeInformation(sourceField, inType); // get target type information TypeInformation<?> targetType = getExpressionTypeInformation(targetField, outType); return sourceType.equals(targetType); } catch (InvalidFieldReferenceException e) { if (throwException) { throw e; } else { return false; } } }
Example #13
Source File: OutputTag.java From flink with Apache License 2.0 | 5 votes |
/** * Creates a new named {@code OutputTag} with the given id and output {@link TypeInformation}. * * @param id The id of the created {@code OutputTag}. * @param typeInfo The {@code TypeInformation} for the side output. */ public OutputTag(String id, TypeInformation<T> typeInfo) { Preconditions.checkNotNull(id, "OutputTag id cannot be null."); Preconditions.checkArgument(!id.isEmpty(), "OutputTag id must not be empty."); this.id = id; this.typeInfo = Preconditions.checkNotNull(typeInfo, "TypeInformation cannot be null."); }
Example #14
Source File: TypeExtractorTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings({ "rawtypes", "unchecked" }) @Test public void testBasicArray() { // use getCoGroupReturnTypes() RichCoGroupFunction<?, ?, ?> function = new RichCoGroupFunction<String[], String[], String[]>() { private static final long serialVersionUID = 1L; @Override public void coGroup(Iterable<String[]> first, Iterable<String[]> second, Collector<String[]> out) throws Exception { // nothing to do } }; TypeInformation<?> ti = TypeExtractor.getCoGroupReturnTypes(function, (TypeInformation) TypeInformation.of(new TypeHint<String[]>(){}), (TypeInformation) TypeInformation.of(new TypeHint<String[]>(){})); Assert.assertFalse(ti.isBasicType()); Assert.assertFalse(ti.isTupleType()); // Due to a Java 6 bug the classification can be slightly wrong Assert.assertTrue(ti instanceof BasicArrayTypeInfo<?,?> || ti instanceof ObjectArrayTypeInfo<?,?>); if(ti instanceof BasicArrayTypeInfo<?,?>) { Assert.assertEquals(BasicArrayTypeInfo.STRING_ARRAY_TYPE_INFO, ti); } else { Assert.assertEquals(BasicTypeInfo.STRING_TYPE_INFO, ((ObjectArrayTypeInfo<?,?>) ti).getComponentInfo()); } }
Example #15
Source File: ElasticsearchUpsertTableSinkBase.java From flink with Apache License 2.0 | 5 votes |
/** * Validate the types that are used for conversion to string. */ private void validateKeyTypes(int[] keyFieldIndices) { final TypeInformation<?>[] types = getFieldTypes(); for (int keyFieldIndex : keyFieldIndices) { final TypeInformation<?> type = types[keyFieldIndex]; if (!TypeCheckUtils.isSimpleStringRepresentation(type)) { throw new ValidationException( "Only simple types that can be safely converted into a string representation " + "can be used as keys. But was: " + type); } } }
Example #16
Source File: FormatDescriptor.java From alchemy with Apache License 2.0 | 5 votes |
private <T> T newProttostuff(TypeInformation<Row> typeInformation, Class clazz, Boolean isDeserialization) { if(isDeserialization){ return (T)new ProtostuffRowDeserializationSchema(typeInformation, clazz); }else{ return (T)new ProtostuffRowSerializationSchema(clazz, typeInformation); } }
Example #17
Source File: CsvRowDeserializationSchema.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static RuntimeConverter createObjectArrayRuntimeConverter( TypeInformation<?> elementType, boolean ignoreParseErrors) { final Class<?> elementClass = elementType.getTypeClass(); final RuntimeConverter elementConverter = createNullableRuntimeConverter(elementType, ignoreParseErrors); return (node) -> { final int nodeSize = node.size(); final Object[] array = (Object[]) Array.newInstance(elementClass, nodeSize); for (int i = 0; i < nodeSize; i++) { array[i] = elementConverter.convert(node.get(i)); } return array; }; }
Example #18
Source File: TestRowDataCsvInputFormat.java From flink with Apache License 2.0 | 5 votes |
private Object convertStringToInternal(String value, TypeInformation type) { if (type.equals(Types.INT)) { return Integer.parseInt(value); } else if (type.equals(Types.LONG)) { return Long.parseLong(value); } else if (type.equals(Types.STRING)) { return StringData.fromString(value); } else { throw new UnsupportedOperationException("Unsupported partition type: " + type); } }
Example #19
Source File: StreamProjection.java From flink with Apache License 2.0 | 5 votes |
public static TypeInformation<?>[] extractFieldTypes(int[] fields, TypeInformation<?> inType) { TupleTypeInfo<?> inTupleType = (TupleTypeInfo<?>) inType; TypeInformation<?>[] fieldTypes = new TypeInformation[fields.length]; for (int i = 0; i < fields.length; i++) { fieldTypes[i] = inTupleType.getTypeAt(fields[i]); } return fieldTypes; }
Example #20
Source File: OutageProcessFunction.java From flink-learning with Apache License 2.0 | 5 votes |
@Override public void open(Configuration parameters) throws Exception { TypeInformation<OutageMetricEvent> outageInfo = TypeInformation.of(new TypeHint<OutageMetricEvent>() { }); TypeInformation<Boolean> recoverInfo = TypeInformation.of(new TypeHint<Boolean>() { }); outageMetricState = getRuntimeContext().getState(new ValueStateDescriptor<>("outage_zhisheng", outageInfo)); recover = getRuntimeContext().getState(new ValueStateDescriptor<>("recover_zhisheng", recoverInfo)); }
Example #21
Source File: NGramMapperTest.java From Alink with Apache License 2.0 | 5 votes |
@Test public void testDefault() throws Exception { TableSchema schema = new TableSchema(new String[] {"sentence"}, new TypeInformation<?>[] {Types.STRING}); Params params = new Params() .set(NGramParams.SELECTED_COL, "sentence"); NGramMapper mapper = new NGramMapper(schema, params); assertEquals(mapper.map(Row.of("This is a unit test for mapper")).getField(0), "This_is is_a a_unit unit_test test_for for_mapper"); assertEquals(mapper.getOutputSchema(), schema); }
Example #22
Source File: JsonRowSerializationSchemaTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSerializeRowWithInvalidNumberOfFields() { final TypeInformation<Row> rowSchema = Types.ROW_NAMED( new String[]{"f1", "f2", "f3"}, Types.INT, Types.BOOLEAN, Types.STRING); final Row row = new Row(1); row.setField(0, 1); final JsonRowSerializationSchema serializationSchema = new JsonRowSerializationSchema.Builder(rowSchema) .build(); assertThat(row, whenSerializedWith(serializationSchema).failsWithException(instanceOf(RuntimeException.class))); }
Example #23
Source File: Keys.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public <E> void validateCustomPartitioner(Partitioner<E> partitioner, TypeInformation<E> typeInfo) { if (keyFields.size() != 1) { throw new InvalidProgramException("Custom partitioners can only be used with keys that have one key field."); } if (typeInfo == null) { // try to extract key type from partitioner try { typeInfo = TypeExtractor.getPartitionerTypes(partitioner); } catch (Throwable t) { // best effort check, so we ignore exceptions } } if (typeInfo != null && !(typeInfo instanceof GenericTypeInfo)) { // only check type compatibility if type is known and not a generic type TypeInformation<?> keyType = keyFields.get(0).getType(); if (!keyType.equals(typeInfo)) { throw new InvalidProgramException("The partitioner is incompatible with the key type. " + "Partitioner type: " + typeInfo + " , key type: " + keyType); } } }
Example #24
Source File: SiddhiStream.java From flink-siddhi with Apache License 2.0 | 5 votes |
/** * @apiNote This function could not be used by dynamic partition, because policies are loaded dynamically * @param outStreamIds The <code>streamIds</code> to return as data stream. * @param <T> Type information should match with stream definition. * During execution phase, it will automatically build type information based on stream definition. * @return Return output stream id and data as Tuple2 * @see SiddhiTypeFactory */ public <T extends Tuple> DataStream<Tuple2<String, T>> returns(List<String> outStreamIds) { for (String outStreamId : outStreamIds) { TypeInformation<T> typeInformation = SiddhiTypeFactory.getTupleTypeInformation(siddhiContext.getAllEnrichedExecutionPlan(), outStreamId); siddhiContext.setOutputStreamType(outStreamId, typeInformation); } return returnsInternal(siddhiContext, executionPlanId); }
Example #25
Source File: JavaTableEnvironmentITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testFromNonAtomicAndNonComposite() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env, config()); List<Either<String, Integer>> data = new ArrayList<>(); data.add(new Either.Left<>("Hello")); data.add(new Either.Right<>(42)); data.add(new Either.Left<>("World")); Table table = tableEnv .fromDataSet( env.fromCollection( data, TypeInformation.of(new TypeHint<Either<String, Integer>>() { }) ), "either") .select("either"); DataSet<Row> ds = tableEnv.toDataSet(table, Row.class); List<Row> results = ds.collect(); String expected = "Left(Hello)\n" + "Left(World)\n" + "Right(42)\n"; compareResultAsText(results, expected); }
Example #26
Source File: CoGroupedStreams.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Specifies a {@link KeySelector} for elements from the second input with explicit type information for the key type. * * @param keySelector The KeySelector to be used for extracting the key for partitioning. * @param keyType The type information describing the key type. */ public EqualTo equalTo(KeySelector<T2, KEY> keySelector, TypeInformation<KEY> keyType) { Preconditions.checkNotNull(keySelector); Preconditions.checkNotNull(keyType); if (!keyType.equals(this.keyType)) { throw new IllegalArgumentException("The keys for the two inputs are not equal: " + "first key = " + this.keyType + " , second key = " + keyType); } return new EqualTo(input2.clean(keySelector)); }
Example #27
Source File: FlinkUniverse.java From stateful-functions with Apache License 2.0 | 5 votes |
private SingleOutputStreamOperator<Message> functionOperator( DataStream<Message> input, Map<EgressIdentifier<?>, OutputTag<Object>> sideOutputs) { TypeInformation<Message> typeInfo = input.getType(); FunctionGroupDispatchFactory operatorFactory = new FunctionGroupDispatchFactory(sideOutputs); return DataStreamUtils.reinterpretAsKeyedStream(input, new MessageKeySelector()) .transform(StatefulFunctionsJobConstants.FUNCTION_OPERATOR_NAME, typeInfo, operatorFactory) .uid(StatefulFunctionsJobConstants.FUNCTION_OPERATOR_UID); }
Example #28
Source File: SlidingWindowCheckMapper.java From flink with Apache License 2.0 | 5 votes |
@Override public void open(Configuration parameters) { ValueStateDescriptor<List<Tuple2<Event, Integer>>> previousWindowDescriptor = new ValueStateDescriptor<>("eventsSeenSoFar", new ListTypeInfo<>(new TupleTypeInfo<>(TypeInformation.of(Event.class), BasicTypeInfo.INT_TYPE_INFO))); eventsSeenSoFar = getRuntimeContext().getState(previousWindowDescriptor); ValueStateDescriptor<Long> lastSequenceNumberDescriptor = new ValueStateDescriptor<>("lastSequenceNumber", BasicTypeInfo.LONG_TYPE_INFO); lastSequenceNumber = getRuntimeContext().getState(lastSequenceNumberDescriptor); }
Example #29
Source File: StreamExecutionEnvironment.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private <OUT> DataStreamSource<OUT> createFileInput(FileInputFormat<OUT> inputFormat, TypeInformation<OUT> typeInfo, String sourceName, FileProcessingMode monitoringMode, long interval) { Preconditions.checkNotNull(inputFormat, "Unspecified file input format."); Preconditions.checkNotNull(typeInfo, "Unspecified output type information."); Preconditions.checkNotNull(sourceName, "Unspecified name for the source."); Preconditions.checkNotNull(monitoringMode, "Unspecified monitoring mode."); Preconditions.checkArgument(monitoringMode.equals(FileProcessingMode.PROCESS_ONCE) || interval >= ContinuousFileMonitoringFunction.MIN_MONITORING_INTERVAL, "The path monitoring interval cannot be less than " + ContinuousFileMonitoringFunction.MIN_MONITORING_INTERVAL + " ms."); ContinuousFileMonitoringFunction<OUT> monitoringFunction = new ContinuousFileMonitoringFunction<>(inputFormat, monitoringMode, getParallelism(), interval); ContinuousFileReaderOperator<OUT> reader = new ContinuousFileReaderOperator<>(inputFormat); SingleOutputStreamOperator<OUT> source = addSource(monitoringFunction, sourceName) .transform("Split Reader: " + sourceName, typeInfo, reader); return new DataStreamSource<>(source); }
Example #30
Source File: CoGroupedStreams.java From flink with Apache License 2.0 | 5 votes |
/** * Specifies a {@link KeySelector} for elements from the second input with explicit type information for the key type. * * @param keySelector The KeySelector to be used for extracting the key for partitioning. * @param keyType The type information describing the key type. */ public EqualTo equalTo(KeySelector<T2, KEY> keySelector, TypeInformation<KEY> keyType) { Preconditions.checkNotNull(keySelector); Preconditions.checkNotNull(keyType); if (!keyType.equals(this.keyType)) { throw new IllegalArgumentException("The keys for the two inputs are not equal: " + "first key = " + this.keyType + " , second key = " + keyType); } return new EqualTo(input2.clean(keySelector)); }