org.apache.flink.table.types.logical.BigIntType Java Examples
The following examples show how to use
org.apache.flink.table.types.logical.BigIntType.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LastValueWithRetractAggFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public TypeInformation<GenericRowData> getAccumulatorType() { LogicalType[] fieldTypes = new LogicalType[] { fromTypeInfoToLogicalType(getResultType()), new BigIntType(), new TypeInformationRawType<>(new MapViewTypeInfo<>(getResultType(), new ListTypeInfo<>(Types.LONG), false, false)), new TypeInformationRawType<>(new MapViewTypeInfo<>(Types.LONG, new ListTypeInfo<>(getResultType()), false, false)) }; String[] fieldNames = new String[] { "lastValue", "lastOrder", "valueToOrderMapView", "orderToValueMapView" }; return (TypeInformation) new RowDataTypeInfo(fieldTypes, fieldNames); }
Example #2
Source File: FirstValueWithRetractAggFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public TypeInformation<GenericRowData> getAccumulatorType() { LogicalType[] fieldTypes = new LogicalType[] { fromTypeInfoToLogicalType(getResultType()), new BigIntType(), new TypeInformationRawType<>(new MapViewTypeInfo<>(getResultType(), new ListTypeInfo<>(Types.LONG), false, false)), new TypeInformationRawType<>(new MapViewTypeInfo<>(Types.LONG, new ListTypeInfo<>(getResultType()), false, false)) }; String[] fieldNames = new String[] { "firstValue", "firstOrder", "valueToOrderMapView", "orderToValueMapView" }; return (TypeInformation) new RowDataTypeInfo(fieldTypes, fieldNames); }
Example #3
Source File: DataTypeExtractorTest.java From flink with Apache License 2.0 | 6 votes |
/** * Testing data type shared with the Scala tests. */ public static DataType getPojoWithCustomOrderDataType(Class<?> pojoClass) { final StructuredType.Builder builder = StructuredType.newBuilder(pojoClass); builder.attributes( Arrays.asList( new StructuredAttribute( "z", new BigIntType()), new StructuredAttribute( "y", new BooleanType()), new StructuredAttribute( "x", new IntType()))); builder.setFinal(true); builder.setInstantiable(true); final StructuredType structuredType = builder.build(); final List<DataType> fieldDataTypes = Arrays.asList( DataTypes.BIGINT(), DataTypes.BOOLEAN(), DataTypes.INT() ); return new FieldsDataType(structuredType, pojoClass, fieldDataTypes); }
Example #4
Source File: RowDataSerializerTest.java From flink with Apache License 2.0 | 6 votes |
public RowDataSerializerTest() { super( new DeeplyEqualsChecker() .withCustomCheck( (o1, o2) -> o1 instanceof RowData && o2 instanceof RowData, (o1, o2, checker) -> { LogicalType[] fieldTypes = new LogicalType[] { new BigIntType(), new BigIntType() }; RowDataSerializer serializer = new RowDataSerializer(new ExecutionConfig(), fieldTypes); return deepEqualsRowData( (RowData) o1, (RowData) o2, (RowDataSerializer) serializer.duplicate(), (RowDataSerializer) serializer.duplicate()); } )); }
Example #5
Source File: BytesHashMapTest.java From flink with Apache License 2.0 | 6 votes |
public BytesHashMapTest() { this.keyTypes = new LogicalType[] { new IntType(), new VarCharType(VarCharType.MAX_LENGTH), new DoubleType(), new BigIntType(), new BooleanType(), new FloatType(), new SmallIntType() }; this.valueTypes = new LogicalType[] { new DoubleType(), new BigIntType(), new BooleanType(), new FloatType(), new SmallIntType() }; this.keySerializer = new BinaryRowDataSerializer(keyTypes.length); this.valueSerializer = new BinaryRowDataSerializer(valueTypes.length); this.defaultValue = valueSerializer.createInstance(); int valueSize = defaultValue.getFixedLengthPartSize(); this.defaultValue.pointTo(MemorySegmentFactory.wrap(new byte[valueSize]), 0, valueSize); }
Example #6
Source File: LastValueWithRetractAggFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public TypeInformation<GenericRow> getAccumulatorType() { LogicalType[] fieldTypes = new LogicalType[] { fromTypeInfoToLogicalType(getResultType()), new BigIntType(), new TypeInformationAnyType<>(new MapViewTypeInfo<>(getResultType(), new ListTypeInfo<>(Types.LONG), false, false)), new TypeInformationAnyType<>(new MapViewTypeInfo<>(Types.LONG, new ListTypeInfo<>(getResultType()), false, false)) }; String[] fieldNames = new String[] { "lastValue", "lastOrder", "valueToOrderMapView", "orderToValueMapView" }; return (TypeInformation) new BaseRowTypeInfo(fieldTypes, fieldNames); }
Example #7
Source File: FirstValueWithRetractAggFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public TypeInformation<GenericRow> getAccumulatorType() { LogicalType[] fieldTypes = new LogicalType[] { fromTypeInfoToLogicalType(getResultType()), new BigIntType(), new TypeInformationAnyType<>(new MapViewTypeInfo<>(getResultType(), new ListTypeInfo<>(Types.LONG), false, false)), new TypeInformationAnyType<>(new MapViewTypeInfo<>(Types.LONG, new ListTypeInfo<>(getResultType()), false, false)) }; String[] fieldNames = new String[] { "firstValue", "firstOrder", "valueToOrderMapView", "orderToValueMapView" }; return (TypeInformation) new BaseRowTypeInfo(fieldTypes, fieldNames); }
Example #8
Source File: PythonScalarFunctionOperatorTestBase.java From flink with Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<IN, OUT> getTestHarness(Configuration config) throws Exception { RowType dataType = new RowType(Arrays.asList( new RowType.RowField("f1", new VarCharType()), new RowType.RowField("f2", new VarCharType()), new RowType.RowField("f3", new BigIntType()))); AbstractPythonScalarFunctionOperator<IN, OUT, UDFIN> operator = getTestOperator( config, new PythonFunctionInfo[] { new PythonFunctionInfo( AbstractPythonScalarFunctionRunnerTest.DummyPythonFunction.INSTANCE, new Integer[]{0}) }, dataType, dataType, new int[]{2}, new int[]{0, 1} ); OneInputStreamOperatorTestHarness<IN, OUT> testHarness = new OneInputStreamOperatorTestHarness<>(operator); testHarness.getStreamConfig().setManagedMemoryFraction(0.5); testHarness.setup(getOutputTypeSerializer(dataType)); return testHarness; }
Example #9
Source File: BytesHashMapTest.java From flink with Apache License 2.0 | 6 votes |
public BytesHashMapTest() { this.keyTypes = new LogicalType[] { new IntType(), new VarCharType(VarCharType.MAX_LENGTH), new DoubleType(), new BigIntType(), new BooleanType(), new FloatType(), new SmallIntType() }; this.valueTypes = new LogicalType[] { new DoubleType(), new BigIntType(), new BooleanType(), new FloatType(), new SmallIntType() }; this.keySerializer = new BinaryRowSerializer(keyTypes.length); this.valueSerializer = new BinaryRowSerializer(valueTypes.length); this.defaultValue = valueSerializer.createInstance(); int valueSize = defaultValue.getFixedLengthPartSize(); this.defaultValue.pointTo(MemorySegmentFactory.wrap(new byte[valueSize]), 0, valueSize); }
Example #10
Source File: PythonTableFunctionRunnerTest.java From flink with Apache License 2.0 | 6 votes |
private AbstractPythonTableFunctionRunner<Row> createUDTFRunner( JobBundleFactory jobBundleFactory, FnDataReceiver<byte[]> receiver) throws IOException { PythonFunctionInfo pythonFunctionInfo = new PythonFunctionInfo( AbstractPythonScalarFunctionRunnerTest.DummyPythonFunction.INSTANCE, new Integer[]{0}); RowType rowType = new RowType(Collections.singletonList(new RowType.RowField("f1", new BigIntType()))); final PythonEnvironmentManager environmentManager = createTestEnvironmentManager(); return new PythonTableFunctionRunnerTestHarness( "testPythonRunner", receiver, pythonFunctionInfo, environmentManager, rowType, rowType, jobBundleFactory, PythonTestUtils.createMockFlinkMetricContainer()); }
Example #11
Source File: AbstractPythonScalarFunctionRunnerTest.java From flink with Apache License 2.0 | 6 votes |
protected AbstractPythonScalarFunctionRunner<IN> createMultipleUDFRunner() throws Exception { PythonFunctionInfo[] pythonFunctionInfos = new PythonFunctionInfo[] { new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Integer[]{0, 1}), new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Integer[]{0, 2}) }; RowType inputType = new RowType(Arrays.asList( new RowType.RowField("f1", new BigIntType()), new RowType.RowField("f2", new BigIntType()), new RowType.RowField("f3", new BigIntType()))); RowType outputType = new RowType(Arrays.asList( new RowType.RowField("f1", new BigIntType()), new RowType.RowField("f2", new BigIntType()))); return createPythonScalarFunctionRunner(pythonFunctionInfos, inputType, outputType); }
Example #12
Source File: PythonTypeUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testLogicalTypeToBlinkTypeSerializer() { List<RowType.RowField> rowFields = new ArrayList<>(); rowFields.add(new RowType.RowField("f1", new BigIntType())); RowType rowType = new RowType(rowFields); TypeSerializer baseSerializer = PythonTypeUtils.toBlinkTypeSerializer(rowType); assertTrue(baseSerializer instanceof RowDataSerializer); assertEquals(1, ((RowDataSerializer) baseSerializer).getArity()); }
Example #13
Source File: FirstValueAggFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public TypeInformation<GenericRow> getAccumulatorType() { LogicalType[] fieldTypes = new LogicalType[] { fromTypeInfoToLogicalType(getResultType()), new BigIntType() }; String[] fieldNames = new String[] { "value", "time" }; return (TypeInformation) new BaseRowTypeInfo(fieldTypes, fieldNames); }
Example #14
Source File: AbstractPythonTableFunctionRunnerTest.java From flink with Apache License 2.0 | 5 votes |
AbstractPythonTableFunctionRunner<IN> createUDTFRunner() throws Exception { PythonFunctionInfo pythonFunctionInfo = new PythonFunctionInfo( AbstractPythonScalarFunctionRunnerTest.DummyPythonFunction.INSTANCE, new Integer[]{0}); RowType rowType = new RowType(Collections.singletonList(new RowType.RowField("f1", new BigIntType()))); return createPythonTableFunctionRunner(pythonFunctionInfo, rowType, rowType); }
Example #15
Source File: LastValueAggFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public TypeInformation<GenericRowData> getAccumulatorType() { LogicalType[] fieldTypes = new LogicalType[] { fromTypeInfoToLogicalType(getResultType()), new BigIntType() }; String[] fieldNames = new String[] { "value", "time" }; return (TypeInformation) new RowDataTypeInfo(fieldTypes, fieldNames); }
Example #16
Source File: LogicalTypesTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBigIntType() { testAll( new BigIntType(), "BIGINT", "BIGINT", new Class[]{Long.class, long.class}, new Class[]{Long.class}, new LogicalType[]{}, new BigIntType(false) ); }
Example #17
Source File: LogicalTypeDuplicatorTest.java From flink with Apache License 2.0 | 5 votes |
@Parameters(name = "{index}: {0}") public static List<Object[]> testData() { return Arrays.asList( new Object[][]{ {new CharType(2), new CharType(2)}, {createMultisetType(new IntType()), createMultisetType(new BigIntType())}, {createArrayType(new IntType()), createArrayType(new BigIntType())}, {createMapType(new IntType()), createMapType(new BigIntType())}, {createRowType(new IntType()), createRowType(new BigIntType())}, {createDistinctType(new IntType()), createDistinctType(new BigIntType())}, {createUserType(new IntType()), createUserType(new BigIntType())}, {createHumanType(), createHumanType()} } ); }
Example #18
Source File: PythonTypeUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testLogicalTypeToProto() { List<RowType.RowField> rowFields = new ArrayList<>(); rowFields.add(new RowType.RowField("f1", new BigIntType())); RowType rowType = new RowType(rowFields); FlinkFnApi.Schema.FieldType protoType = rowType.accept(new PythonTypeUtils.LogicalTypeToProtoTypeConverter()); FlinkFnApi.Schema schema = protoType.getRowSchema(); assertEquals(1, schema.getFieldsCount()); assertEquals("f1", schema.getFields(0).getName()); assertEquals(FlinkFnApi.Schema.TypeName.BIGINT, schema.getFields(0).getType().getTypeName()); }
Example #19
Source File: PythonScalarFunctionRunnerTest.java From flink with Apache License 2.0 | 5 votes |
private AbstractGeneralPythonScalarFunctionRunner<Row> createUDFRunner( JobBundleFactory jobBundleFactory, FnDataReceiver<byte[]> receiver) { PythonFunctionInfo[] pythonFunctionInfos = new PythonFunctionInfo[] { new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Integer[]{0}) }; RowType rowType = new RowType(Collections.singletonList(new RowType.RowField("f1", new BigIntType()))); final PythonEnvironmentManager environmentManager = createTestEnvironmentManager(); return new PassThroughPythonScalarFunctionRunner<Row>( "testPythonRunner", receiver, pythonFunctionInfos, environmentManager, rowType, rowType, Collections.emptyMap(), jobBundleFactory, PythonTestUtils.createMockFlinkMetricContainer()) { @Override public TypeSerializer<Row> getInputTypeSerializer() { return (RowSerializer) PythonTypeUtils.toFlinkTypeSerializer(getInputType()); } }; }
Example #20
Source File: FlinkTypeToType.java From iceberg with Apache License 2.0 | 5 votes |
@SuppressWarnings("checkstyle:CyclomaticComplexity") @Override public Type atomic(AtomicDataType type) { LogicalType inner = type.getLogicalType(); if (inner instanceof VarCharType || inner instanceof CharType) { return Types.StringType.get(); } else if (inner instanceof BooleanType) { return Types.BooleanType.get(); } else if (inner instanceof IntType || inner instanceof SmallIntType || inner instanceof TinyIntType) { return Types.IntegerType.get(); } else if (inner instanceof BigIntType) { return Types.LongType.get(); } else if (inner instanceof VarBinaryType) { return Types.BinaryType.get(); } else if (inner instanceof BinaryType) { BinaryType binaryType = (BinaryType) inner; return Types.FixedType.ofLength(binaryType.getLength()); } else if (inner instanceof FloatType) { return Types.FloatType.get(); } else if (inner instanceof DoubleType) { return Types.DoubleType.get(); } else if (inner instanceof DateType) { return Types.DateType.get(); } else if (inner instanceof TimeType) { return Types.TimeType.get(); } else if (inner instanceof TimestampType) { return Types.TimestampType.withoutZone(); } else if (inner instanceof LocalZonedTimestampType) { return Types.TimestampType.withZone(); } else if (inner instanceof DecimalType) { DecimalType decimalType = (DecimalType) inner; return Types.DecimalType.of(decimalType.getPrecision(), decimalType.getScale()); } else { throw new UnsupportedOperationException("Not a supported type: " + type.toString()); } }
Example #21
Source File: AbstractPythonScalarFunctionRunnerTest.java From flink with Apache License 2.0 | 5 votes |
protected AbstractPythonScalarFunctionRunner<IN> createSingleUDFRunner() throws Exception { PythonFunctionInfo[] pythonFunctionInfos = new PythonFunctionInfo[] { new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Integer[]{0}) }; RowType rowType = new RowType(Collections.singletonList(new RowType.RowField("f1", new BigIntType()))); return createPythonScalarFunctionRunner(pythonFunctionInfos, rowType, rowType); }
Example #22
Source File: PythonTypeUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testLogicalTypeToFlinkTypeSerializer() { List<RowType.RowField> rowFields = new ArrayList<>(); rowFields.add(new RowType.RowField("f1", new BigIntType())); RowType rowType = new RowType(rowFields); TypeSerializer rowSerializer = PythonTypeUtils.toFlinkTypeSerializer(rowType); assertTrue(rowSerializer instanceof RowSerializer); assertEquals(1, ((RowSerializer) rowSerializer).getArity()); }
Example #23
Source File: FirstValueAggFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public TypeInformation<GenericRowData> getAccumulatorType() { LogicalType[] fieldTypes = new LogicalType[] { fromTypeInfoToLogicalType(getResultType()), new BigIntType() }; String[] fieldNames = new String[] { "value", "time" }; return (TypeInformation) new RowDataTypeInfo(fieldTypes, fieldNames); }
Example #24
Source File: RowDataSerializerTest.java From flink with Apache License 2.0 | 5 votes |
@Override protected TypeSerializer<RowData> createSerializer() { TypeSerializer<?>[] fieldTypeSerializers = { LongSerializer.INSTANCE, LongSerializer.INSTANCE }; LogicalType[] fieldTypes = { new BigIntType(), new BigIntType() }; return new org.apache.flink.table.runtime.typeutils.serializers.python.RowDataSerializer( fieldTypes, fieldTypeSerializers); }
Example #25
Source File: ArrayDataSerializerTest.java From flink with Apache License 2.0 | 5 votes |
@Override protected ArrayData[] getTestData() { BinaryArrayData elementArray = BinaryArrayData.fromPrimitiveArray(new long[]{100L}); ArrayDataSerializer elementTypeSerializer = new ArrayDataSerializer(new BigIntType(), LongSerializer.INSTANCE); BinaryArrayData array = new BinaryArrayData(); BinaryArrayWriter writer = new BinaryArrayWriter(array, 1, 8); writer.writeArray(0, elementArray, elementTypeSerializer); writer.complete(); return new BinaryArrayData[]{array}; }
Example #26
Source File: LogicalTypeDuplicatorTest.java From flink with Apache License 2.0 | 5 votes |
@Parameters(name = "{index}: {0}") public static List<Object[]> testData() { return Arrays.asList( new Object[][]{ {new CharType(2), new CharType(2)}, {createMultisetType(new IntType()), createMultisetType(new BigIntType())}, {createArrayType(new IntType()), createArrayType(new BigIntType())}, {createMapType(new IntType()), createMapType(new BigIntType())}, {createRowType(new IntType()), createRowType(new BigIntType())}, {createDistinctType(new IntType()), createDistinctType(new BigIntType())}, {createUserType(new IntType()), createUserType(new BigIntType())}, {createHumanType(), createHumanType()} } ); }
Example #27
Source File: LogicalTypesTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBigIntType() { testAll( new BigIntType(), "BIGINT", "BIGINT", new Class[]{Long.class, long.class}, new Class[]{Long.class}, new LogicalType[]{}, new BigIntType(false) ); }
Example #28
Source File: AbstractPythonScalarFunctionRunnerTest.java From flink with Apache License 2.0 | 5 votes |
protected AbstractPythonScalarFunctionRunner<IN> createChainedUDFRunner() throws Exception { PythonFunctionInfo[] pythonFunctionInfos = new PythonFunctionInfo[] { new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Integer[]{0, 1}), new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Object[]{ 0, new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Integer[]{1, 2}) }), new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Object[]{ new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Integer[]{1, 3}), new PythonFunctionInfo( DummyPythonFunction.INSTANCE, new Integer[]{3, 4}) }) }; RowType inputType = new RowType(Arrays.asList( new RowType.RowField("f1", new BigIntType()), new RowType.RowField("f2", new BigIntType()), new RowType.RowField("f3", new BigIntType()), new RowType.RowField("f4", new BigIntType()), new RowType.RowField("f5", new BigIntType()))); RowType outputType = new RowType(Arrays.asList( new RowType.RowField("f1", new BigIntType()), new RowType.RowField("f2", new BigIntType()), new RowType.RowField("f3", new BigIntType()))); return createPythonScalarFunctionRunner(pythonFunctionInfos, inputType, outputType); }
Example #29
Source File: PythonTypeUtils.java From flink with Apache License 2.0 | 5 votes |
@Override public FlinkFnApi.Schema.FieldType visit(BigIntType bigIntType) { return FlinkFnApi.Schema.FieldType.newBuilder() .setTypeName(FlinkFnApi.Schema.TypeName.BIGINT) .setNullable(bigIntType.isNullable()) .build(); }
Example #30
Source File: PythonTableFunctionOperatorTestBase.java From flink with Apache License 2.0 | 5 votes |
private OneInputStreamOperatorTestHarness<IN, OUT> getTestHarness( Configuration config, JoinRelType joinRelType) throws Exception { RowType inputType = new RowType(Arrays.asList( new RowType.RowField("f1", new VarCharType()), new RowType.RowField("f2", new VarCharType()), new RowType.RowField("f3", new BigIntType()))); RowType outputType = new RowType(Arrays.asList( new RowType.RowField("f1", new VarCharType()), new RowType.RowField("f2", new VarCharType()), new RowType.RowField("f3", new BigIntType()), new RowType.RowField("f4", new BigIntType()))); AbstractPythonTableFunctionOperator<IN, OUT, UDTFIN> operator = getTestOperator( config, new PythonFunctionInfo( AbstractPythonScalarFunctionRunnerTest.DummyPythonFunction.INSTANCE, new Integer[]{0}), inputType, outputType, new int[]{2}, joinRelType ); OneInputStreamOperatorTestHarness<IN, OUT> testHarness = new OneInputStreamOperatorTestHarness<>(operator); testHarness.getStreamConfig().setManagedMemoryFraction(0.5); return testHarness; }