org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo Java Examples
The following examples show how to use
org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SqoopHCatImportHelper.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
private Object convertClobType(Object val, HCatFieldSchema hfs) { HCatFieldSchema.Type hfsType = hfs.getType(); ClobRef cr = (ClobRef) val; String s = cr.isExternal() ? cr.toString() : cr.getData(); if (hfsType == HCatFieldSchema.Type.STRING) { return s; } else if (hfsType == HCatFieldSchema.Type.VARCHAR) { VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo(); HiveVarchar hvc = new HiveVarchar(s, vti.getLength()); return hvc; } else if (hfsType == HCatFieldSchema.Type.CHAR) { CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo(); HiveChar hc = new HiveChar(s, cti.getLength()); return hc; } return null; }
Example #2
Source File: HiveTypeConverter.java From metacat with Apache License 2.0 | 6 votes |
private static Type getPrimitiveType(final ObjectInspector fieldInspector) { final PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) fieldInspector) .getPrimitiveCategory(); if (HiveTypeMapping.getHIVE_TO_CANONICAL().containsKey(primitiveCategory.name())) { return HiveTypeMapping.getHIVE_TO_CANONICAL().get(primitiveCategory.name()); } switch (primitiveCategory) { case DECIMAL: final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) ((PrimitiveObjectInspector) fieldInspector) .getTypeInfo(); return DecimalType.createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.getScale()); case CHAR: final int cLength = ((CharTypeInfo) ((PrimitiveObjectInspector) fieldInspector).getTypeInfo()).getLength(); return CharType.createCharType(cLength); case VARCHAR: final int vLength = ((VarcharTypeInfo) ((PrimitiveObjectInspector) fieldInspector) .getTypeInfo()).getLength(); return VarcharType.createVarcharType(vLength); default: return null; } }
Example #3
Source File: HiveType.java From presto with Apache License 2.0 | 5 votes |
public static Type getPrimitiveType(PrimitiveTypeInfo typeInfo) { switch (typeInfo.getPrimitiveCategory()) { case BOOLEAN: return BOOLEAN; case BYTE: return TINYINT; case SHORT: return SMALLINT; case INT: return INTEGER; case LONG: return BIGINT; case FLOAT: return REAL; case DOUBLE: return DOUBLE; case STRING: return createUnboundedVarcharType(); case VARCHAR: return createVarcharType(((VarcharTypeInfo) typeInfo).getLength()); case CHAR: return createCharType(((CharTypeInfo) typeInfo).getLength()); case DATE: return DATE; case TIMESTAMP: return TIMESTAMP; case BINARY: return VARBINARY; case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.scale()); default: return null; } }
Example #4
Source File: TestHiveFileFormats.java From presto with Apache License 2.0 | 5 votes |
@Test public void testFailForLongVarcharPartitionColumn() throws Exception { TestColumn partitionColumn = new TestColumn("partition_column", getPrimitiveJavaObjectInspector(new VarcharTypeInfo(3)), "test", utf8Slice("tes"), true); TestColumn varcharColumn = new TestColumn("varchar_column", getPrimitiveJavaObjectInspector(new VarcharTypeInfo(3)), new HiveVarchar("tes", 3), utf8Slice("tes")); List<TestColumn> columns = ImmutableList.of(partitionColumn, varcharColumn); HiveErrorCode expectedErrorCode = HiveErrorCode.HIVE_INVALID_PARTITION_VALUE; String expectedMessage = "Invalid partition value 'test' for varchar(3) partition key: partition_column"; assertThatFileFormat(RCTEXT) .withColumns(columns) .isFailingForPageSource(new RcFilePageSourceFactory(TYPE_MANAGER, HDFS_ENVIRONMENT, STATS), expectedErrorCode, expectedMessage) .isFailingForRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage); assertThatFileFormat(RCBINARY) .withColumns(columns) .isFailingForPageSource(new RcFilePageSourceFactory(TYPE_MANAGER, HDFS_ENVIRONMENT, STATS), expectedErrorCode, expectedMessage) .isFailingForRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage); assertThatFileFormat(ORC) .withColumns(columns) .isFailingForPageSource(new OrcPageSourceFactory(new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS), expectedErrorCode, expectedMessage); assertThatFileFormat(PARQUET) .withColumns(columns) .withSession(PARQUET_SESSION) .isFailingForPageSource(new ParquetPageSourceFactory(HDFS_ENVIRONMENT, STATS, new ParquetReaderConfig()), expectedErrorCode, expectedMessage); assertThatFileFormat(SEQUENCEFILE) .withColumns(columns) .isFailingForRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage); assertThatFileFormat(TEXTFILE) .withColumns(columns) .isFailingForRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage); }
Example #5
Source File: HiveTypeUtil.java From flink with Apache License 2.0 | 5 votes |
private static DataType toFlinkPrimitiveType(PrimitiveTypeInfo hiveType) { checkNotNull(hiveType, "hiveType cannot be null"); switch (hiveType.getPrimitiveCategory()) { case CHAR: return DataTypes.CHAR(((CharTypeInfo) hiveType).getLength()); case VARCHAR: return DataTypes.VARCHAR(((VarcharTypeInfo) hiveType).getLength()); case STRING: return DataTypes.STRING(); case BOOLEAN: return DataTypes.BOOLEAN(); case BYTE: return DataTypes.TINYINT(); case SHORT: return DataTypes.SMALLINT(); case INT: return DataTypes.INT(); case LONG: return DataTypes.BIGINT(); case FLOAT: return DataTypes.FLOAT(); case DOUBLE: return DataTypes.DOUBLE(); case DATE: return DataTypes.DATE(); case TIMESTAMP: return DataTypes.TIMESTAMP(); case BINARY: return DataTypes.BYTES(); case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) hiveType; return DataTypes.DECIMAL(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); default: throw new UnsupportedOperationException( String.format("Flink doesn't support Hive primitive type %s yet", hiveType)); } }
Example #6
Source File: SqoopHCatImportHelper.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 5 votes |
private Object convertStringTypes(Object val, HCatFieldSchema hfs) { HCatFieldSchema.Type hfsType = hfs.getType(); if (hfsType == HCatFieldSchema.Type.STRING || hfsType == HCatFieldSchema.Type.VARCHAR || hfsType == HCatFieldSchema.Type.CHAR) { String str = val.toString(); if (doHiveDelimsReplacement) { str = FieldFormatter.hiveStringReplaceDelims(str, hiveDelimsReplacement, hiveDelimiters); } if (hfsType == HCatFieldSchema.Type.STRING) { return str; } else if (hfsType == HCatFieldSchema.Type.VARCHAR) { VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo(); HiveVarchar hvc = new HiveVarchar(str, vti.getLength()); return hvc; } else if (hfsType == HCatFieldSchema.Type.CHAR) { CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo(); HiveChar hc = new HiveChar(val.toString(), cti.getLength()); return hc; } } else if (hfsType == HCatFieldSchema.Type.DECIMAL) { BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128); HiveDecimal hd = HiveDecimal.create(bd); return hd; } return null; }
Example #7
Source File: SqoopHCatImportHelper.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 5 votes |
private Object convertBooleanTypes(Object val, HCatFieldSchema hfs) { HCatFieldSchema.Type hfsType = hfs.getType(); Boolean b = (Boolean) val; if (hfsType == HCatFieldSchema.Type.BOOLEAN) { return b; } else if (hfsType == HCatFieldSchema.Type.TINYINT) { return (byte) (b ? 1 : 0); } else if (hfsType == HCatFieldSchema.Type.SMALLINT) { return (short) (b ? 1 : 0); } else if (hfsType == HCatFieldSchema.Type.INT) { return (int) (b ? 1 : 0); } else if (hfsType == HCatFieldSchema.Type.BIGINT) { return (long) (b ? 1 : 0); } else if (hfsType == HCatFieldSchema.Type.FLOAT) { return (float) (b ? 1 : 0); } else if (hfsType == HCatFieldSchema.Type.DOUBLE) { return (double) (b ? 1 : 0); } else if (hfsType == HCatFieldSchema.Type.STRING) { return val.toString(); } else if (hfsType == HCatFieldSchema.Type.VARCHAR) { VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo(); HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength()); return hvc; } else if (hfsType == HCatFieldSchema.Type.CHAR) { CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo(); HiveChar hChar = new HiveChar(val.toString(), cti.getLength()); return hChar; } return null; }
Example #8
Source File: HiveTypeUtil.java From flink with Apache License 2.0 | 5 votes |
private static DataType toFlinkPrimitiveType(PrimitiveTypeInfo hiveType) { checkNotNull(hiveType, "hiveType cannot be null"); switch (hiveType.getPrimitiveCategory()) { case CHAR: return DataTypes.CHAR(((CharTypeInfo) hiveType).getLength()); case VARCHAR: return DataTypes.VARCHAR(((VarcharTypeInfo) hiveType).getLength()); case STRING: return DataTypes.STRING(); case BOOLEAN: return DataTypes.BOOLEAN(); case BYTE: return DataTypes.TINYINT(); case SHORT: return DataTypes.SMALLINT(); case INT: return DataTypes.INT(); case LONG: return DataTypes.BIGINT(); case FLOAT: return DataTypes.FLOAT(); case DOUBLE: return DataTypes.DOUBLE(); case DATE: return DataTypes.DATE(); case TIMESTAMP: return DataTypes.TIMESTAMP(9); case BINARY: return DataTypes.BYTES(); case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) hiveType; return DataTypes.DECIMAL(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); default: throw new UnsupportedOperationException( String.format("Flink doesn't support Hive primitive type %s yet", hiveType)); } }
Example #9
Source File: TestHiveFileFormats.java From presto with Apache License 2.0 | 4 votes |
@Test public void testTruncateVarcharColumn() throws Exception { TestColumn writeColumn = new TestColumn("varchar_column", getPrimitiveJavaObjectInspector(new VarcharTypeInfo(4)), new HiveVarchar("test", 4), utf8Slice("test")); TestColumn readColumn = new TestColumn("varchar_column", getPrimitiveJavaObjectInspector(new VarcharTypeInfo(3)), new HiveVarchar("tes", 3), utf8Slice("tes")); assertThatFileFormat(RCTEXT) .withWriteColumns(ImmutableList.of(writeColumn)) .withReadColumns(ImmutableList.of(readColumn)) .isReadableByPageSource(new RcFilePageSourceFactory(TYPE_MANAGER, HDFS_ENVIRONMENT, STATS)) .isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); assertThatFileFormat(RCBINARY) .withWriteColumns(ImmutableList.of(writeColumn)) .withReadColumns(ImmutableList.of(readColumn)) .isReadableByPageSource(new RcFilePageSourceFactory(TYPE_MANAGER, HDFS_ENVIRONMENT, STATS)) .isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); assertThatFileFormat(ORC) .withWriteColumns(ImmutableList.of(writeColumn)) .withReadColumns(ImmutableList.of(readColumn)) .isReadableByPageSource(new OrcPageSourceFactory(new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS)); assertThatFileFormat(PARQUET) .withWriteColumns(ImmutableList.of(writeColumn)) .withReadColumns(ImmutableList.of(readColumn)) .withSession(PARQUET_SESSION) .isReadableByPageSource(new ParquetPageSourceFactory(HDFS_ENVIRONMENT, STATS, new ParquetReaderConfig())); assertThatFileFormat(AVRO) .withWriteColumns(ImmutableList.of(writeColumn)) .withReadColumns(ImmutableList.of(readColumn)) .isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); assertThatFileFormat(SEQUENCEFILE) .withWriteColumns(ImmutableList.of(writeColumn)) .withReadColumns(ImmutableList.of(readColumn)) .isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); assertThatFileFormat(TEXTFILE) .withWriteColumns(ImmutableList.of(writeColumn)) .withReadColumns(ImmutableList.of(readColumn)) .isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)); }
Example #10
Source File: ExcelSerde.java From hadoopoffice with Apache License 2.0 | 4 votes |
/** * Deserializes an object of type @see #getSerializedClass() * Note: Some Java types, such as Decimal, are converted to Hive specific datatypes. * * @param arg0 object of type @see #getSerializedClass() * @return Array containing objects of type primitive Java (e.g. string, byte, integer)/Hive (e.g HiveDecimal, HiveVarChar) * */ @Override public Object deserialize(Writable arg0) throws SerDeException { if ((arg0 == null) || (arg0 instanceof NullWritable)) { return this.nullRow; } Object[] primitiveRow = this.readConverter .getDataAccordingToSchema((SpreadSheetCellDAO[]) ((ArrayWritable) arg0).get()); // check if supported type and convert to hive type, if necessary for (int i = 0; i < primitiveRow.length; i++) { PrimitiveTypeInfo ti = (PrimitiveTypeInfo) this.columnTypes.get(i); switch (ti.getPrimitiveCategory()) { case STRING: primitiveRow[i] = primitiveRow[i]; break; case BYTE: primitiveRow[i] = primitiveRow[i]; break; case SHORT: primitiveRow[i] = primitiveRow[i]; break; case INT: primitiveRow[i] = primitiveRow[i]; break; case LONG: primitiveRow[i] = primitiveRow[i]; break; case FLOAT: primitiveRow[i] = primitiveRow[i]; break; case DOUBLE: primitiveRow[i] = primitiveRow[i]; break; case BOOLEAN: primitiveRow[i] = primitiveRow[i]; break; case TIMESTAMP: primitiveRow[i] = primitiveRow[i]; break; case DATE: if (primitiveRow[i] != null) { primitiveRow[i] = new java.sql.Date(((Date) primitiveRow[i]).getTime()); } break; case DECIMAL: if (primitiveRow[i] != null) { primitiveRow[i] = HiveDecimal.create((BigDecimal) primitiveRow[i]); } break; case CHAR: if (primitiveRow[i] != null) { primitiveRow[i] = new HiveChar((String) primitiveRow[i], ((CharTypeInfo) ti).getLength()); } break; case VARCHAR: if (primitiveRow[i] != null) { primitiveRow[i] = new HiveVarchar((String) primitiveRow[i], ((VarcharTypeInfo) ti).getLength()); } break; default: throw new SerDeException("Unsupported type " + ti); } } if (this.columnNames.size()>primitiveRow.length) { // can happen in rare cases where a row does not contain all columns Object[] tempRow = new Object[this.columnNames.size()]; for (int i=0;i<primitiveRow.length;i++) { tempRow[i]=primitiveRow[i]; } primitiveRow=tempRow; } return primitiveRow; }