Java Code Examples for org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory#stringTypeInfo()
The following examples show how to use
org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory#stringTypeInfo() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTypeUtil.java From flink with Apache License 2.0 | 6 votes |
@Override public TypeInfo visit(VarCharType varCharType) { // Flink's StringType is defined as VARCHAR(Integer.MAX_VALUE) // We don't have more information in LogicalTypeRoot to distinguish StringType and a VARCHAR(Integer.MAX_VALUE) instance // Thus always treat VARCHAR(Integer.MAX_VALUE) as StringType if (varCharType.getLength() == Integer.MAX_VALUE) { return TypeInfoFactory.stringTypeInfo; } if (varCharType.getLength() > HiveVarchar.MAX_VARCHAR_LENGTH) { throw new CatalogException( String.format("HiveCatalog doesn't support varchar type with length of '%d'. " + "The maximum length is %d", varCharType.getLength(), HiveVarchar.MAX_VARCHAR_LENGTH)); } return TypeInfoFactory.getVarcharTypeInfo(varCharType.getLength()); }
Example 2
Source File: HiveTypeUtil.java From flink with Apache License 2.0 | 6 votes |
@Override public TypeInfo visit(CharType charType) { // Flink and Hive have different length limit for CHAR. Promote it to STRING if it exceeds the limits of // Hive and we're told not to check precision. This can be useful when calling Hive UDF to process data. if (charType.getLength() > HiveChar.MAX_CHAR_LENGTH || charType.getLength() < 1) { if (checkPrecision) { throw new CatalogException( String.format("HiveCatalog doesn't support char type with length of '%d'. " + "The supported length is [%d, %d]", charType.getLength(), 1, HiveChar.MAX_CHAR_LENGTH)); } else { return TypeInfoFactory.stringTypeInfo; } } return TypeInfoFactory.getCharTypeInfo(charType.getLength()); }
Example 3
Source File: HiveTypeUtil.java From flink with Apache License 2.0 | 6 votes |
@Override public TypeInfo visit(VarCharType varCharType) { // Flink's StringType is defined as VARCHAR(Integer.MAX_VALUE) // We don't have more information in LogicalTypeRoot to distinguish StringType and a VARCHAR(Integer.MAX_VALUE) instance // Thus always treat VARCHAR(Integer.MAX_VALUE) as StringType if (varCharType.getLength() == Integer.MAX_VALUE) { return TypeInfoFactory.stringTypeInfo; } // Flink and Hive have different length limit for VARCHAR. Promote it to STRING if it exceeds the limits of // Hive and we're told not to check precision. This can be useful when calling Hive UDF to process data. if (varCharType.getLength() > HiveVarchar.MAX_VARCHAR_LENGTH || varCharType.getLength() < 1) { if (checkPrecision) { throw new CatalogException( String.format("HiveCatalog doesn't support varchar type with length of '%d'. " + "The supported length is [%d, %d]", varCharType.getLength(), 1, HiveVarchar.MAX_VARCHAR_LENGTH)); } else { return TypeInfoFactory.stringTypeInfo; } } return TypeInfoFactory.getVarcharTypeInfo(varCharType.getLength()); }
Example 4
Source File: BlurObjectInspectorGenerator.java From incubator-retired-blur with Apache License 2.0 | 6 votes |
private TypeInfo getTypeInfo(String fieldType) { if (fieldType.equals(TEXT) || fieldType.equals(STRING) || fieldType.equals(STORED)) { return TypeInfoFactory.stringTypeInfo; } else if (fieldType.equals(LONG)) { return TypeInfoFactory.longTypeInfo; } else if (fieldType.equals(INT)) { return TypeInfoFactory.intTypeInfo; } else if (fieldType.equals(FLOAT)) { return TypeInfoFactory.floatTypeInfo; } else if (fieldType.equals(DOUBLE)) { return TypeInfoFactory.doubleTypeInfo; } else if (fieldType.equals(DATE)) { return TypeInfoFactory.dateTypeInfo; } else if (fieldType.equals(GEO_POINTVECTOR) || fieldType.equals(GEO_RECURSIVEPREFIX) || fieldType.equals(GEO_TERMPREFIX)) { List<TypeInfo> typeInfos = Arrays.asList((TypeInfo) TypeInfoFactory.floatTypeInfo, (TypeInfo) TypeInfoFactory.floatTypeInfo); return TypeInfoFactory.getStructTypeInfo(Arrays.asList(LATITUDE, LONGITUDE), typeInfos); } // Return string for anything that is not a built in type. return TypeInfoFactory.stringTypeInfo; }
Example 5
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 6 votes |
@Test public void getRowIDSearchCondition() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "hi"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children); assertNotNull(node); String filterExpr = Utilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf); assertEquals(sConditions.size(), 1); } catch (Exception e) { fail("Error getting search conditions"); } }
Example 6
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 6 votes |
@Test() public void rangeEqual() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children); assertNotNull(node); String filterExpr = Utilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { Collection<Range> ranges = handler.getRanges(conf); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertTrue(range.contains(new Key(new Text("aaa")))); assertTrue(range.afterEndKey(new Key(new Text("aab")))); assertTrue(range.beforeStartKey(new Key(new Text("aa")))); } catch (Exception e) { fail("Error getting search conditions"); } }
Example 7
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 6 votes |
@Test public void rangeGreaterThanOrEqual() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children); assertNotNull(node); String filterExpr = Utilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { Collection<Range> ranges = handler.getRanges(conf); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertTrue(range.contains(new Key(new Text("aaa")))); assertFalse(range.afterEndKey(new Key(new Text("ccccc")))); assertTrue(range.contains(new Key(new Text("aab")))); assertTrue(range.beforeStartKey(new Key(new Text("aa")))); } catch (Exception e) { fail("Error getting search conditions"); } }
Example 8
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 6 votes |
@Test public void pushdownTuple() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field1", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children); assertNotNull(node); String filterExpr = Utilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf); assertEquals(sConditions.size(), 1); AccumuloPredicateHandler.PushdownTuple tuple = new AccumuloPredicateHandler.PushdownTuple(sConditions.get(0)); byte [] expectedVal = new byte[4]; ByteBuffer.wrap(expectedVal).putInt(5); assertEquals(tuple.getConstVal(), expectedVal); assertEquals(tuple.getcOpt().getClass(), Equal.class); assertEquals(tuple.getpCompare().getClass(), IntCompare.class); } catch (Exception e) { fail(StringUtils.stringifyException(e)); } }
Example 9
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 5 votes |
@Test() public void rangeGreaterThan() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children); assertNotNull(node); String filterExpr = Utilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { Collection<Range> ranges = handler.getRanges(conf); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertFalse(range.contains(new Key(new Text("aaa")))); assertFalse(range.afterEndKey(new Key(new Text("ccccc")))); assertTrue(range.contains(new Key(new Text("aab")))); assertTrue(range.beforeStartKey(new Key(new Text("aa")))); assertTrue(range.beforeStartKey(new Key(new Text("aaa")))); } catch (Exception e) { fail("Error getting search conditions"); } }
Example 10
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 5 votes |
@Test public void rangeLessThan() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children); assertNotNull(node); String filterExpr = Utilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { Collection<Range> ranges = handler.getRanges(conf); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertFalse(range.contains(new Key(new Text("aaa")))); assertTrue(range.afterEndKey(new Key(new Text("ccccc")))); assertTrue(range.contains(new Key(new Text("aa")))); assertTrue(range.afterEndKey(new Key(new Text("aab")))); assertTrue(range.afterEndKey(new Key(new Text("aaa")))); } catch (Exception e) { fail("Error getting search conditions"); } }
Example 11
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 5 votes |
@Test public void rangeLessThanOrEqual() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); String filterExpr = Utilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { Collection<Range> ranges = handler.getRanges(conf); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertTrue(range.contains(new Key(new Text("aaa")))); assertTrue(range.afterEndKey(new Key(new Text("ccccc")))); assertTrue(range.contains(new Key(new Text("aa")))); assertTrue(range.afterEndKey(new Key(new Text("aab")))); assertFalse(range.afterEndKey(new Key(new Text("aaa")))); } catch (Exception e) { fail("Error getting search conditions"); } }
Example 12
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 5 votes |
@Test public void multiRange() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb"); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = Utilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { Collection<Range> ranges = handler.getRanges(conf); assertEquals(ranges.size(), 2); Iterator<Range> itr = ranges.iterator(); Range range1 = itr.next(); Range range2 = itr.next(); assertNull(range1.clip(range2, true)); } catch (Exception e) { fail("Error getting search conditions"); } }
Example 13
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 5 votes |
@Test public void iteratorIgnoreRowIDFields() { setup(); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb"); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = Utilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { List<IteratorSetting> iterators = handler.getIterators(conf); assertEquals(iterators.size() , 0); } catch (SerDeException e) { StringUtils.stringifyException(e); } }
Example 14
Source File: PredicateHandlerTest.java From accumulo-hive-storage-manager with Apache License 2.0 | 5 votes |
@Test public void ignoreIteratorPushdown() { setup(); conf.set(serdeConstants.LIST_COLUMNS, "field1,field2,rid"); conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,string"); conf.set(AccumuloSerde.COLUMN_MAPPINGS, "cf|f1,cf|f2,rowID"); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field2", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = Utilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); conf.setBoolean(AccumuloSerde.NO_ITERATOR_PUSHDOWN, true); try { List<IteratorSetting> iterators = handler.getIterators(conf); assertEquals(iterators.size(), 0); } catch (Exception e) { fail(StringUtils.stringifyException(e)); } }
Example 15
Source File: TestMapTreeWriter.java From hive-dwrf with Apache License 2.0 | 4 votes |
@SuppressWarnings({"override", "UnusedDeclaration", "RedundantCast"}) // FB Hive public PrimitiveTypeInfo getTypeInfo() { return (PrimitiveTypeInfo) TypeInfoFactory.stringTypeInfo; }
Example 16
Source File: OrcLazyStringObjectInspector.java From hive-dwrf with Apache License 2.0 | 4 votes |
@SuppressWarnings({"override", "UnusedDeclaration", "RedundantCast"}) // FB Hive public PrimitiveTypeInfo getTypeInfo() { return (PrimitiveTypeInfo) TypeInfoFactory.stringTypeInfo; }
Example 17
Source File: HiveDynamoDBStringType.java From emr-dynamodb-connector with Apache License 2.0 | 4 votes |
@Override public TypeInfo getSupportedHiveType() { return TypeInfoFactory.stringTypeInfo; }
Example 18
Source File: HiveInspectors.java From flink with Apache License 2.0 | 4 votes |
public static ObjectInspector getObjectInspector(Class clazz) { TypeInfo typeInfo; if (clazz.equals(String.class) || clazz.equals(Text.class)) { typeInfo = TypeInfoFactory.stringTypeInfo; } else if (clazz.equals(Boolean.class) || clazz.equals(BooleanWritable.class)) { typeInfo = TypeInfoFactory.booleanTypeInfo; } else if (clazz.equals(Byte.class) || clazz.equals(ByteWritable.class)) { typeInfo = TypeInfoFactory.byteTypeInfo; } else if (clazz.equals(Short.class) || clazz.equals(ShortWritable.class)) { typeInfo = TypeInfoFactory.shortTypeInfo; } else if (clazz.equals(Integer.class) || clazz.equals(IntWritable.class)) { typeInfo = TypeInfoFactory.intTypeInfo; } else if (clazz.equals(Long.class) || clazz.equals(LongWritable.class)) { typeInfo = TypeInfoFactory.longTypeInfo; } else if (clazz.equals(Float.class) || clazz.equals(FloatWritable.class)) { typeInfo = TypeInfoFactory.floatTypeInfo; } else if (clazz.equals(Double.class) || clazz.equals(DoubleWritable.class)) { typeInfo = TypeInfoFactory.doubleTypeInfo; } else if (clazz.equals(Date.class) || clazz.equals(DateWritable.class)) { typeInfo = TypeInfoFactory.dateTypeInfo; } else if (clazz.equals(Timestamp.class) || clazz.equals(TimestampWritable.class)) { typeInfo = TypeInfoFactory.timestampTypeInfo; } else if (clazz.equals(byte[].class) || clazz.equals(BytesWritable.class)) { typeInfo = TypeInfoFactory.binaryTypeInfo; } else if (clazz.equals(HiveChar.class) || clazz.equals(HiveCharWritable.class)) { typeInfo = TypeInfoFactory.charTypeInfo; } else if (clazz.equals(HiveVarchar.class) || clazz.equals(HiveVarcharWritable.class)) { typeInfo = TypeInfoFactory.varcharTypeInfo; } else if (clazz.equals(HiveDecimal.class) || clazz.equals(HiveDecimalWritable.class)) { typeInfo = TypeInfoFactory.decimalTypeInfo; } else { throw new FlinkHiveUDFException( String.format("Class %s is not supported yet", clazz.getName())); } return getObjectInspector(typeInfo); }