Java Code Examples for org.apache.hadoop.hive.metastore.api.FieldSchema#getName()
The following examples show how to use
org.apache.hadoop.hive.metastore.api.FieldSchema#getName() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTableUtil.java From flink with Apache License 2.0 | 6 votes |
/** * Create a Flink's TableSchema from Hive table's columns and partition keys. */ public static TableSchema createTableSchema(List<FieldSchema> cols, List<FieldSchema> partitionKeys) { List<FieldSchema> allCols = new ArrayList<>(cols); allCols.addAll(partitionKeys); String[] colNames = new String[allCols.size()]; DataType[] colTypes = new DataType[allCols.size()]; for (int i = 0; i < allCols.size(); i++) { FieldSchema fs = allCols.get(i); colNames[i] = fs.getName(); colTypes[i] = HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(fs.getType())); } return TableSchema.builder() .fields(colNames, colTypes) .build(); }
Example 2
Source File: HiveStatsUtil.java From flink with Apache License 2.0 | 6 votes |
private static ColumnStatistics createHiveColumnStatistics( Map<String, CatalogColumnStatisticsDataBase> colStats, StorageDescriptor sd, ColumnStatisticsDesc desc) { List<ColumnStatisticsObj> colStatsList = new ArrayList<>(); for (FieldSchema field : sd.getCols()) { String hiveColName = field.getName(); String hiveColType = field.getType(); CatalogColumnStatisticsDataBase flinkColStat = colStats.get(field.getName()); if (null != flinkColStat) { ColumnStatisticsData statsData = getColumnStatisticsData(HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(hiveColType)), flinkColStat); ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj(hiveColName, hiveColType, statsData); colStatsList.add(columnStatisticsObj); } } return new ColumnStatistics(desc, colStatsList); }
Example 3
Source File: HiveClientWrapper.java From pxf with Apache License 2.0 | 6 votes |
private String serializePartitionKeys(HiveTablePartition partData) { if (partData.partition == null) { /* this is a simple hive table - there are no partitions */ return HiveDataFragmenter.HIVE_NO_PART_TBL; } StringBuilder partitionKeys = new StringBuilder(); String prefix = ""; ListIterator<String> valsIter = partData.partition.getValues().listIterator(); ListIterator<FieldSchema> keysIter = partData.partitionKeys.listIterator(); while (valsIter.hasNext() && keysIter.hasNext()) { FieldSchema key = keysIter.next(); String name = key.getName(); String type = key.getType(); String val = valsIter.next(); String oneLevel = prefix + name + HiveDataFragmenter.HIVE_1_PART_DELIM + type + HiveDataFragmenter.HIVE_1_PART_DELIM + val; partitionKeys.append(oneLevel); prefix = HiveDataFragmenter.HIVE_PARTITIONS_DELIM; } return partitionKeys.toString(); }
Example 4
Source File: HiveMetaStoreBridge.java From atlas with Apache License 2.0 | 6 votes |
private String getCreateTableString(Table table, String location){ String colString = ""; List<FieldSchema> colList = table.getAllCols(); if (colList != null) { for (FieldSchema col : colList) { colString += col.getName() + " " + col.getType() + ","; } if (colList.size() > 0) { colString = colString.substring(0, colString.length() - 1); colString = "(" + colString + ")"; } } String query = "create external table " + table.getTableName() + colString + " location '" + location + "'"; return query; }
Example 5
Source File: PartitionUtil.java From metacat with Apache License 2.0 | 6 votes |
/** * Retrieves the partition values from the partition name. This method also validates the partition keys to that * of the table. * * @param tableQName table name * @param table table * @param partName partition name * @return list of partition values */ public static List<String> getPartValuesFromPartName(final QualifiedName tableQName, final Table table, final String partName) { if (Strings.isNullOrEmpty(partName)) { throw new InvalidMetaException(tableQName, partName, null); } final LinkedHashMap<String, String> partSpec = new LinkedHashMap<>(); Warehouse.makeSpecFromName(partSpec, new Path(partName)); final List<String> values = new ArrayList<>(); for (FieldSchema field : table.getPartitionKeys()) { final String key = field.getName(); final String val = partSpec.get(key); if (val == null) { throw new InvalidMetaException(tableQName, partName, null); } values.add(val); } return values; }
Example 6
Source File: HiveMetaStoreBridge.java From incubator-atlas with Apache License 2.0 | 6 votes |
private String getCreateTableString(Table table, String location){ String colString = ""; List<FieldSchema> colList = table.getAllCols(); if ( colList != null) { for (FieldSchema col : colList) { colString += col.getName() + " " + col.getType() + ","; } if (colList.size() > 0) { colString = colString.substring(0, colString.length() - 1); colString = "(" + colString + ")"; } } String query = "create external table " + table.getTableName() + colString + " location '" + location + "'"; return query; }
Example 7
Source File: HiveTableUtil.java From flink with Apache License 2.0 | 6 votes |
/** * Create a Flink's TableSchema from Hive table's columns and partition keys. */ public static TableSchema createTableSchema(List<FieldSchema> cols, List<FieldSchema> partitionKeys, Set<String> notNullColumns, UniqueConstraint primaryKey) { List<FieldSchema> allCols = new ArrayList<>(cols); allCols.addAll(partitionKeys); String[] colNames = new String[allCols.size()]; DataType[] colTypes = new DataType[allCols.size()]; for (int i = 0; i < allCols.size(); i++) { FieldSchema fs = allCols.get(i); colNames[i] = fs.getName(); colTypes[i] = HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(fs.getType())); if (notNullColumns.contains(colNames[i])) { colTypes[i] = colTypes[i].notNull(); } } TableSchema.Builder builder = TableSchema.builder().fields(colNames, colTypes); if (primaryKey != null) { builder.primaryKey(primaryKey.getName(), primaryKey.getColumns().toArray(new String[0])); } return builder.build(); }
Example 8
Source File: HiveStatsUtil.java From flink with Apache License 2.0 | 6 votes |
private static ColumnStatistics createHiveColumnStatistics( Map<String, CatalogColumnStatisticsDataBase> colStats, StorageDescriptor sd, ColumnStatisticsDesc desc, String hiveVersion) { List<ColumnStatisticsObj> colStatsList = new ArrayList<>(); for (FieldSchema field : sd.getCols()) { String hiveColName = field.getName(); String hiveColType = field.getType(); CatalogColumnStatisticsDataBase flinkColStat = colStats.get(field.getName()); if (null != flinkColStat) { ColumnStatisticsData statsData = getColumnStatisticsData( HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(hiveColType)), flinkColStat, hiveVersion); ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj(hiveColName, hiveColType, statsData); colStatsList.add(columnStatisticsObj); } } return new ColumnStatistics(desc, colStatsList); }
Example 9
Source File: HiveAvroORCQueryGenerator.java From incubator-gobblin with Apache License 2.0 | 6 votes |
/*** * Use destination table schema to generate column mapping * @param hiveColumns Optional Map to populate with the generated hive columns for reference of caller * @param destinationTableMeta destination table metadata * @return Generate Hive columns with types for given Avro schema */ private static String generateDestinationToHiveColumnMapping( Optional<Map<String, String>> hiveColumns, Table destinationTableMeta) { StringBuilder columns = new StringBuilder(); boolean isFirst = true; List<FieldSchema> fieldList = destinationTableMeta.getSd().getCols(); for (FieldSchema field : fieldList) { if (isFirst) { isFirst = false; } else { columns.append(", \n"); } String name = field.getName(); String type = escapeHiveType(field.getType()); String comment = field.getComment(); if (hiveColumns.isPresent()) { hiveColumns.get().put(name, type); } columns.append(String.format(" `%s` %s COMMENT '%s'", name, type, escapeStringForHive(comment))); } return columns.toString(); }
Example 10
Source File: HiveUtilities.java From pxf with Apache License 2.0 | 5 votes |
/** * Checks if hive type is supported, and if so return its matching GPDB * type. Unsupported types will result in an exception. <br> * The supported mappings are: * <ul> * <li>{@code tinyint -> int2}</li> * <li>{@code smallint -> int2}</li> * <li>{@code int -> int4}</li> * <li>{@code bigint -> int8}</li> * <li>{@code boolean -> bool}</li> * <li>{@code float -> float4}</li> * <li>{@code double -> float8}</li> * <li>{@code string -> text}</li> * <li>{@code binary -> bytea}</li> * <li>{@code timestamp -> timestamp}</li> * <li>{@code date -> date}</li> * <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li> * <li>{@code varchar(size) -> varchar(size)}</li> * <li>{@code char(size) -> bpchar(size)}</li> * <li>{@code array<dataType> -> text}</li> * <li>{@code map<keyDataType, valueDataType> -> text}</li> * <li>{@code struct<field1:dataType,...,fieldN:dataType> -> text}</li> * <li>{@code uniontype<...> -> text}</li> * </ul> * * @param hiveColumn hive column schema * @return field with mapped GPDB type and modifiers * @throws UnsupportedTypeException if the column type is not supported * @see EnumHiveToGpdbType */ public static Metadata.Field mapHiveType(FieldSchema hiveColumn) throws UnsupportedTypeException { String fieldName = hiveColumn.getName(); String hiveType = hiveColumn.getType(); // Type name and modifiers if any String hiveTypeName; // Type name String[] modifiers = null; // Modifiers EnumHiveToGpdbType hiveToGpdbType = EnumHiveToGpdbType.getHiveToGpdbType(hiveType); EnumGpdbType gpdbType = hiveToGpdbType.getGpdbType(); if (hiveToGpdbType.getSplitExpression() != null) { String[] tokens = hiveType.split(hiveToGpdbType.getSplitExpression()); hiveTypeName = tokens[0]; if (gpdbType.getModifiersNum() > 0) { modifiers = Arrays.copyOfRange(tokens, 1, tokens.length); if (modifiers.length != gpdbType.getModifiersNum()) { throw new UnsupportedTypeException( "GPDB does not support type " + hiveType + " (Field " + fieldName + "), " + "expected number of modifiers: " + gpdbType.getModifiersNum() + ", actual number of modifiers: " + modifiers.length); } if (!verifyIntegerModifiers(modifiers)) { throw new UnsupportedTypeException("GPDB does not support type " + hiveType + " (Field " + fieldName + "), modifiers should be integers"); } } } else hiveTypeName = hiveType; return new Metadata.Field(fieldName, gpdbType, hiveToGpdbType.isComplexType(), hiveTypeName, modifiers); }
Example 11
Source File: HiveSchemaConverter.java From dremio-oss with Apache License 2.0 | 5 votes |
/** * iterates over all fields of a table and checks if any field exceeds * maximum allowed nested level * @param table * @param maxNestedLevels */ public static void checkFieldNestedLevels(final Table table, int maxNestedLevels) { for (FieldSchema hiveField : table.getSd().getCols()) { final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType()); int depth = findFieldDepth(typeInfo); if (depth > maxNestedLevels) { throw new ColumnNestedTooDeepException(hiveField.getName(), maxNestedLevels); } } }
Example 12
Source File: SchemaUtils.java From beam with Apache License 2.0 | 5 votes |
private static Schema.Field toBeamField(FieldSchema field) { String name = field.getName(); HCatFieldSchema hCatFieldSchema; try { hCatFieldSchema = HCatSchemaUtils.getHCatFieldSchema(field); } catch (HCatException e) { // Converting checked Exception to unchecked Exception. throw new UnsupportedOperationException( "Error while converting FieldSchema to HCatFieldSchema", e); } switch (hCatFieldSchema.getCategory()) { case PRIMITIVE: { if (!HCAT_TO_BEAM_TYPES_MAP.containsKey(hCatFieldSchema.getType())) { throw new UnsupportedOperationException( "The Primitive HCat type '" + field.getType() + "' of field '" + name + "' cannot be converted to Beam FieldType"); } FieldType fieldType = HCAT_TO_BEAM_TYPES_MAP.get(hCatFieldSchema.getType()); return Schema.Field.of(name, fieldType).withNullable(true); } // TODO: Add Support for Complex Types i.e. ARRAY, MAP, STRUCT default: throw new UnsupportedOperationException( "The category '" + hCatFieldSchema.getCategory() + "' is not supported."); } }
Example 13
Source File: HiveHook.java From incubator-atlas with Apache License 2.0 | 5 votes |
private Pair<String, String> findChangedColNames(List<FieldSchema> oldColList, List<FieldSchema> newColList) { HashMap<FieldSchema, Integer> oldColHashMap = new HashMap<>(); HashMap<FieldSchema, Integer> newColHashMap = new HashMap<>(); for (int i = 0; i < oldColList.size(); i++) { oldColHashMap.put(oldColList.get(i), i); newColHashMap.put(newColList.get(i), i); } String changedColStringOldName = oldColList.get(0).getName(); String changedColStringNewName = changedColStringOldName; for (FieldSchema oldCol : oldColList) { if (!newColHashMap.containsKey(oldCol)) { changedColStringOldName = oldCol.getName(); break; } } for (FieldSchema newCol : newColList) { if (!oldColHashMap.containsKey(newCol)) { changedColStringNewName = newCol.getName(); break; } } return Pair.of(changedColStringOldName, changedColStringNewName); }
Example 14
Source File: TestSchemaConversion.java From kite with Apache License 2.0 | 5 votes |
@Override public String apply(@Nullable FieldSchema input) { if (input != null) { return input.getName(); } else { return null; } }
Example 15
Source File: ThriftMetastoreUtil.java From presto with Apache License 2.0 | 4 votes |
private static Column fromMetastoreApiFieldSchema(FieldSchema fieldSchema) { return new Column(fieldSchema.getName(), HiveType.valueOf(fieldSchema.getType()), Optional.ofNullable(fieldSchema.getComment())); }
Example 16
Source File: HiveClient.java From ranger with Apache License 2.0 | 4 votes |
private List<String> getClmListFromHM(String columnNameMatching,List<String> dbList, List<String> tblList, List<String> colList) throws HadoopException { if (LOG.isDebugEnabled()) { LOG.debug("==> HiveClient.getClmListFromHM() columnNameMatching: " + columnNameMatching + " dbList :" + dbList + " tblList: " + tblList + " colList: " + colList); } List<String> ret = new ArrayList<String>(); String columnNameMatchingRegEx = null; if (columnNameMatching != null && !columnNameMatching.isEmpty()) { columnNameMatchingRegEx = columnNameMatching; } if (hiveClient != null && dbList != null && !dbList.isEmpty() && tblList != null && !tblList.isEmpty()) { for (String db : dbList) { for (String tbl : tblList) { try { List<FieldSchema> hiveSch = hiveClient.getFields(db, tbl); if (hiveSch != null) { for (FieldSchema sch : hiveSch) { String columnName = sch.getName(); if (colList != null && colList.contains(columnName)) { continue; } if (columnNameMatchingRegEx == null) { ret.add(columnName); } else if (FilenameUtils.wildcardMatch(columnName, columnNameMatchingRegEx)) { ret.add(columnName); } } } } catch (TException e) { String msgDesc = "Unable to get Columns."; HadoopException hdpException = new HadoopException(msgDesc, e); hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null); if (LOG.isDebugEnabled()) { LOG.debug("<== HiveClient.getClmListFromHM() Error : " ,e); } throw hdpException; } } } } if (LOG.isDebugEnabled()) { LOG.debug("<== HiveClient.getClmListFromHM() " + ret ); } return ret; }