Java Code Examples for org.apache.hadoop.hive.ql.metadata.Table#setFields()
The following examples show how to use
org.apache.hadoop.hive.ql.metadata.Table#setFields() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AvroHiveUtil.java From streamx with Apache License 2.0 | 6 votes |
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException { Table table = new Table(database, tableName); table.setTableType(TableType.EXTERNAL_TABLE); table.getParameters().put("EXTERNAL", "TRUE"); String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName); table.setDataLocation(new Path(tablePath)); table.setSerializationLib(avroSerde); try { table.setInputFormatClass(avroInputFormat); table.setOutputFormatClass(avroOutputFormat); } catch (HiveException e) { throw new HiveMetaStoreException("Cannot find input/output format:", e); } List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); table.setPartCols(partitioner.partitionFields()); table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString()); return table; }
Example 2
Source File: ParquetHiveUtil.java From streamx with Apache License 2.0 | 6 votes |
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException { Table table = new Table(database, tableName); table.setTableType(TableType.EXTERNAL_TABLE); table.getParameters().put("EXTERNAL", "TRUE"); String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName); table.setDataLocation(new Path(tablePath)); table.setSerializationLib(getHiveParquetSerde()); try { table.setInputFormatClass(getHiveParquetInputFormat()); table.setOutputFormatClass(getHiveParquetOutputFormat()); } catch (HiveException e) { throw new HiveMetaStoreException("Cannot find input/output format:", e); } // convert copycat schema schema to Hive columns List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); table.setPartCols(partitioner.partitionFields()); return table; }
Example 3
Source File: HiveMetaStoreBridgeTest.java From atlas with Apache License 2.0 | 5 votes |
private Table createTestTable(String databaseName, String tableName) throws HiveException { Table table = new Table(databaseName, tableName); table.setInputFormatClass(TextInputFormat.class); table.setFields(new ArrayList<FieldSchema>() {{ add(new FieldSchema("col1", "string", "comment1")); } }); table.setTableType(TableType.EXTERNAL_TABLE); table.setDataLocation(new Path("somehdfspath")); return table; }
Example 4
Source File: ParquetHiveUtil.java From streamx with Apache License 2.0 | 5 votes |
@Override public void alterSchema(String database, String tableName, Schema schema) { Table table = hiveMetaStore.getTable(database, tableName); List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); hiveMetaStore.alterTable(table); }
Example 5
Source File: HiveMetaStoreBridgeTest.java From incubator-atlas with Apache License 2.0 | 5 votes |
private Table createTestTable(String databaseName, String tableName) throws HiveException { Table table = new Table(databaseName, tableName); table.setInputFormatClass(TextInputFormat.class); table.setFields(new ArrayList<FieldSchema>() {{ add(new FieldSchema("col1", "string", "comment1")); } }); table.setTableType(TableType.EXTERNAL_TABLE); table.setDataLocation(new Path("somehdfspath")); return table; }