Java Code Examples for org.apache.hadoop.hive.ql.plan.TableDesc#getProperties()
The following examples show how to use
org.apache.hadoop.hive.ql.plan.TableDesc#getProperties() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JdbcStorageHandler.java From HiveJdbcStorageHandler with Apache License 2.0 | 6 votes |
private void configureJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) { if(LOG.isDebugEnabled()) { LOG.debug("tabelDesc: " + tableDesc); LOG.debug("jobProperties: " + jobProperties); } String tblName = tableDesc.getTableName(); Properties tblProps = tableDesc.getProperties(); String columnNames = tblProps.getProperty(Constants.LIST_COLUMNS); jobProperties.put(DBConfiguration.INPUT_CLASS_PROPERTY, DbRecordWritable.class.getName()); jobProperties.put(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, tblName); jobProperties.put(DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY, tblName); jobProperties.put(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY, columnNames); jobProperties.put(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, columnNames); for(String key : tblProps.stringPropertyNames()) { if(key.startsWith("mapred.jdbc.")) { String value = tblProps.getProperty(key); jobProperties.put(key, value); } } }
Example 2
Source File: SolrStorageHandler.java From hive-solr with MIT License | 5 votes |
@Override public void configureOutputJobProperties(TableDesc tbl, Map<String, String> jobProperties) { final Properties properties = tbl.getProperties(); //设置属性到运行时的jobconf里面 Conf.copyProperties(properties,jobProperties); }
Example 3
Source File: KafkaStorageHandler.java From HiveKa with Apache License 2.0 | 5 votes |
@Override public void configureInputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) { Properties tableProperties = tableDesc.getProperties(); new KafkaBackedTableProperties().initialize(tableProperties, jobProperties, tableDesc); }
Example 4
Source File: KafkaStorageHandler.java From HiveKa with Apache License 2.0 | 5 votes |
@Override public void configureOutputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) { Properties tableProperties = tableDesc.getProperties(); new KafkaBackedTableProperties().initialize(tableProperties, jobProperties, tableDesc); }
Example 5
Source File: AccumuloStorageHandler.java From accumulo-hive-storage-manager with Apache License 2.0 | 5 votes |
/** * * @param desc table description * @param jobProps */ @Override public void configureTableJobProperties(TableDesc desc, Map<String, String> jobProps) { Properties tblProperties = desc.getProperties(); jobProps.put(AccumuloSerde.COLUMN_MAPPINGS, tblProperties.getProperty(AccumuloSerde.COLUMN_MAPPINGS)); String tableName = tblProperties.getProperty(AccumuloSerde.TABLE_NAME); jobProps.put(AccumuloSerde.TABLE_NAME, tableName); String useIterators = tblProperties.getProperty(AccumuloSerde.NO_ITERATOR_PUSHDOWN); if(useIterators != null) { jobProps.put(AccumuloSerde.NO_ITERATOR_PUSHDOWN, useIterators); } }
Example 6
Source File: AccumuloStorageHandler.java From accumulo-hive-storage-manager with Apache License 2.0 | 5 votes |
@Override public void configureInputJobProperties(TableDesc tableDesc, Map<String, String> properties) { Properties props = tableDesc.getProperties(); properties.put(AccumuloSerde.COLUMN_MAPPINGS, props.getProperty(AccumuloSerde.COLUMN_MAPPINGS)); properties.put(AccumuloSerde.TABLE_NAME, props.getProperty(AccumuloSerde.TABLE_NAME)); String useIterators = props.getProperty(AccumuloSerde.NO_ITERATOR_PUSHDOWN); if(useIterators != null) { properties.put(AccumuloSerde.NO_ITERATOR_PUSHDOWN, useIterators); } }
Example 7
Source File: SMStorageHandler.java From spliceengine with GNU Affero General Public License v3.0 | 5 votes |
public void configureTableJobProperties(TableDesc tableDesc, Map<String, String> jobProperties, boolean isInputJob) throws Exception { Properties tableProperties = tableDesc.getProperties(); String tableName = null; String connStr = tableProperties.getProperty(MRConstants.SPLICE_JDBC_STR); if (connStr == null) throw new Exception("Error: wrong param. Did you mean '" + MRConstants.SPLICE_JDBC_STR + "'?"); // TODO JL if (sqlUtil == null) sqlUtil = SMSQLUtil.getInstance(connStr); if (isInputJob) { tableName = tableProperties.getProperty(MRConstants.SPLICE_TABLE_NAME); if (tableName == null) throw new Exception("Error: wrong param. Did you mean '" + MRConstants.SPLICE_TABLE_NAME + "'?"); } else { tableName = tableProperties.getProperty(MRConstants.SPLICE_TABLE_NAME); if (tableName == null) throw new Exception("Error: wrong param. Did you mean '" + MRConstants.SPLICE_TABLE_NAME + "'?"); } tableName = tableName.trim(); if (parentConn == null) { parentTxnId = startWriteJobParentTxn(connStr, tableName); } jobProperties.put(MRConstants.SPLICE_TRANSACTION_ID, parentTxnId); jobProperties.put(MRConstants.SPLICE_TABLE_NAME, tableName); jobProperties.put(MRConstants.SPLICE_JDBC_STR, connStr); }
Example 8
Source File: SolrStorageHandler.java From hive-solr with MIT License | 4 votes |
@Override public void configureInputJobProperties(TableDesc tbl, Map<String, String> jobProperties) { final Properties properties = tbl.getProperties(); //设置属性到运行时的jobconf里面 Conf.copyProperties(properties,jobProperties); }
Example 9
Source File: KuduStorageHandler.java From HiveKudu-Handler with Apache License 2.0 | 4 votes |
private void configureJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) { //This will always have the DB Name qualifier of Hive. Dont use this to set Kudu Tablename. String tblName = tableDesc.getTableName(); LOG.debug("Hive Table Name:" + tblName); Properties tblProps = tableDesc.getProperties(); String columnNames = tblProps.getProperty(HiveKuduConstants.LIST_COLUMNS); String columnTypes = tblProps.getProperty(HiveKuduConstants.LIST_COLUMN_TYPES); LOG.debug("Columns names:" + columnNames); LOG.debug("Column types:" + columnTypes); if (columnNames.length() == 0) { //TODO: Place keeper to insert SerDeHelper code to connect to Kudu to extract column names. LOG.warn("SerDe currently doesn't support column names and types. Please provide it explicitly"); } //set map reduce properties. jobProperties.put(HiveKuduConstants.MR_INPUT_TABLE_NAME, tblProps.getProperty(HiveKuduConstants.TABLE_NAME)); jobProperties.put(HiveKuduConstants.MR_OUTPUT_TABLE_NAME, tblProps.getProperty(HiveKuduConstants.TABLE_NAME)); jobProperties.put(HiveKuduConstants.MR_MASTER_ADDRESS_NAME, tblProps.getProperty(HiveKuduConstants.MASTER_ADDRESS_NAME)); LOG.debug("Kudu Table Name: " + tblProps.getProperty(HiveKuduConstants.TABLE_NAME)); LOG.debug("Kudu Master Addresses: " + tblProps.getProperty(HiveKuduConstants.MASTER_ADDRESS_NAME)); //set configuration property conf.set(HiveKuduConstants.MR_INPUT_TABLE_NAME, tblProps.getProperty(HiveKuduConstants.TABLE_NAME)); conf.set(HiveKuduConstants.MR_OUTPUT_TABLE_NAME, tblProps.getProperty(HiveKuduConstants.TABLE_NAME)); conf.set(HiveKuduConstants.MR_MASTER_ADDRESS_NAME, tblProps.getProperty(HiveKuduConstants.MASTER_ADDRESS_NAME)); conf.set(HiveKuduConstants.TABLE_NAME, tblProps.getProperty(HiveKuduConstants.TABLE_NAME)); conf.set(HiveKuduConstants.MASTER_ADDRESS_NAME, tblProps.getProperty(HiveKuduConstants.MASTER_ADDRESS_NAME)); //set class variables kuduMaster = conf.get(HiveKuduConstants.MASTER_ADDRESS_NAME); kuduTableName = conf.get(HiveKuduConstants.TABLE_NAME); for (String key : tblProps.stringPropertyNames()) { if (key.startsWith(HiveKuduConstants.MR_PROPERTY_PREFIX)) { String value = tblProps.getProperty(key); jobProperties.put(key, value); //Also set configuration for Non Map Reduce Hive calls to the Handler conf.set(key, value); } } }