org.apache.hadoop.hive.shims.Utils Java Examples
The following examples show how to use
org.apache.hadoop.hive.shims.Utils.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveClientImpl.java From dremio-oss with Apache License 2.0 | 6 votes |
/** * Helper method that gets the delegation token using <i>processHiveClient</i> for given <i>proxyUserName</i> * and sets it in proxy user UserGroupInformation and proxy user HiveConf. */ static void getAndSetDelegationToken(final HiveConf proxyUserHiveConf, final UserGroupInformation proxyUGI, final HiveClient processHiveClient) { checkNotNull(processHiveClient, "process user Hive client required"); checkNotNull(proxyUserHiveConf, "Proxy user HiveConf required"); checkNotNull(proxyUGI, "Proxy user UserGroupInformation required"); try { final String delegationToken = processHiveClient.getDelegationToken(proxyUGI.getUserName()); Utils.setTokenStr(proxyUGI, delegationToken, "DremioDelegationTokenForHiveMetaStoreServer"); proxyUserHiveConf.set("hive.metastore.token.signature", "DremioDelegationTokenForHiveMetaStoreServer"); } catch (Exception e) { final String processUsername = HiveImpersonationUtil.getProcessUserUGI().getShortUserName(); throw UserException.permissionError(e) .message("Failed to generate Hive metastore delegation token for user %s. " + "Check Hadoop services (including metastore) have correct proxy user impersonation settings (%s, %s) " + "and services are restarted after applying those settings.", proxyUGI.getUserName(), String.format("hadoop.proxyuser.%s.hosts", processUsername), String.format("hadoop.proxyuser.%s.groups", processUsername) ) .addContext("Proxy user", proxyUGI.getUserName()) .build(logger); } }
Example #2
Source File: HiveHook.java From atlas with Apache License 2.0 | 4 votes |
@Override public void run(HookContext hookContext) throws Exception { if (LOG.isDebugEnabled()) { LOG.debug("==> HiveHook.run({})", hookContext.getOperationName()); } try { HiveOperation oper = OPERATION_MAP.get(hookContext.getOperationName()); AtlasHiveHookContext context = new AtlasHiveHookContext(this, oper, hookContext, getKnownObjects()); BaseHiveEvent event = null; switch (oper) { case CREATEDATABASE: event = new CreateDatabase(context); break; case DROPDATABASE: event = new DropDatabase(context); break; case ALTERDATABASE: case ALTERDATABASE_OWNER: case ALTERDATABASE_LOCATION: event = new AlterDatabase(context); break; case CREATETABLE: event = new CreateTable(context, true); break; case DROPTABLE: case DROPVIEW: event = new DropTable(context); break; case CREATETABLE_AS_SELECT: case CREATEVIEW: case ALTERVIEW_AS: case LOAD: case EXPORT: case IMPORT: case QUERY: event = new CreateHiveProcess(context, true); break; case ALTERTABLE_FILEFORMAT: case ALTERTABLE_CLUSTER_SORT: case ALTERTABLE_BUCKETNUM: case ALTERTABLE_PROPERTIES: case ALTERVIEW_PROPERTIES: case ALTERTABLE_SERDEPROPERTIES: case ALTERTABLE_SERIALIZER: case ALTERTABLE_ADDCOLS: case ALTERTABLE_REPLACECOLS: case ALTERTABLE_PARTCOLTYPE: case ALTERTABLE_LOCATION: event = new AlterTable(context); break; case ALTERTABLE_RENAME: case ALTERVIEW_RENAME: event = new AlterTableRename(context); break; case ALTERTABLE_RENAMECOL: event = new AlterTableRenameCol(context); break; default: if (LOG.isDebugEnabled()) { LOG.debug("HiveHook.run({}): operation ignored", hookContext.getOperationName()); } break; } if (event != null) { final UserGroupInformation ugi = hookContext.getUgi() == null ? Utils.getUGI() : hookContext.getUgi(); super.notifyEntities(event.getNotificationMessages(), ugi); } } catch (Throwable t) { LOG.error("HiveHook.run(): failed to process operation {}", hookContext.getOperationName(), t); } if (LOG.isDebugEnabled()) { LOG.debug("<== HiveHook.run({})", hookContext.getOperationName()); } }
Example #3
Source File: HiveMetastoreHookImpl.java From atlas with Apache License 2.0 | 4 votes |
public void handleEvent(HiveOperationContext operContext) { ListenerEvent listenerEvent = operContext.getEvent(); if (!listenerEvent.getStatus()) { return; } try { HiveOperation oper = operContext.getOperation(); AtlasHiveHookContext context = new AtlasHiveHookContext(hiveHook, oper, hiveHook.getKnownObjects(), this, listenerEvent); BaseHiveEvent event = null; switch (oper) { case CREATEDATABASE: event = new CreateDatabase(context); break; case DROPDATABASE: event = new DropDatabase(context); break; case ALTERDATABASE: event = new AlterDatabase(context); break; case CREATETABLE: event = new CreateTable(context, true); break; case DROPTABLE: event = new DropTable(context); break; case ALTERTABLE_PROPERTIES: event = new AlterTable(context); break; case ALTERTABLE_RENAME: event = new AlterTableRename(context); break; case ALTERTABLE_RENAMECOL: FieldSchema columnOld = operContext.getColumnOld(); FieldSchema columnNew = operContext.getColumnNew(); event = new AlterTableRenameCol(columnOld, columnNew, context); break; default: if (LOG.isDebugEnabled()) { LOG.debug("HiveMetastoreHook.handleEvent({}): operation ignored.", listenerEvent); } break; } if (event != null) { final UserGroupInformation ugi = SecurityUtils.getUGI() == null ? Utils.getUGI() : SecurityUtils.getUGI(); super.notifyEntities(event.getNotificationMessages(), ugi); } } catch (Throwable t) { LOG.error("HiveMetastoreHook.handleEvent({}): failed to process operation {}", listenerEvent, t); } }
Example #4
Source File: HiveHook.java From incubator-atlas with Apache License 2.0 | 4 votes |
@Override public void run(final HookContext hookContext) throws Exception { // clone to avoid concurrent access try { final HiveEventContext event = new HiveEventContext(); event.setInputs(hookContext.getInputs()); event.setOutputs(hookContext.getOutputs()); event.setHookType(hookContext.getHookType()); final UserGroupInformation ugi = hookContext.getUgi() == null ? Utils.getUGI() : hookContext.getUgi(); event.setUgi(ugi); event.setUser(getUser(hookContext.getUserName(), hookContext.getUgi())); event.setOperation(OPERATION_MAP.get(hookContext.getOperationName())); event.setQueryId(hookContext.getQueryPlan().getQueryId()); event.setQueryStr(hookContext.getQueryPlan().getQueryStr()); event.setQueryStartTime(hookContext.getQueryPlan().getQueryStartTime()); event.setQueryType(hookContext.getQueryPlan().getQueryPlan().getQueryType()); event.setLineageInfo(hookContext.getLinfo()); if (executor == null) { collect(event); notifyAsPrivilegedAction(event); } else { executor.submit(new Runnable() { @Override public void run() { try { ugi.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { collect(event); return event; } }); notifyAsPrivilegedAction(event); } catch (Throwable e) { LOG.error("Atlas hook failed due to error ", e); } } }); } } catch (Throwable t) { LOG.error("Submitting to thread pool failed due to error ", t); } }