org.apache.hadoop.hive.ql.plan.HiveOperation Java Examples
The following examples show how to use
org.apache.hadoop.hive.ql.plan.HiveOperation.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveHookIT.java From atlas with Apache License 2.0 | 6 votes |
@Test public void testCreateView() throws Exception { String tableName = createTable(); String viewName = tableName(); String query = "create view " + viewName + " as select * from " + tableName; runCommand(query); HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.CREATEVIEW, getInputs(tableName, Entity.Type.TABLE), getOutputs(viewName, Entity.Type.TABLE)); AtlasEntity processEntity1 = validateProcess(hiveEventContext); AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, hiveEventContext); AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute( BaseHiveEvent.ATTRIBUTE_PROCESS)); Assert.assertEquals(process1.getGuid(), processEntity1.getGuid()); Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1); assertTableIsRegistered(DEFAULT_DB, viewName); String viewId = assertTableIsRegistered(DEFAULT_DB, viewName); AtlasEntity viewEntity = atlasClientV2.getEntityByGuid(viewId).getEntity(); List ddlQueries = (List) viewEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); Assert.assertNotNull(ddlQueries); Assert.assertEquals(ddlQueries.size(), 1); }
Example #2
Source File: SentryOnFailureHookContextImpl.java From incubator-sentry with Apache License 2.0 | 6 votes |
public SentryOnFailureHookContextImpl(String command, Set<ReadEntity> inputs, Set<WriteEntity> outputs, HiveOperation hiveOp, Database db, Table tab, AccessURI udfURI, AccessURI partitionURI, String userName, String ipAddress, AuthorizationException e, Configuration conf) { this.command = command; this.inputs = inputs; this.outputs = outputs; this.hiveOp = hiveOp; this.userName = userName; this.ipAddress = ipAddress; this.database = db; this.table = tab; this.udfURI = udfURI; this.partitionURI = partitionURI; this.authException = e; this.conf = conf; }
Example #3
Source File: TestPrivilegeWithGrantOption.java From incubator-sentry with Apache License 2.0 | 6 votes |
private void verifyFailureHook(HiveOperation expectedOp, String dbName, String tableName, boolean checkSentryAccessDeniedException) throws Exception { if (!isInternalServer) { return; } Assert.assertTrue(DummySentryOnFailureHook.invoked); if (expectedOp != null) { Assert.assertNotNull("Hive op is null for op: " + expectedOp, DummySentryOnFailureHook.hiveOp); Assert.assertTrue(expectedOp.equals(DummySentryOnFailureHook.hiveOp)); } if (checkSentryAccessDeniedException) { Assert.assertTrue("Expected SentryDeniedException for op: " + expectedOp, DummySentryOnFailureHook.exception.getCause() instanceof SentryAccessDeniedException); } if(tableName != null) { Assert.assertNotNull("Table object is null for op: " + expectedOp, DummySentryOnFailureHook.table); Assert.assertTrue(tableName.equalsIgnoreCase(DummySentryOnFailureHook.table.getName())); } if(dbName != null) { Assert.assertNotNull("Database object is null for op: " + expectedOp, DummySentryOnFailureHook.db); Assert.assertTrue(dbName.equalsIgnoreCase(DummySentryOnFailureHook.db.getName())); } }
Example #4
Source File: HiveHookIT.java From atlas with Apache License 2.0 | 6 votes |
@Test public void testLoadLocalPath() throws Exception { String tableName = createTable(false); String loadFile = file("load"); String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName; String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); runCommand(query); AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); Assert.assertNotNull(ddlQueries); Assert.assertEquals(ddlQueries.size(), 1); assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, null, getOutputs(tableName, Entity.Type.TABLE))); }
Example #5
Source File: HiveHookIT.java From atlas with Apache License 2.0 | 6 votes |
@Test public void testLoadLocalPathIntoPartition() throws Exception { String tableName = createTable(true); String loadFile = file("load"); String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName + " partition(dt = '"+ PART_FILE + "')"; String tblId = assertTableIsRegistered(DEFAULT_DB, tableName); runCommand(query); AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity(); List ddlQueries = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES); Assert.assertNotNull(ddlQueries); Assert.assertEquals(ddlQueries.size(), 1); assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, null, getOutputs(tableName, Entity.Type.TABLE))); }
Example #6
Source File: HiveITBase.java From atlas with Apache License 2.0 | 6 votes |
protected static boolean addQueryType(HiveOperation op, WriteEntity entity) { if (entity.getWriteType() != null && HiveOperation.QUERY.equals(op)) { switch (entity.getWriteType()) { case INSERT: case INSERT_OVERWRITE: case UPDATE: case DELETE: return true; case PATH_WRITE: //Add query type only for DFS paths and ignore local paths since they are not added as outputs if ( !Entity.Type.LOCAL_DIR.equals(entity.getType())) { return true; } break; default: } } return false; }
Example #7
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 6 votes |
/** * Positive test case for MSCK REPAIR TABLE. User has privileges to execute the * operation. */ @Test public void testMsckRepairTable() throws Exception { outputTabHierarcyList.add(buildObjectHierarchy(SERVER1, JUNIOR_ANALYST_DB, PURCHASES_TAB)); testAuth.authorize(HiveOperation.MSCK, alterTabPrivileges, MANAGER_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); // Should also succeed for the admin. testAuth.authorize(HiveOperation.MSCK, alterTabPrivileges, ADMIN_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); // Admin can also run this against tables in the ANALYST_DB. inputTabHierarcyList.add(buildObjectHierarchy(SERVER1, ANALYST_DB, PURCHASES_TAB)); testAuth.authorize(HiveOperation.MSCK, alterTabPrivileges, ADMIN_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); }
Example #8
Source File: HiveHookIT.java From incubator-atlas with Apache License 2.0 | 6 votes |
@Test(enabled = false) public void testInsertIntoTempTable() throws Exception { String tableName = createTable(); String insertTableName = createTable(false, false, true); assertTableIsRegistered(DEFAULT_DB, tableName); assertTableIsNotRegistered(DEFAULT_DB, insertTableName, true); String query = "insert into " + insertTableName + " select id, name from " + tableName; runCommand(query); Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE); Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE); outputs.iterator().next().setName(getQualifiedTblName(insertTableName + HiveMetaStoreBridge.TEMP_TABLE_PREFIX + SessionState.get().getSessionId())); outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT); validateProcess(constructEvent(query, HiveOperation.QUERY, inputs, outputs)); assertTableIsRegistered(DEFAULT_DB, tableName); assertTableIsRegistered(DEFAULT_DB, insertTableName, null, true); }
Example #9
Source File: HiveAuthzBindingHookV2.java From incubator-sentry with Apache License 2.0 | 6 votes |
/** * Post analyze hook that invokes hive auth bindings */ @Override public void postAnalyze(HiveSemanticAnalyzerHookContext context, List<Task<? extends Serializable>> rootTasks) throws SemanticException { HiveOperation stmtOperation = getCurrentHiveStmtOp(); Subject subject = new Subject(context.getUserName()); for (int i = 0; i < rootTasks.size(); i++) { Task<? extends Serializable> task = rootTasks.get(i); if (task instanceof DDLTask) { SentryFilterDDLTask filterTask = new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation); filterTask.setWork((DDLWork)task.getWork()); rootTasks.set(i, filterTask); } } }
Example #10
Source File: TestPrivilegeWithHAGrantOption.java From incubator-sentry with Apache License 2.0 | 5 votes |
private void verifyFailureHook(Statement statement, String sqlStr, HiveOperation expectedOp, String dbName, String tableName, boolean checkSentryAccessDeniedException) throws Exception { // negative test case: non admin user can't create role Assert.assertFalse(DummySentryOnFailureHook.invoked); try { statement.execute(sqlStr); Assert.fail("Expected SQL exception for " + sqlStr); } catch (SQLException e) { Assert.assertTrue(DummySentryOnFailureHook.invoked); } finally { DummySentryOnFailureHook.invoked = false; } if (expectedOp != null) { Assert.assertNotNull("Hive op is null for op: " + expectedOp, DummySentryOnFailureHook.hiveOp); Assert.assertTrue(expectedOp.equals(DummySentryOnFailureHook.hiveOp)); } if (checkSentryAccessDeniedException) { Assert.assertTrue("Expected SentryDeniedException for op: " + expectedOp, DummySentryOnFailureHook.exception.getCause() instanceof SentryAccessDeniedException); } if(tableName != null) { Assert.assertNotNull("Table object is null for op: " + expectedOp, DummySentryOnFailureHook.table); Assert.assertTrue(tableName.equalsIgnoreCase(DummySentryOnFailureHook.table.getName())); } if(dbName != null) { Assert.assertNotNull("Database object is null for op: " + expectedOp, DummySentryOnFailureHook.db); Assert.assertTrue(dbName.equalsIgnoreCase(DummySentryOnFailureHook.db.getName())); } }
Example #11
Source File: HiveHookIT.java From incubator-atlas with Apache License 2.0 | 5 votes |
@Test public void testLoadDFSPathPartitioned() throws Exception { String tableName = createTable(true, true, false); assertTableIsRegistered(DEFAULT_DB, tableName); final String loadFile = createTestDFSFile("loadDFSFile"); String query = "load data inpath '" + loadFile + "' into table " + tableName + " partition(dt = '"+ PART_FILE + "')"; runCommand(query); final Set<WriteEntity> outputs = getOutputs(tableName, Entity.Type.TABLE); final Set<ReadEntity> inputs = getInputs(loadFile, Entity.Type.DFS_DIR); final Set<WriteEntity> partitionOps = new LinkedHashSet<>(outputs); partitionOps.addAll(getOutputs(DEFAULT_DB + "@" + tableName + "@dt=" + PART_FILE, Entity.Type.PARTITION)); Referenceable processReference = validateProcess(constructEvent(query, HiveOperation.LOAD, inputs, partitionOps), inputs, outputs); validateHDFSPaths(processReference, INPUTS, loadFile); validateOutputTables(processReference, outputs); final String loadFile2 = createTestDFSFile("loadDFSFile1"); query = "load data inpath '" + loadFile2 + "' into table " + tableName + " partition(dt = '"+ PART_FILE + "')"; runCommand(query); Set<ReadEntity> process2Inputs = getInputs(loadFile2, Entity.Type.DFS_DIR); Set<ReadEntity> expectedInputs = new LinkedHashSet<>(); expectedInputs.addAll(process2Inputs); expectedInputs.addAll(inputs); validateProcess(constructEvent(query, HiveOperation.LOAD, expectedInputs, partitionOps), expectedInputs, outputs); }
Example #12
Source File: SentryFilterDDLTask.java From incubator-sentry with Apache License 2.0 | 5 votes |
public SentryFilterDDLTask(HiveAuthzBinding hiveAuthzBinding, Subject subject, HiveOperation stmtOperation) { Preconditions.checkNotNull(hiveAuthzBinding); Preconditions.checkNotNull(subject); Preconditions.checkNotNull(stmtOperation); this.hiveAuthzBinding = hiveAuthzBinding; this.subject = subject; this.stmtOperation = stmtOperation; }
Example #13
Source File: TestDbSentryOnFailureHookLoading.java From incubator-sentry with Apache License 2.0 | 5 votes |
private void verifyFailureHook(Statement statement, String sqlStr, HiveOperation expectedOp, String dbName, String tableName, boolean checkSentryAccessDeniedException) throws Exception { // negative test case: non admin user can't create role Assert.assertFalse(DummySentryOnFailureHook.invoked); try { statement.execute(sqlStr); Assert.fail("Expected SQL exception for " + sqlStr); } catch (SQLException e) { Assert.assertTrue("FailureHook is not ran : " + e.getMessage(), DummySentryOnFailureHook.invoked); } finally { DummySentryOnFailureHook.invoked = false; } if (expectedOp != null) { Assert.assertNotNull("Hive op is null for op: " + expectedOp, DummySentryOnFailureHook.hiveOp); Assert.assertTrue(expectedOp.equals(DummySentryOnFailureHook.hiveOp)); } if (checkSentryAccessDeniedException) { Assert.assertTrue("Expected SentryDeniedException for op: " + expectedOp, DummySentryOnFailureHook.exception.getCause() instanceof SentryAccessDeniedException); } if(tableName != null) { Assert.assertNotNull("Table object is null for op: " + expectedOp, DummySentryOnFailureHook.table); Assert.assertTrue(tableName.equalsIgnoreCase(DummySentryOnFailureHook.table.getName())); } if(dbName != null) { Assert.assertNotNull("Database object is null for op: " + expectedOp, DummySentryOnFailureHook.db); Assert.assertTrue(dbName.equalsIgnoreCase(DummySentryOnFailureHook.db.getName())); } }
Example #14
Source File: TestPrivilegeWithGrantOption.java From incubator-sentry with Apache License 2.0 | 5 votes |
private void runSQLWithError(Statement statement, String sqlStr, HiveOperation expectedOp, String dbName, String tableName, boolean checkSentryAccessDeniedException) throws Exception { // negative test case: non admin user can't create role Assert.assertFalse(DummySentryOnFailureHook.invoked); try { statement.execute(sqlStr); Assert.fail("Expected SQL exception for " + sqlStr); } catch (SQLException e) { verifyFailureHook(expectedOp, dbName, tableName, checkSentryAccessDeniedException); } finally { DummySentryOnFailureHook.invoked = false; } }
Example #15
Source File: HiveAuthzBindingHook.java From incubator-sentry with Apache License 2.0 | 5 votes |
public static List<FieldSchema> filterShowColumns( HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols, HiveOperation operation, String userName, String tableName, String dbName) throws SemanticException { List<FieldSchema> filteredResult = new ArrayList<FieldSchema>(); Subject subject = new Subject(userName); HiveAuthzPrivileges columnMetaDataPrivilege = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS); HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName); Database database = new Database(dbName); Table table = new Table(tableName); for (FieldSchema col : cols) { // if user has privileges on column, add to filtered list, else discard List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); externalAuthorizableHierarchy.add(database); externalAuthorizableHierarchy.add(table); externalAuthorizableHierarchy.add(new Column(col.getName())); inputHierarchy.add(externalAuthorizableHierarchy); try { // do the authorization by new HiveAuthzBinding with PrivilegeCache hiveBindingWithPrivilegeCache.authorize(operation, columnMetaDataPrivilege, subject, inputHierarchy, outputHierarchy); filteredResult.add(col); } catch (AuthorizationException e) { // squash the exception, user doesn't have privileges, so the column is // not added to // filtered list. } } return filteredResult; }
Example #16
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * Validate create function permission for admin (server level priviledge */ @Test public void testValidateCreateFunctionForAdmin() throws Exception { inputTabHierarcyList.add(buildObjectHierarchy(SERVER1, CUSTOMER_DB, PURCHASES_TAB, AGE_COL)); inputTabHierarcyList.add(Arrays.asList(new DBModelAuthorizable[] { new Server(SERVER1), new AccessURI("file:///some/path/to/a/jar") })); testAuth.authorize(HiveOperation.CREATEFUNCTION, createFuncPrivileges, ADMIN_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); }
Example #17
Source File: AuthorizingObjectStoreV2.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * Invoke Hive table filtering that removes the entries which use has no * privileges to access * @param dbList * @return * @throws MetaException */ protected List<String> filterTables(String dbName, List<String> tabList) throws MetaException { if (needsAuthorization(getUserName())) { try { return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(), tabList, HiveOperation.SHOWTABLES, getUserName(), dbName); } catch (SemanticException e) { throw new MetaException("Error getting Table list " + e.getMessage()); } } else { return tabList; } }
Example #18
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * validate create table permissions denided to junior_analyst in analyst sandbox db */ @Test(expected=AuthorizationException.class) public void testValidateCreateTabPrivilegesRejectionForUser2() throws Exception { outputTabHierarcyList.add(buildObjectHierarchy(SERVER1, ANALYST_DB, null)); testAuth.authorize(HiveOperation.CREATETABLE, createTabPrivileges, JUNIOR_ANALYST_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); }
Example #19
Source File: AuthorizingObjectStoreV2.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * Invoke Hive database filtering that removes the entries which use has no * privileges to access * @param dbList * @return * @throws MetaException */ private List<String> filterDatabases(List<String> dbList) throws MetaException { if (needsAuthorization(getUserName())) { try { return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(), dbList, HiveOperation.SHOWDATABASES, getUserName()); } catch (SemanticException e) { throw new MetaException("Error getting DB list " + e.getMessage()); } } else { return dbList; } }
Example #20
Source File: MetastoreAuthzBinding.java From incubator-sentry with Apache License 2.0 | 5 votes |
private void authorizeDropDatabase(PreDropDatabaseEvent context) throws InvalidOperationException, MetaException { authorizeMetastoreAccess(HiveOperation.DROPDATABASE, new HierarcyBuilder() .addDbToOutput(getAuthServer(), context.getDatabase().getName()).build(), new HierarcyBuilder().addDbToOutput(getAuthServer(), context.getDatabase().getName()).build()); }
Example #21
Source File: SentryMetaStoreFilterHook.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * Invoke Hive table filtering that removes the entries which use has no * privileges to access * @param tabList * @return * @throws MetaException */ private List<String> filterTab(String dbName, List<String> tabList) { try { return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(), tabList, HiveOperation.SHOWTABLES, getUserName(), dbName); } catch (Exception e) { LOG.warn("Error getting Table list ", e); return new ArrayList<String>(); } finally { close(); } }
Example #22
Source File: HiveHook.java From incubator-atlas with Apache License 2.0 | 5 votes |
private static void addOutputs(HiveMetaStoreBridge hiveBridge, HiveOperation op, SortedSet<WriteEntity> sortedOutputs, StringBuilder buffer, final Map<WriteEntity, Referenceable> refs, final boolean ignoreHDFSPathsInQFName) throws HiveException { if (refs != null) { Set<String> dataSetsProcessed = new LinkedHashSet<>(); if (sortedOutputs != null) { for (WriteEntity output : sortedOutputs) { final Entity entity = output; if (!dataSetsProcessed.contains(output.getName().toLowerCase())) { //HiveOperation.QUERY type encompasses INSERT, INSERT_OVERWRITE, UPDATE, DELETE, PATH_WRITE operations if (addQueryType(op, (WriteEntity) entity)) { buffer.append(SEP); buffer.append(((WriteEntity) entity).getWriteType().name()); } if (ignoreHDFSPathsInQFName && (Type.DFS_DIR.equals(output.getType()) || Type.LOCAL_DIR.equals(output.getType()))) { LOG.debug("Skipping dfs dir output addition to process qualified name {} ", output.getName()); } else if (refs.containsKey(output)) { if ( output.getType() == Type.PARTITION || output.getType() == Type.TABLE) { final Date createTime = HiveMetaStoreBridge.getTableCreatedTime(hiveBridge.hiveClient.getTable(output.getTable().getDbName(), output.getTable().getTableName())); addDataset(buffer, refs.get(output), createTime.getTime()); } else { addDataset(buffer, refs.get(output)); } } dataSetsProcessed.add(output.getName().toLowerCase()); } } } } }
Example #23
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * validate create table permissions for manager in junior_analyst sandbox db */ @Test public void testValidateCreateTabPrivilegesForUser() throws Exception { outputTabHierarcyList.add(buildObjectHierarchy(SERVER1, JUNIOR_ANALYST_DB, null)); testAuth.authorize(HiveOperation.CREATETABLE, createTabPrivileges, MANAGER_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); }
Example #24
Source File: HiveHook.java From incubator-atlas with Apache License 2.0 | 5 votes |
private static boolean ignoreHDFSPathsinQFName(final HiveOperation op, final Set<ReadEntity> inputs, final Set<WriteEntity> outputs) { switch (op) { case LOAD: case IMPORT: return isPartitionBasedQuery(outputs); case EXPORT: return isPartitionBasedQuery(inputs); case QUERY: return true; } return false; }
Example #25
Source File: HiveHook.java From incubator-atlas with Apache License 2.0 | 5 votes |
@VisibleForTesting static String getProcessQualifiedName(HiveMetaStoreBridge dgiBridge, HiveEventContext eventContext, final SortedSet<ReadEntity> sortedHiveInputs, final SortedSet<WriteEntity> sortedHiveOutputs, SortedMap<ReadEntity, Referenceable> hiveInputsMap, SortedMap<WriteEntity, Referenceable> hiveOutputsMap) throws HiveException { HiveOperation op = eventContext.getOperation(); if (isCreateOp(eventContext)) { Entity entity = getEntityByType(sortedHiveOutputs, Type.TABLE); if (entity != null) { Table outTable = entity.getTable(); //refresh table outTable = dgiBridge.hiveClient.getTable(outTable.getDbName(), outTable.getTableName()); return HiveMetaStoreBridge.getTableProcessQualifiedName(dgiBridge.getClusterName(), outTable); } } StringBuilder buffer = new StringBuilder(op.getOperationName()); boolean ignoreHDFSPathsinQFName = ignoreHDFSPathsinQFName(op, sortedHiveInputs, sortedHiveOutputs); if ( ignoreHDFSPathsinQFName && LOG.isDebugEnabled()) { LOG.debug("Ignoring HDFS paths in qualifiedName for {} {} ", op, eventContext.getQueryStr()); } addInputs(dgiBridge, op, sortedHiveInputs, buffer, hiveInputsMap, ignoreHDFSPathsinQFName); buffer.append(IO_SEP); addOutputs(dgiBridge, op, sortedHiveOutputs, buffer, hiveOutputsMap, ignoreHDFSPathsinQFName); LOG.info("Setting process qualified name to {}", buffer); return buffer.toString(); }
Example #26
Source File: HiveHook.java From incubator-atlas with Apache License 2.0 | 5 votes |
private static boolean isCreateOp(HiveEventContext hiveEvent) { return HiveOperation.CREATETABLE.equals(hiveEvent.getOperation()) || HiveOperation.CREATEVIEW.equals(hiveEvent.getOperation()) || HiveOperation.ALTERVIEW_AS.equals(hiveEvent.getOperation()) || HiveOperation.ALTERTABLE_LOCATION.equals(hiveEvent.getOperation()) || HiveOperation.CREATETABLE_AS_SELECT.equals(hiveEvent.getOperation()); }
Example #27
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * validate create table permissions denided to junior_analyst in customer db */ @Test(expected=AuthorizationException.class) public void testValidateCreateTabPrivilegesRejectionForUser() throws Exception { outputTabHierarcyList.add(buildObjectHierarchy(SERVER1, CUSTOMER_DB, null)); testAuth.authorize(HiveOperation.CREATETABLE, createTabPrivileges, JUNIOR_ANALYST_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); }
Example #28
Source File: HiveITBase.java From atlas with Apache License 2.0 | 5 votes |
protected static boolean isCreateOp(HiveEventContext hiveEvent) { return HiveOperation.CREATETABLE.equals(hiveEvent.getOperation()) || HiveOperation.CREATEVIEW.equals(hiveEvent.getOperation()) || HiveOperation.ALTERVIEW_AS.equals(hiveEvent.getOperation()) || HiveOperation.ALTERTABLE_LOCATION.equals(hiveEvent.getOperation()) || HiveOperation.CREATETABLE_AS_SELECT.equals(hiveEvent.getOperation()); }
Example #29
Source File: TestHiveAuthzBindings.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * validate read permission for admin on customer:purchase */ @Test public void testValidateSelectPrivilegesForAdmin() throws Exception { inputTabHierarcyList.add(buildObjectHierarchy(SERVER1, CUSTOMER_DB, PURCHASES_TAB)); testAuth.authorize(HiveOperation.QUERY, queryPrivileges, ADMIN_SUBJECT, inputTabHierarcyList, outputTabHierarcyList); }
Example #30
Source File: HiveITBase.java From atlas with Apache License 2.0 | 5 votes |
@VisibleForTesting protected static String getProcessQualifiedName(HiveMetaStoreBridge dgiBridge, HiveEventContext eventContext, final SortedSet<ReadEntity> sortedHiveInputs, final SortedSet<WriteEntity> sortedHiveOutputs, SortedMap<ReadEntity, AtlasEntity> hiveInputsMap, SortedMap<WriteEntity, AtlasEntity> hiveOutputsMap) throws HiveException { HiveOperation op = eventContext.getOperation(); if (isCreateOp(eventContext)) { Entity entity = getEntityByType(sortedHiveOutputs, Entity.Type.TABLE); if (entity != null) { Table outTable = entity.getTable(); //refresh table outTable = dgiBridge.getHiveClient().getTable(outTable.getDbName(), outTable.getTableName()); return HiveMetaStoreBridge.getTableProcessQualifiedName(dgiBridge.getMetadataNamespace(), outTable); } } StringBuilder buffer = new StringBuilder(op.getOperationName()); boolean ignoreHDFSPathsinQFName = ignoreHDFSPathsinQFName(op, sortedHiveInputs, sortedHiveOutputs); if ( ignoreHDFSPathsinQFName && LOG.isDebugEnabled()) { LOG.debug("Ignoring HDFS paths in qualifiedName for {} {} ", op, eventContext.getQueryStr()); } addInputs(dgiBridge, op, sortedHiveInputs, buffer, hiveInputsMap, ignoreHDFSPathsinQFName); buffer.append(IO_SEP); addOutputs(dgiBridge, op, sortedHiveOutputs, buffer, hiveOutputsMap, ignoreHDFSPathsinQFName); LOG.info("Setting process qualified name to {}", buffer); return buffer.toString(); }