Java Code Examples for org.apache.hadoop.hive.ql.parse.ASTNode#getChildCount()
The following examples show how to use
org.apache.hadoop.hive.ql.parse.ASTNode#getChildCount() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Parser.java From Eagle with Apache License 2.0 | 7 votes |
private void visitSubtree(ASTNode ast) { int len = ast.getChildCount(); if (len > 0) { for (Node n : ast.getChildren()) { ASTNode asn = (ASTNode)n; switch (asn.getToken().getType()) { case HiveParser.TOK_TABNAME: tableSet.add(ast.getChild(0).getChild(0).getText()); break; case HiveParser.TOK_SET_COLUMNS_CLAUSE: for (int i = 0; i < asn.getChildCount(); i++) { addToColumnSet((ASTNode)asn.getChild(i).getChild(0)); } case HiveParser.TOK_FROM: parseFromClause((ASTNode)asn.getChild(0)); break; case HiveParser.TOK_INSERT: for (int i = 0; i < asn.getChildCount(); i++) { parseInsertClause((ASTNode)asn.getChild(i)); } break; case HiveParser.TOK_UNIONTYPE: int childcount = asn.getChildCount(); for (int i = 0; i < childcount; i++) { parseQL((ASTNode)asn.getChild(i)); } break; } } // Add tableSet and columnSet to tableColumnMap addTablesColumnsToMap(tableSet, columnSet); } }
Example 2
Source File: Parser.java From Eagle with Apache License 2.0 | 6 votes |
private void parseSubQuery(ASTNode subQuery) { int cc = 0; int cp = 0; switch (subQuery.getToken().getType()) { case HiveParser.TOK_QUERY: visitSubtree(subQuery); break; case HiveParser.TOK_UNIONTYPE: cc = subQuery.getChildCount(); for ( cp = 0; cp < cc; ++cp) { parseSubQuery((ASTNode)subQuery.getChild(cp)); } break; } }
Example 3
Source File: Parser.java From eagle with Apache License 2.0 | 6 votes |
private void parseQueryClause(ASTNode ast) { int len = ast.getChildCount(); if (len > 0) { for (Node n : ast.getChildren()) { ASTNode asn = (ASTNode) n; switch (asn.getToken().getType()) { case HiveParser.TOK_FROM: parseFromClause((ASTNode) asn.getChild(0)); break; case HiveParser.TOK_INSERT: for (int i = 0; i < asn.getChildCount(); i++) { parseInsertClause((ASTNode) asn.getChild(i)); } break; } } } }
Example 4
Source File: Parser.java From eagle with Apache License 2.0 | 6 votes |
private void parseTokFunction(ASTNode ast, Set<String> set) { switch(ast.getType()) { case HiveParser.TOK_TABLE_OR_COL: String colRealName = convAliasToReal(columnAliasMap, ast.getChild(0).getText()); set.add(colRealName); break; case HiveParser.TOK_FUNCTION: for (int i = 0; i < ast.getChildCount(); i++) { ASTNode n = (ASTNode)ast.getChild(i); if (n != null) { parseTokFunction(n, set); } } break; } }
Example 5
Source File: SentryHiveAuthorizationTaskFactoryImpl.java From incubator-sentry with Apache License 2.0 | 6 votes |
@Override public Task<? extends Serializable> createRevokeTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0)); List<PrincipalDesc> principalDesc = analyzePrincipalListDef((ASTNode) ast.getChild(1)); PrivilegeObjectDesc privilegeObj = null; if (ast.getChildCount() > 2) { ASTNode astChild = (ASTNode) ast.getChild(2); privilegeObj = analyzePrivilegeObject(astChild); } if (privilegeObj != null && privilegeObj.getPartSpec() != null) { throw new SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED); } for (PrincipalDesc princ : principalDesc) { if (princ.getType() != PrincipalType.ROLE) { String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + princ.getType(); throw new SemanticException(msg); } } RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, privilegeObj); return createTask(new DDLWork(inputs, outputs, revokeDesc)); }
Example 6
Source File: SentryHiveAuthorizationTaskFactoryImpl.java From incubator-sentry with Apache License 2.0 | 6 votes |
private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { List<PrincipalDesc> principalDesc = analyzePrincipalListDef( (ASTNode) ast.getChild(0)); List<String> roles = new ArrayList<String>(); for (int i = 1; i < ast.getChildCount(); i++) { roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText())); } String roleOwnerName = ""; if (SessionState.get() != null && SessionState.get().getAuthenticator() != null) { roleOwnerName = SessionState.get().getAuthenticator().getUserName(); } for (PrincipalDesc princ : principalDesc) { if (princ.getType() != PrincipalType.GROUP) { String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_ON_OBJECT + princ.getType(); throw new SemanticException(msg); } } GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant, roles, principalDesc, roleOwnerName, PrincipalType.USER, false); return createTask(new DDLWork(inputs, outputs, grantRevokeRoleDDL)); }
Example 7
Source File: SentryHiveAuthorizationTaskFactoryImpl.java From incubator-sentry with Apache License 2.0 | 6 votes |
@Override public Task<? extends Serializable> createShowRolePrincipalsTask(ASTNode ast, Path resFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { String roleName; if (ast.getChildCount() == 1) { roleName = ast.getChild(0).getText(); } else { // the parser should not allow this throw new AssertionError("Unexpected Tokens in SHOW ROLE PRINCIPALS"); } RoleDDLDesc roleDDLDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.SHOW_ROLE_PRINCIPALS, null); roleDDLDesc.setResFile(resFile.toString()); return createTask(new DDLWork(inputs, outputs, roleDDLDesc)); //return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc), conf); }
Example 8
Source File: Parser.java From Eagle with Apache License 2.0 | 5 votes |
private void parseTokFunction(ASTNode ast, Set<String> set) { if (ast.getType() == HiveParser.TOK_TABLE_OR_COL) { String colRealName = convAliasToReal(columnAliasMap, ast.getChild(0).getText()); set.add(colRealName); } for (int i = 0; i < ast.getChildCount(); i++) { ASTNode n = (ASTNode)ast.getChild(i); if (n != null) { parseTokFunction(n, set); } } }
Example 9
Source File: TestParser.java From Eagle with Apache License 2.0 | 5 votes |
private void printTree(ASTNode root, int indent) { if ( root != null ) { StringBuffer sb = new StringBuffer(indent); for ( int i = 0; i < indent; i++ ) sb = sb.append(" "); for ( int i = 0; i < root.getChildCount(); i++ ) { System.out.println(sb.toString() + root.getChild(i).getText()); printTree((ASTNode)root.getChild(i), indent + 1); } } }
Example 10
Source File: Parser.java From eagle with Apache License 2.0 | 5 votes |
private void visitSubtree(ASTNode ast) { int len = ast.getChildCount(); if (len > 0) { for (Node n : ast.getChildren()) { ASTNode asn = (ASTNode)n; switch (asn.getToken().getType()) { case HiveParser.TOK_TABNAME: //tableSet.add(ast.getChild(0).getChild(0).getText()); parserContent.getTableColumnMap().put(ast.getChild(0).getChild(0).getText(), new HashSet<>(columnSet)); break; case HiveParser.TOK_SET_COLUMNS_CLAUSE: for (int i = 0; i < asn.getChildCount(); i++) { addToColumnSet((ASTNode) asn.getChild(i).getChild(0)); } break; case HiveParser.TOK_QUERY: parseQueryClause(asn); break; case HiveParser.TOK_UNIONTYPE: case HiveParser.TOK_UNIONALL: case HiveParser.TOK_UNIONDISTINCT: visitSubtree(asn); break; } } // Add tableSet and columnSet to tableColumnMap addTablesColumnsToMap(tableSet, columnSet); } }
Example 11
Source File: Parser.java From eagle with Apache License 2.0 | 5 votes |
private void parseSelectClause(ASTNode ast) { for (int i = 0; i < ast.getChildCount(); i++) { ASTNode selectXpr = (ASTNode)ast.getChild(i); for(int j = 0; j < selectXpr.getChildCount(); j++) { parseSelectExpr((ASTNode)selectXpr.getChild(j)); } } }
Example 12
Source File: TestParser.java From eagle with Apache License 2.0 | 5 votes |
private void printTree(ASTNode root, int indent) { if ( root != null ) { StringBuffer sb = new StringBuffer(indent); for ( int i = 0; i < indent; i++ ) sb = sb.append(" "); for ( int i = 0; i < root.getChildCount(); i++ ) { System.out.println(sb.toString() + root.getChild(i).getText()); printTree((ASTNode)root.getChild(i), indent + 1); } } }
Example 13
Source File: SentryHiveAuthorizationTaskFactoryImpl.java From incubator-sentry with Apache License 2.0 | 5 votes |
@Override public Task<? extends Serializable> createGrantTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef( (ASTNode) ast.getChild(0)); List<PrincipalDesc> principalDesc = analyzePrincipalListDef( (ASTNode) ast.getChild(1)); SentryHivePrivilegeObjectDesc privilegeObj = null; boolean grantOption = false; if (ast.getChildCount() > 2) { for (int i = 2; i < ast.getChildCount(); i++) { ASTNode astChild = (ASTNode) ast.getChild(i); if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) { grantOption = true; } else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) { privilegeObj = analyzePrivilegeObject(astChild); } } } String userName = null; if (SessionState.get() != null && SessionState.get().getAuthenticator() != null) { userName = SessionState.get().getAuthenticator().getUserName(); } Preconditions.checkNotNull(privilegeObj, "privilegeObj is null for " + ast.dump()); if (privilegeObj.getPartSpec() != null) { throw new SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED); } for (PrincipalDesc princ : principalDesc) { if (princ.getType() != PrincipalType.ROLE) { String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + princ.getType(); throw new SemanticException(msg); } } GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption); return createTask(new DDLWork(inputs, outputs, grantDesc)); }
Example 14
Source File: SentryHiveAuthorizationTaskFactoryImpl.java From incubator-sentry with Apache License 2.0 | 5 votes |
@Override public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { SentryHivePrivilegeObjectDesc privHiveObj = null; ASTNode principal = (ASTNode) ast.getChild(0); PrincipalType type = PrincipalType.USER; switch (principal.getType()) { case HiveParser.TOK_USER: type = PrincipalType.USER; break; case HiveParser.TOK_GROUP: type = PrincipalType.GROUP; break; case HiveParser.TOK_ROLE: type = PrincipalType.ROLE; break; } if (type != PrincipalType.ROLE) { String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + type; throw new SemanticException(msg); } String principalName = BaseSemanticAnalyzer.unescapeIdentifier(principal.getChild(0).getText()); PrincipalDesc principalDesc = new PrincipalDesc(principalName, type); // Partition privileges are not supported by Sentry if (ast.getChildCount() > 1) { ASTNode child = (ASTNode) ast.getChild(1); if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = analyzePrivilegeObject(child); } else { throw new SemanticException("Unrecognized Token: " + child.getToken().getType()); } } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); return createTask(new DDLWork(inputs, outputs, showGrant)); }
Example 15
Source File: SentryHiveAuthorizationTaskFactoryImpl.java From incubator-sentry with Apache License 2.0 | 5 votes |
private List<PrincipalDesc> analyzePrincipalListDef(ASTNode node) { List<PrincipalDesc> principalList = new ArrayList<PrincipalDesc>(); for (int i = 0; i < node.getChildCount(); i++) { ASTNode child = (ASTNode) node.getChild(i); PrincipalType type = null; switch (child.getType()) { case 880: type = PrincipalType.USER; break; case HiveParser.TOK_USER: type = PrincipalType.USER; break; case 685: type = PrincipalType.GROUP; break; case HiveParser.TOK_GROUP: type = PrincipalType.GROUP; break; case 782: type = PrincipalType.ROLE; break; case HiveParser.TOK_ROLE: type = PrincipalType.ROLE; break; } String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText()); PrincipalDesc principalDesc = new PrincipalDesc(principalName, type); LOG.debug("## Principal : [ " + principalName + ", " + type + "]"); principalList.add(principalDesc); } return principalList; }
Example 16
Source File: HiveAuthzBindingHook.java From incubator-sentry with Apache License 2.0 | 5 votes |
@VisibleForTesting protected static AccessURI extractPartition(ASTNode ast) throws SemanticException { for (int i = 0; i < ast.getChildCount(); i++) { ASTNode child = (ASTNode)ast.getChild(i); if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION && child.getChildCount() == 1) { return parseURI(BaseSemanticAnalyzer. unescapeSQLString(child.getChild(0).getText())); } } return null; }