Java Code Examples for com.alibaba.druid.sql.parser.Token#INDEX
The following examples show how to use
com.alibaba.druid.sql.parser.Token#INDEX .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ElasticSqlSelectParser.java From elasticsearch-sql with Apache License 2.0 | 5 votes |
private void parseIndexHint(MySqlIndexHintImpl hint) { if (lexer.token() == Token.INDEX) { lexer.nextToken(); } else { accept(Token.KEY); } if (lexer.token() == Token.FOR) { lexer.nextToken(); if (lexer.token() == Token.JOIN) { lexer.nextToken(); hint.setOption(MySqlIndexHint.Option.JOIN); } else if (lexer.token() == Token.ORDER) { lexer.nextToken(); accept(Token.BY); hint.setOption(MySqlIndexHint.Option.ORDER_BY); } else { accept(Token.GROUP); accept(Token.BY); hint.setOption(MySqlIndexHint.Option.GROUP_BY); } } accept(Token.LPAREN); if (lexer.token() == Token.PRIMARY) { lexer.nextToken(); hint.getIndexList().add(new SQLIdentifierExpr("PRIMARY")); } else { this.exprParser.names(hint.getIndexList()); } accept(Token.RPAREN); }
Example 2
Source File: ElasticSqlExprParser.java From elasticsearch-sql with Apache License 2.0 | 4 votes |
public SQLPartition parsePartition() { accept(Token.PARTITION); SQLPartition partitionDef = new SQLPartition(); partitionDef.setName(this.name()); SQLPartitionValue values = this.parsePartitionValues(); if (values != null) { partitionDef.setValues(values); } for (;;) { boolean storage = false; if (lexer.identifierEquals(FnvHash.Constants.DATA)) { lexer.nextToken(); acceptIdentifier("DIRECTORY"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } partitionDef.setDataDirectory(this.expr()); } else if (lexer.token() == Token.TABLESPACE) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLName tableSpace = this.name(); partitionDef.setTablespace(tableSpace); } else if (lexer.token() == Token.INDEX) { lexer.nextToken(); acceptIdentifier("DIRECTORY"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } partitionDef.setIndexDirectory(this.expr()); } else if (lexer.identifierEquals(FnvHash.Constants.MAX_ROWS)) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr maxRows = this.primary(); partitionDef.setMaxRows(maxRows); } else if (lexer.identifierEquals(FnvHash.Constants.MIN_ROWS)) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr minRows = this.primary(); partitionDef.setMaxRows(minRows); } else if (lexer.identifierEquals(FnvHash.Constants.ENGINE) || // (storage = (lexer.token() == Token.STORAGE || lexer.identifierEquals(FnvHash.Constants.STORAGE)))) { if (storage) { lexer.nextToken(); } acceptIdentifier("ENGINE"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLName engine = this.name(); partitionDef.setEngine(engine); } else if (lexer.token() == Token.COMMENT) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr comment = this.primary(); partitionDef.setComment(comment); } else { break; } } if (lexer.token() == Token.LPAREN) { lexer.nextToken(); for (;;) { acceptIdentifier("SUBPARTITION"); SQLName subPartitionName = this.name(); SQLSubPartition subPartition = new SQLSubPartition(); subPartition.setName(subPartitionName); partitionDef.addSubPartition(subPartition); if (lexer.token() == Token.COMMA) { lexer.nextToken(); continue; } break; } accept(Token.RPAREN); } return partitionDef; }