Java Code Examples for com.alibaba.druid.sql.parser.Token#COMMA
The following examples show how to use
com.alibaba.druid.sql.parser.Token#COMMA .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ElasticSqlSelectParser.java From elasticsearch-sql with Apache License 2.0 | 4 votes |
protected MySqlUpdateStatement parseUpdateStatment() { MySqlUpdateStatement update = new MySqlUpdateStatement(); lexer.nextToken(); if (lexer.identifierEquals(FnvHash.Constants.LOW_PRIORITY)) { lexer.nextToken(); update.setLowPriority(true); } if (lexer.identifierEquals(FnvHash.Constants.IGNORE)) { lexer.nextToken(); update.setIgnore(true); } if (lexer.identifierEquals(FnvHash.Constants.COMMIT_ON_SUCCESS)) { lexer.nextToken(); update.setCommitOnSuccess(true); } if (lexer.identifierEquals(FnvHash.Constants.ROLLBACK_ON_FAIL)) { lexer.nextToken(); update.setRollBackOnFail(true); } if (lexer.identifierEquals(FnvHash.Constants.QUEUE_ON_PK)) { lexer.nextToken(); update.setQueryOnPk(true); } if (lexer.identifierEquals(FnvHash.Constants.TARGET_AFFECT_ROW)) { lexer.nextToken(); SQLExpr targetAffectRow = this.exprParser.expr(); update.setTargetAffectRow(targetAffectRow); } if (lexer.identifierEquals(FnvHash.Constants.FORCE)) { lexer.nextToken(); if (lexer.token() == Token.ALL) { lexer.nextToken(); acceptIdentifier("PARTITIONS"); update.setForceAllPartitions(true); } else if (lexer.identifierEquals(FnvHash.Constants.PARTITIONS)){ lexer.nextToken(); update.setForceAllPartitions(true); } else if (lexer.token() == Token.PARTITION) { lexer.nextToken(); SQLName partition = this.exprParser.name(); update.setForcePartition(partition); } else { throw new ParserException("TODO. " + lexer.info()); } } while (lexer.token() == Token.HINT) { this.exprParser.parseHints(update.getHints()); } SQLSelectParser selectParser = this.exprParser.createSelectParser(); SQLTableSource updateTableSource = selectParser.parseTableSource(); update.setTableSource(updateTableSource); accept(Token.SET); for (;;) { SQLUpdateSetItem item = this.exprParser.parseUpdateSetItem(); update.addItem(item); if (lexer.token() != Token.COMMA) { break; } lexer.nextToken(); } if (lexer.token() == (Token.WHERE)) { lexer.nextToken(); update.setWhere(this.exprParser.expr()); } update.setOrderBy(this.exprParser.parseOrderBy()); update.setLimit(this.exprParser.parseLimit()); return update; }
Example 2
Source File: ElasticSqlExprParser.java From elasticsearch-sql with Apache License 2.0 | 4 votes |
@Override public MySqlPrimaryKey parsePrimaryKey() { accept(Token.PRIMARY); accept(Token.KEY); MySqlPrimaryKey primaryKey = new MySqlPrimaryKey(); if (lexer.identifierEquals(FnvHash.Constants.USING)) { lexer.nextToken(); primaryKey.setIndexType(lexer.stringVal()); lexer.nextToken(); } if (lexer.token() != Token.LPAREN) { SQLName name = this.name(); primaryKey.setName(name); } accept(Token.LPAREN); for (;;) { SQLExpr expr; if (lexer.token() == Token.LITERAL_ALIAS) { expr = this.name(); } else { expr = this.expr(); } primaryKey.addColumn(expr); if (!(lexer.token() == (Token.COMMA))) { break; } else { lexer.nextToken(); } } accept(Token.RPAREN); if (lexer.identifierEquals(FnvHash.Constants.USING)) { lexer.nextToken(); primaryKey.setIndexType(lexer.stringVal()); lexer.nextToken(); } return primaryKey; }
Example 3
Source File: ElasticSqlExprParser.java From elasticsearch-sql with Apache License 2.0 | 4 votes |
public SQLPartition parsePartition() { accept(Token.PARTITION); SQLPartition partitionDef = new SQLPartition(); partitionDef.setName(this.name()); SQLPartitionValue values = this.parsePartitionValues(); if (values != null) { partitionDef.setValues(values); } for (;;) { boolean storage = false; if (lexer.identifierEquals(FnvHash.Constants.DATA)) { lexer.nextToken(); acceptIdentifier("DIRECTORY"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } partitionDef.setDataDirectory(this.expr()); } else if (lexer.token() == Token.TABLESPACE) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLName tableSpace = this.name(); partitionDef.setTablespace(tableSpace); } else if (lexer.token() == Token.INDEX) { lexer.nextToken(); acceptIdentifier("DIRECTORY"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } partitionDef.setIndexDirectory(this.expr()); } else if (lexer.identifierEquals(FnvHash.Constants.MAX_ROWS)) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr maxRows = this.primary(); partitionDef.setMaxRows(maxRows); } else if (lexer.identifierEquals(FnvHash.Constants.MIN_ROWS)) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr minRows = this.primary(); partitionDef.setMaxRows(minRows); } else if (lexer.identifierEquals(FnvHash.Constants.ENGINE) || // (storage = (lexer.token() == Token.STORAGE || lexer.identifierEquals(FnvHash.Constants.STORAGE)))) { if (storage) { lexer.nextToken(); } acceptIdentifier("ENGINE"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLName engine = this.name(); partitionDef.setEngine(engine); } else if (lexer.token() == Token.COMMENT) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr comment = this.primary(); partitionDef.setComment(comment); } else { break; } } if (lexer.token() == Token.LPAREN) { lexer.nextToken(); for (;;) { acceptIdentifier("SUBPARTITION"); SQLName subPartitionName = this.name(); SQLSubPartition subPartition = new SQLSubPartition(); subPartition.setName(subPartitionName); partitionDef.addSubPartition(subPartition); if (lexer.token() == Token.COMMA) { lexer.nextToken(); continue; } break; } accept(Token.RPAREN); } return partitionDef; }