Java Code Examples for org.antlr.v4.runtime.CommonTokenStream#seek()
The following examples show how to use
org.antlr.v4.runtime.CommonTokenStream#seek() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TypeCalculation.java From presto with Apache License 2.0 | 5 votes |
private static ParserRuleContext parseTypeCalculation(String calculation) { TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(CharStreams.fromString(calculation))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); TypeCalculationParser parser = new TypeCalculationParser(tokenStream); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parser.typeCalculation(); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parser.typeCalculation(); } return tree; }
Example 2
Source File: AntlrSqlParser.java From sylph with Apache License 2.0 | 5 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction) { try { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); //parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); lexer.removeErrorListeners(); lexer.addErrorListener(LEXER_ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(LEXER_ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder().visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
Example 3
Source File: SqlParser.java From macrobase with Apache License 2.0 | 5 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction) { try { SqlBaseLexer lexer = new SqlBaseLexer( new CaseInsensitiveStream(new ANTLRInputStream(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder().visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
Example 4
Source File: SqlParser.java From crate with Apache License 2.0 | 5 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction) { try { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(CharStreams.fromString(sql, name))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); parser.addParseListener(new PostProcessor()); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder().visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
Example 5
Source File: SqlParser.java From presto with Apache License 2.0 | 4 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) { try { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); initializer.accept(lexer, parser); // Override the default error strategy to not attempt inserting or deleting a token. // Otherwise, it messes up error reporting parser.setErrorHandler(new DefaultErrorStrategy() { @Override public Token recoverInline(Parser recognizer) throws RecognitionException { if (nextTokensContext == null) { throw new InputMismatchException(recognizer); } else { throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext); } } }); parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()), parser)); lexer.removeErrorListeners(); lexer.addErrorListener(LEXER_ERROR_LISTENER); parser.removeErrorListeners(); if (enhancedErrorHandlerEnabled) { parser.addErrorListener(PARSER_ERROR_HANDLER); } else { parser.addErrorListener(LEXER_ERROR_LISTENER); } ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder(parsingOptions).visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }