org.antlr.v4.runtime.atn.PredictionMode Java Examples
The following examples show how to use
org.antlr.v4.runtime.atn.PredictionMode.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ParserUtils.java From ethereumj with MIT License | 6 votes |
public static <P extends Parser> P getParser(Class<? extends Lexer> lexerClass, Class<P> parserClass, String source) { Lexer lexer = getLexer(lexerClass, source); TokenStream tokens = new CommonTokenStream(lexer); P parser; try { parser = parserClass.getConstructor(TokenStream.class).newInstance(tokens); } catch (Exception e) { throw new IllegalArgumentException("couldn't invoke parser constructor", e); } parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); parser.removeErrorListeners(); // don't spit to stderr parser.addErrorListener(new DiagnosticErrorListener()); parser.addErrorListener(new AntlrFailureListener()); return parser; }
Example #2
Source File: BoaCompiler.java From compiler with Apache License 2.0 | 6 votes |
private static Start parse(final CommonTokenStream tokens, final BoaParser parser, final BoaErrorListener parserErrorListener) { parser.setBuildParseTree(false); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); try { return parser.start().ast; } catch (final ParseCancellationException e) { // fall-back to LL mode parsing if SLL fails tokens.reset(); parser.reset(); parser.removeErrorListeners(); parser.addErrorListener(parserErrorListener); parser.getInterpreter().setPredictionMode(PredictionMode.LL); return parser.start().ast; } }
Example #3
Source File: Utils.java From Concurnas with MIT License | 6 votes |
public static ConcurnasParser miniParse(String text, String name, int startingLine, int startingColumn, LexParseErrorCapturer errors) { CharStream input = CharStreams.fromString(text, name); ConcurnasLexer lexer = new ConcurnasLexer(input); if(startingLine > -1) { lexer.setTokenFactory(new LocationRepointCommonTokenFactory(startingLine,startingColumn)); } lexer.permitDollarPrefixRefName = true; CommonTokenStream tokens = new CommonTokenStream(lexer); ConcurnasParser parser = new ConcurnasParser(tokens); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); parser.removeErrorListeners(); // remove ConsoleErrorListener parser.addErrorListener(errors); // add ours lexer.removeErrorListeners(); // remove ConsoleErrorListener lexer.addErrorListener(errors); // add ours return parser; }
Example #4
Source File: DescriptiveErrorStrategy.java From groovy with Apache License 2.0 | 6 votes |
@Override public void recover(Parser recognizer, RecognitionException e) { for (ParserRuleContext context = recognizer.getContext(); context != null; context = context.getParent()) { context.exception = e; } if (PredictionMode.LL.equals(recognizer.getInterpreter().getPredictionMode())) { if (e instanceof NoViableAltException) { this.reportNoViableAlternative(recognizer, (NoViableAltException) e); } else if (e instanceof InputMismatchException) { this.reportInputMismatch(recognizer, (InputMismatchException) e); } else if (e instanceof FailedPredicateException) { this.reportFailedPredicate(recognizer, (FailedPredicateException) e); } } throw new ParseCancellationException(e); }
Example #5
Source File: AstBuilder.java From kalang with MIT License | 6 votes |
private CompilationUnitContext buildCompilationUnitContext() { ANTLRErrorStrategy oldErrorHandler = parser.getErrorHandler(); List<? extends ANTLRErrorListener> oldErrorListeners = parser.getErrorListeners(); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); parser.removeErrorListeners(); parser.setErrorHandler(new SLLErrorStrategy(compilationUnit)); try { CompilationUnitContext cu = parser.compilationUnit(); oldErrorListeners.forEach(parser::addErrorListener); parser.setErrorHandler(oldErrorHandler); return cu; } catch (Throwable throwable) { parser.getInputStream().seek(0); parser.getInterpreter().setPredictionMode(PredictionMode.LL); oldErrorListeners.forEach(parser::addErrorListener); parser.setErrorHandler(oldErrorHandler); return parser.compilationUnit(); } }
Example #6
Source File: TypeCalculation.java From rainbow with Apache License 2.0 | 5 votes |
private static ParserRuleContext parseTypeCalculation(String calculation) { TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(new ANTLRInputStream(calculation))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); TypeCalculationParser parser = new TypeCalculationParser(tokenStream); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parser.typeCalculation(); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.reset(); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parser.typeCalculation(); } return tree; }
Example #7
Source File: SqlParser.java From crate with Apache License 2.0 | 5 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction) { try { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(CharStreams.fromString(sql, name))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); parser.addParseListener(new PostProcessor()); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder().visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
Example #8
Source File: SqlParser.java From macrobase with Apache License 2.0 | 5 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction) { try { SqlBaseLexer lexer = new SqlBaseLexer( new CaseInsensitiveStream(new ANTLRInputStream(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder().visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
Example #9
Source File: ProgramParser.java From vespa with Apache License 2.0 | 5 votes |
private ProgramContext parseProgram(yqlplusParser parser) throws RecognitionException { try { return parser.program(); } catch (RecognitionException e) { //Retry parsing using full LL mode parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); return parser.program(); } }
Example #10
Source File: SQLParserExecutor.java From shardingsphere with Apache License 2.0 | 5 votes |
private ParseASTNode twoPhaseParse() { SQLParser sqlParser = SQLParserFactory.newInstance(databaseTypeName, sql); try { ((Parser) sqlParser).setErrorHandler(new BailErrorStrategy()); ((Parser) sqlParser).getInterpreter().setPredictionMode(PredictionMode.SLL); return (ParseASTNode) sqlParser.parse(); } catch (final ParseCancellationException ex) { ((Parser) sqlParser).reset(); ((Parser) sqlParser).setErrorHandler(new DefaultErrorStrategy()); ((Parser) sqlParser).getInterpreter().setPredictionMode(PredictionMode.LL); return (ParseASTNode) sqlParser.parse(); } }
Example #11
Source File: GrammarParserInterpreter.java From codebuff with BSD 2-Clause "Simplified" License | 5 votes |
/** Derive a new parser from an old one that has knowledge of the grammar. * The Grammar object is used to correctly compute outer alternative * numbers for parse tree nodes. A parser of the same type is created * for subclasses of {@link ParserInterpreter}. */ public static ParserInterpreter deriveTempParserInterpreter(Grammar g, Parser originalParser, TokenStream tokens) { ParserInterpreter parser; if (originalParser instanceof ParserInterpreter) { Class<? extends ParserInterpreter> c = originalParser.getClass().asSubclass(ParserInterpreter.class); try { Constructor<? extends ParserInterpreter> ctor = c.getConstructor(Grammar.class, ATN.class, TokenStream.class); parser = ctor.newInstance(g, originalParser.getATN(), originalParser.getTokenStream()); } catch (Exception e) { throw new IllegalArgumentException("can't create parser to match incoming "+originalParser.getClass().getSimpleName(), e); } } else { // must've been a generated parser char[] serializedAtn = ATNSerializer.getSerializedAsChars(originalParser.getATN()); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); parser = new ParserInterpreter(originalParser.getGrammarFileName(), originalParser.getVocabulary(), Arrays.asList(originalParser.getRuleNames()), deserialized, tokens); } parser.setInputStream(tokens); // Make sure that we don't get any error messages from using this temporary parser parser.setErrorHandler(new BailErrorStrategy()); parser.removeErrorListeners(); parser.removeParseListeners(); parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); return parser; }
Example #12
Source File: ProgramParser.java From yql-plus with Apache License 2.0 | 5 votes |
private ProgramContext parseProgram(yqlplusParser parser) throws RecognitionException { try { return parser.program(); } catch (RecognitionException e) { //Retry parsing using full LL mode parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); return parser.program(); } }
Example #13
Source File: SqlParser.java From rainbow with Apache License 2.0 | 5 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) { try { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(new ANTLRInputStream(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.reset(); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder(parsingOptions).visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
Example #14
Source File: AntlrSqlParser.java From sylph with Apache License 2.0 | 5 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction) { try { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); //parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); lexer.removeErrorListeners(); lexer.addErrorListener(LEXER_ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(LEXER_ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder().visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
Example #15
Source File: KsqlParser.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private ParserRuleContext getParseTree(String sql) { SqlBaseLexer sqlBaseLexer = new SqlBaseLexer(new CaseInsensitiveStream(new ANTLRInputStream(sql))); CommonTokenStream tokenStream = new CommonTokenStream(sqlBaseLexer); SqlBaseParser sqlBaseParser = new SqlBaseParser(tokenStream); sqlBaseLexer.removeErrorListeners(); sqlBaseLexer.addErrorListener(ERROR_LISTENER); sqlBaseParser.removeErrorListeners(); sqlBaseParser.addErrorListener(ERROR_LISTENER); Function<SqlBaseParser, ParserRuleContext> parseFunction = SqlBaseParser::statements; ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(sqlBaseParser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.reset(); // rewind input stream sqlBaseParser.reset(); sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(sqlBaseParser); } return tree; }
Example #16
Source File: ParseHelper.java From graphicsfuzz with Apache License 2.0 | 5 votes |
private static Translation_unitContext tryFastParse( InputStream inputStream, ParseTreeListener listener) throws IOException { GLSLParser parser = getParser(inputStream, listener); parser.setErrorHandler(new BailErrorStrategy()); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); Translation_unitContext result = parser.translation_unit(); parser.getInterpreter().clearDFA(); return result; }
Example #17
Source File: ParseDriver.java From incubator-iotdb with Apache License 2.0 | 5 votes |
public Operator parse(String sql, ZoneId zoneId) throws ParseCancellationException { LogicalGenerator logicalGenerator = new LogicalGenerator(zoneId); CharStream charStream1 = CharStreams.fromString(sql); SqlBaseLexer lexer1 = new SqlBaseLexer(charStream1); CommonTokenStream tokens1 = new CommonTokenStream(lexer1); SqlBaseParser parser1 = new SqlBaseParser(tokens1); parser1.getInterpreter().setPredictionMode(PredictionMode.SLL); parser1.removeErrorListeners(); parser1.addErrorListener(LogicalGeneratorError.INSTANCE); ParseTree tree; try { tree = parser1.singleStatement(); // STAGE 1 } catch (Exception ex) { CharStream charStream2 = CharStreams.fromString(sql); SqlBaseLexer lexer2 = new SqlBaseLexer(charStream2); CommonTokenStream tokens2 = new CommonTokenStream(lexer2); SqlBaseParser parser2 = new SqlBaseParser(tokens2); parser2.getInterpreter().setPredictionMode(PredictionMode.LL); parser2.removeErrorListeners(); parser2.addErrorListener(LogicalGeneratorError.INSTANCE); tree = parser2.singleStatement(); // STAGE 2 // if we parse ok, it's LL not SLL } walker.walk(logicalGenerator, tree); return logicalGenerator.getLogicalPlan(); }
Example #18
Source File: SyntaxParserTestUtil.java From Concurnas with MIT License | 5 votes |
public SyntaxParserTestUtil(){ lexer = new ConcurnasLexer(null); CommonTokenStream tokens = new CommonTokenStream(lexer); parser = new ConcurnasParser(tokens); parser.setInputStream(new CommonTokenStream(lexer)); parser.removeErrorListeners(); // remove ConsoleErrorListener parser.addErrorListener(lexerErrors); // add ours parser.getInterpreter().setPredictionMode(PredictionMode.SLL); lexer.removeErrorListeners(); // remove ConsoleErrorListener lexer.addErrorListener(parserErrors); // add ours }
Example #19
Source File: Concc.java From Concurnas with MIT License | 5 votes |
public ConccBuilder createBuilder() { ErrorCap errors = new ErrorCap(); ConccBuilder builder; try { CharStream input = CharStreams.fromString(inputString, "cmd"); ConccLexer lexer = new ConccLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); ConccParser parser = new ConccParser(tokens); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); parser.removeErrorListeners(); // remove ConsoleErrorListener parser.addErrorListener(errors); // add ours lexer.removeErrorListeners(); // remove ConsoleErrorListener lexer.addErrorListener(errors); // add ours builder = new ConccBuilder(); parser.concc().accept(builder); }catch(Throwable e) { return null; } if(errors.hasErrors) { return null; } return builder; }
Example #20
Source File: Conc.java From Concurnas with MIT License | 5 votes |
public ConcBuilder createBuilder() { ErrorCap errors = new ErrorCap(); ConcBuilder builder; try { CharStream input = CharStreams.fromString(inputString, "cmd"); ConcLexer lexer = new ConcLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); ConcParser parser = new ConcParser(tokens); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); parser.removeErrorListeners(); // remove ConsoleErrorListener parser.addErrorListener(errors); // add ours lexer.removeErrorListeners(); // remove ConsoleErrorListener lexer.addErrorListener(errors); // add ours builder = new ConcBuilder(); parser.conc().accept(builder); }catch(Throwable e) { return null; } if(errors.hasErrors) { return null; } return builder; }
Example #21
Source File: TypeCalculation.java From presto with Apache License 2.0 | 5 votes |
private static ParserRuleContext parseTypeCalculation(String calculation) { TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(CharStreams.fromString(calculation))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); TypeCalculationParser parser = new TypeCalculationParser(tokenStream); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parser.typeCalculation(); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parser.typeCalculation(); } return tree; }
Example #22
Source File: SqlParser.java From presto with Apache License 2.0 | 4 votes |
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) { try { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); initializer.accept(lexer, parser); // Override the default error strategy to not attempt inserting or deleting a token. // Otherwise, it messes up error reporting parser.setErrorHandler(new DefaultErrorStrategy() { @Override public Token recoverInline(Parser recognizer) throws RecognitionException { if (nextTokensContext == null) { throw new InputMismatchException(recognizer); } else { throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext); } } }); parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()), parser)); lexer.removeErrorListeners(); lexer.addErrorListener(LEXER_ERROR_LISTENER); parser.removeErrorListeners(); if (enhancedErrorHandlerEnabled) { parser.addErrorListener(PARSER_ERROR_HANDLER); } else { parser.addErrorListener(LEXER_ERROR_LISTENER); } ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder(parsingOptions).visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
Example #23
Source File: TestRig.java From codebuff with BSD 2-Clause "Simplified" License | 4 votes |
protected void process(Lexer lexer, Class<? extends Parser> parserClass, Parser parser, InputStream is, Reader r) throws IOException, IllegalAccessException, InvocationTargetException, PrintException { try { ANTLRInputStream input = new ANTLRInputStream(r); lexer.setInputStream(input); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill(); if ( showTokens ) { for (Object tok : tokens.getTokens()) { System.out.println(tok); } } if ( startRuleName.equals(LEXER_START_RULE_NAME) ) return; if ( diagnostics ) { parser.addErrorListener(new DiagnosticErrorListener()); parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); } if ( printTree || gui || psFile!=null ) { parser.setBuildParseTree(true); } if ( SLL ) { // overrides diagnostics parser.getInterpreter().setPredictionMode(PredictionMode.SLL); } parser.setTokenStream(tokens); parser.setTrace(trace); try { Method startRule = parserClass.getMethod(startRuleName); ParserRuleContext tree = (ParserRuleContext)startRule.invoke(parser, (Object[])null); if ( printTree ) { System.out.println(tree.toStringTree(parser)); } if ( gui ) { Trees.inspect(tree, parser); } if ( psFile!=null ) { Trees.save(tree, parser, psFile); // Generate postscript } } catch (NoSuchMethodException nsme) { System.err.println("No method for rule "+startRuleName+" or it has arguments"); } } finally { if ( r!=null ) r.close(); if ( is!=null ) is.close(); } }
Example #24
Source File: GraphQLUtil.java From js-graphql-intellij-plugin with MIT License | 4 votes |
/** * Parses GraphQL string input into a graphql-java Document, shifting the source locations in the specified document with the specified line delta. * Shifting of the sourceLocation is required for proper error reporting locations for GraphQL language injections, e.g. GraphQL in a JavaScript file. * @param input a GraphQL document represented as a string to be parsed * @param sourceName the file name of the source * @param lineDelta the delta line to apply to the document and all child nodes * @param firstLineColumnDelta the column delta for the first line */ public static Document parseDocument(String input, String sourceName, int lineDelta, int firstLineColumnDelta) { CharStream charStream; if(sourceName == null) { charStream = CharStreams.fromString(input); } else{ charStream = CharStreams.fromString(input, sourceName); } GraphqlLexer lexer = new GraphqlLexer(charStream); CommonTokenStream tokens = new CommonTokenStream(lexer); GraphqlParser parser = new GraphqlParser(tokens); parser.removeErrorListeners(); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); parser.setErrorHandler(new BailErrorStrategy()); GraphqlParser.DocumentContext documentContext = parser.document(); MultiSourceReader multiSourceReader = MultiSourceReader.newMultiSourceReader() .string(input, sourceName) .trackData(true) .build(); GraphqlAntlrToLanguage antlrToLanguage = new GraphqlAntlrToLanguage(tokens, multiSourceReader) { @Override protected SourceLocation getSourceLocation(ParserRuleContext parserRuleContext) { return createSourceLocationFromDelta(parserRuleContext.getStart(), lineDelta, firstLineColumnDelta); } @Override protected SourceLocation getSourceLocation(Token token) { return createSourceLocationFromDelta(token, lineDelta, firstLineColumnDelta); } }; Document doc = antlrToLanguage.createDocument(documentContext); Token stop = documentContext.getStop(); List<Token> allTokens = tokens.getTokens(); if (stop != null && allTokens != null && !allTokens.isEmpty()) { Token last = allTokens.get(allTokens.size() - 1); // // do we have more tokens in the stream than we consumed in the parse? // if yes then its invalid. We make sure its the same channel boolean notEOF = last.getType() != Token.EOF; boolean lastGreaterThanDocument = last.getTokenIndex() > stop.getTokenIndex(); boolean sameChannel = last.getChannel() == stop.getChannel(); if (notEOF && lastGreaterThanDocument && sameChannel) { throw new ParseCancellationException("There are more tokens in the query that have not been consumed"); } } return doc; }
Example #25
Source File: Parser.java From graql with GNU Affero General Public License v3.0 | 4 votes |
private <CONTEXT extends ParserRuleContext, RETURN> RETURN parseQuery( String queryString, Function<GraqlParser, CONTEXT> parserMethod, Function<CONTEXT, RETURN> visitor ) { if (queryString == null || queryString.isEmpty()) { throw GraqlException.create("Query String is NULL or Empty"); } ErrorListener errorListener = ErrorListener.of(queryString); CharStream charStream = CharStreams.fromString(queryString); GraqlLexer lexer = new GraqlLexer(charStream); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); CommonTokenStream tokens = new CommonTokenStream(lexer); GraqlParser parser = new GraqlParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(errorListener); // BailErrorStrategy + SLL is a very fast parsing strategy for queries // that are expected to be correct. However, it may not be able to // provide detailed/useful error message, if at all. parser.setErrorHandler(new BailErrorStrategy()); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); CONTEXT queryContext; try { queryContext = parserMethod.apply(parser); } catch (ParseCancellationException e) { // We parse the query one more time, with "strict strategy" : // DefaultErrorStrategy + LL_EXACT_AMBIG_DETECTION // This was not set to default parsing strategy, but it is useful // to produce detailed/useful error message parser.setErrorHandler(new DefaultErrorStrategy()); parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); queryContext = parserMethod.apply(parser); throw GraqlException.create(errorListener.toString()); } return visitor.apply(queryContext); }
Example #26
Source File: ParsingUtils.java From intellij-plugin-v4 with BSD 3-Clause "New" or "Revised" License | 4 votes |
public static ParsingResult parseText(Grammar g, LexerGrammar lg, String startRuleName, final VirtualFile grammarFile, SyntaxErrorListener syntaxErrorListener, TokenStream tokens, int startIndex) { if ( g==null || lg==null ) { ANTLRv4PluginController.LOG.info("parseText can't parse: missing lexer or parser no Grammar object for " + (grammarFile != null ? grammarFile.getName() : "<unknown file>")); return null; } String grammarFileName = g.fileName; if (!new File(grammarFileName).exists()) { ANTLRv4PluginController.LOG.info("parseText grammar doesn't exist "+grammarFileName); return null; } if ( g==BAD_PARSER_GRAMMAR || lg==BAD_LEXER_GRAMMAR ) { return null; } tokens.seek(startIndex); PreviewParser parser = new PreviewParser(g, tokens); parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); parser.setProfile(true); parser.removeErrorListeners(); parser.addErrorListener(syntaxErrorListener); Rule start = g.getRule(startRuleName); if ( start==null ) { return null; // can't find start rule } // System.out.println("parse test ----------------------------"); ParseTree t = parser.parse(start.index); if ( t!=null ) { return new ParsingResult(parser, t, syntaxErrorListener); } return null; }
Example #27
Source File: SyntaxTests.java From Concurnas with MIT License | 3 votes |
public Block runTest( CharStream input, String filename) throws Exception{ LexParseErrorCapturer lexerErrors = new LexParseErrorCapturer(filename); LexParseErrorCapturer parserErrors = new LexParseErrorCapturer(filename); ConcurnasLexer lexer = new ConcurnasLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); ConcurnasParser parser = new ConcurnasParser(tokens); parser.setInputStream(new CommonTokenStream(lexer)); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); parser.removeErrorListeners(); // remove ConsoleErrorListener parser.addErrorListener(lexerErrors); // add ours lexer.removeErrorListeners(); // remove ConsoleErrorListener lexer.addErrorListener(parserErrors); // add ours ParseTree tree = parser.code(); ArrayList<ErrorHolder> lexerErrorsAR = lexerErrors.errors; ArrayList<ErrorHolder> parserErrorsAR = parserErrors.errors; checkForErrors(lexerErrorsAR, parserErrorsAR); //no errors, create AST Block ret = (Block)new ASTCreator(filename, parserErrors).visit(tree); checkForErrors(lexerErrorsAR, parserErrorsAR); return ret; }