1 package antlr; 2 3 9 10 13 import java.util.Enumeration ; 14 import java.util.Hashtable ; 15 import java.util.HashSet ; 16 import antlr.collections.impl.BitSet; 17 import antlr.collections.impl.Vector; 18 import java.io.PrintWriter ; import java.io.IOException ; 20 import java.io.FileWriter ; 21 22 23 public class CppCodeGenerator extends CodeGenerator { 24 protected int syntacticPredLevel = 0; 26 27 protected boolean genAST = false; 29 30 protected boolean saveText = false; 32 33 protected boolean genHashLines = true; 35 protected int outputLine; 37 protected String outputFile; 38 39 boolean usingCustomAST = false; 42 String labeledElementType; 43 String labeledElementASTType; String labeledElementASTInit; 45 String labeledElementInit; 46 String commonExtraArgs; 47 String commonExtraParams; 48 String commonLocalVars; 49 String lt1Value; 50 String exceptionThrown; 51 String throwNoViable; 52 53 RuleBlock currentRule; 55 String currentASTResult; 57 Hashtable treeVariableMap = new Hashtable (); 60 61 64 HashSet declaredASTVariables = new HashSet (); 65 66 int astVarNumber = 1; 68 protected static final String NONUNIQUE = new String (); 70 71 public static final int caseSizeThreshold = 127; 73 private Vector semPreds; 74 75 private Vector astTypes; 78 79 private static String namespaceStd = "ANTLR_USE_NAMESPACE(std)"; 80 private static String namespaceAntlr = "ANTLR_USE_NAMESPACE(antlr)"; 81 private static NameSpace nameSpace = null; 82 83 private static final String preIncludeCpp = "pre_include_cpp"; 84 private static final String preIncludeHpp = "pre_include_hpp"; 85 private static final String postIncludeCpp = "post_include_cpp"; 86 private static final String postIncludeHpp = "post_include_hpp"; 87 88 92 public CppCodeGenerator() { 93 super(); 94 charFormatter = new CppCharFormatter(); 95 } 96 101 protected int addSemPred(String predicate) { 102 semPreds.appendElement(predicate); 103 return semPreds.size()-1; 104 } 105 public void exitIfError() 106 { 107 if (antlrTool.hasError()) 108 { 109 System.out.println("Exiting due to errors."); 110 System.exit(1); 111 } 112 } 113 protected int countLines( String s ) 114 { 115 int lines = 0; 116 for( int i = 0; i < s.length(); i++ ) 117 { 118 if( s.charAt(i) == '\n' ) 119 lines++; 120 } 121 return lines; 122 } 123 127 protected void _print(String s) 128 { 129 if (s != null) 130 { 131 outputLine += countLines(s); 132 currentOutput.print(s); 133 } 134 } 135 140 protected void _printAction(String s) 141 { 142 if (s != null) 143 { 144 outputLine += countLines(s)+1; 145 super._printAction(s); 146 } 147 } 148 149 public void printAction(Token t) 150 { 151 if (t != null) 152 { 153 genLineNo(t.getLine()); 154 printTabs(); 155 _printAction(processActionForTreeSpecifiers(t.getText(), t.getLine(), 156 null, null) ); 157 genLineNo2(); 158 } 159 } 160 163 public void printHeaderAction(String name) 164 { 165 Token a = (antlr.Token)behavior.headerActions.get(name); 166 if (a != null) 167 { 168 genLineNo(a.getLine()); 169 println(processActionForTreeSpecifiers(a.getText(), a.getLine(), 170 null, null) ); 171 genLineNo2(); 172 } 173 } 174 178 protected void _println(String s) { 179 if (s != null) { 180 outputLine += countLines(s)+1; 181 currentOutput.println(s); 182 } 183 } 184 188 protected void println(String s) { 189 if (s != null) { 190 printTabs(); 191 outputLine += countLines(s)+1; 192 currentOutput.println(s); 193 } 194 } 195 196 197 public void genLineNo(int line) { 198 if ( line == 0 ) { 199 line++; 200 } 201 if( genHashLines ) 202 _println("#line "+line+" \""+antlrTool.fileMinusPath(antlrTool.grammarFile)+"\""); 203 } 204 205 206 public void genLineNo(GrammarElement el) 207 { 208 if( el != null ) 209 genLineNo(el.getLine()); 210 } 211 212 public void genLineNo(Token t) 213 { 214 if (t != null) 215 genLineNo(t.getLine()); 216 } 217 218 public void genLineNo2() 219 { 220 if( genHashLines ) 221 { 222 _println("#line "+(outputLine+1)+" \""+outputFile+"\""); 223 } 224 } 225 226 public void gen() { 227 try { 229 Enumeration grammarIter = behavior.grammars.elements(); 231 while (grammarIter.hasMoreElements()) { 232 Grammar g = (Grammar)grammarIter.nextElement(); 233 g.setGrammarAnalyzer(analyzer); 235 g.setCodeGenerator(this); 236 analyzer.setGrammar(g); 237 setupGrammarParameters(g); 239 g.generate(); 240 exitIfError(); 241 } 242 243 Enumeration tmIter = behavior.tokenManagers.elements(); 245 while (tmIter.hasMoreElements()) { 246 TokenManager tm = (TokenManager)tmIter.nextElement(); 247 if (!tm.isReadOnly()) { 248 genTokenTypes(tm); 252 genTokenInterchange(tm); 254 } 255 exitIfError(); 256 } 257 } 258 catch (IOException e) { 259 System.out.println(e.getMessage()); 260 } 261 } 262 265 public void gen(ActionElement action) { 266 if ( DEBUG_CODE_GENERATOR ) System.out.println("genAction("+action+")"); 267 if ( action.isSemPred ) { 268 genSemPred(action.actionText, action.line); 269 } 270 else { 271 if ( grammar.hasSyntacticPredicate ) { 272 println("if ( inputState->guessing==0 ) {"); 273 tabs++; 274 } 275 276 ActionTransInfo tInfo = new ActionTransInfo(); 277 String actionStr = processActionForTreeSpecifiers(action.actionText, 278 action.getLine(), 279 currentRule, tInfo); 280 281 if ( tInfo.refRuleRoot!=null ) { 282 println(tInfo.refRuleRoot + " = "+labeledElementASTType+"(currentAST.root);"); 287 } 288 289 genLineNo(action); 291 printAction(actionStr); 292 genLineNo2(); 293 294 if ( tInfo.assignToRoot ) { 295 println("currentAST.root = "+tInfo.refRuleRoot+";"); 297 println("if ( "+tInfo.refRuleRoot+"!="+labeledElementASTInit+" &&"); 300 tabs++; 301 println(tInfo.refRuleRoot+"->getFirstChild() != "+labeledElementASTInit+" )"); 302 println(" currentAST.child = "+tInfo.refRuleRoot+"->getFirstChild();"); 303 tabs--; 304 println("else"); 305 tabs++; 306 println("currentAST.child = "+tInfo.refRuleRoot+";"); 307 tabs--; 308 println("currentAST.advanceChildToEnd();"); 309 } 310 311 if ( grammar.hasSyntacticPredicate ) { 312 tabs--; 313 println("}"); 314 } 315 } 316 } 317 318 321 public void gen(AlternativeBlock blk) { 322 if ( DEBUG_CODE_GENERATOR ) System.out.println("gen("+blk+")"); 323 println("{"); 324 genBlockPreamble(blk); 325 genBlockInitAction(blk); 326 327 String saveCurrentASTResult = currentASTResult; 329 if (blk.getLabel() != null) { 330 currentASTResult = blk.getLabel(); 331 } 332 333 boolean ok = grammar.theLLkAnalyzer.deterministic(blk); 334 335 CppBlockFinishingInfo howToFinish = genCommonBlock(blk, true); 336 genBlockFinish(howToFinish, throwNoViable); 337 338 println("}"); 339 340 currentASTResult = saveCurrentASTResult; 342 } 343 348 public void gen(BlockEndElement end) { 349 if ( DEBUG_CODE_GENERATOR ) System.out.println("genRuleEnd("+end+")"); 350 } 351 354 public void gen(CharLiteralElement atom) { 355 if ( DEBUG_CODE_GENERATOR ) System.out.println("genChar("+atom+")"); 356 357 if ( atom.getLabel()!=null ) { 358 println(atom.getLabel() + " = " + lt1Value + ";"); 359 } 360 361 boolean oldsaveText = saveText; 362 saveText = saveText && atom.getAutoGenType()==GrammarElement.AUTO_GEN_NONE; 363 genMatch(atom); 364 saveText = oldsaveText; 365 } 366 369 public void gen(CharRangeElement r) { 370 if ( r.getLabel()!=null && syntacticPredLevel == 0) { 371 println(r.getLabel() + " = " + lt1Value + ";"); 372 } 373 boolean save = ( grammar instanceof LexerGrammar && 375 ( !saveText || 376 r.getAutoGenType() == GrammarElement.AUTO_GEN_BANG ) 377 ); 378 if (save) 379 println("_saveIndex=text.length();"); 380 381 println("matchRange("+textOrChar(r.beginText)+","+textOrChar(r.endText)+");"); 382 383 if (save) 384 println("text.setLength(_saveIndex);"); 385 } 386 387 public void gen(LexerGrammar g) throws IOException { 388 if (g.debuggingOutput) 390 semPreds = new Vector(); 391 392 setGrammar(g); 393 if (!(grammar instanceof LexerGrammar)) { 394 antlrTool.panic("Internal error generating lexer"); 395 } 396 397 genBody(g); 398 genInclude(g); 399 } 400 403 public void gen(OneOrMoreBlock blk) { 404 if ( DEBUG_CODE_GENERATOR ) System.out.println("gen+("+blk+")"); 405 String label; 406 String cnt; 407 println("{ // ( ... )+"); 408 genBlockPreamble(blk); 409 if ( blk.getLabel() != null ) { 410 cnt = "_cnt_"+blk.getLabel(); 411 } 412 else { 413 cnt = "_cnt" + blk.ID; 414 } 415 println("int "+cnt+"=0;"); 416 if ( blk.getLabel() != null ) { 417 label = blk.getLabel(); 418 } 419 else { 420 label = "_loop" + blk.ID; 421 } 422 423 println("for (;;) {"); 424 tabs++; 425 genBlockInitAction(blk); 428 429 String saveCurrentASTResult = currentASTResult; 431 if (blk.getLabel() != null) { 432 currentASTResult = blk.getLabel(); 433 } 434 435 boolean ok = grammar.theLLkAnalyzer.deterministic(blk); 436 437 boolean generateNonGreedyExitPath = false; 448 int nonGreedyExitDepth = grammar.maxk; 449 450 if ( !blk.greedy && 451 blk.exitLookaheadDepth<=grammar.maxk && 452 blk.exitCache[blk.exitLookaheadDepth].containsEpsilon() ) 453 { 454 generateNonGreedyExitPath = true; 455 nonGreedyExitDepth = blk.exitLookaheadDepth; 456 } 457 else if ( !blk.greedy && 458 blk.exitLookaheadDepth==LLkGrammarAnalyzer.NONDETERMINISTIC ) 459 { 460 generateNonGreedyExitPath = true; 461 } 462 463 if ( generateNonGreedyExitPath ) { 466 if ( DEBUG_CODE_GENERATOR ) { 467 System.out.println("nongreedy (...)+ loop; exit depth is "+ 468 blk.exitLookaheadDepth); 469 } 470 String predictExit = 471 getLookaheadTestExpression(blk.exitCache, 472 nonGreedyExitDepth); 473 println("// nongreedy exit test"); 474 println("if ( "+cnt+">=1 && "+predictExit+") goto "+label+";"); 475 } 476 477 CppBlockFinishingInfo howToFinish = genCommonBlock(blk, false); 478 genBlockFinish( 479 howToFinish, 480 "if ( "+cnt+">=1 ) { goto "+label+"; } else {" + throwNoViable + "}" 481 ); 482 483 println(cnt+"++;"); 484 tabs--; 485 println("}"); 486 println(label+":;"); 487 println("} // ( ... )+"); 488 489 currentASTResult = saveCurrentASTResult; 491 } 492 493 public void gen(ParserGrammar g) throws IOException { 494 495 if (g.debuggingOutput) 498 semPreds = new Vector(); 499 500 setGrammar(g); 501 if (!(grammar instanceof ParserGrammar)) { 502 antlrTool.panic("Internal error generating parser"); 503 } 504 505 genBody(g); 506 genInclude(g); 507 } 508 511 public void gen(RuleRefElement rr) 512 { 513 if ( DEBUG_CODE_GENERATOR ) System.out.println("genRR("+rr+")"); 514 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(rr.targetRule); 515 if (rs == null || !rs.isDefined()) 516 { 517 antlrTool.error("Rule '" + rr.targetRule + "' is not defined", grammar.getFilename(), rr.getLine(), rr.getColumn()); 519 return; 520 } 521 if (!(rs instanceof RuleSymbol)) 522 { 523 antlrTool.error("'" + rr.targetRule + "' does not name a grammar rule", grammar.getFilename(), rr.getLine(), rr.getColumn()); 525 return; 526 } 527 528 genErrorTryForElement(rr); 529 530 if ( grammar instanceof TreeWalkerGrammar && 533 rr.getLabel() != null && 534 syntacticPredLevel == 0 ) 535 { 536 println(rr.getLabel() + " = (_t == ASTNULL) ? "+labeledElementASTInit+" : "+lt1Value+";"); 537 } 538 539 if ( grammar instanceof LexerGrammar && (!saveText||rr.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) 542 { 543 println("_saveIndex = text.length();"); 544 } 545 546 printTabs(); 548 if (rr.idAssign != null) 549 { 550 if (rs.block.returnAction == null) 552 { 553 antlrTool.warning("Rule '" + rr.targetRule + "' has no return type", grammar.getFilename(), rr.getLine(), rr.getColumn()); 554 } 555 _print(rr.idAssign + "="); 556 } else { 557 if ( !(grammar instanceof LexerGrammar) && syntacticPredLevel == 0 && rs.block.returnAction != null) 559 { 560 antlrTool.warning("Rule '" + rr.targetRule + "' returns a value", grammar.getFilename(), rr.getLine(), rr.getColumn()); 561 } 562 } 563 564 GenRuleInvocation(rr); 566 567 if ( grammar instanceof LexerGrammar && (!saveText||rr.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 569 println("text.erase(_saveIndex);"); 570 } 571 572 if (syntacticPredLevel == 0) 574 { 575 boolean doNoGuessTest = ( 576 grammar.hasSyntacticPredicate && 577 ( 578 grammar.buildAST && rr.getLabel() != null || 579 (genAST && rr.getAutoGenType() == GrammarElement.AUTO_GEN_NONE) 580 ) 581 ); 582 583 if (doNoGuessTest) { 584 println("if (inputState->guessing==0) {"); 585 tabs++; 586 } 587 588 if (grammar.buildAST && rr.getLabel() != null) 589 { 590 println(rr.getLabel() + "_AST = returnAST;"); 592 } 593 594 if (genAST) 595 { 596 switch (rr.getAutoGenType()) 597 { 598 case GrammarElement.AUTO_GEN_NONE: 599 if( usingCustomAST ) 600 println("astFactory->addASTChild(currentAST, static_cast<"+namespaceAntlr+"RefAST>(returnAST));"); 601 else 602 println("astFactory->addASTChild( currentAST, returnAST );"); 603 break; 604 case GrammarElement.AUTO_GEN_CARET: 605 antlrTool.error("Internal: encountered ^ after rule reference"); 608 break; 609 default: 610 break; 611 } 612 } 613 614 if ( grammar instanceof LexerGrammar && rr.getLabel() != null ) 616 { 617 println(rr.getLabel()+"=_returnToken;"); 618 } 619 620 if (doNoGuessTest) 621 { 622 tabs--; 623 println("}"); 624 } 625 } 626 genErrorCatchForElement(rr); 627 } 628 631 public void gen(StringLiteralElement atom) { 632 if ( DEBUG_CODE_GENERATOR ) System.out.println("genString("+atom+")"); 633 634 if (atom.getLabel()!=null && syntacticPredLevel == 0) { 636 println(atom.getLabel() + " = " + lt1Value + ";"); 637 } 638 639 genElementAST(atom); 641 642 boolean oldsaveText = saveText; 644 saveText = saveText && atom.getAutoGenType()==GrammarElement.AUTO_GEN_NONE; 645 646 genMatch(atom); 648 649 saveText = oldsaveText; 650 651 if (grammar instanceof TreeWalkerGrammar) { 653 println("_t = _t->getNextSibling();"); 654 } 655 } 656 659 public void gen(TokenRangeElement r) { 660 genErrorTryForElement(r); 661 if ( r.getLabel()!=null && syntacticPredLevel == 0) { 662 println(r.getLabel() + " = " + lt1Value + ";"); 663 } 664 665 genElementAST(r); 667 668 println("matchRange("+r.beginText+","+r.endText+");"); 670 genErrorCatchForElement(r); 671 } 672 675 public void gen(TokenRefElement atom) { 676 if ( DEBUG_CODE_GENERATOR ) System.out.println("genTokenRef("+atom+")"); 677 if ( grammar instanceof LexerGrammar ) { 678 antlrTool.panic("Token reference found in lexer"); 679 } 680 genErrorTryForElement(atom); 681 if ( atom.getLabel()!=null && syntacticPredLevel == 0) { 683 println(atom.getLabel() + " = " + lt1Value + ";"); 684 } 685 686 genElementAST(atom); 688 genMatch(atom); 690 genErrorCatchForElement(atom); 691 692 if (grammar instanceof TreeWalkerGrammar) { 694 println("_t = _t->getNextSibling();"); 695 } 696 } 697 public void gen(TreeElement t) { 698 println(labeledElementType+" __t" + t.ID + " = _t;"); 700 701 if (t.root.getLabel() != null) { 703 println(t.root.getLabel() + " = (_t == ASTNULL) ? "+labeledElementASTInit+" : _t;"); 704 } 705 706 if ( t.root.getAutoGenType() == GrammarElement.AUTO_GEN_BANG ) { 708 antlrTool.error("Suffixing a root node with '!' is not implemented", 709 grammar.getFilename(), t.getLine(), t.getColumn()); 710 t.root.setAutoGenType(GrammarElement.AUTO_GEN_NONE); 711 } 712 if ( t.root.getAutoGenType() == GrammarElement.AUTO_GEN_CARET ) { 713 antlrTool.warning("Suffixing a root node with '^' is redundant; already a root", 714 grammar.getFilename(), t.getLine(), t.getColumn()); 715 t.root.setAutoGenType(GrammarElement.AUTO_GEN_NONE); 716 } 717 718 genElementAST(t.root); 720 if (grammar.buildAST) { 721 println(namespaceAntlr+"ASTPair __currentAST" + t.ID + " = currentAST;"); 723 println("currentAST.root = currentAST.child;"); 725 println("currentAST.child = "+labeledElementASTInit+";"); 726 } 727 728 if ( t.root instanceof WildcardElement ) { 730 println("if ( _t == ASTNULL ) throw MismatchedTokenException();"); 731 } 732 else { 733 genMatch(t.root); 734 } 735 println("_t = _t->getFirstChild();"); 737 738 for (int i=0; i<t.getAlternatives().size(); i++) { 740 Alternative a = t.getAlternativeAt(i); 741 AlternativeElement e = a.head; 742 while ( e != null ) { 743 e.generate(); 744 e = e.next; 745 } 746 } 747 748 if (grammar.buildAST) { 749 println("currentAST = __currentAST" + t.ID + ";"); 752 } 753 println("_t = __t" + t.ID + ";"); 755 println("_t = _t->getNextSibling();"); 757 } 758 759 public void gen(TreeWalkerGrammar g) throws IOException { 760 setGrammar(g); 761 if (!(grammar instanceof TreeWalkerGrammar)) { 762 antlrTool.panic("Internal error generating tree-walker"); 763 } 764 765 genBody(g); 766 genInclude(g); 767 } 768 771 public void gen(WildcardElement wc) { 772 if (wc.getLabel()!=null && syntacticPredLevel == 0) { 774 println(wc.getLabel() + " = " + lt1Value + ";"); 775 } 776 777 genElementAST(wc); 779 if (grammar instanceof TreeWalkerGrammar) { 781 println("if ( _t == "+labeledElementASTInit+" ) throw "+namespaceAntlr+"MismatchedTokenException();"); 782 } 783 else if (grammar instanceof LexerGrammar) { 784 if ( grammar instanceof LexerGrammar && 785 (!saveText||wc.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 786 println("_saveIndex = text.length();"); 787 } 788 println("matchNot(EOF/*_CHAR*/);"); 789 if ( grammar instanceof LexerGrammar && 790 (!saveText||wc.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 791 println("text.erase(_saveIndex);"); } 793 } 794 else { 795 println("matchNot(" + getValueString(Token.EOF_TYPE) + ");"); 796 } 797 798 if (grammar instanceof TreeWalkerGrammar) { 800 println("_t = _t->getNextSibling();"); 801 } 802 } 803 806 public void gen(ZeroOrMoreBlock blk) { 807 if ( DEBUG_CODE_GENERATOR ) System.out.println("gen*("+blk+")"); 808 println("{ // ( ... )*"); 809 genBlockPreamble(blk); 810 String label; 811 if ( blk.getLabel() != null ) { 812 label = blk.getLabel(); 813 } 814 else { 815 label = "_loop" + blk.ID; 816 } 817 println("for (;;) {"); 818 tabs++; 819 genBlockInitAction(blk); 822 823 String saveCurrentASTResult = currentASTResult; 825 if (blk.getLabel() != null) { 826 currentASTResult = blk.getLabel(); 827 } 828 829 boolean ok = grammar.theLLkAnalyzer.deterministic(blk); 830 831 boolean generateNonGreedyExitPath = false; 842 int nonGreedyExitDepth = grammar.maxk; 843 844 if ( !blk.greedy && 845 blk.exitLookaheadDepth<=grammar.maxk && 846 blk.exitCache[blk.exitLookaheadDepth].containsEpsilon() ) 847 { 848 generateNonGreedyExitPath = true; 849 nonGreedyExitDepth = blk.exitLookaheadDepth; 850 } 851 else if ( !blk.greedy && 852 blk.exitLookaheadDepth==LLkGrammarAnalyzer.NONDETERMINISTIC ) 853 { 854 generateNonGreedyExitPath = true; 855 } 856 if ( generateNonGreedyExitPath ) { 857 if ( DEBUG_CODE_GENERATOR ) { 858 System.out.println("nongreedy (...)* loop; exit depth is "+ 859 blk.exitLookaheadDepth); 860 } 861 String predictExit = 862 getLookaheadTestExpression(blk.exitCache, 863 nonGreedyExitDepth); 864 println("// nongreedy exit test"); 865 println("if ("+predictExit+") goto "+label+";"); 866 } 867 868 CppBlockFinishingInfo howToFinish = genCommonBlock(blk, false); 869 genBlockFinish(howToFinish, "goto " + label + ";"); 870 871 tabs--; 872 println("}"); 873 println(label+":;"); 874 println("} // ( ... )*"); 875 876 currentASTResult = saveCurrentASTResult; 878 } 879 883 protected void genAlt(Alternative alt, AlternativeBlock blk) 884 { 885 boolean savegenAST = genAST; 887 genAST = genAST && alt.getAutoGen(); 888 889 boolean oldsaveTest = saveText; 890 saveText = saveText && alt.getAutoGen(); 891 892 Hashtable saveMap = treeVariableMap; 894 treeVariableMap = new Hashtable (); 895 896 if (alt.exceptionSpec != null) { 898 println("try { // for error handling"); 899 tabs++; 900 } 901 902 AlternativeElement elem = alt.head; 903 while ( !(elem instanceof BlockEndElement) ) { 904 elem.generate(); elem = elem.next; 906 } 907 908 if ( genAST) 909 { 910 if (blk instanceof RuleBlock) 911 { 912 RuleBlock rblk = (RuleBlock)blk; 914 if( usingCustomAST ) 915 println(rblk.getRuleName() + "_AST = static_cast<"+labeledElementASTType+">(currentAST.root);"); 916 else 917 println(rblk.getRuleName() + "_AST = currentAST.root;"); 918 } 919 else if (blk.getLabel() != null) { 920 antlrTool.warning("Labeled subrules are not implemented", grammar.getFilename(), blk.getLine(), blk.getColumn()); 923 } 924 } 925 926 if (alt.exceptionSpec != null) 927 { 928 tabs--; 930 println("}"); 931 genErrorHandler(alt.exceptionSpec); 932 } 933 934 genAST = savegenAST; 935 saveText = oldsaveTest; 936 937 treeVariableMap = saveMap; 938 } 939 951 protected void genBitsets( 952 Vector bitsetList, 953 int maxVocabulary, 954 String prefix 955 ) 956 { 957 TokenManager tm = grammar.tokenManager; 958 959 println(""); 960 961 for (int i = 0; i < bitsetList.size(); i++) 962 { 963 BitSet p = (BitSet)bitsetList.elementAt(i); 964 p.growToInclude(maxVocabulary); 966 967 println( 969 "const unsigned long " + prefix + getBitsetName(i) + "_data_" + "[] = { " + 970 p.toStringOfHalfWords() + 971 " };" 972 ); 973 974 String t = "// "; 976 for( int j = 0; j < tm.getVocabulary().size(); j++ ) 977 { 978 if ( p.member( j ) ) 979 { 980 if ( (grammar instanceof LexerGrammar) ) 981 t += tm.getVocabulary().elementAt(j)+" "; 982 else 983 t += tm.getTokenStringAt(j)+" "; 984 985 if( t.length() > 70 ) 986 { 987 println(t); 988 t = "// "; 989 } 990 } 991 } 992 if ( t != "// " ) 993 println(t); 994 995 println( 997 "const "+namespaceAntlr+"BitSet " + prefix + getBitsetName(i) + "(" + 998 getBitsetName(i) + "_data_," + p.size()/32 + 999 ");" 1000 ); 1001 } 1002 } 1003 protected void genBitsetsHeader( 1004 Vector bitsetList, 1005 int maxVocabulary 1006 ) { 1007 println(""); 1008 for (int i = 0; i < bitsetList.size(); i++) 1009 { 1010 BitSet p = (BitSet)bitsetList.elementAt(i); 1011 p.growToInclude(maxVocabulary); 1013 println("static const unsigned long " + getBitsetName(i) + "_data_" + "[];"); 1015 println("static const "+namespaceAntlr+"BitSet " + getBitsetName(i) + ";"); 1017 } 1018 } 1019 1025 private void genBlockFinish(CppBlockFinishingInfo howToFinish, String noViableAction) 1026 { 1027 if (howToFinish.needAnErrorClause && 1028 (howToFinish.generatedAnIf || howToFinish.generatedSwitch)) { 1029 if ( howToFinish.generatedAnIf ) { 1030 println("else {"); 1031 } 1032 else { 1033 println("{"); 1034 } 1035 tabs++; 1036 println(noViableAction); 1037 tabs--; 1038 println("}"); 1039 } 1040 1041 if ( howToFinish.postscript!=null ) { 1042 println(howToFinish.postscript); 1043 } 1044 } 1045 1049 protected void genBlockInitAction( AlternativeBlock blk ) 1050 { 1051 if ( blk.initAction!=null ) { 1053 genLineNo(blk); 1054 printAction(processActionForTreeSpecifiers(blk.initAction, blk.line, 1055 currentRule, null) ); 1056 genLineNo2(); 1057 } 1058 } 1059 1064 protected void genBlockPreamble(AlternativeBlock blk) { 1065 if ( blk instanceof RuleBlock ) { 1067 RuleBlock rblk = (RuleBlock)blk; 1068 if ( rblk.labeledElements!=null ) { 1069 for (int i=0; i<rblk.labeledElements.size(); i++) { 1070 1071 AlternativeElement a = (AlternativeElement)rblk.labeledElements.elementAt(i); 1072 if ( 1078 a instanceof RuleRefElement || 1079 a instanceof AlternativeBlock && 1080 !(a instanceof RuleBlock) && 1081 !(a instanceof SynPredBlock) 1082 ) { 1083 1084 if ( 1085 !(a instanceof RuleRefElement) && 1086 ((AlternativeBlock)a).not && 1087 analyzer.subruleCanBeInverted(((AlternativeBlock)a), grammar instanceof LexerGrammar) 1088 ) { 1089 println(labeledElementType + " " + a.getLabel() + " = " + labeledElementInit + ";"); 1093 if (grammar.buildAST) { 1094 genASTDeclaration( a ); 1095 } 1096 } 1097 else { 1098 if (grammar.buildAST) { 1099 genASTDeclaration( a ); 1102 } 1103 if ( grammar instanceof LexerGrammar ) { 1104 println(namespaceAntlr+"RefToken "+a.getLabel()+";"); 1105 } 1106 if (grammar instanceof TreeWalkerGrammar) { 1107 println(labeledElementType + " " + a.getLabel() + " = " + labeledElementInit + ";"); 1109 } 1110 } 1111 } 1112 else { 1113 println(labeledElementType + " " + a.getLabel() + " = " + labeledElementInit + ";"); 1116 if (grammar.buildAST) { 1118 if (a instanceof GrammarAtom && 1123 ((GrammarAtom)a).getASTNodeType()!=null ) { 1124 GrammarAtom ga = (GrammarAtom)a; 1125 genASTDeclaration( a, ga.getASTNodeType() ); 1126 } 1127 else { 1128 genASTDeclaration( a ); 1129 } 1130 } 1131 } 1132 } 1133 } 1134 } 1135 } 1136 public void genBody(LexerGrammar g) throws IOException  1137 { 1138 outputFile = grammar.getClassName() + ".cpp"; 1139 outputLine = 1; 1140 currentOutput = antlrTool.openOutputFile(outputFile); 1141 1143 genAST = false; saveText = true; 1146 tabs=0; 1147 1148 genHeader(outputFile); 1150 1151 printHeaderAction(preIncludeCpp); 1152 println("#include \"" + grammar.getClassName() + ".hpp\""); 1154 println("#include <antlr/CharBuffer.hpp>"); 1155 println("#include <antlr/TokenStreamException.hpp>"); 1156 println("#include <antlr/TokenStreamIOException.hpp>"); 1157 println("#include <antlr/TokenStreamRecognitionException.hpp>"); 1158 println("#include <antlr/CharStreamException.hpp>"); 1159 println("#include <antlr/CharStreamIOException.hpp>"); 1160 println("#include <antlr/NoViableAltForCharException.hpp>"); 1161 if (grammar.debuggingOutput) 1162 println("#include <antlr/DebuggingInputBuffer.hpp>"); 1163 println(""); 1164 printHeaderAction(postIncludeCpp); 1165 1166 if (nameSpace != null) 1167 nameSpace.emitDeclarations(currentOutput); 1168 1169 printAction(grammar.preambleAction); 1171 1172 String sup=null; 1174 if ( grammar.superClass!=null ) { 1175 sup = grammar.superClass; 1176 } 1177 else { 1178 sup = grammar.getSuperClass(); 1179 if (sup.lastIndexOf('.') != -1) 1180 sup = sup.substring(sup.lastIndexOf('.')+1); 1181 sup = namespaceAntlr + sup; 1182 } 1183 1184 println(grammar.getClassName() + "::" + grammar.getClassName() + "(" + namespaceStd + "istream& in)"); 1188 tabs++; 1189 if (grammar.debuggingOutput) 1191 println(": " + sup + "(new "+namespaceAntlr+"DebuggingInputBuffer(new "+namespaceAntlr+"CharBuffer(in)))"); 1192 else 1193 println(": " + sup + "(new "+namespaceAntlr+"CharBuffer(in))"); 1194 tabs--; 1195 println("{"); 1196 tabs++; 1197 1198 if ( grammar.debuggingOutput ) { 1201 println("setRuleNames(_ruleNames);"); 1202 println("setSemPredNames(_semPredNames);"); 1203 println("setupDebugging();"); 1204 } 1205 1206 println("setCaseSensitive("+g.caseSensitive+");"); 1207 println("initLiterals();"); 1208 tabs--; 1209 println("}"); 1210 println(""); 1211 1212 println(grammar.getClassName() + "::" + grammar.getClassName() + "("+namespaceAntlr+"InputBuffer& ib)"); 1214 tabs++; 1215 if (grammar.debuggingOutput) 1217 println(": " + sup + "(new "+namespaceAntlr+"DebuggingInputBuffer(ib))"); 1218 else 1219 println(": " + sup + "(ib)"); 1220 tabs--; 1221 println("{"); 1222 tabs++; 1223 1224 if ( grammar.debuggingOutput ) { 1227 println("setRuleNames(_ruleNames);"); 1228 println("setSemPredNames(_semPredNames);"); 1229 println("setupDebugging();"); 1230 } 1231 1232 println("setCaseSensitive("+g.caseSensitive+");"); 1233 println("initLiterals();"); 1234 tabs--; 1235 println("}"); 1236 println(""); 1237 1238 println(grammar.getClassName() + "::" + grammar.getClassName() + "(const "+namespaceAntlr+"LexerSharedInputState& state)"); 1240 tabs++; 1241 println(": " + sup + "(state)"); 1242 tabs--; 1243 println("{"); 1244 tabs++; 1245 1246 if ( grammar.debuggingOutput ) { 1249 println("setRuleNames(_ruleNames);"); 1250 println("setSemPredNames(_semPredNames);"); 1251 println("setupDebugging();"); 1252 } 1253 1254 println("setCaseSensitive("+g.caseSensitive+");"); 1255 println("initLiterals();"); 1256 tabs--; 1257 println("}"); 1258 println(""); 1259 1260 println("void " + grammar.getClassName() + "::initLiterals()"); 1261 println("{"); 1262 tabs++; 1263 1279 Enumeration keys = grammar.tokenManager.getTokenSymbolKeys(); 1281 while ( keys.hasMoreElements() ) { 1282 String key = (String )keys.nextElement(); 1283 if ( key.charAt(0) != '"' ) { 1284 continue; 1285 } 1286 TokenSymbol sym = grammar.tokenManager.getTokenSymbol(key); 1287 if ( sym instanceof StringLiteralSymbol ) { 1288 StringLiteralSymbol s = (StringLiteralSymbol)sym; 1289 println("literals["+s.getId()+"] = "+s.getTokenType()+";"); 1290 } 1291 } 1292 1293 tabs--; 1295 println("}"); 1296 1297 println("bool " + grammar.getClassName() + "::getCaseSensitiveLiterals() const"); 1299 println("{"); 1300 tabs++; 1301 println("return "+g.caseSensitiveLiterals + ";"); 1302 tabs--; 1303 println("}"); 1304 1305 Enumeration ids; 1306 if (grammar.debuggingOutput) { 1308 println("const char* "+grammar.getClassName()+"::_ruleNames[] = {"); 1309 tabs++; 1310 1311 ids = grammar.rules.elements(); 1312 int ruleNum=0; 1313 while ( ids.hasMoreElements() ) { 1314 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 1315 if ( sym instanceof RuleSymbol) 1316 println("\""+((RuleSymbol)sym).getId()+"\","); 1317 } 1318 println("0"); 1319 tabs--; 1320 println("};"); 1321 } 1322 1323 genNextToken(); 1327 1328 ids = grammar.rules.elements(); 1330 int ruleNum=0; 1331 while ( ids.hasMoreElements() ) { 1332 RuleSymbol sym = (RuleSymbol) ids.nextElement(); 1333 if (!sym.getId().equals("mnextToken")) { 1335 genRule(sym, false, ruleNum++, grammar.getClassName() + "::"); 1336 } 1337 exitIfError(); 1338 } 1339 1340 if (grammar.debuggingOutput) 1342 genSemPredMap(grammar.getClassName() + "::"); 1343 1344 genBitsets(bitsetsUsed, ((LexerGrammar)grammar).charVocabulary.size(), grammar.getClassName() + "::" ); 1346 1347 println(""); 1348 if (nameSpace != null) 1349 nameSpace.emitClosures(currentOutput); 1350 1351 currentOutput.close(); 1353 currentOutput = null; 1354 } 1355 public void genBody(ParserGrammar g) throws IOException  1356 { 1357 outputFile = grammar.getClassName() + ".cpp"; 1359 outputLine = 1; 1360 currentOutput = antlrTool.openOutputFile(outputFile); 1361 1362 genAST = grammar.buildAST; 1363 1364 tabs = 0; 1365 1366 genHeader(outputFile); 1368 1369 printHeaderAction(preIncludeCpp); 1370 1371 println("#include \"" + grammar.getClassName() + ".hpp\""); 1373 println("#include <antlr/NoViableAltException.hpp>"); 1374 println("#include <antlr/SemanticException.hpp>"); 1375 println("#include <antlr/ASTFactory.hpp>"); 1376 1377 printHeaderAction(postIncludeCpp); 1378 1379 if (nameSpace != null) 1380 nameSpace.emitDeclarations(currentOutput); 1381 1382 printAction(grammar.preambleAction); 1384 1385 String sup=null; 1386 if ( grammar.superClass!=null ) 1387 sup = grammar.superClass; 1388 else { 1389 sup = grammar.getSuperClass(); 1390 if (sup.lastIndexOf('.') != -1) 1391 sup = sup.substring(sup.lastIndexOf('.')+1); 1392 sup = namespaceAntlr + sup; 1393 } 1394 1395 if (grammar.debuggingOutput) { 1398 println("const char* "+grammar.getClassName()+"::_ruleNames[] = {"); 1399 tabs++; 1400 1401 Enumeration ids = grammar.rules.elements(); 1402 int ruleNum=0; 1403 while ( ids.hasMoreElements() ) { 1404 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 1405 if ( sym instanceof RuleSymbol) 1406 println("\""+((RuleSymbol)sym).getId()+"\","); 1407 } 1408 println("0"); 1409 tabs--; 1410 println("};"); 1411 } 1412 1413 println("void " + grammar.getClassName() + "::_initialize(void)"); 1415 println("{"); 1416 tabs++; 1417 1418 1420 if( grammar.buildAST ) 1421 println("initializeFactory();"); 1422 1423 if ( grammar.debuggingOutput ) { 1426 println("setRuleNames(_ruleNames);"); 1427 println("setSemPredNames(_semPredNames);"); 1428 println("setupDebugging();"); 1429 } 1430 tabs--; 1431 println("}"); 1432 1433 print(grammar.getClassName() + "::" + grammar.getClassName()); 1435 println("("+namespaceAntlr+"TokenBuffer& tokenBuf, int k)"); 1436 println(": " + sup + "(tokenBuf,k)"); 1437 println("{"); 1438 tabs++; 1439 println("_initialize();"); 1440 tabs--; 1441 println("}"); 1442 println(""); 1443 1444 print(grammar.getClassName() + "::" + grammar.getClassName()); 1445 println("("+namespaceAntlr+"TokenBuffer& tokenBuf)"); 1446 println(": " + sup + "(tokenBuf," + grammar.maxk + ")"); 1447 println("{"); 1448 tabs++; 1449 println("_initialize();"); 1450 tabs--; 1451 println("}"); 1452 println(""); 1453 1454 print(grammar.getClassName() + "::" + grammar.getClassName()); 1456 println("("+namespaceAntlr+"TokenStream& lexer, int k)"); 1457 println(": " + sup + "(lexer,k)"); 1458 println("{"); 1459 tabs++; 1460 println("_initialize();"); 1461 tabs--; 1462 println("}"); 1463 println(""); 1464 1465 print(grammar.getClassName() + "::" + grammar.getClassName()); 1466 println("("+namespaceAntlr+"TokenStream& lexer)"); 1467 println(": " + sup + "(lexer," + grammar.maxk + ")"); 1468 println("{"); 1469 tabs++; 1470 println("_initialize();"); 1471 tabs--; 1472 println("}"); 1473 println(""); 1474 1475 print(grammar.getClassName() + "::" + grammar.getClassName()); 1476 println("(const "+namespaceAntlr+"ParserSharedInputState& state)"); 1477 println(": " + sup + "(state," + grammar.maxk + ")"); 1478 println("{"); 1479 tabs++; 1480 println("_initialize();"); 1481 tabs--; 1482 println("}"); 1483 println(""); 1484 1485 astTypes = new Vector(); 1486 1487 Enumeration ids = grammar.rules.elements(); 1489 int ruleNum=0; 1490 while ( ids.hasMoreElements() ) { 1491 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 1492 if ( sym instanceof RuleSymbol) { 1493 RuleSymbol rs = (RuleSymbol)sym; 1494 genRule(rs, rs.references.size()==0, ruleNum++, grammar.getClassName() + "::"); 1495 } 1496 exitIfError(); 1497 } 1498 if ( usingCustomAST ) 1499 { 1500 1505 println(labeledElementASTType+" "+grammar.getClassName()+"::getAST()"); 1508 println("{"); 1509 println("\treturn returnAST;"); 1510 println("}"); 1511 println(""); 1512 } 1513 1514 println("void "+ grammar.getClassName() + "::initializeFactory( void )"); 1517 println("{"); 1518 tabs++; 1519 if( grammar.buildAST ) 1520 { 1521 println("if( ! astFactory )"); 1522 tabs++; 1523 if( usingCustomAST ) 1524 { 1525 String nodetype = labeledElementASTType.substring(3, labeledElementASTType.length()); 1527 println("astFactory = new "+namespaceAntlr+"ASTFactory(\""+nodetype+"\", "+nodetype+"::factory);"); 1528 } 1529 else 1530 println("astFactory = new "+namespaceAntlr+"ASTFactory();"); 1531 tabs--; 1532 Enumeration e = astTypes.elements(); 1533 while( e.hasMoreElements() ) 1534 println((String )e.nextElement()); 1535 println("astFactory->setMaxNodeType("+grammar.tokenManager.maxTokenType()+");"); 1536 } 1537 tabs--; 1538 println("}"); 1539 1540 genTokenStrings(grammar.getClassName() + "::"); 1542 1543 genBitsets(bitsetsUsed, grammar.tokenManager.maxTokenType(), grammar.getClassName() + "::" ); 1545 1546 if (grammar.debuggingOutput) 1548 genSemPredMap(grammar.getClassName() + "::"); 1549 1550 println(""); 1552 println(""); 1553 if (nameSpace != null) 1554 nameSpace.emitClosures(currentOutput); 1555 1556 currentOutput.close(); 1558 currentOutput = null; 1559 } 1560 public void genBody(TreeWalkerGrammar g) throws IOException  1561 { 1562 outputFile = grammar.getClassName() + ".cpp"; 1564 outputLine = 1; 1565 currentOutput = antlrTool.openOutputFile(outputFile); 1566 1568 genAST = grammar.buildAST; 1569 tabs = 0; 1570 1571 genHeader(outputFile); 1573 1574 printHeaderAction(preIncludeCpp); 1575 1576 println("#include \"" + grammar.getClassName() + ".hpp\""); 1578 println("#include <antlr/Token.hpp>"); 1579 println("#include <antlr/AST.hpp>"); 1580 println("#include <antlr/NoViableAltException.hpp>"); 1581 println("#include <antlr/MismatchedTokenException.hpp>"); 1582 println("#include <antlr/SemanticException.hpp>"); 1583 println("#include <antlr/BitSet.hpp>"); 1584 1585 printHeaderAction(postIncludeCpp); 1586 1587 if (nameSpace != null) 1588 nameSpace.emitDeclarations(currentOutput); 1589 1590 printAction(grammar.preambleAction); 1592 1593 String sup = null; 1595 if ( grammar.superClass!=null ) { 1596 sup = grammar.superClass; 1597 } 1598 else { 1599 sup = grammar.getSuperClass(); 1600 if (sup.lastIndexOf('.') != -1) 1601 sup = sup.substring(sup.lastIndexOf('.')+1); 1602 sup = namespaceAntlr + sup; 1603 } 1604 1605 println(grammar.getClassName() + "::" + grammar.getClassName() + "()"); 1607 println("\t: "+namespaceAntlr+"TreeParser() {"); 1608 tabs++; 1609 tabs--; 1611 println("}"); 1612 println(""); 1613 1614 Enumeration ids = grammar.rules.elements(); 1616 int ruleNum=0; 1617 String ruleNameInits = ""; 1618 while ( ids.hasMoreElements() ) { 1619 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 1620 if ( sym instanceof RuleSymbol) { 1621 RuleSymbol rs = (RuleSymbol)sym; 1622 genRule(rs, rs.references.size()==0, ruleNum++, grammar.getClassName() + "::"); 1623 } 1624 exitIfError(); 1625 } 1626 1627 if ( usingCustomAST ) 1628 { 1629 println(labeledElementASTType+" "+grammar.getClassName()+"::getAST()"); 1632 println("{"); 1633 println("\treturn returnAST;"); 1634 println("}"); 1635 println(""); 1636 } 1637 genTokenStrings(grammar.getClassName() + "::"); 1639 1640 genBitsets(bitsetsUsed, grammar.tokenManager.maxTokenType(), grammar.getClassName() + "::" ); 1642 1643 println(""); 1645 println(""); 1646 1647 if (nameSpace != null) 1648 nameSpace.emitClosures(currentOutput); 1649 1650 currentOutput.close(); 1652 currentOutput = null; 1653 } 1654 1657 protected void genCases(BitSet p) { 1658 if ( DEBUG_CODE_GENERATOR ) System.out.println("genCases("+p+")"); 1659 int[] elems; 1660 1661 elems = p.toArray(); 1662 int wrap = 1; int j=1; 1665 boolean startOfLine = true; 1666 for (int i = 0; i < elems.length; i++) { 1667 if (j==1) { 1668 print(""); 1669 } else { 1670 _print(" "); 1671 } 1672 _print("case " + getValueString(elems[i]) + ":"); 1673 1674 if (j==wrap) { 1675 _println(""); 1676 startOfLine = true; 1677 j=1; 1678 } 1679 else { 1680 j++; 1681 startOfLine = false; 1682 } 1683 } 1684 if (!startOfLine) { 1685 _println(""); 1686 } 1687 } 1688 1698 public CppBlockFinishingInfo genCommonBlock( 1699 AlternativeBlock blk, 1700 boolean noTestForSingle ) 1701 { 1702 int nIF=0; 1703 boolean createdLL1Switch = false; 1704 int closingBracesOfIFSequence = 0; 1705 CppBlockFinishingInfo finishingInfo = new CppBlockFinishingInfo(); 1706 if ( DEBUG_CODE_GENERATOR ) System.out.println("genCommonBlk("+blk+")"); 1707 1708 boolean savegenAST = genAST; 1710 genAST = genAST && blk.getAutoGen(); 1711 1712 boolean oldsaveTest = saveText; 1713 saveText = saveText && blk.getAutoGen(); 1714 1715 if ( blk.not && 1717 analyzer.subruleCanBeInverted(blk, grammar instanceof LexerGrammar) ) 1718 { 1719 Lookahead p = analyzer.look(1, blk); 1720 if (blk.getLabel() != null && syntacticPredLevel == 0) { 1722 println(blk.getLabel() + " = " + lt1Value + ";"); 1723 } 1724 1725 genElementAST(blk); 1727 1728 String astArgs=""; 1729 if (grammar instanceof TreeWalkerGrammar) { 1730 if( usingCustomAST ) 1731 astArgs="static_cast<"+namespaceAntlr+"RefAST"+">(_t),"; 1732 else 1733 astArgs="_t,"; 1734 } 1735 1736 println("match(" + astArgs + getBitsetName(markBitsetForGen(p.fset)) + ");"); 1738 1739 if (grammar instanceof TreeWalkerGrammar) 1741 { 1742 println("_t = _t->getNextSibling();"); 1743 } 1744 return finishingInfo; 1745 } 1746 1747 if (blk.getAlternatives().size() == 1) 1749 { 1750 Alternative alt = blk.getAlternativeAt(0); 1751 if (alt.synPred != null) 1753 { 1754 antlrTool.warning( 1755 "Syntactic predicate superfluous for single alternative", 1756 grammar.getFilename(), 1757 blk.getAlternativeAt(0).synPred.getLine(), 1758 blk.getAlternativeAt(0).synPred.getColumn() 1759 ); 1760 } 1761 if (noTestForSingle) 1762 { 1763 if (alt.semPred != null) 1764 { 1765 genSemPred(alt.semPred, blk.line); 1767 } 1768 genAlt(alt, blk); 1769 return finishingInfo; 1770 } 1771 } 1772 1773 1783 int nLL1 = 0; 1784 for (int i=0; i<blk.getAlternatives().size(); i++) 1785 { 1786 Alternative a = blk.getAlternativeAt(i); 1787 if ( suitableForCaseExpression(a) ) 1788 nLL1++; 1789 } 1790 1791 if ( nLL1 >= makeSwitchThreshold ) 1793 { 1794 String testExpr = lookaheadString(1); 1796 createdLL1Switch = true; 1797 if ( grammar instanceof TreeWalkerGrammar ) 1799 { 1800 println("if (_t == "+labeledElementASTInit+" )"); 1801 tabs++; 1802 println("_t = ASTNULL;"); 1803 tabs--; 1804 } 1805 println("switch ( "+testExpr+") {"); 1806 for (int i=0; i<blk.alternatives.size(); i++) 1807 { 1808 Alternative alt = blk.getAlternativeAt(i); 1809 if ( !suitableForCaseExpression(alt) ) 1812 { 1813 continue; 1814 } 1815 Lookahead p = alt.cache[1]; 1816 if (p.fset.degree() == 0 && !p.containsEpsilon()) 1817 { 1818 antlrTool.warning("Alternate omitted due to empty prediction set", 1819 grammar.getFilename(), 1820 alt.head.getLine(), alt.head.getColumn()); 1821 } 1822 else 1823 { 1824 genCases(p.fset); 1825 println("{"); 1826 tabs++; 1827 genAlt(alt, blk); 1828 println("break;"); 1829 tabs--; 1830 println("}"); 1831 } 1832 } 1833 println("default:"); 1834 tabs++; 1835 } 1836 1837 int startDepth = (grammar instanceof LexerGrammar) ? grammar.maxk : 0; 1852 for (int altDepth = startDepth; altDepth >= 0; altDepth--) { 1853 if ( DEBUG_CODE_GENERATOR ) System.out.println("checking depth "+altDepth); 1854 for (int i=0; i<blk.alternatives.size(); i++) { 1855 Alternative alt = blk.getAlternativeAt(i); 1856 if ( DEBUG_CODE_GENERATOR ) System.out.println("genAlt: "+i); 1857 if ( createdLL1Switch && 1861 suitableForCaseExpression(alt) ) 1862 { 1863 if ( DEBUG_CODE_GENERATOR ) 1864 System.out.println("ignoring alt because it was in the switch"); 1865 continue; 1866 } 1867 String e; 1868 1869 boolean unpredicted = false; 1870 1871 if (grammar instanceof LexerGrammar) { 1872 int effectiveDepth = alt.lookaheadDepth; 1875 if (effectiveDepth == GrammarAnalyzer.NONDETERMINISTIC) 1876 { 1877 effectiveDepth = grammar.maxk; 1879 } 1880 while ( effectiveDepth >= 1 && 1881 alt.cache[effectiveDepth].containsEpsilon() ) 1882 { 1883 effectiveDepth--; 1884 } 1885 if (effectiveDepth != altDepth) 1888 { 1889 if ( DEBUG_CODE_GENERATOR ) 1890 System.out.println("ignoring alt because effectiveDepth!=altDepth;"+effectiveDepth+"!="+altDepth); 1891 continue; 1892 } 1893 unpredicted = lookaheadIsEmpty(alt, effectiveDepth); 1894 e = getLookaheadTestExpression(alt, effectiveDepth); 1895 } 1896 else 1897 { 1898 unpredicted = lookaheadIsEmpty(alt, grammar.maxk); 1899 e = getLookaheadTestExpression(alt, grammar.maxk); 1900 } 1901 1902 if ( alt.cache[1].fset.degree() > caseSizeThreshold && 1905 suitableForCaseExpression(alt)) 1906 { 1907 if ( nIF==0 ) 1908 { 1909 if ( grammar instanceof TreeWalkerGrammar ) { 1912 println("if (_t == "+labeledElementASTInit+" )"); 1913 tabs++; 1914 println("_t = ASTNULL;"); 1915 tabs--; 1916 } 1917 println("if " + e + " {"); 1918 } 1919 else 1920 println("else if " + e + " {"); 1921 } 1922 else if (unpredicted && 1923 alt.semPred==null && 1924 alt.synPred==null) 1925 { 1926 if ( nIF==0 ) { 1931 println("{"); 1932 } 1933 else { 1934 println("else {"); 1935 } 1936 finishingInfo.needAnErrorClause = false; 1937 } 1938 else 1939 { 1940 if ( alt.semPred != null ) { 1943 ActionTransInfo tInfo = new ActionTransInfo(); 1947 String actionStr = processActionForTreeSpecifiers(alt.semPred, 1948 blk.line, 1949 currentRule, 1950 tInfo); 1951 1953 if (((grammar instanceof ParserGrammar) || (grammar instanceof LexerGrammar)) && grammar.debuggingOutput) 1955 e = "("+e+"&& fireSemanticPredicateEvaluated(antlr.debug.SemanticPredicateEvent.PREDICTING,"+ addSemPred(charFormatter.escapeString(actionStr))+","+actionStr+"))"; 1957 else 1958 e = "("+e+"&&("+actionStr +"))"; 1959 } 1960 1961 if ( nIF>0 ) { 1963 if ( alt.synPred != null ) { 1964 println("else {"); 1965 tabs++; 1966 genSynPred( alt.synPred, e ); 1967 closingBracesOfIFSequence++; 1968 } 1969 else { 1970 println("else if " + e + " {"); 1971 } 1972 } 1973 else { 1974 if ( alt.synPred != null ) { 1975 genSynPred( alt.synPred, e ); 1976 } 1977 else { 1978 if ( grammar instanceof TreeWalkerGrammar ) { 1981 println("if (_t == "+labeledElementASTInit+" )"); 1982 tabs++; 1983 println("_t = ASTNULL;"); 1984 tabs--; 1985 } 1986 println("if " + e + " {"); 1987 } 1988 } 1989 1990 } 1991 1992 nIF++; 1993 tabs++; 1994 genAlt(alt, blk); 1995 tabs--; 1996 println("}"); 1997 } 1998 } 1999 String ps = ""; 2000 for (int i=1; i<=closingBracesOfIFSequence; i++) { 2001 tabs--; ps+="}"; 2003 } 2004 2005 genAST = savegenAST; 2007 2008 saveText=oldsaveTest; 2010 2011 if ( createdLL1Switch ) { 2013 tabs--; 2014 finishingInfo.postscript = ps+"}"; 2015 finishingInfo.generatedSwitch = true; 2016 finishingInfo.generatedAnIf = nIF>0; 2017 2019 } 2020 else { 2021 finishingInfo.postscript = ps; 2022 finishingInfo.generatedSwitch = false; 2023 finishingInfo.generatedAnIf = nIF>0; 2024 } 2026 return finishingInfo; 2027 } 2028 2029 private static boolean suitableForCaseExpression(Alternative a) { 2030 return a.lookaheadDepth == 1 && 2031 a.semPred == null && 2032 !a.cache[1].containsEpsilon() && 2033 a.cache[1].fset.degree()<=caseSizeThreshold; 2034 } 2035 2036 2038 private void genElementAST(AlternativeElement el) { 2039 2040 if ( grammar instanceof TreeWalkerGrammar && !grammar.buildAST ) 2043 { 2044 String elementRef; 2045 String astName; 2046 2047 if (el.getLabel() == null) 2049 { 2050 elementRef = lt1Value; 2051 astName = "tmp" + astVarNumber + "_AST"; 2053 astVarNumber++; 2054 mapTreeVariable(el, astName); 2056 println(labeledElementASTType+" "+astName+"_in = "+elementRef+";"); 2058 } 2059 return; 2060 } 2061 2062 if (grammar.buildAST && syntacticPredLevel == 0) 2063 { 2064 boolean needASTDecl = 2065 ( genAST && (el.getLabel() != null || 2066 el.getAutoGenType() != GrammarElement.AUTO_GEN_BANG )); 2067 2068 if( el.getAutoGenType() != GrammarElement.AUTO_GEN_BANG && 2073 (el instanceof TokenRefElement) ) 2074 needASTDecl = true; 2075 2076 boolean doNoGuessTest = 2077 ( grammar.hasSyntacticPredicate && needASTDecl ); 2078 2079 String elementRef; 2080 String astNameBase; 2081 2082 if (el.getLabel() != null) 2084 { 2085 elementRef = el.getLabel(); 2087 astNameBase = el.getLabel(); 2088 } 2089 else 2090 { 2091 elementRef = lt1Value; 2093 astNameBase = "tmp" + astVarNumber; 2095 astVarNumber++; 2096 } 2097 2098 if ( needASTDecl ) 2100 { 2101 if ( el instanceof GrammarAtom ) 2102 { 2103 GrammarAtom ga = (GrammarAtom)el; 2104 if ( ga.getASTNodeType()!=null ) 2105 { 2106 genASTDeclaration( el, astNameBase, "Ref"+ga.getASTNodeType() ); 2107 } 2109 else 2110 { 2111 genASTDeclaration( el, astNameBase, labeledElementASTType ); 2112 } 2114 } 2115 else 2116 { 2117 genASTDeclaration( el, astNameBase, labeledElementASTType ); 2118 } 2120 } 2121 2122 String astName = astNameBase + "_AST"; 2124 2125 mapTreeVariable(el, astName); 2127 if (grammar instanceof TreeWalkerGrammar) 2128 { 2129 println(labeledElementASTType+" " + astName + "_in = "+labeledElementASTInit+";"); 2131 } 2132 2133 if (doNoGuessTest) { 2135 println("if ( inputState->guessing == 0 ) {"); 2136 tabs++; 2137 } 2138 2139 if (el.getLabel() != null) 2142 { 2143 if ( el instanceof GrammarAtom ) 2144 { 2145 println(astName + " = "+ 2146 getASTCreateString((GrammarAtom)el,elementRef) + ";"); 2147 } 2148 else 2149 { 2150 println(astName + " = "+ 2151 getASTCreateString(elementRef) + ";"); 2152 } 2153 } 2154 2155 if( el.getLabel() == null && needASTDecl ) 2157 { 2158 elementRef = lt1Value; 2159 if ( el instanceof GrammarAtom ) 2160 { 2161 println(astName + " = "+ 2162 getASTCreateString((GrammarAtom)el,elementRef) + ";"); 2163 } 2164 else 2165 { 2166 println(astName + " = "+ 2167 getASTCreateString(elementRef) + ";"); 2168 } 2169 if (grammar instanceof TreeWalkerGrammar) 2171 { 2172 println(astName + "_in = " + elementRef + ";"); 2174 } 2175 } 2176 2177 if (genAST) 2178 { 2179 switch (el.getAutoGenType()) 2180 { 2181 case GrammarElement.AUTO_GEN_NONE: 2182 if( usingCustomAST || 2183 (el instanceof GrammarAtom && 2184 ((GrammarAtom)el).getASTNodeType() != null) ) 2185 println("astFactory->addASTChild(currentAST, static_cast<"+namespaceAntlr+"RefAST>("+ astName + "));"); 2186 else 2187 println("astFactory->addASTChild(currentAST, "+ astName + ");"); 2188 break; 2190 case GrammarElement.AUTO_GEN_CARET: 2191 if( usingCustomAST || 2192 (el instanceof GrammarAtom && 2193 ((GrammarAtom)el).getASTNodeType() != null) ) 2194 println("astFactory->makeASTRoot(currentAST, static_cast<"+namespaceAntlr+"RefAST>(" + astName + "));"); 2195 else 2196 println("astFactory->makeASTRoot(currentAST, " + astName + ");"); 2197 break; 2198 default: 2199 break; 2200 } 2201 } 2202 if (doNoGuessTest) 2203 { 2204 tabs--; 2205 println("}"); 2206 } 2207 } 2208 } 2209 2212 private void genErrorCatchForElement(AlternativeElement el) { 2213 if (el.getLabel() == null) return; 2214 String r = el.enclosingRuleName; 2215 if ( grammar instanceof LexerGrammar ) { 2216 r = CodeGenerator.encodeLexerRuleName(el.enclosingRuleName); 2217 } 2218 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(r); 2219 if (rs == null) { 2220 antlrTool.panic("Enclosing rule not found!"); 2221 } 2222 ExceptionSpec ex = rs.block.findExceptionSpec(el.getLabel()); 2223 if (ex != null) { 2224 tabs--; 2225 println("}"); 2226 genErrorHandler(ex); 2227 } 2228 } 2229 2230 private void genErrorHandler(ExceptionSpec ex) 2231 { 2232 for (int i = 0; i < ex.handlers.size(); i++) 2234 { 2235 ExceptionHandler handler = (ExceptionHandler)ex.handlers.elementAt(i); 2236 println("catch (" + handler.exceptionTypeAndName.getText() + ") {"); 2238 tabs++; 2239 if (grammar.hasSyntacticPredicate) { 2240 println("if (inputState->guessing==0) {"); 2241 tabs++; 2242 } 2243 2244 ActionTransInfo tInfo=null; 2247 if (currentRule != null && (grammar instanceof ParserGrammar) ) { 2249 tInfo = new ActionTransInfo(); 2250 Lookahead follow = grammar.theLLkAnalyzer.FOLLOW(1, currentRule.endNode); 2251 String followSetName = getBitsetName(markBitsetForGen(follow.fset)); 2252 tInfo.lookaheadSetName = followSetName; 2253 } 2254 2255 genLineNo(handler.action); 2257 printAction( 2258 processActionForTreeSpecifiers( handler.action.getText(), 2259 handler.action.getLine(), 2260 currentRule, tInfo ) 2261 ); 2262 genLineNo2(); 2263 2264 if (grammar.hasSyntacticPredicate) 2265 { 2266 tabs--; 2267 println("} else {"); 2268 tabs++; 2269 println("throw "+extractIdOfAction(handler.exceptionTypeAndName)+";"); 2271 tabs--; 2272 println("}"); 2273 } 2274 tabs--; 2276 println("}"); 2277 } 2278 } 2279 2280 private void genErrorTryForElement(AlternativeElement el) { 2281 if (el.getLabel() == null) return; 2282 String r = el.enclosingRuleName; 2283 if ( grammar instanceof LexerGrammar ) { 2284 r = CodeGenerator.encodeLexerRuleName(el.enclosingRuleName); 2285 } 2286 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(r); 2287 if (rs == null) { 2288 antlrTool.panic("Enclosing rule not found!"); 2289 } 2290 ExceptionSpec ex = rs.block.findExceptionSpec(el.getLabel()); 2291 if (ex != null) { 2292 println("try { // for error handling"); 2293 tabs++; 2294 } 2295 } 2296 2297 protected void genHeader(String fileName) 2298 { 2299 println("/* $ANTLR "+antlrTool.version+": "+ 2300 "\""+antlrTool.fileMinusPath(antlrTool.grammarFile)+"\""+ 2301 " -> "+ 2302 "\""+fileName+"\"$ */"); 2303 } 2304 2305 public void genInclude(LexerGrammar g) throws IOException  2307 { 2308 outputFile = grammar.getClassName() + ".hpp"; 2309 outputLine = 1; 2310 currentOutput = antlrTool.openOutputFile(outputFile); 2311 2313 genAST = false; saveText = true; 2316 tabs=0; 2317 2318 println("#ifndef INC_"+grammar.getClassName()+"_hpp_"); 2320 println("#define INC_"+grammar.getClassName()+"_hpp_"); 2321 println(""); 2322 2323 printHeaderAction(preIncludeHpp); 2324 2325 println("#include <antlr/config.hpp>"); 2326 2327 genHeader(outputFile); 2329 2330 println("#include <antlr/CommonToken.hpp>"); 2332 println("#include <antlr/InputBuffer.hpp>"); 2333 println("#include <antlr/BitSet.hpp>"); 2334 println("#include \"" + grammar.tokenManager.getName() + TokenTypesFileSuffix+".hpp\""); 2335 2336 String sup=null; 2338 if ( grammar.superClass!=null ) { 2339 sup = grammar.superClass; 2340 println("#include \""+sup+".hpp\""); 2341 } 2342 else { 2343 sup = grammar.getSuperClass(); 2344 if (sup.lastIndexOf('.') != -1) 2345 sup = sup.substring(sup.lastIndexOf('.')+1); 2346 println("#include <antlr/"+sup+".hpp>"); 2347 sup = namespaceAntlr + sup; 2348 } 2349 2350 printHeaderAction(postIncludeHpp); 2352 2353 if (nameSpace != null) 2354 nameSpace.emitDeclarations(currentOutput); 2355 2356 printHeaderAction(""); 2357 2358 if ( grammar.comment!=null ) { 2360 _println(grammar.comment); 2361 } 2362 2363 print("class " + grammar.getClassName() + " : public " + sup); 2365 println(", public " + grammar.tokenManager.getName() + TokenTypesFileSuffix); 2366 2367 Token tsuffix = (Token)grammar.options.get("classHeaderSuffix"); 2368 if ( tsuffix != null ) { 2369 String suffix = StringUtils.stripFrontBack(tsuffix.getText(),"\"","\""); 2370 if ( suffix != null ) { 2371 print(", "+suffix); } 2373 } 2374 println("{"); 2375 2376 if (grammar.classMemberAction != null) { 2378 genLineNo(grammar.classMemberAction); 2379 print( 2380 processActionForTreeSpecifiers(grammar.classMemberAction.getText(), 2381 grammar.classMemberAction.getLine(), 2382 currentRule, null) 2383 ); 2384 genLineNo2(); 2385 } 2386 2387 tabs=0; 2389 println("private:"); 2390 tabs=1; 2391 println("void initLiterals();"); 2392 2393 tabs=0; 2395 println("public:"); 2396 tabs=1; 2397 println("bool getCaseSensitiveLiterals() const;"); 2398 2399 tabs=0; 2401 println("public:"); 2402 tabs=1; 2403 2404 println(grammar.getClassName() + "(" + namespaceStd + "istream& in);"); 2406 2407 println(grammar.getClassName() + "("+namespaceAntlr+"InputBuffer& ib);"); 2409 2410 println(grammar.getClassName() + "(const "+namespaceAntlr+"LexerSharedInputState& state);"); 2411 2412 println(namespaceAntlr+"RefToken nextToken();"); 2416 2417 Enumeration ids = grammar.rules.elements(); 2419 while ( ids.hasMoreElements() ) { 2420 RuleSymbol sym = (RuleSymbol) ids.nextElement(); 2421 if (!sym.getId().equals("mnextToken")) { 2423 genRuleHeader(sym, false); 2424 } 2425 exitIfError(); 2426 } 2427 2428 tabs=0; 2430 println("private:"); 2431 tabs=1; 2432 2433 if ( grammar.debuggingOutput ) { 2435 println("static const char* _ruleNames[];"); 2436 } 2437 2438 if (grammar.debuggingOutput) 2440 println("static const char* _semPredNames[];"); 2441 2442 genBitsetsHeader(bitsetsUsed, ((LexerGrammar)grammar).charVocabulary.size()); 2444 2445 tabs=0; 2446 println("};"); 2447 println(""); 2448 if (nameSpace != null) 2449 nameSpace.emitClosures(currentOutput); 2450 2451 println("#endif /*INC_"+grammar.getClassName()+"_hpp_*/"); 2453 2454 currentOutput.close(); 2456 currentOutput = null; 2457 } 2458 public void genInclude(ParserGrammar g) throws IOException  2459 { 2460 outputFile = grammar.getClassName() + ".hpp"; 2462 outputLine = 1; 2463 currentOutput = antlrTool.openOutputFile(outputFile); 2464 2466 genAST = grammar.buildAST; 2467 2468 tabs = 0; 2469 2470 println("#ifndef INC_"+grammar.getClassName()+"_hpp_"); 2472 println("#define INC_"+grammar.getClassName()+"_hpp_"); 2473 println(""); 2474 printHeaderAction(preIncludeHpp); 2475 println("#include <antlr/config.hpp>"); 2476 2477 genHeader(outputFile); 2479 2480 println("#include <antlr/TokenStream.hpp>"); 2482 println("#include <antlr/TokenBuffer.hpp>"); 2483 println("#include \"" + grammar.tokenManager.getName() + TokenTypesFileSuffix+".hpp\""); 2484 2485 String sup=null; 2487 if ( grammar.superClass!=null ) { 2488 sup = grammar.superClass; 2489 println("#include \""+sup+".hpp\""); 2490 } 2491 else { 2492 sup = grammar.getSuperClass(); 2493 if (sup.lastIndexOf('.') != -1) 2494 sup = sup.substring(sup.lastIndexOf('.')+1); 2495 println("#include <antlr/"+sup+".hpp>"); 2496 sup = namespaceAntlr + sup; 2497 } 2498 println(""); 2499 2500 printHeaderAction(postIncludeHpp); 2502 2503 if (nameSpace != null) 2504 nameSpace.emitDeclarations(currentOutput); 2505 2506 printHeaderAction(""); 2507 2508 if ( grammar.comment!=null ) { 2510 _println(grammar.comment); 2511 } 2512 2513 print("class " + grammar.getClassName() + " : public " + sup); 2515 println(", public " + grammar.tokenManager.getName() + TokenTypesFileSuffix); 2516 2517 Token tsuffix = (Token)grammar.options.get("classHeaderSuffix"); 2518 if ( tsuffix != null ) { 2519 String suffix = StringUtils.stripFrontBack(tsuffix.getText(),"\"","\""); 2520 if ( suffix != null ) 2521 print(", "+suffix); } 2523 println("{"); 2524 2525 if (grammar.debuggingOutput) { 2528 println("public: static const char* _ruleNames[];"); 2529 } 2530 if (grammar.classMemberAction != null) { 2532 genLineNo(grammar.classMemberAction.getLine()); 2533 print( 2534 processActionForTreeSpecifiers(grammar.classMemberAction.getText(), 2535 grammar.classMemberAction.getLine(), 2536 currentRule, null) 2537 ); 2538 genLineNo2(); 2539 } 2540 println("public:"); 2541 tabs = 1; 2542 println("void initializeFactory( void );"); 2543 println("// called from constructors"); 2544 println("void _initialize( void );"); 2545 2546 tabs=0; 2548 println("protected:"); 2549 tabs=1; 2550 println(grammar.getClassName() + "("+namespaceAntlr+"TokenBuffer& tokenBuf, int k);"); 2551 tabs=0; 2552 println("public:"); 2553 tabs=1; 2554 println(grammar.getClassName() + "("+namespaceAntlr+"TokenBuffer& tokenBuf);"); 2555 2556 tabs=0; 2558 println("protected:"); 2559 tabs=1; 2560 println(grammar.getClassName()+"("+namespaceAntlr+"TokenStream& lexer, int k);"); 2561 tabs=0; 2562 println("public:"); 2563 tabs=1; 2564 println(grammar.getClassName()+"("+namespaceAntlr+"TokenStream& lexer);"); 2565 2566 println(grammar.getClassName()+"(const "+namespaceAntlr+"ParserSharedInputState& state);"); 2567 2568 println("int getNumTokens() const"); 2569 println("{"); tabs++; 2570 println("return "+grammar.getClassName()+"::NUM_TOKENS;"); 2571 tabs--; println("}"); 2572 println("const char* getTokenName( int type ) const"); 2573 println("{"); tabs++; 2574 println("if( type > getNumTokens() ) return 0;"); 2575 println("return "+grammar.getClassName()+"::tokenNames[type];"); 2576 tabs--; println("}"); 2577 println("const char* const* getTokenNames() const"); 2578 println("{"); tabs++; 2579 println("return "+grammar.getClassName()+"::tokenNames;"); 2580 tabs--; println("}"); 2581 2582 Enumeration ids = grammar.rules.elements(); 2584 while ( ids.hasMoreElements() ) { 2585 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 2586 if ( sym instanceof RuleSymbol) { 2587 RuleSymbol rs = (RuleSymbol)sym; 2588 genRuleHeader(rs, rs.references.size()==0); 2589 } 2590 exitIfError(); 2591 } 2592 2593 if ( usingCustomAST ) 2594 { 2595 tabs=0; println("public:"); tabs=1; 2598 println(labeledElementASTType+" getAST();"); 2599 println(""); 2600 tabs=0; println("protected:"); tabs=1; 2601 println(labeledElementASTType+" returnAST;"); 2602 } 2603 tabs=0; 2605 println("private:"); 2606 tabs=1; 2607 2608 println("static const char* tokenNames[];"); 2610 _println("#ifndef NO_STATIC_CONSTS"); 2612 println("static const int NUM_TOKENS = "+grammar.tokenManager.getVocabulary().size()+";"); 2613 _println("#else"); 2614 println("enum {"); 2615 println("\tNUM_TOKENS = "+grammar.tokenManager.getVocabulary().size()); 2616 println("};"); 2617 _println("#endif"); 2618 2619 genBitsetsHeader(bitsetsUsed, grammar.tokenManager.maxTokenType()); 2621 2622 if (grammar.debuggingOutput) 2624 println("static const char* _semPredNames[];"); 2625 2626 tabs=0; 2628 println("};"); 2629 println(""); 2630 if (nameSpace != null) 2631 nameSpace.emitClosures(currentOutput); 2632 2633 println("#endif /*INC_"+grammar.getClassName()+"_hpp_*/"); 2635 2636 currentOutput.close(); 2638 currentOutput = null; 2639 } 2640 public void genInclude(TreeWalkerGrammar g) throws IOException  2641 { 2642 outputFile = grammar.getClassName() + ".hpp"; 2644 outputLine = 1; 2645 currentOutput = antlrTool.openOutputFile(outputFile); 2646 2648 genAST = grammar.buildAST; 2649 tabs = 0; 2650 2651 println("#ifndef INC_"+grammar.getClassName()+"_hpp_"); 2653 println("#define INC_"+grammar.getClassName()+"_hpp_"); 2654 println(""); 2655 printHeaderAction(preIncludeHpp); 2656 println("#include <antlr/config.hpp>"); 2657 println("#include \"" + grammar.tokenManager.getName() + TokenTypesFileSuffix+".hpp\""); 2658 2659 genHeader(outputFile); 2661 2662 String sup=null; 2664 if ( grammar.superClass!=null ) { 2665 sup = grammar.superClass; 2666 println("#include \""+sup+".hpp\""); 2667 } 2668 else { 2669 sup = grammar.getSuperClass(); 2670 if (sup.lastIndexOf('.') != -1) 2671 sup = sup.substring(sup.lastIndexOf('.')+1); 2672 println("#include <antlr/"+sup+".hpp>"); 2673 sup = namespaceAntlr + sup; 2674 } 2675 println(""); 2676 2677 printHeaderAction(postIncludeHpp); 2681 2682 if (nameSpace != null) 2683 nameSpace.emitDeclarations(currentOutput); 2684 2685 printHeaderAction(""); 2686 2687 if ( grammar.comment!=null ) { 2689 _println(grammar.comment); 2690 } 2691 2692 print("class " + grammar.getClassName() + " : public "+sup); 2694 println(", public " + grammar.tokenManager.getName() + TokenTypesFileSuffix); 2695 2696 Token tsuffix = (Token)grammar.options.get("classHeaderSuffix"); 2697 if ( tsuffix != null ) { 2698 String suffix = StringUtils.stripFrontBack(tsuffix.getText(),"\"","\""); 2699 if ( suffix != null ) { 2700 print(", "+suffix); } 2702 } 2703 println("{"); 2704 2705 if (grammar.classMemberAction != null) { 2707 genLineNo(grammar.classMemberAction.getLine()); 2708 print( 2709 processActionForTreeSpecifiers(grammar.classMemberAction.getText(), 2710 grammar.classMemberAction.getLine(), 2711 currentRule, null) 2712 ); 2713 genLineNo2(); 2714 } 2715 2716 tabs=0; 2718 println("public:"); 2719 tabs=1; 2720 println(grammar.getClassName() + "();"); 2721 2722 println("int getNumTokens() const"); 2723 println("{"); tabs++; 2724 println("return "+grammar.getClassName()+"::NUM_TOKENS;"); 2725 tabs--; println("}"); 2726 println("const char* getTokenName( int type ) const"); 2727 println("{"); tabs++; 2728 println("if( type > getNumTokens() ) return 0;"); 2729 println("return "+grammar.getClassName()+"::tokenNames[type];"); 2730 tabs--; println("}"); 2731 2732 Enumeration ids = grammar.rules.elements(); 2734 String ruleNameInits = ""; 2735 while ( ids.hasMoreElements() ) { 2736 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 2737 if ( sym instanceof RuleSymbol) { 2738 RuleSymbol rs = (RuleSymbol)sym; 2739 genRuleHeader(rs, rs.references.size()==0); 2740 } 2741 exitIfError(); 2742 } 2743 if ( usingCustomAST ) 2744 { 2745 tabs=0; println("public:"); tabs=1; 2748 println(labeledElementASTType+" getAST();"); 2749 println(""); 2750 tabs=0; println("protected:"); tabs=1; 2751 println(labeledElementASTType+" returnAST;"); 2752 println(labeledElementASTType+" _retTree;"); 2753 } 2754 2755 tabs=0; 2757 println("private:"); 2758 tabs=1; 2759 2760 println("static const char* tokenNames[];"); 2762 _println("#ifndef NO_STATIC_CONSTS"); 2764 println("static const int NUM_TOKENS = "+grammar.tokenManager.getVocabulary().size()+";"); 2765 _println("#else"); 2766 println("enum {"); 2767 println("\tNUM_TOKENS = "+grammar.tokenManager.getVocabulary().size()); 2768 println("};"); 2769 _println("#endif"); 2770 2771 genBitsetsHeader(bitsetsUsed, grammar.tokenManager.maxTokenType()); 2773 2774 tabs=0; 2776 println("};"); 2777 println(""); 2778 if (nameSpace != null) 2779 nameSpace.emitClosures(currentOutput); 2780 2781 println("#endif /*INC_"+grammar.getClassName()+"_hpp_*/"); 2783 2784 currentOutput.close(); 2786 currentOutput = null; 2787 } 2788 protected void genASTDeclaration( AlternativeElement el ) { 2790 genASTDeclaration( el, labeledElementASTType ); 2791 } 2792 protected void genASTDeclaration( AlternativeElement el, String node_type ) { 2794 genASTDeclaration( el, el.getLabel(), node_type ); 2795 } 2796 protected void genASTDeclaration( AlternativeElement el, String var_name, String node_type ) { 2798 if( declaredASTVariables.contains(el) ) 2800 return; 2801 2802 String init = labeledElementASTInit; 2803 2804 if (el instanceof GrammarAtom && 2805 ((GrammarAtom)el).getASTNodeType() != null ) 2806 init = "static_cast<Ref"+((GrammarAtom)el).getASTNodeType()+">("+labeledElementASTInit+")"; 2807 2808 println(node_type+" " + var_name + "_AST = "+init+";"); 2810 2811 declaredASTVariables.add( el ); 2813 } 2814 private void genLiteralsTest() { 2815 println("_ttype = testLiteralsTable(_ttype);"); 2816 } 2817 private void genLiteralsTestForPartialToken() { 2818 println("_ttype = testLiteralsTable(text.substr(_begin, text.length()-_begin),_ttype);"); 2819 } 2820 protected void genMatch(BitSet b) { 2821 } 2822 protected void genMatch(GrammarAtom atom) { 2823 if ( atom instanceof StringLiteralElement ) { 2824 if ( grammar instanceof LexerGrammar ) { 2825 genMatchUsingAtomText(atom); 2826 } 2827 else { 2828 genMatchUsingAtomTokenType(atom); 2829 } 2830 } 2831 else if ( atom instanceof CharLiteralElement ) { 2832 if ( grammar instanceof LexerGrammar ) { 2833 genMatchUsingAtomText(atom); 2834 } 2835 else { 2836 antlrTool.error("cannot ref character literals in grammar: "+atom); 2837 } 2838 } 2839 else if ( atom instanceof TokenRefElement ) { 2840 genMatchUsingAtomText(atom); 2841 } else if (atom instanceof WildcardElement) { 2842 gen((WildcardElement)atom); 2843 } 2844 } 2845 protected void genMatchUsingAtomText(GrammarAtom atom) { 2846 String astArgs=""; 2848 if (grammar instanceof TreeWalkerGrammar) { 2849 if( usingCustomAST ) 2850 astArgs="static_cast<"+namespaceAntlr+"RefAST"+">(_t),"; 2851 else 2852 astArgs="_t,"; 2853 } 2854 2855 if ( grammar instanceof LexerGrammar && (!saveText||atom.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 2857 println("_saveIndex=text.length();"); 2858 } 2859 2860 print(atom.not ? "matchNot(" : "match("); 2861 _print(astArgs); 2862 2863 if (atom.atomText.equals("EOF")) { 2865 _print(namespaceAntlr+"Token::EOF_TYPE"); 2867 } 2868 else { 2869 _print(textOrChar(atom.atomText)); 2870 } 2871 _println(");"); 2872 2873 if ( grammar instanceof LexerGrammar && (!saveText||atom.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 2874 println("text.erase(_saveIndex);"); } 2876 } 2877 protected void genMatchUsingAtomTokenType(GrammarAtom atom) { 2878 String astArgs=""; 2880 if (grammar instanceof TreeWalkerGrammar) { 2881 if( usingCustomAST ) 2882 astArgs="static_cast<"+namespaceAntlr+"RefAST"+">(_t),"; 2883 else 2884 astArgs="_t,"; 2885 } 2886 2887 String mangledName = null; 2889 String s = astArgs + getValueString(atom.getType()); 2890 2891 println( (atom.not ? "matchNot(" : "match(") + s + ");"); 2893 } 2894 2899 public void genNextToken() { 2900 boolean hasPublicRules = false; 2903 for (int i = 0; i < grammar.rules.size(); i++) { 2904 RuleSymbol rs = (RuleSymbol)grammar.rules.elementAt(i); 2905 if ( rs.isDefined() && rs.access.equals("public") ) { 2906 hasPublicRules = true; 2907 break; 2908 } 2909 } 2910 if (!hasPublicRules) { 2911 println(""); 2912 println(namespaceAntlr+"RefToken "+grammar.getClassName()+"::nextToken() { return "+namespaceAntlr+"RefToken(new "+namespaceAntlr+"CommonToken("+namespaceAntlr+"Token::EOF_TYPE, \"\")); }"); 2913 println(""); 2914 return; 2915 } 2916 2917 RuleBlock nextTokenBlk = MakeGrammar.createNextTokenRule(grammar, grammar.rules, "nextToken"); 2919 RuleSymbol nextTokenRs = new RuleSymbol("mnextToken"); 2921 nextTokenRs.setDefined(); 2922 nextTokenRs.setBlock(nextTokenBlk); 2923 nextTokenRs.access = "private"; 2924 grammar.define(nextTokenRs); 2925 boolean ok = grammar.theLLkAnalyzer.deterministic(nextTokenBlk); 2927 2928 String filterRule=null; 2930 if ( ((LexerGrammar)grammar).filterMode ) { 2931 filterRule = ((LexerGrammar)grammar).filterRule; 2932 } 2933 2934 println(""); 2935 println(namespaceAntlr+"RefToken "+grammar.getClassName()+"::nextToken()"); 2936 println("{"); 2937 tabs++; 2938 println(namespaceAntlr+"RefToken theRetToken;"); 2939 println("for (;;) {"); 2940 tabs++; 2941 println(namespaceAntlr+"RefToken theRetToken;"); 2942 println("int _ttype = "+namespaceAntlr+"Token::INVALID_TYPE;"); 2943 if ( ((LexerGrammar)grammar).filterMode ) { 2944 println("setCommitToPath(false);"); 2945 if ( filterRule!=null ) { 2946 if ( !grammar.isDefined(CodeGenerator.encodeLexerRuleName(filterRule)) ) { 2948 grammar.antlrTool.error("Filter rule "+filterRule+" does not exist in this lexer"); 2949 } 2950 else { 2951 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(CodeGenerator.encodeLexerRuleName(filterRule)); 2952 if ( !rs.isDefined() ) { 2953 grammar.antlrTool.error("Filter rule "+filterRule+" does not exist in this lexer"); 2954 } 2955 else if ( rs.access.equals("public") ) { 2956 grammar.antlrTool.error("Filter rule "+filterRule+" must be protected"); 2957 } 2958 } 2959 println("int _m;"); 2960 println("_m = mark();"); 2961 } 2962 } 2963 println("resetText();"); 2964 2965 println("try { // for lexical and char stream error handling"); 2967 tabs++; 2968 2969 for (int i=0; i<nextTokenBlk.getAlternatives().size(); i++) { 2971 Alternative a = nextTokenBlk.getAlternativeAt(i); 2972 if ( a.cache[1].containsEpsilon() ) { 2973 antlrTool.warning("found optional path in nextToken()"); 2974 } 2975 } 2976 2977 String newline = System.getProperty("line.separator"); 2979 CppBlockFinishingInfo howToFinish = genCommonBlock(nextTokenBlk, false); 2980 String errFinish = "if (LA(1)==EOF_CHAR)"+newline+ 2981 "\t\t\t\t{"+newline+"\t\t\t\t\tuponEOF();"+newline+ 2982 "\t\t\t\t\t_returnToken = makeToken("+namespaceAntlr+"Token::EOF_TYPE);"+ 2983 newline+"\t\t\t\t}"; 2984 errFinish += newline+"\t\t\t\t"; 2985 if ( ((LexerGrammar)grammar).filterMode ) { 2986 if ( filterRule==null ) { 2987 errFinish += "else {consume(); goto tryAgain;}"; 2988 } 2989 else { 2990 errFinish += "else {"+newline+ 2991 "\t\t\t\t\tcommit();"+newline+ 2992 "\t\t\t\t\ttry {m"+filterRule+"(false);}"+newline+ 2993 "\t\t\t\t\tcatch("+namespaceAntlr+"RecognitionException& e) {"+newline+ 2994 "\t\t\t\t\t // catastrophic failure"+newline+ 2995 "\t\t\t\t\t reportError(e);"+newline+ 2996 "\t\t\t\t\t consume();"+newline+ 2997 "\t\t\t\t\t}"+newline+ 2998 "\t\t\t\t\tgoto tryAgain;"+newline+ 2999 "\t\t\t\t}"; 3000 } 3001 } 3002 else { 3003 errFinish += "else {"+throwNoViable+"}"; 3004 } 3005 genBlockFinish(howToFinish, errFinish); 3006 3007 if ( ((LexerGrammar)grammar).filterMode && filterRule!=null ) { 3009 println("commit();"); 3010 } 3011 3012 println("if ( !_returnToken )"+newline+ 3016 "\t\t\t\tgoto tryAgain; // found SKIP token"+newline); 3017 println("_ttype = _returnToken->getType();"); 3018 if ( ((LexerGrammar)grammar).getTestLiterals()) { 3019 genLiteralsTest(); 3020 } 3021 3022 println("_returnToken->setType(_ttype);"); 3024 println("return _returnToken;"); 3025 3026 tabs--; 3028 println("}"); 3029 println("catch ("+namespaceAntlr+"RecognitionException& e) {"); 3030 tabs++; 3031 if ( ((LexerGrammar)grammar).filterMode ) { 3032 if ( filterRule==null ) { 3033 println("if ( !getCommitToPath() ) {"); 3034 tabs++; 3035 println("consume();"); 3036 println("goto tryAgain;"); 3037 tabs--; 3038 println("}"); 3039 } 3040 else { 3041 println("if ( !getCommitToPath() ) {"); 3042 tabs++; 3043 println("rewind(_m);"); 3044 println("resetText();"); 3045 println("try {m"+filterRule+"(false);}"); 3046 println("catch("+namespaceAntlr+"RecognitionException& ee) {"); 3047 println(" // horrendous failure: error in filter rule"); 3048 println(" reportError(ee);"); 3049 println(" consume();"); 3050 println("}"); 3051 println("goto tryAgain;"); 3052 tabs--; 3053 println("}"); 3054 } 3055 } 3056 if ( nextTokenBlk.getDefaultErrorHandler() ) { 3057 println("reportError(e);"); 3058 println("consume();"); 3059 } 3060 else { 3061 println("throw "+namespaceAntlr+"TokenStreamRecognitionException(e);"); 3063 } 3064 3065 tabs--; 3067 println("}"); 3068 println("catch ("+namespaceAntlr+"CharStreamIOException& csie) {"); 3069 println("\tthrow "+namespaceAntlr+"TokenStreamIOException(csie.io);"); 3070 println("}"); 3071 println("catch ("+namespaceAntlr+"CharStreamException& cse) {"); 3072 println("\tthrow "+namespaceAntlr+"TokenStreamException(cse.getMessage());"); 3073 println("}"); 3074 3075 _println("tryAgain:;"); 3077 tabs--; 3078 println("}"); 3079 3080 tabs--; 3082 println("}"); 3083 println(""); 3084 } 3085 3101 public void genRule(RuleSymbol s, boolean startSymbol, int ruleNum, String prefix) { 3102 if ( DEBUG_CODE_GENERATOR ) System.out.println("genRule("+ s.getId() +")"); 3104 if ( !s.isDefined() ) { 3105 antlrTool.error("undefined rule: "+ s.getId()); 3106 return; 3107 } 3108 3109 RuleBlock rblk = s.getBlock(); 3111 3112 currentRule = rblk; 3113 currentASTResult = s.getId(); 3114 3115 declaredASTVariables.clear(); 3117 3118 boolean savegenAST = genAST; 3120 genAST = genAST && rblk.getAutoGen(); 3121 3122 saveText = rblk.getAutoGen(); 3124 3125 if ( s.comment!=null ) { 3127 _println(s.comment); 3128 } 3129 3130 if (rblk.returnAction != null) 3132 { 3133 _print(extractTypeOfAction(rblk.returnAction, rblk.getLine(), rblk.getColumn()) + " "); 3135 } else { 3136 _print("void "); 3138 } 3139 3140 _print(prefix + s.getId() + "("); 3142 3143 _print(commonExtraParams); 3145 if (commonExtraParams.length() != 0 && rblk.argAction != null ) { 3146 _print(","); 3147 } 3148 3149 if (rblk.argAction != null) 3151 { 3152 _println(""); 3154 tabs++; 3157 println(rblk.argAction); 3158 tabs--; 3159 print(") "); 3160 } else { 3162 _print(") "); 3164 } 3165 3166 _println("{"); 3172 tabs++; 3173 3174 if (grammar.traceRules) { 3175 if ( grammar instanceof TreeWalkerGrammar ) { 3176 if ( usingCustomAST ) 3177 println("Tracer traceInOut(this,\""+ s.getId() +"\",static_cast<"+namespaceAntlr+"RefAST"+">(_t));"); 3178 else 3179 println("Tracer traceInOut(this,\""+ s.getId() +"\",_t);"); 3180 } 3181 else { 3182 println("Tracer traceInOut(this, \""+ s.getId() +"\");"); 3183 } 3184 } 3185 3186 if (rblk.returnAction != null) 3188 { 3189 genLineNo(rblk); 3190 println(rblk.returnAction + ";"); 3191 genLineNo2(); 3192 } 3193 3194 if (!commonLocalVars.equals("")) 3196 println(commonLocalVars); 3197 3198 if ( grammar instanceof LexerGrammar ) { 3199 if (s.getId().equals("mEOF")) 3204 println("_ttype = "+namespaceAntlr+"Token::EOF_TYPE;"); 3205 else 3206 println("_ttype = "+ s.getId().substring(1)+";"); 3207 println("int _saveIndex;"); 3214 } 3215 3216 if ( grammar.debuggingOutput) 3218 if (grammar instanceof ParserGrammar) 3219 println("fireEnterRule(" + ruleNum + ",0);"); 3220 else if (grammar instanceof LexerGrammar) 3221 println("fireEnterRule(" + ruleNum + ",_ttype);"); 3222 3223 3229 if (grammar instanceof TreeWalkerGrammar) { 3231 println(labeledElementASTType+" " + s.getId() + "_AST_in = _t;"); 3234 } 3235 if (grammar.buildAST) { 3236 println("returnAST = "+labeledElementASTInit+";"); 3238 println(namespaceAntlr+"ASTPair currentAST;"); println(labeledElementASTType+" " + s.getId() + "_AST = "+labeledElementASTInit+";"); 3242 } 3243 3244 genBlockPreamble(rblk); 3245 genBlockInitAction(rblk); 3246 println(""); 3247 3248 ExceptionSpec unlabeledUserSpec = rblk.findExceptionSpec(""); 3250 3251 if (unlabeledUserSpec != null || rblk.getDefaultErrorHandler() ) { 3253 println("try { // for error handling"); 3254 tabs++; 3255 } 3256 3257 if ( rblk.alternatives.size()==1 ) 3259 { 3260 Alternative alt = rblk.getAlternativeAt(0); 3262 String pred = alt.semPred; 3263 if ( pred!=null ) 3264 genSemPred(pred, currentRule.line); 3265 if (alt.synPred != null) { 3266 antlrTool.warning( 3267 "Syntactic predicate ignored for single alternative", 3268 grammar.getFilename(), 3269 alt.synPred.getLine(), 3270 alt.synPred.getColumn() 3271 ); 3272 } 3273 genAlt(alt, rblk); 3274 } 3275 else 3276 { 3277 boolean ok = grammar.theLLkAnalyzer.deterministic(rblk); 3279 3280 CppBlockFinishingInfo howToFinish = genCommonBlock(rblk, false); 3281 genBlockFinish(howToFinish, throwNoViable); 3282 } 3283 3284 if (unlabeledUserSpec != null || rblk.getDefaultErrorHandler() ) { 3286 tabs--; 3288 println("}"); 3289 } 3290 3291 if (unlabeledUserSpec != null) 3293 { 3294 genErrorHandler(unlabeledUserSpec); 3295 } 3296 else if (rblk.getDefaultErrorHandler()) 3297 { 3298 println("catch (" + exceptionThrown + "& ex) {"); 3300 tabs++; 3301 if (grammar.hasSyntacticPredicate) { 3303 println("if( inputState->guessing == 0 ) {"); 3304 tabs++; 3305 } 3306 println("reportError(ex);"); 3307 if ( !(grammar instanceof TreeWalkerGrammar) ) 3308 { 3309 Lookahead follow = grammar.theLLkAnalyzer.FOLLOW(1, rblk.endNode); 3311 String followSetName = getBitsetName(markBitsetForGen(follow.fset)); 3312 println("consume();"); 3313 println("consumeUntil(" + followSetName + ");"); 3314 } 3315 else 3316 { 3317 println("if ( _t != "+labeledElementASTInit+" )"); 3319 tabs++; 3320 println("_t = _t->getNextSibling();"); 3321 tabs--; 3322 } 3323 if (grammar.hasSyntacticPredicate) 3324 { 3325 tabs--; 3326 println("} else {"); 3328 tabs++; 3329 println("throw ex;"); 3330 tabs--; 3331 println("}"); 3332 } 3333 tabs--; 3335 println("}"); 3336 } 3337 3338 if (grammar.buildAST) { 3340 println("returnAST = " + s.getId() + "_AST;"); 3341 } 3342 3343 if ( grammar instanceof TreeWalkerGrammar ) { 3345 println("_retTree = _t;"); 3346 } 3347 3348 if (rblk.getTestLiterals()) { 3350 if ( s.access.equals("protected") ) { 3351 genLiteralsTestForPartialToken(); 3352 } 3353 else { 3354 genLiteralsTest(); 3355 } 3356 } 3357 3358 if ( grammar instanceof LexerGrammar ) { 3360 println("if ( _createToken && _token=="+namespaceAntlr+"nullToken && _ttype!="+namespaceAntlr+"Token::SKIP ) {"); 3361 println(" _token = makeToken(_ttype);"); 3362 println(" _token->setText(text.substr(_begin, text.length()-_begin));"); 3363 println("}"); 3364 println("_returnToken = _token;"); 3365 println("_saveIndex=0;"); 3368 } 3369 3370 if (rblk.returnAction != null) { 3372 println("return " + extractIdOfAction(rblk.returnAction, rblk.getLine(), rblk.getColumn()) + ";"); 3373 } 3374 3375 3400 tabs--; 3401 println("}"); 3402 println(""); 3403 3404 genAST = savegenAST; 3406 3407 } 3410 public void genRuleHeader(RuleSymbol s, boolean startSymbol) { 3411 tabs=1; 3412 if ( DEBUG_CODE_GENERATOR ) System.out.println("genRuleHeader("+ s.getId() +")"); 3413 if ( !s.isDefined() ) { 3414 antlrTool.error("undefined rule: "+ s.getId()); 3415 return; 3416 } 3417 3418 RuleBlock rblk = s.getBlock(); 3420 currentRule = rblk; 3421 currentASTResult = s.getId(); 3422 3423 boolean savegenAST = genAST; 3425 genAST = genAST && rblk.getAutoGen(); 3426 3427 saveText = rblk.getAutoGen(); 3429 3430 print(s.access + ": "); 3432 3433 if (rblk.returnAction != null) 3435 { 3436 _print(extractTypeOfAction(rblk.returnAction, rblk.getLine(), rblk.getColumn()) + " "); 3438 } else { 3439 _print("void "); 3441 } 3442 3443 _print(s.getId() + "("); 3445 3446 _print(commonExtraParams); 3448 if (commonExtraParams.length() != 0 && rblk.argAction != null ) { 3449 _print(","); 3450 } 3451 3452 if (rblk.argAction != null) 3454 { 3455 _println(""); 3457 tabs++; 3458 println(rblk.argAction); 3459 tabs--; 3460 print(")"); 3461 } else { 3462 _print(")"); 3464 } 3465 _println(";"); 3466 3467 tabs--; 3468 3469 genAST = savegenAST; 3471 3472 } 3475 private void GenRuleInvocation(RuleRefElement rr) { 3476 _print(rr.targetRule + "("); 3478 3479 if ( grammar instanceof LexerGrammar ) { 3481 if ( rr.getLabel() != null ) { 3483 _print("true"); 3484 } 3485 else { 3486 _print("false"); 3487 } 3488 if (commonExtraArgs.length() != 0 || rr.args!=null ) { 3489 _print(","); 3490 } 3491 } 3492 3493 _print(commonExtraArgs); 3495 if (commonExtraArgs.length() != 0 && rr.args!=null ) { 3496 _print(","); 3497 } 3498 3499 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(rr.targetRule); 3501 if (rr.args != null) 3502 { 3503 ActionTransInfo tInfo = new ActionTransInfo(); 3505 String args = processActionForTreeSpecifiers(rr.args, rr.line, 3508 currentRule, tInfo); 3509 if ( tInfo.assignToRoot || tInfo.refRuleRoot!=null ) 3510 { 3511 antlrTool.error("Arguments of rule reference '" + rr.targetRule + "' cannot set or ref #"+ 3512 currentRule.getRuleName()+" on line "+rr.getLine()); 3513 } 3514 _print(args); 3515 3516 if (rs.block.argAction == null) 3518 { 3519 antlrTool.warning("Rule '" + rr.targetRule + "' accepts no arguments", 3520 grammar.getFilename(), 3521 rr.getLine(), rr.getColumn()); 3522 } 3523 } 3524 else 3525 { 3526 } 3532 _println(");"); 3533 3534 if ( grammar instanceof TreeWalkerGrammar ) { 3536 println("_t = _retTree;"); 3537 } 3538 } 3539 protected void genSemPred(String pred, int line) { 3540 ActionTransInfo tInfo = new ActionTransInfo(); 3542 pred = processActionForTreeSpecifiers(pred, line, currentRule, tInfo); 3543 String escapedPred = charFormatter.escapeString(pred); 3545 3546 if (grammar.debuggingOutput && ((grammar instanceof ParserGrammar) || 3549 (grammar instanceof LexerGrammar))) 3550 pred = "fireSemanticPredicateEvaluated(antlr.debug.SemanticPredicateEvent.VALIDATING," + addSemPred(escapedPred) + "," + pred + ")"; 3552 println("if (!(" + pred + "))"); 3553 tabs++; 3554 println("throw "+namespaceAntlr+"SemanticException(\"" + escapedPred + "\");"); 3555 tabs--; 3556 } 3557 3560 protected void genSemPredMap(String prefix) { 3561 Enumeration e = semPreds.elements(); 3562 println("const char* " + prefix + "_semPredNames[] = {"); 3563 tabs++; 3564 while(e.hasMoreElements()) 3565 println("\""+e.nextElement()+"\","); 3566 println("0"); 3567 tabs--; 3568 println("};"); 3569 } 3570 protected void genSynPred(SynPredBlock blk, String lookaheadExpr) { 3571 if ( DEBUG_CODE_GENERATOR ) System.out.println("gen=>("+blk+")"); 3572 3573 println("bool synPredMatched" + blk.ID + " = false;"); 3575 println("if (" + lookaheadExpr + ") {"); 3577 tabs++; 3578 3579 if ( grammar instanceof TreeWalkerGrammar ) { 3581 println(labeledElementType + " __t" + blk.ID + " = _t;"); 3582 } 3583 else { 3584 println("int _m" + blk.ID + " = mark();"); 3585 } 3586 3587 println("synPredMatched" + blk.ID + " = true;"); 3589 println("inputState->guessing++;"); 3590 3591 if (grammar.debuggingOutput && ((grammar instanceof ParserGrammar) || 3593 (grammar instanceof LexerGrammar))) { 3594 println("fireSyntacticPredicateStarted();"); 3595 } 3596 3597 syntacticPredLevel++; 3598 println("try {"); 3599 tabs++; 3600 gen((AlternativeBlock)blk); tabs--; 3602 println("}"); 3604 println("catch (" + exceptionThrown + "& pe) {"); 3605 tabs++; 3606 println("synPredMatched"+blk.ID+" = false;"); 3607 tabs--; 3609 println("}"); 3610 3611 if ( grammar instanceof TreeWalkerGrammar ) { 3613 println("_t = __t"+blk.ID+";"); 3614 } 3615 else { 3616 println("rewind(_m"+blk.ID+");"); 3617 } 3618 3619 println("inputState->guessing--;"); 3620 3621 if (grammar.debuggingOutput && ((grammar instanceof ParserGrammar) || 3623 (grammar instanceof LexerGrammar))) { 3624 println("if (synPredMatched" + blk.ID +")"); 3625 println(" fireSyntacticPredicateSucceeded();"); 3626 println("else"); 3627 println(" fireSyntacticPredicateFailed();"); 3628 } 3629 3630 syntacticPredLevel--; 3631 tabs--; 3632 3633 println("}"); 3635 3636 println("if ( synPredMatched"+blk.ID+" ) {"); 3638 } 3639 3647 public void genTokenStrings(String prefix) { 3648 println("const char* " + prefix + "tokenNames[] = {"); 3652 tabs++; 3653 3654 Vector v = grammar.tokenManager.getVocabulary(); 3657 for (int i = 0; i < v.size(); i++) 3658 { 3659 String s = (String )v.elementAt(i); 3660 if (s == null) 3661 { 3662 s = "<"+String.valueOf(i)+">"; 3663 } 3664 if ( !s.startsWith("\"") && !s.startsWith("<") ) { 3665 TokenSymbol ts = (TokenSymbol)grammar.tokenManager.getTokenSymbol(s); 3666 if ( ts!=null && ts.getParaphrase()!=null ) { 3667 s = StringUtils.stripFrontBack(ts.getParaphrase(), "\"", "\""); 3668 } 3669 } 3670 print(charFormatter.literalString(s)); 3671 _println(","); 3672 } 3673 println("0"); 3674 3675 tabs--; 3677 println("};"); 3678 } 3679 3680 protected void genTokenTypes(TokenManager tm) throws IOException { 3681 outputFile = tm.getName() + TokenTypesFileSuffix+".hpp"; 3683 outputLine = 1; 3684 currentOutput = antlrTool.openOutputFile(outputFile); 3685 3687 tabs = 0; 3688 3689 println("#ifndef INC_"+tm.getName()+TokenTypesFileSuffix+"_hpp_"); 3691 println("#define INC_"+tm.getName()+TokenTypesFileSuffix+"_hpp_"); 3692 println(""); 3693 3694 if (nameSpace != null) 3695 nameSpace.emitDeclarations(currentOutput); 3696 3697 genHeader(outputFile); 3699 3700 println("struct " + tm.getName() + TokenTypesFileSuffix+" {"); 3703 tabs++; 3704 println("enum {"); 3705 tabs++; 3706 3707 Vector v = tm.getVocabulary(); 3709 3710 println("EOF_ = " + Token.EOF_TYPE + ","); 3712 3713 3716 for (int i = Token.MIN_USER_TYPE; i < v.size(); i++) { 3717 String s = (String )v.elementAt(i); 3718 if (s != null) { 3719 if ( s.startsWith("\"") ) { 3720 StringLiteralSymbol sl = (StringLiteralSymbol)tm.getTokenSymbol(s); 3722 if ( sl==null ) { 3723 antlrTool.panic("String literal "+s+" not in symbol table"); 3724 } 3725 else if ( sl.label != null ) { 3726 println(sl.label + " = " + i + ","); 3727 } 3728 else { 3729 String mangledName = mangleLiteral(s); 3730 if (mangledName != null) { 3731 println(mangledName + " = " + i + ","); 3733 sl.label = mangledName; 3735 } 3736 else { 3737 println("// " + s + " = " + i); 3738 } 3739 } 3740 } 3741 else if ( !s.startsWith("<") ) { 3742 println(s + " = " + i + ","); 3743 } 3744 } 3745 } 3746 3747 println("NULL_TREE_LOOKAHEAD = " + Token.NULL_TREE_LOOKAHEAD); 3749 3750 tabs--; 3752 println("};"); 3753 3754 tabs--; 3756 println("};"); 3757 3758 if (nameSpace != null) 3759 nameSpace.emitClosures(currentOutput); 3760 3761 println("#endif /*INC_"+tm.getName()+TokenTypesFileSuffix+"_hpp_*/"); 3763 3764 currentOutput.close(); 3766 currentOutput = null; 3767 exitIfError(); 3768 } 3769 3775 public String processStringForASTConstructor( String str ) 3776 { 3777 if( usingCustomAST && 3778 ((grammar instanceof TreeWalkerGrammar) || 3779 (grammar instanceof ParserGrammar)) && 3780 !(grammar.tokenManager.tokenDefined(str) ) ) 3781 { 3782 return "static_cast<"+namespaceAntlr+"RefAST>("+str+")"; 3784 } 3785 else 3786 { 3787 return str; 3789 } 3790 } 3791 3795 public String getASTCreateString(Vector v) { 3796 if (v.size() == 0) { 3797 return ""; 3798 } 3799 StringBuffer buf = new StringBuffer (); 3800 buf.append(labeledElementASTType+ 3803 "(astFactory->make((new "+namespaceAntlr+ 3804 "ASTArray("+v.size()+"))"); 3805 for (int i = 0; i < v.size(); i++) { 3806 buf.append("->add("+ v.elementAt(i) + ")"); 3807 } 3808 buf.append("))"); 3809 return buf.toString(); 3810 } 3811 3814 public String getASTCreateString(GrammarAtom atom, String str) { 3815 if ( atom!=null && atom.getASTNodeType() != null ) { 3816 3817 astTypes.appendElement("astFactory->registerFactory("+ 3821 atom.getType() + ", \""+atom.getASTNodeType()+ 3822 "\", "+atom.getASTNodeType()+"::factory);"); 3823 3824 return "astFactory->create("+str+")"; 3829 } 3832 else 3833 { 3834 boolean is_constructor = false; 3839 if( str.indexOf(',') != -1 ) 3840 is_constructor = grammar.tokenManager.tokenDefined(str.substring(0,str.indexOf(','))); 3841 3842 if( usingCustomAST && 3844 (grammar instanceof TreeWalkerGrammar) && 3845 !(grammar.tokenManager.tokenDefined(str) ) && 3846 ! is_constructor ) 3847 return "astFactory->create(static_cast<"+namespaceAntlr+"RefAST>("+str+"))"; 3848 else 3849 return "astFactory->create("+str+")"; 3850 } 3851 } 3852 3853 3856 public String getASTCreateString(String str) { 3857 if( usingCustomAST ) 3859 return "static_cast<"+labeledElementASTType+">(astFactory->create(static_cast<"+namespaceAntlr+"RefAST>("+str+")))"; 3860 else 3861 return "astFactory->create("+str+")"; 3862 } 3863 3864 protected String getLookaheadTestExpression(Lookahead[] look, int k) { 3865 StringBuffer e = new StringBuffer (100); 3866 boolean first = true; 3867 3868 e.append("("); 3869 for (int i = 1; i <= k; i++) { 3870 BitSet p = look[i].fset; 3871 if (!first) { 3872 e.append(") && ("); 3873 } 3874 first = false; 3875 3876 if (look[i].containsEpsilon()) { 3880 e.append("true"); 3881 } else { 3882 e.append(getLookaheadTestTerm(i, p)); 3883 } 3884 } 3885 e.append(")"); 3886 3887 return e.toString(); 3888 } 3889 3893 protected String getLookaheadTestExpression(Alternative alt, int maxDepth) { 3894 int depth = alt.lookaheadDepth; 3895 if ( depth == GrammarAnalyzer.NONDETERMINISTIC ) { 3896 depth = grammar.maxk; 3899 } 3900 3901 if ( maxDepth==0 ) { 3902 return "true"; 3905 } 3906 3907 3929 3930 return "(" + getLookaheadTestExpression(alt.cache,depth) + ")"; 3931 } 3932 3940 protected String getLookaheadTestTerm(int k, BitSet p) { 3941 String ts = lookaheadString(k); 3943 3944 int[] elems = p.toArray(); 3946 if (elementsAreRange(elems)) { 3947 return getRangeExpression(k, elems); 3948 } 3949 3950 StringBuffer e; 3952 int degree = p.degree(); 3953 if ( degree == 0 ) { 3954 return "true"; 3955 } 3956 3957 if (degree >= bitsetTestThreshold) { 3958 int bitsetIdx = markBitsetForGen(p); 3959 return getBitsetName(bitsetIdx) + ".member(" + ts + ")"; 3960 } 3961 3962 e = new StringBuffer (); 3964 for (int i = 0; i < elems.length; i++) { 3965 String cs = getValueString(elems[i]); 3967 3968 if( i > 0 ) e.append(" || "); 3970 e.append(ts); 3971 e.append(" == "); 3972 e.append(cs); 3973 } 3974 return e.toString(); 3975 } 3976 3981 public String getRangeExpression(int k, int[] elems) { 3982 if (!elementsAreRange(elems)) { 3983 antlrTool.panic("getRangeExpression called with non-range"); 3984 } 3985 int begin = elems[0]; 3986 int end = elems[elems.length-1]; 3987 return 3988 "(" + lookaheadString(k) + " >= " + getValueString(begin) + " && " + 3989 lookaheadString(k) + " <= " + getValueString(end) + ")"; 3990 } 3991 3994 private String getValueString(int value) { 3995 String cs; 3996 if ( grammar instanceof LexerGrammar ) { 3997 cs = charFormatter.literalChar(value); 3998 } 3999 else 4000 { 4001 TokenSymbol ts = grammar.tokenManager.getTokenSymbolAt(value); 4002 if ( ts == null ) { 4003 return ""+value; } 4006 String tId = ts.getId(); 4007 if ( ts instanceof StringLiteralSymbol ) { 4008 StringLiteralSymbol sl = (StringLiteralSymbol)ts; 4012 String label = sl.getLabel(); 4013 if ( label!=null ) { 4014 cs = label; 4015 } 4016 else { 4017 cs = mangleLiteral(tId); 4018 if (cs == null) { 4019 cs = String.valueOf(value); 4020 } 4021 } 4022 } 4023 else { 4024 if ( tId.equals("EOF") ) 4025 cs = namespaceAntlr+"Token::EOF_TYPE"; 4026 else 4027 cs = tId; 4028 } 4029 } 4030 return cs; 4031 } 4032 4033 protected boolean lookaheadIsEmpty(Alternative alt, int maxDepth) { 4034 int depth = alt.lookaheadDepth; 4035 if ( depth == GrammarAnalyzer.NONDETERMINISTIC ) { 4036 depth = grammar.maxk; 4037 } 4038 for (int i=1; i<=depth && i<=maxDepth; i++) { 4039 BitSet p = alt.cache[i].fset; 4040 if (p.degree() != 0) { 4041 return false; 4042 } 4043 } 4044 return true; 4045 } 4046 private String lookaheadString(int k) { 4047 if (grammar instanceof TreeWalkerGrammar) { 4048 return "_t->getType()"; 4049 } 4050 return "LA(" + k + ")"; 4051 } 4052 4058 private String mangleLiteral(String s) { 4059 String mangled = antlrTool.literalsPrefix; 4060 for (int i = 1; i < s.length()-1; i++) { 4061 if (!Character.isLetter(s.charAt(i)) && 4062 s.charAt(i) != '_') { 4063 return null; 4064 } 4065 mangled += s.charAt(i); 4066 } 4067 if ( antlrTool.upperCaseMangledLiterals ) { 4068 mangled = mangled.toUpperCase(); 4069 } 4070 return mangled; 4071 } 4072 4078 public String mapTreeId(String idParam, ActionTransInfo transInfo) { 4079 if ( currentRule==null ) return idParam; 4081 4083 boolean in_var = false; 4084 String id = idParam; 4085 if (grammar instanceof TreeWalkerGrammar) 4086 { 4087 if ( !grammar.buildAST ) 4091 { 4092 in_var = true; 4093 } 4095 if (id.length() > 3 && id.lastIndexOf("_in") == id.length()-3) 4098 { 4099 id = id.substring(0, id.length()-3); 4101 in_var = true; 4102 } 4104 } 4105 4107 for (int i = 0; i < currentRule.labeledElements.size(); i++) 4110 { 4111 AlternativeElement elt = (AlternativeElement)currentRule.labeledElements.elementAt(i); 4112 if (elt.getLabel().equals(id)) 4113 { 4114 return in_var ? id : id + "_AST"; 4117 } 4118 } 4119 4120 String s = (String )treeVariableMap.get(id); 4124 if (s != null) 4125 { 4126 if (s == NONUNIQUE) 4127 { 4128 return null; 4132 } 4133 else if (s.equals(currentRule.getRuleName())) 4134 { 4135 return null; 4140 } 4141 else 4142 { 4143 return in_var ? s + "_in" : s; 4146 } 4147 } 4148 4149 if( id.equals(currentRule.getRuleName()) ) 4153 { 4154 String r = in_var ? id + "_AST_in" : id + "_AST"; 4155 if ( transInfo!=null ) { 4156 if ( !in_var ) { 4157 transInfo.refRuleRoot = r; 4158 } 4159 } 4160 return r; 4163 } 4164 else 4165 { 4166 return id; 4170 } 4171 } 4172 4175 private void mapTreeVariable(AlternativeElement e, String name) 4176 { 4177 if (e instanceof TreeElement) { 4179 mapTreeVariable( ((TreeElement)e).root, name); 4180 return; 4181 } 4182 4183 String elName = null; 4185 4186 if (e.getLabel() == null) { 4188 if (e instanceof TokenRefElement) { 4189 elName = ((TokenRefElement)e).atomText; 4191 } 4192 else if (e instanceof RuleRefElement) { 4193 elName = ((RuleRefElement)e).targetRule; 4195 } 4196 } 4197 if (elName != null) { 4199 if (treeVariableMap.get(elName) != null) { 4200 treeVariableMap.remove(elName); 4202 treeVariableMap.put(elName, NONUNIQUE); 4203 } 4204 else { 4205 treeVariableMap.put(elName, name); 4206 } 4207 } 4208 } 4209 4210 4214 protected String processActionForTreeSpecifiers(String actionStr, 4215 int line, 4216 RuleBlock currentRule, 4217 ActionTransInfo tInfo) 4218 { 4219 if ( actionStr==null || actionStr.length()==0 ) 4220 return null; 4221 4222 if (grammar==null) 4225 return actionStr; 4226 4227 if ((grammar.buildAST && actionStr.indexOf('#') != -1) || 4228 grammar instanceof TreeWalkerGrammar || 4229 ((grammar instanceof LexerGrammar || 4230 grammar instanceof ParserGrammar) 4231 && actionStr.indexOf('$') != -1) ) 4232 { 4233 antlr.actions.cpp.ActionLexer lexer = 4235 new antlr.actions.cpp.ActionLexer(actionStr, currentRule, this, tInfo); 4236 lexer.setLineOffset(line); 4237 lexer.setFilename(grammar.getFilename()); 4238 lexer.setTool(antlrTool); 4239 4240 try { 4241 lexer.mACTION(true); 4242 actionStr = lexer.getTokenObject().getText(); 4243 } 4246 catch (RecognitionException ex) { 4247 lexer.reportError(ex); 4248 return actionStr; 4249 } 4250 catch (TokenStreamException tex) { 4251 antlrTool.panic("Error reading action:"+actionStr); 4252 return actionStr; 4253 } 4254 catch (CharStreamException io) { 4255 antlrTool.panic("Error reading action:"+actionStr); 4256 return actionStr; 4257 } 4258 } 4259 return actionStr; 4260 } 4261 4262 private String fixNameSpaceOption( String ns ) 4263 { 4264 ns = StringUtils.stripFrontBack(ns,"\"","\""); 4265 if( ns.length() > 2 && 4266 !ns.substring(ns.length()-2, ns.length()).equals("::") ) 4267 ns += "::"; 4268 return ns; 4269 } 4270 4271 private void setupGrammarParameters(Grammar g) { 4272 if (g instanceof ParserGrammar || 4273 g instanceof LexerGrammar || 4274 g instanceof TreeWalkerGrammar 4275 ) 4276 { 4277 4281 if( antlrTool.nameSpace != null ) 4282 nameSpace = antlrTool.nameSpace; 4283 4284 if( antlrTool.namespaceStd != null ) 4285 namespaceStd = fixNameSpaceOption(antlrTool.namespaceStd); 4286 4287 if( antlrTool.namespaceAntlr != null ) 4288 namespaceAntlr = fixNameSpaceOption(antlrTool.namespaceAntlr); 4289 4290 genHashLines = antlrTool.genHashLines; 4291 4292 4294 if( g.hasOption("namespace") ) { 4295 Token t = g.getOption("namespace"); 4296 if( t != null ) { 4297 nameSpace = new NameSpace(t.getText()); 4298 } 4299 } 4300 if( g.hasOption("namespaceAntlr") ) { 4301 Token t = g.getOption("namespaceAntlr"); 4302 if( t != null ) { 4303 String ns = StringUtils.stripFrontBack(t.getText(),"\"","\""); 4304 if ( ns != null ) { 4305 if( ns.length() > 2 && 4306 !ns.substring(ns.length()-2, ns.length()).equals("::") ) 4307 ns += "::"; 4308 namespaceAntlr = ns; 4309 } 4310 } 4311 } 4312 if( g.hasOption("namespaceStd") ) { 4313 Token t = g.getOption("namespaceStd"); 4314 if( t != null ) { 4315 String ns = StringUtils.stripFrontBack(t.getText(),"\"","\""); 4316 if ( ns != null ) { 4317 if( ns.length() > 2 && 4318 !ns.substring(ns.length()-2, ns.length()).equals("::") ) 4319 ns += "::"; 4320 namespaceStd = ns; 4321 } 4322 } 4323 } 4324 if( g.hasOption("genHashLines") ) { 4325 Token t = g.getOption("genHashLines"); 4326 if( t != null ) { 4327 String val = StringUtils.stripFrontBack(t.getText(),"\"","\""); 4328 genHashLines = val.equals("true"); 4329 } 4330 } 4331 } 4332 if (g instanceof ParserGrammar) { 4333 labeledElementASTType = namespaceAntlr+"RefAST"; 4334 labeledElementASTInit = namespaceAntlr+"nullAST"; 4335 if ( g.hasOption("ASTLabelType") ) { 4336 Token tsuffix = g.getOption("ASTLabelType"); 4337 if ( tsuffix != null ) { 4338 String suffix = StringUtils.stripFrontBack(tsuffix.getText(),"\"","\""); 4339 if ( suffix != null ) { 4340 usingCustomAST = true; 4341 labeledElementASTType = suffix; 4342 labeledElementASTInit = "static_cast<"+suffix+">("+namespaceAntlr+"nullAST)"; 4343 } 4344 } 4345 } 4346 labeledElementType = namespaceAntlr+"RefToken "; 4347 labeledElementInit = namespaceAntlr+"nullToken"; 4348 commonExtraArgs = ""; 4349 commonExtraParams = ""; 4350 commonLocalVars = ""; 4351 lt1Value = "LT(1)"; 4352 exceptionThrown = namespaceAntlr+"RecognitionException"; 4353 throwNoViable = "throw "+namespaceAntlr+"NoViableAltException(LT(1), getFilename());"; 4354 } 4355 else if (g instanceof LexerGrammar) { 4356 labeledElementType = "char "; 4357 labeledElementInit = "'\\0'"; 4358 commonExtraArgs = ""; 4359 commonExtraParams = "bool _createToken"; 4360 commonLocalVars = "int _ttype; "+namespaceAntlr+"RefToken _token; int _begin=text.length();"; 4361 lt1Value = "LA(1)"; 4362 exceptionThrown = namespaceAntlr+"RecognitionException"; 4363 throwNoViable = "throw "+namespaceAntlr+"NoViableAltForCharException(LA(1), getFilename(), getLine(), getColumn());"; 4364 } 4365 else if (g instanceof TreeWalkerGrammar) { 4366 labeledElementInit = namespaceAntlr+"nullAST"; 4367 labeledElementASTInit = namespaceAntlr+"nullAST"; 4368 labeledElementASTType = namespaceAntlr+"RefAST"; 4369 labeledElementType = namespaceAntlr+"RefAST"; 4370 commonExtraParams = namespaceAntlr+"RefAST _t"; 4371 throwNoViable = "throw "+namespaceAntlr+"NoViableAltException(_t);"; 4372 lt1Value = "_t"; 4373 if ( g.hasOption("ASTLabelType") ) { 4374 Token tsuffix = g.getOption("ASTLabelType"); 4375 if ( tsuffix != null ) { 4376 String suffix = StringUtils.stripFrontBack(tsuffix.getText(),"\"","\""); 4377 if ( suffix != null ) { 4378 usingCustomAST = true; 4379 labeledElementASTType = suffix; 4380 labeledElementType = suffix; 4381 labeledElementInit = "static_cast<"+suffix+">("+namespaceAntlr+"nullAST)"; 4382 labeledElementASTInit = labeledElementInit; 4383 commonExtraParams = suffix+" _t"; 4384 throwNoViable = "throw "+namespaceAntlr+"NoViableAltException(static_cast<"+namespaceAntlr+"RefAST>(_t));"; 4385 lt1Value = "_t"; 4386 } 4387 } 4388 } 4389 if ( !g.hasOption("ASTLabelType") ) { 4390 g.setOption("ASTLabelType", new Token(ANTLRTokenTypes.STRING_LITERAL,namespaceAntlr+"RefAST")); 4391 } 4392 commonExtraArgs = "_t"; 4393 commonLocalVars = ""; 4394 exceptionThrown = namespaceAntlr+"RecognitionException"; 4395 } 4396 else { 4397 antlrTool.panic("Unknown grammar type"); 4398 } 4399 } 4400 private String textOrChar(String text) { 4401 if (text.startsWith("'")) { 4403 return charFormatter.literalChar(ANTLRLexer.tokenTypeForCharLiteral(text)); 4405 } 4406 else 4407 return text; 4408 } 4409} 4410
| Popular Tags
|