1 package persistence.antlr; 2 3 8 9 17 53 import java.util.Enumeration ; 54 import java.util.Hashtable ; 55 import persistence.antlr.collections.impl.BitSet; 56 import persistence.antlr.collections.impl.Vector; 57 import java.io.PrintWriter ; import java.io.IOException ; 59 import java.io.FileWriter ; 60 61 62 public class CSharpCodeGenerator extends CodeGenerator { 63 protected int syntacticPredLevel = 0; 65 66 protected boolean genAST = false; 68 69 protected boolean saveText = false; 71 72 boolean usingCustomAST = false; 75 String labeledElementType; 76 String labeledElementASTType; 77 String labeledElementInit; 78 String commonExtraArgs; 79 String commonExtraParams; 80 String commonLocalVars; 81 String lt1Value; 82 String exceptionThrown; 83 String throwNoViable; 84 85 RuleBlock currentRule; 87 String currentASTResult; 89 90 93 Hashtable treeVariableMap = new Hashtable (); 94 95 98 Hashtable declaredASTVariables = new Hashtable (); 99 100 101 int astVarNumber = 1; 102 103 104 protected static final String NONUNIQUE = new String (); 105 106 public static final int caseSizeThreshold = 127; 108 private Vector semPreds; 109 private java.util.Vector astTypes; 112 113 private static CSharpNameSpace nameSpace = null; 114 115 boolean bSaveIndexCreated = false; 117 118 119 123 public CSharpCodeGenerator() { 124 super(); 125 charFormatter = new CSharpCharFormatter(); 126 } 127 128 133 protected int addSemPred(String predicate) { 134 semPreds.appendElement(predicate); 135 return semPreds.size()-1; 136 } 137 138 public void exitIfError() 139 { 140 if (antlrTool.hasError()) 141 { 142 antlrTool.fatalError("Exiting due to errors."); 143 } 144 } 145 146 147 public void gen() { 148 try { 150 Enumeration grammarIter = behavior.grammars.elements(); 152 while (grammarIter.hasMoreElements()) { 153 Grammar g = (Grammar)grammarIter.nextElement(); 154 g.setGrammarAnalyzer(analyzer); 156 g.setCodeGenerator(this); 157 analyzer.setGrammar(g); 158 setupGrammarParameters(g); 160 g.generate(); 161 exitIfError(); 162 } 163 164 Enumeration tmIter = behavior.tokenManagers.elements(); 166 while (tmIter.hasMoreElements()) { 167 TokenManager tm = (TokenManager)tmIter.nextElement(); 168 if (!tm.isReadOnly()) { 169 genTokenTypes(tm); 173 genTokenInterchange(tm); 175 } 176 exitIfError(); 177 } 178 } 179 catch (IOException e) { 180 antlrTool.reportException(e, null); 181 } 182 } 183 184 187 public void gen(ActionElement action) { 188 if ( DEBUG_CODE_GENERATOR ) System.out.println("genAction("+action+")"); 189 if ( action.isSemPred ) { 190 genSemPred(action.actionText, action.line); 191 } 192 else { 193 if ( grammar.hasSyntacticPredicate ) { 194 println("if (0==inputState.guessing)"); 195 println("{"); 196 tabs++; 197 } 198 199 ActionTransInfo tInfo = new ActionTransInfo(); 200 String actionStr = processActionForSpecialSymbols(action.actionText, 201 action.getLine(), 202 currentRule, tInfo); 203 204 if ( tInfo.refRuleRoot!=null ) { 205 println(tInfo.refRuleRoot + " = ("+labeledElementASTType+")currentAST.root;"); 210 } 211 212 printAction(actionStr); 214 215 if ( tInfo.assignToRoot ) { 216 println("currentAST.root = "+tInfo.refRuleRoot+";"); 218 println("if ( (null != "+tInfo.refRuleRoot+") && (null != "+tInfo.refRuleRoot+".getFirstChild()) )"); 220 tabs++; 221 println("currentAST.child = "+tInfo.refRuleRoot+".getFirstChild();"); 222 tabs--; 223 println("else"); 224 tabs++; 225 println("currentAST.child = "+tInfo.refRuleRoot+";"); 226 tabs--; 227 println("currentAST.advanceChildToEnd();"); 228 } 229 230 if ( grammar.hasSyntacticPredicate ) { 231 tabs--; 232 println("}"); 233 } 234 } 235 } 236 237 240 public void gen(AlternativeBlock blk) { 241 if ( DEBUG_CODE_GENERATOR ) System.out.println("gen("+blk+")"); 242 println("{"); 243 tabs++; 244 245 genBlockPreamble(blk); 246 genBlockInitAction(blk); 247 248 String saveCurrentASTResult = currentASTResult; 250 if (blk.getLabel() != null) { 251 currentASTResult = blk.getLabel(); 252 } 253 254 boolean ok = grammar.theLLkAnalyzer.deterministic(blk); 255 256 CSharpBlockFinishingInfo howToFinish = genCommonBlock(blk, true); 257 genBlockFinish(howToFinish, throwNoViable); 258 259 tabs--; 260 println("}"); 261 262 currentASTResult = saveCurrentASTResult; 264 } 265 270 public void gen(BlockEndElement end) { 271 if ( DEBUG_CODE_GENERATOR ) System.out.println("genRuleEnd("+end+")"); 272 } 273 276 public void gen(CharLiteralElement atom) { 277 if ( DEBUG_CODE_GENERATOR ) System.out.println("genChar("+atom+")"); 278 279 if ( atom.getLabel()!=null ) { 280 println(atom.getLabel() + " = " + lt1Value + ";"); 281 } 282 283 boolean oldsaveText = saveText; 284 saveText = saveText && atom.getAutoGenType()==GrammarElement.AUTO_GEN_NONE; 285 genMatch(atom); 286 saveText = oldsaveText; 287 } 288 291 public void gen(CharRangeElement r) { 292 if ( r.getLabel()!=null && syntacticPredLevel == 0) { 293 println(r.getLabel() + " = " + lt1Value + ";"); 294 } 295 boolean flag = ( grammar instanceof LexerGrammar && 296 (!saveText || (r.getAutoGenType() == GrammarElement.AUTO_GEN_BANG)) ); 297 if (flag) 298 println("_saveIndex = text.Length;"); 299 300 println("matchRange("+OctalToUnicode(r.beginText)+","+OctalToUnicode(r.endText)+");"); 301 302 if (flag) 303 println("text.Length = _saveIndex;"); 304 } 305 306 public void gen(LexerGrammar g) throws IOException { 307 if (g.debuggingOutput) 309 semPreds = new Vector(); 310 311 setGrammar(g); 312 if (!(grammar instanceof LexerGrammar)) { 313 antlrTool.panic("Internal error generating lexer"); 314 } 315 genBody(g); 316 } 317 320 public void gen(OneOrMoreBlock blk) { 321 if ( DEBUG_CODE_GENERATOR ) System.out.println("gen+("+blk+")"); 322 String label; 323 String cnt; 324 println("{ // ( ... )+"); 325 genBlockPreamble(blk); 326 if ( blk.getLabel() != null ) { 327 cnt = "_cnt_"+blk.getLabel(); 328 } 329 else { 330 cnt = "_cnt" + blk.ID; 331 } 332 println("int "+cnt+"=0;"); 333 if ( blk.getLabel() != null ) { 334 label = blk.getLabel(); 335 } 336 else { 337 label = "_loop" + blk.ID; 338 } 339 340 println("for (;;)"); 341 println("{"); 342 tabs++; 343 genBlockInitAction(blk); 346 347 String saveCurrentASTResult = currentASTResult; 349 if (blk.getLabel() != null) { 350 currentASTResult = blk.getLabel(); 351 } 352 353 boolean ok = grammar.theLLkAnalyzer.deterministic(blk); 354 355 boolean generateNonGreedyExitPath = false; 366 int nonGreedyExitDepth = grammar.maxk; 367 368 if ( !blk.greedy && 369 blk.exitLookaheadDepth<=grammar.maxk && 370 blk.exitCache[blk.exitLookaheadDepth].containsEpsilon() ) 371 { 372 generateNonGreedyExitPath = true; 373 nonGreedyExitDepth = blk.exitLookaheadDepth; 374 } 375 else if ( !blk.greedy && 376 blk.exitLookaheadDepth==LLkGrammarAnalyzer.NONDETERMINISTIC ) 377 { 378 generateNonGreedyExitPath = true; 379 } 380 381 if ( generateNonGreedyExitPath ) { 384 if ( DEBUG_CODE_GENERATOR ) { 385 System.out.println("nongreedy (...)+ loop; exit depth is "+ 386 blk.exitLookaheadDepth); 387 } 388 String predictExit = 389 getLookaheadTestExpression(blk.exitCache, 390 nonGreedyExitDepth); 391 println("// nongreedy exit test"); 392 println("if (("+cnt+" >= 1) && "+predictExit+") goto "+label+"_breakloop;"); 393 } 394 395 CSharpBlockFinishingInfo howToFinish = genCommonBlock(blk, false); 396 genBlockFinish( 397 howToFinish, 398 "if ("+cnt+" >= 1) { goto "+label+"_breakloop; } else { " + throwNoViable + "; }" 399 ); 400 401 println(cnt+"++;"); 402 tabs--; 403 println("}"); 404 _print(label + "_breakloop:"); 405 println(";"); 406 println("} // ( ... )+"); 407 408 currentASTResult = saveCurrentASTResult; 410 } 411 412 public void gen(ParserGrammar g) throws IOException { 413 414 if (g.debuggingOutput) 417 semPreds = new Vector(); 418 419 setGrammar(g); 420 if (!(grammar instanceof ParserGrammar)) { 421 antlrTool.panic("Internal error generating parser"); 422 } 423 genBody(g); 424 } 425 428 public void gen(RuleRefElement rr) 429 { 430 if ( DEBUG_CODE_GENERATOR ) System.out.println("genRR("+rr+")"); 431 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(rr.targetRule); 432 if (rs == null || !rs.isDefined()) 433 { 434 antlrTool.error("Rule '" + rr.targetRule + "' is not defined", grammar.getFilename(), rr.getLine(), rr.getColumn()); 436 return; 437 } 438 if (!(rs instanceof RuleSymbol)) 439 { 440 antlrTool.error("'" + rr.targetRule + "' does not name a grammar rule", grammar.getFilename(), rr.getLine(), rr.getColumn()); 442 return; 443 } 444 445 genErrorTryForElement(rr); 446 447 if ( grammar instanceof TreeWalkerGrammar && 450 rr.getLabel() != null && 451 syntacticPredLevel == 0 ) 452 { 453 println(rr.getLabel() + " = _t==ASTNULL ? null : "+lt1Value+";"); 454 } 455 456 if (grammar instanceof LexerGrammar && (!saveText || rr.getAutoGenType() == GrammarElement.AUTO_GEN_BANG)) 458 { 459 declareSaveIndexVariableIfNeeded(); 460 println("_saveIndex = text.Length;"); 461 } 462 463 printTabs(); 465 if (rr.idAssign != null) 466 { 467 if (rs.block.returnAction == null) 469 { 470 antlrTool.warning("Rule '" + rr.targetRule + "' has no return type", grammar.getFilename(), rr.getLine(), rr.getColumn()); 471 } 472 _print(rr.idAssign + "="); 473 } else { 474 if ( !(grammar instanceof LexerGrammar) && syntacticPredLevel == 0 && rs.block.returnAction != null) 476 { 477 antlrTool.warning("Rule '" + rr.targetRule + "' returns a value", grammar.getFilename(), rr.getLine(), rr.getColumn()); 478 } 479 } 480 481 GenRuleInvocation(rr); 483 484 if ( grammar instanceof LexerGrammar && (!saveText||rr.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 486 declareSaveIndexVariableIfNeeded(); 487 println("text.Length = _saveIndex;"); 488 } 489 490 if (syntacticPredLevel == 0) 492 { 493 boolean doNoGuessTest = ( 494 grammar.hasSyntacticPredicate && 495 ( 496 grammar.buildAST && rr.getLabel() != null || 497 (genAST && rr.getAutoGenType() == GrammarElement.AUTO_GEN_NONE) 498 ) 499 ); 500 if (doNoGuessTest) { 501 println("if (0 == inputState.guessing)"); 502 println("{"); 503 tabs++; 504 } 505 506 if (grammar.buildAST && rr.getLabel() != null) 507 { 508 println(rr.getLabel() + "_AST = ("+labeledElementASTType+")returnAST;"); 510 } 511 if (genAST) 512 { 513 switch (rr.getAutoGenType()) 514 { 515 case GrammarElement.AUTO_GEN_NONE: 516 if( usingCustomAST ) 517 println("astFactory.addASTChild(currentAST, (AST)returnAST);"); 518 else 519 println("astFactory.addASTChild(currentAST, returnAST);"); 520 break; 521 case GrammarElement.AUTO_GEN_CARET: 522 antlrTool.error("Internal: encountered ^ after rule reference"); 523 break; 524 default: 525 break; 526 } 527 } 528 529 if ( grammar instanceof LexerGrammar && rr.getLabel() != null ) 531 { 532 println(rr.getLabel()+" = returnToken_;"); 533 } 534 535 if (doNoGuessTest) 536 { 537 tabs--; 538 println("}"); 539 } 540 } 541 genErrorCatchForElement(rr); 542 } 543 546 public void gen(StringLiteralElement atom) { 547 if ( DEBUG_CODE_GENERATOR ) System.out.println("genString("+atom+")"); 548 549 if (atom.getLabel()!=null && syntacticPredLevel == 0) { 551 println(atom.getLabel() + " = " + lt1Value + ";"); 552 } 553 554 genElementAST(atom); 556 557 boolean oldsaveText = saveText; 559 saveText = saveText && atom.getAutoGenType()==GrammarElement.AUTO_GEN_NONE; 560 561 genMatch(atom); 563 564 saveText = oldsaveText; 565 566 if (grammar instanceof TreeWalkerGrammar) { 568 println("_t = _t.getNextSibling();"); 569 } 570 } 571 572 575 public void gen(TokenRangeElement r) { 576 genErrorTryForElement(r); 577 if ( r.getLabel()!=null && syntacticPredLevel == 0) { 578 println(r.getLabel() + " = " + lt1Value + ";"); 579 } 580 581 genElementAST(r); 583 584 println("matchRange("+OctalToUnicode(r.beginText)+","+OctalToUnicode(r.endText)+");"); 586 genErrorCatchForElement(r); 587 } 588 589 592 public void gen(TokenRefElement atom) { 593 if ( DEBUG_CODE_GENERATOR ) System.out.println("genTokenRef("+atom+")"); 594 if ( grammar instanceof LexerGrammar ) { 595 antlrTool.panic("Token reference found in lexer"); 596 } 597 genErrorTryForElement(atom); 598 if ( atom.getLabel()!=null && syntacticPredLevel == 0) { 600 println(atom.getLabel() + " = " + lt1Value + ";"); 601 } 602 603 genElementAST(atom); 605 genMatch(atom); 607 genErrorCatchForElement(atom); 608 609 if (grammar instanceof TreeWalkerGrammar) { 611 println("_t = _t.getNextSibling();"); 612 } 613 } 614 615 public void gen(TreeElement t) { 616 println("AST __t" + t.ID + " = _t;"); 618 619 if (t.root.getLabel() != null) { 621 println(t.root.getLabel() + " = (ASTNULL == _t) ? null : ("+labeledElementASTType +")_t;"); 622 } 623 624 if ( t.root.getAutoGenType() == GrammarElement.AUTO_GEN_BANG ) { 626 antlrTool.error("Suffixing a root node with '!' is not implemented", 627 grammar.getFilename(), t.getLine(), t.getColumn()); 628 t.root.setAutoGenType(GrammarElement.AUTO_GEN_NONE); 629 } 630 if ( t.root.getAutoGenType() == GrammarElement.AUTO_GEN_CARET ) { 631 antlrTool.warning("Suffixing a root node with '^' is redundant; already a root", 632 grammar.getFilename(), t.getLine(), t.getColumn()); 633 t.root.setAutoGenType(GrammarElement.AUTO_GEN_NONE); 634 } 635 636 genElementAST(t.root); 638 if (grammar.buildAST) { 639 println("ASTPair __currentAST" + t.ID + " = currentAST.copy();"); 641 println("currentAST.root = currentAST.child;"); 643 println("currentAST.child = null;"); 644 } 645 646 if ( t.root instanceof WildcardElement ) { 648 println("if (null == _t) throw new MismatchedTokenException();"); 649 } 650 else { 651 genMatch(t.root); 652 } 653 println("_t = _t.getFirstChild();"); 655 656 for (int i=0; i<t.getAlternatives().size(); i++) { 658 Alternative a = t.getAlternativeAt(i); 659 AlternativeElement e = a.head; 660 while ( e != null ) { 661 e.generate(); 662 e = e.next; 663 } 664 } 665 666 if (grammar.buildAST) { 667 println("currentAST = __currentAST" + t.ID + ";"); 670 } 671 println("_t = __t" + t.ID + ";"); 673 println("_t = _t.getNextSibling();"); 675 } 676 677 public void gen(TreeWalkerGrammar g) throws IOException { 678 setGrammar(g); 680 if (!(grammar instanceof TreeWalkerGrammar)) { 681 antlrTool.panic("Internal error generating tree-walker"); 682 } 683 genBody(g); 684 } 685 686 689 public void gen(WildcardElement wc) { 690 if (wc.getLabel()!=null && syntacticPredLevel == 0) { 692 println(wc.getLabel() + " = " + lt1Value + ";"); 693 } 694 695 genElementAST(wc); 697 if (grammar instanceof TreeWalkerGrammar) { 699 println("if (null == _t) throw new MismatchedTokenException();"); 700 } 701 else if (grammar instanceof LexerGrammar) { 702 if ( grammar instanceof LexerGrammar && 703 (!saveText||wc.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 704 declareSaveIndexVariableIfNeeded(); 705 println("_saveIndex = text.Length;"); 706 } 707 println("matchNot(EOF/*_CHAR*/);"); 708 if ( grammar instanceof LexerGrammar && 709 (!saveText||wc.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 710 declareSaveIndexVariableIfNeeded(); 711 println("text.Length = _saveIndex;"); } 713 } 714 else { 715 println("matchNot(" + getValueString(Token.EOF_TYPE) + ");"); 716 } 717 718 if (grammar instanceof TreeWalkerGrammar) { 720 println("_t = _t.getNextSibling();"); 721 } 722 } 723 724 727 public void gen(ZeroOrMoreBlock blk) { 728 if ( DEBUG_CODE_GENERATOR ) System.out.println("gen*("+blk+")"); 729 println("{ // ( ... )*"); 730 tabs++; 731 genBlockPreamble(blk); 732 String label; 733 if ( blk.getLabel() != null ) { 734 label = blk.getLabel(); 735 } 736 else { 737 label = "_loop" + blk.ID; 738 } 739 println("for (;;)"); 740 println("{"); 741 tabs++; 742 genBlockInitAction(blk); 745 746 String saveCurrentASTResult = currentASTResult; 748 if (blk.getLabel() != null) { 749 currentASTResult = blk.getLabel(); 750 } 751 752 boolean ok = grammar.theLLkAnalyzer.deterministic(blk); 753 754 boolean generateNonGreedyExitPath = false; 765 int nonGreedyExitDepth = grammar.maxk; 766 767 if ( !blk.greedy && 768 blk.exitLookaheadDepth<=grammar.maxk && 769 blk.exitCache[blk.exitLookaheadDepth].containsEpsilon() ) 770 { 771 generateNonGreedyExitPath = true; 772 nonGreedyExitDepth = blk.exitLookaheadDepth; 773 } 774 else if ( !blk.greedy && 775 blk.exitLookaheadDepth==LLkGrammarAnalyzer.NONDETERMINISTIC ) 776 { 777 generateNonGreedyExitPath = true; 778 } 779 if ( generateNonGreedyExitPath ) { 780 if ( DEBUG_CODE_GENERATOR ) { 781 System.out.println("nongreedy (...)* loop; exit depth is "+ 782 blk.exitLookaheadDepth); 783 } 784 String predictExit = 785 getLookaheadTestExpression(blk.exitCache, 786 nonGreedyExitDepth); 787 println("// nongreedy exit test"); 788 println("if ("+predictExit+") goto "+label+"_breakloop;"); 789 } 790 791 CSharpBlockFinishingInfo howToFinish = genCommonBlock(blk, false); 792 genBlockFinish(howToFinish, "goto " + label + "_breakloop;"); 793 794 tabs--; 795 println("}"); 796 _print(label+"_breakloop:"); 797 println(";"); 798 tabs--; 799 println("} // ( ... )*"); 800 801 currentASTResult = saveCurrentASTResult; 803 } 804 805 809 protected void genAlt(Alternative alt, AlternativeBlock blk) 810 { 811 boolean savegenAST = genAST; 813 genAST = genAST && alt.getAutoGen(); 814 815 boolean oldsaveTest = saveText; 816 saveText = saveText && alt.getAutoGen(); 817 818 Hashtable saveMap = treeVariableMap; 820 treeVariableMap = new Hashtable (); 821 822 if (alt.exceptionSpec != null) { 824 println("try // for error handling"); 825 println("{"); 826 tabs++; 827 } 828 829 AlternativeElement elem = alt.head; 830 while ( !(elem instanceof BlockEndElement) ) { 831 elem.generate(); elem = elem.next; 833 } 834 835 if ( genAST) 836 { 837 if (blk instanceof RuleBlock) 838 { 839 RuleBlock rblk = (RuleBlock)blk; 841 if( usingCustomAST ) 842 { 843 println(rblk.getRuleName() + "_AST = ("+labeledElementASTType+")currentAST.root;"); 844 } 845 else 846 { 847 println(rblk.getRuleName() + "_AST = currentAST.root;"); 848 } 849 } 850 else if (blk.getLabel() != null) { 851 antlrTool.warning("Labeled subrules not yet supported", grammar.getFilename(), blk.getLine(), blk.getColumn()); 854 } 855 } 856 857 if (alt.exceptionSpec != null) 858 { 859 tabs--; 861 println("}"); 862 genErrorHandler(alt.exceptionSpec); 863 } 864 865 genAST = savegenAST; 866 saveText = oldsaveTest; 867 868 treeVariableMap = saveMap; 869 } 870 871 881 protected void genBitsets( Vector bitsetList, int maxVocabulary ) { 882 println(""); 883 for (int i = 0; i < bitsetList.size(); i++) 884 { 885 BitSet p = (BitSet)bitsetList.elementAt(i); 886 p.growToInclude(maxVocabulary); 888 genBitSet(p, i); 889 } 890 } 891 892 902 private void genBitSet(BitSet p, int id) { 903 println("private static long[] mk_" + getBitsetName(id) + "()"); 905 println("{"); 906 tabs++; 907 int n = p.lengthInLongWords(); 908 if ( n<BITSET_OPTIMIZE_INIT_THRESHOLD ) { 909 println("long[] data = { " + p.toStringOfWords() + "};"); 910 } 911 else { 912 println("long[] data = new long["+n+"];"); 914 long[] elems = p.toPackedArray(); 915 for (int i = 0; i < elems.length;) { 916 if ( (i+1)==elems.length || elems[i]!=elems[i+1] ) { 917 println("data["+i+"]="+elems[i]+"L;"); 919 i++; 920 } 921 else 922 { 923 int j; 925 for (j = i + 1; j < elems.length && elems[j]==elems[i]; j++) 926 { 927 ; 928 } 929 println("for (int i = "+i+"; i<="+(j-1)+"; i++) { data[i]="+ 931 elems[i]+"L; }"); 932 i = j; 933 } 934 } 935 } 936 937 println("return data;"); 938 tabs--; 939 println("}"); 940 println("public static readonly BitSet " + getBitsetName(id) + " = new BitSet(" + 942 "mk_" + getBitsetName(id) + "()" + ");"); 943 } 944 945 950 protected String getBitsetName(int index) { 951 return "tokenSet_" + index + "_"; 952 } 953 954 960 private void genBlockFinish(CSharpBlockFinishingInfo howToFinish, String noViableAction) 961 { 962 963 if (howToFinish.needAnErrorClause && 964 (howToFinish.generatedAnIf || howToFinish.generatedSwitch)) 965 { 966 if ( howToFinish.generatedAnIf ) { 967 println("else"); 968 println("{"); 969 } 970 else { 971 println("{"); 972 } 973 tabs++; 974 println(noViableAction); 975 tabs--; 976 println("}"); 977 } 978 979 if ( howToFinish.postscript!=null ) { 980 if (howToFinish.needAnErrorClause && howToFinish.generatedSwitch && 981 !howToFinish.generatedAnIf && noViableAction != null) 982 { 983 if (noViableAction.indexOf("throw") == 0 || noViableAction.indexOf("goto") == 0) { 985 int endOfBreak = howToFinish.postscript.indexOf("break;") + 6; 987 String newPostScript = howToFinish.postscript.substring(endOfBreak); 988 println(newPostScript); 989 } 990 else { 991 println(howToFinish.postscript); 992 } 993 } 994 else { 995 println(howToFinish.postscript); 996 } 997 } 998 } 999 1000 1004 protected void genBlockInitAction(AlternativeBlock blk) 1005 { 1006 if (blk.initAction != null) { 1008 printAction(processActionForSpecialSymbols(blk.initAction, blk.getLine(), currentRule, null)); 1009 } 1010 } 1011 1012 1017 protected void genBlockPreamble(AlternativeBlock blk) { 1018 if ( blk instanceof RuleBlock ) { 1020 RuleBlock rblk = (RuleBlock)blk; 1021 if ( rblk.labeledElements!=null ) { 1022 for (int i=0; i<rblk.labeledElements.size(); i++) { 1023 1024 AlternativeElement a = (AlternativeElement)rblk.labeledElements.elementAt(i); 1025 if ( 1032 a instanceof RuleRefElement || 1033 a instanceof AlternativeBlock && 1034 !(a instanceof RuleBlock) && 1035 !(a instanceof SynPredBlock) 1036 ) { 1037 1038 if ( 1039 !(a instanceof RuleRefElement) && 1040 ((AlternativeBlock)a).not && 1041 analyzer.subruleCanBeInverted(((AlternativeBlock)a), grammar instanceof LexerGrammar) 1042 ) { 1043 println(labeledElementType + " " + a.getLabel() + " = " + labeledElementInit + ";"); 1047 if (grammar.buildAST) { 1048 genASTDeclaration(a); 1049 } 1050 } 1051 else { 1052 if (grammar.buildAST) { 1053 genASTDeclaration(a); 1057 } 1058 if ( grammar instanceof LexerGrammar ) { 1059 println("Token "+a.getLabel()+" = null;"); 1060 } 1061 if (grammar instanceof TreeWalkerGrammar) { 1062 println(labeledElementType + " " + a.getLabel() + " = " + labeledElementInit + ";"); 1065 } 1066 } 1067 } 1068 else { 1069 println(labeledElementType + " " + a.getLabel() + " = " + labeledElementInit + ";"); 1072 if (grammar.buildAST) { 1074 if (a instanceof GrammarAtom && 1076 ((GrammarAtom)a).getASTNodeType()!=null ) { 1077 GrammarAtom ga = (GrammarAtom)a; 1078 genASTDeclaration(a, ga.getASTNodeType()); 1079 } 1080 else { 1081 genASTDeclaration(a); 1082 } 1083 } 1084 } 1085 } 1086 } 1087 } 1088 } 1089 1090 public void genBody(LexerGrammar g) throws IOException 1091 { 1092 setupOutput(grammar.getClassName()); 1095 1096 genAST = false; saveText = true; 1099 tabs=0; 1100 1101 genHeader(); 1103 println(behavior.getHeaderAction("")); 1105 1106 if (nameSpace != null) 1108 nameSpace.emitDeclarations(currentOutput); 1109 tabs++; 1110 1111 println("// Generate header specific to lexer CSharp file"); 1114 println("using System;"); 1115 println("using Stream = System.IO.Stream;"); 1116 println("using TextReader = System.IO.TextReader;"); 1117 println("using Hashtable = System.Collections.Hashtable;"); 1118 println("using Comparer = System.Collections.Comparer;"); 1119 if ( !(g.caseSensitiveLiterals) ) 1120 { 1121 println("using CaseInsensitiveHashCodeProvider = System.Collections.CaseInsensitiveHashCodeProvider;"); 1122 println("using CaseInsensitiveComparer = System.Collections.CaseInsensitiveComparer;"); 1123 } 1124 println(""); 1125 println("using TokenStreamException = persistence.antlr.TokenStreamException;"); 1126 println("using TokenStreamIOException = persistence.antlr.TokenStreamIOException;"); 1127 println("using TokenStreamRecognitionException = persistence.antlr.TokenStreamRecognitionException;"); 1128 println("using CharStreamException = persistence.antlr.CharStreamException;"); 1129 println("using CharStreamIOException = persistence.antlr.CharStreamIOException;"); 1130 println("using ANTLRException = persistence.antlr.ANTLRException;"); 1131 println("using CharScanner = persistence.antlr.CharScanner;"); 1132 println("using InputBuffer = persistence.antlr.InputBuffer;"); 1133 println("using ByteBuffer = persistence.antlr.ByteBuffer;"); 1134 println("using CharBuffer = persistence.antlr.CharBuffer;"); 1135 println("using Token = persistence.antlr.Token;"); 1136 println("using CommonToken = persistence.antlr.CommonToken;"); 1137 println("using SemanticException = persistence.antlr.SemanticException;"); 1138 println("using RecognitionException = persistence.antlr.RecognitionException;"); 1139 println("using NoViableAltForCharException = persistence.antlr.NoViableAltForCharException;"); 1140 println("using MismatchedCharException = persistence.antlr.MismatchedCharException;"); 1141 println("using TokenStream = persistence.antlr.TokenStream;"); 1142 println("using LexerSharedInputState = persistence.antlr.LexerSharedInputState;"); 1143 println("using BitSet = persistence.antlr.collections.impl.BitSet;"); 1144 1145 println(grammar.preambleAction.getText()); 1147 1148 String sup=null; 1150 if ( grammar.superClass!=null ) { 1151 sup = grammar.superClass; 1152 } 1153 else { 1154 sup = "persistence.antlr." + grammar.getSuperClass(); 1155 } 1156 1157 if ( grammar.comment!=null ) 1159 { 1160 _println(grammar.comment); 1161 } 1162 1163 Token tprefix = (Token)grammar.options.get("classHeaderPrefix"); 1164 if (tprefix == null) { 1165 print("public "); 1166 } 1167 else { 1168 String p = StringUtils.stripFrontBack(tprefix.getText(), "\"", "\""); 1169 if (p == null) { 1170 print("public "); 1171 } 1172 else { 1173 print(p+" "); 1174 } 1175 } 1176 1177 print("class " + grammar.getClassName() + " : "+sup); 1178 println(", TokenStream"); 1179 Token tsuffix = (Token)grammar.options.get("classHeaderSuffix"); 1180 if ( tsuffix != null ) 1181 { 1182 String suffix = StringUtils.stripFrontBack(tsuffix.getText(),"\"","\""); 1183 if ( suffix != null ) 1184 { 1185 print(", "+suffix); } 1187 } 1188 println(" {"); 1189 tabs++; 1190 1191 genTokenDefinitions(grammar.tokenManager); 1193 1194 print( 1196 processActionForSpecialSymbols(grammar.classMemberAction.getText(), grammar.classMemberAction.getLine(), currentRule, null) 1197 ); 1198 1199 println("public " + grammar.getClassName() + "(Stream ins) : this(new ByteBuffer(ins))"); 1204 println("{"); 1205 println("}"); 1206 println(""); 1207 1208 println("public " + grammar.getClassName() + "(TextReader r) : this(new CharBuffer(r))"); 1213 println("{"); 1214 println("}"); 1215 println(""); 1216 1217 print("public " + grammar.getClassName() + "(InputBuffer ib)"); 1218 if (grammar.debuggingOutput) 1220 println(" : this(new LexerSharedInputState(new persistence.antlr.debug.DebuggingInputBuffer(ib)))"); 1221 else 1222 println(" : this(new LexerSharedInputState(ib))"); 1223 println("{"); 1224 println("}"); 1225 println(""); 1226 1227 println("public " + grammar.getClassName() + "(LexerSharedInputState state) : base(state)"); 1231 println("{"); 1232 tabs++; 1233 println("initialize();"); 1234 tabs--; 1235 println("}"); 1236 1237 println("private void initialize()"); 1239 println("{"); 1240 tabs++; 1241 1242 if ( grammar.debuggingOutput ) { 1245 println("ruleNames = _ruleNames;"); 1246 println("semPredNames = _semPredNames;"); 1247 println("setupDebugging();"); 1248 } 1249 1250 println("caseSensitiveLiterals = " + g.caseSensitiveLiterals + ";"); 1254 println("setCaseSensitive(" + g.caseSensitive + ");"); 1255 1256 if (g.caseSensitiveLiterals) 1260 println("literals = new Hashtable(null, Comparer.Default);"); 1261 else 1262 println("literals = new Hashtable(CaseInsensitiveHashCodeProvider.Default, CaseInsensitiveComparer.Default);"); 1263 Enumeration keys = grammar.tokenManager.getTokenSymbolKeys(); 1264 while ( keys.hasMoreElements() ) { 1265 String key = (String )keys.nextElement(); 1266 if ( key.charAt(0) != '"' ) { 1267 continue; 1268 } 1269 TokenSymbol sym = grammar.tokenManager.getTokenSymbol(key); 1270 if ( sym instanceof StringLiteralSymbol ) { 1271 StringLiteralSymbol s = (StringLiteralSymbol)sym; 1272 println("literals.Add(" + s.getId() + ", " + s.getTokenType() + ");"); 1273 } 1274 } 1275 1276 Enumeration ids; 1277 tabs--; 1278 println("}"); 1279 1280 if (grammar.debuggingOutput) { 1282 println("private const string[] _ruleNames = {"); 1283 1284 ids = grammar.rules.elements(); 1285 int ruleNum=0; 1286 while ( ids.hasMoreElements() ) { 1287 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 1288 if ( sym instanceof RuleSymbol) 1289 println(" \""+((RuleSymbol)sym).getId()+"\","); 1290 } 1291 println("};"); 1292 } 1293 1294 genNextToken(); 1298 1299 ids = grammar.rules.elements(); 1301 int ruleNum=0; 1302 while ( ids.hasMoreElements() ) { 1303 RuleSymbol sym = (RuleSymbol) ids.nextElement(); 1304 if (!sym.getId().equals("mnextToken")) { 1306 genRule(sym, false, ruleNum++, grammar.tokenManager); 1307 } 1308 exitIfError(); 1309 } 1310 1311 if (grammar.debuggingOutput) 1313 genSemPredMap(); 1314 1315 genBitsets(bitsetsUsed, ((LexerGrammar)grammar).charVocabulary.size()); 1317 1318 println(""); 1319 tabs--; 1320 println("}"); 1321 1322 tabs--; 1323 if (nameSpace != null) 1325 nameSpace.emitClosures(currentOutput); 1326 1327 currentOutput.close(); 1329 currentOutput = null; 1330 } 1331 1332 public void genInitFactory( Grammar g ) { 1333 if( g.buildAST ) 1334 { 1335 println("static public void initializeASTFactory( ASTFactory factory )"); 1338 println("{"); 1339 tabs++; 1340 1341 println("factory.setMaxNodeType("+g.tokenManager.maxTokenType()+");"); 1342 1343 Vector v = g.tokenManager.getVocabulary(); 1346 for (int i = 0; i < v.size(); i++) { 1347 String s = (String )v.elementAt(i); 1348 if (s != null) { 1349 TokenSymbol ts = g.tokenManager.getTokenSymbol(s); 1350 if (ts != null && ts.getASTNodeType() != null) { 1351 println("factory.setTokenTypeASTNodeType(" + s + ", \"" + ts.getASTNodeType() + "\");"); 1352 } 1353 } 1354 } 1355 1356 tabs--; 1357 println("}"); 1358 } 1359 } 1360 1361 public void genBody(ParserGrammar g) throws IOException 1362 { 1363 setupOutput(grammar.getClassName()); 1366 1367 genAST = grammar.buildAST; 1368 1369 tabs = 0; 1370 1371 genHeader(); 1373 println(behavior.getHeaderAction("")); 1375 1376 if (nameSpace != null) 1378 nameSpace.emitDeclarations(currentOutput); 1379 tabs++; 1380 1381 println("// Generate the header common to all output files."); 1383 println("using System;"); 1384 println(""); 1385 println("using TokenBuffer = persistence.antlr.TokenBuffer;"); 1386 println("using TokenStreamException = persistence.antlr.TokenStreamException;"); 1387 println("using TokenStreamIOException = persistence.antlr.TokenStreamIOException;"); 1388 println("using ANTLRException = persistence.antlr.ANTLRException;"); 1389 println("using " + grammar.getSuperClass() + " = persistence.antlr." + grammar.getSuperClass() + ";"); 1390 println("using Token = persistence.antlr.Token;"); 1391 println("using TokenStream = persistence.antlr.TokenStream;"); 1392 println("using RecognitionException = persistence.antlr.RecognitionException;"); 1393 println("using NoViableAltException = persistence.antlr.NoViableAltException;"); 1394 println("using MismatchedTokenException = persistence.antlr.MismatchedTokenException;"); 1395 println("using SemanticException = persistence.antlr.SemanticException;"); 1396 println("using ParserSharedInputState = persistence.antlr.ParserSharedInputState;"); 1397 println("using BitSet = persistence.antlr.collections.impl.BitSet;"); 1398 if ( genAST ) { 1399 println("using AST = persistence.antlr.collections.AST;"); 1400 println("using ASTPair = persistence.antlr.ASTPair;"); 1401 println("using ASTFactory = persistence.antlr.ASTFactory;"); 1402 println("using ASTArray = persistence.antlr.collections.impl.ASTArray;"); 1403 } 1404 1405 println(grammar.preambleAction.getText()); 1407 1408 String sup=null; 1410 if ( grammar.superClass != null ) 1411 sup = grammar.superClass; 1412 else 1413 sup = "persistence.antlr." + grammar.getSuperClass(); 1414 1415 if ( grammar.comment!=null ) { 1417 _println(grammar.comment); 1418 } 1419 1420 Token tprefix = (Token)grammar.options.get("classHeaderPrefix"); 1421 if (tprefix == null) { 1422 print("public "); 1423 } 1424 else { 1425 String p = StringUtils.stripFrontBack(tprefix.getText(), "\"", "\""); 1426 if (p == null) { 1427 print("public "); 1428 } 1429 else { 1430 print(p+" "); 1431 } 1432 } 1433 1434 println("class " + grammar.getClassName() + " : "+sup); 1435 1436 Token tsuffix = (Token)grammar.options.get("classHeaderSuffix"); 1437 if ( tsuffix != null ) { 1438 String suffix = StringUtils.stripFrontBack(tsuffix.getText(),"\"","\""); 1439 if ( suffix != null ) 1440 print(" , "+suffix); } 1442 println("{"); 1443 tabs++; 1444 1445 genTokenDefinitions(grammar.tokenManager); 1447 1448 if (grammar.debuggingOutput) { 1451 println("private const string[] _ruleNames = {"); 1452 tabs++; 1453 1454 Enumeration ids = grammar.rules.elements(); 1455 int ruleNum=0; 1456 while ( ids.hasMoreElements() ) { 1457 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 1458 if ( sym instanceof RuleSymbol) 1459 println(" \""+((RuleSymbol)sym).getId()+"\","); 1460 } 1461 tabs--; 1462 println("};"); 1463 } 1464 1465 print( 1467 processActionForSpecialSymbols(grammar.classMemberAction.getText(), grammar.classMemberAction.getLine(), currentRule, null) 1468 ); 1469 1470 println(""); 1472 println("protected void initialize()"); 1473 println("{"); 1474 tabs++; 1475 println("tokenNames = tokenNames_;"); 1476 1477 if( grammar.buildAST ) 1478 println("initializeFactory();"); 1479 1480 if ( grammar.debuggingOutput ) { 1483 println("ruleNames = _ruleNames;"); 1484 println("semPredNames = _semPredNames;"); 1485 println("setupDebugging(tokenBuf);"); 1486 } 1487 tabs--; 1488 println("}"); 1489 println(""); 1490 1491 println(""); 1492 println("protected " + grammar.getClassName() + "(TokenBuffer tokenBuf, int k) : base(tokenBuf, k)"); 1493 println("{"); 1494 tabs++; 1495 println("initialize();"); 1496 tabs--; 1497 println("}"); 1498 println(""); 1499 1500 println("public " + grammar.getClassName() + "(TokenBuffer tokenBuf) : this(tokenBuf," + grammar.maxk + ")"); 1501 println("{"); 1502 println("}"); 1503 println(""); 1504 1505 println("protected " + grammar.getClassName()+"(TokenStream lexer, int k) : base(lexer,k)"); 1507 println("{"); 1508 tabs++; 1509 println("initialize();"); 1510 tabs--; 1511 println("}"); 1512 println(""); 1513 1514 println("public " + grammar.getClassName()+"(TokenStream lexer) : this(lexer," + grammar.maxk + ")"); 1515 println("{"); 1516 println("}"); 1517 println(""); 1518 1519 println("public " + grammar.getClassName()+"(ParserSharedInputState state) : base(state," + grammar.maxk + ")"); 1520 println("{"); 1521 tabs++; 1522 println("initialize();"); 1523 tabs--; 1524 println("}"); 1525 println(""); 1526 1527 astTypes = new java.util.Vector (100); 1528 1529 Enumeration ids = grammar.rules.elements(); 1531 int ruleNum=0; 1532 while ( ids.hasMoreElements() ) { 1533 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 1534 if ( sym instanceof RuleSymbol) { 1535 RuleSymbol rs = (RuleSymbol)sym; 1536 genRule(rs, rs.references.size()==0, ruleNum++, grammar.tokenManager); 1537 } 1538 exitIfError(); 1539 } 1540 if ( usingCustomAST ) 1541 { 1542 println("public new " + labeledElementASTType + " getAST()"); 1545 println("{"); 1546 tabs++; 1547 println("return (" + labeledElementASTType + ") returnAST;"); 1548 tabs--; 1549 println("}"); 1550 println(""); 1551 } 1552 1553 println("private void initializeFactory()"); 1556 println("{"); 1557 tabs++; 1558 if( grammar.buildAST ) { 1559 println("if (astFactory == null)"); 1560 println("{"); 1561 tabs++; 1562 if( usingCustomAST ) 1563 { 1564 println("astFactory = new ASTFactory(\"" + labeledElementASTType + "\");"); 1565 } 1566 else 1567 println("astFactory = new ASTFactory();"); 1568 tabs--; 1569 println("}"); 1570 println("initializeASTFactory( astFactory );"); 1571 } 1572 tabs--; 1573 println("}"); 1574 genInitFactory( g ); 1575 1576 genTokenStrings(); 1578 1579 genBitsets(bitsetsUsed, grammar.tokenManager.maxTokenType()); 1581 1582 if (grammar.debuggingOutput) 1584 genSemPredMap(); 1585 1586 println(""); 1588 tabs--; 1589 println("}"); 1590 1591 tabs--; 1592 if (nameSpace != null) 1594 nameSpace.emitClosures(currentOutput); 1595 1596 currentOutput.close(); 1598 currentOutput = null; 1599 } 1600 public void genBody(TreeWalkerGrammar g) throws IOException 1601 { 1602 setupOutput(grammar.getClassName()); 1606 1607 genAST = grammar.buildAST; 1608 tabs = 0; 1609 1610 genHeader(); 1612 println(behavior.getHeaderAction("")); 1614 1615 if (nameSpace != null) 1617 nameSpace.emitDeclarations(currentOutput); 1618 tabs++; 1619 1620 println("// Generate header specific to the tree-parser CSharp file"); 1622 println("using System;"); 1623 println(""); 1624 println("using " + grammar.getSuperClass() + " = persistence.antlr." + grammar.getSuperClass() + ";"); 1625 println("using Token = persistence.antlr.Token;"); 1626 println("using AST = persistence.antlr.collections.AST;"); 1627 println("using RecognitionException = persistence.antlr.RecognitionException;"); 1628 println("using ANTLRException = persistence.antlr.ANTLRException;"); 1629 println("using NoViableAltException = persistence.antlr.NoViableAltException;"); 1630 println("using MismatchedTokenException = persistence.antlr.MismatchedTokenException;"); 1631 println("using SemanticException = persistence.antlr.SemanticException;"); 1632 println("using BitSet = persistence.antlr.collections.impl.BitSet;"); 1633 println("using ASTPair = persistence.antlr.ASTPair;"); 1634 println("using ASTFactory = persistence.antlr.ASTFactory;"); 1635 println("using ASTArray = persistence.antlr.collections.impl.ASTArray;"); 1636 1637 println(grammar.preambleAction.getText()); 1639 1640 String sup=null; 1642 if ( grammar.superClass!=null ) { 1643 sup = grammar.superClass; 1644 } 1645 else { 1646 sup = "persistence.antlr." + grammar.getSuperClass(); 1647 } 1648 println(""); 1649 1650 if ( grammar.comment!=null ) { 1652 _println(grammar.comment); 1653 } 1654 1655 Token tprefix = (Token)grammar.options.get("classHeaderPrefix"); 1656 if (tprefix == null) { 1657 print("public "); 1658 } 1659 else { 1660 String p = StringUtils.stripFrontBack(tprefix.getText(), "\"", "\""); 1661 if (p == null) { 1662 print("public "); 1663 } 1664 else { 1665 print(p+" "); 1666 } 1667 } 1668 1669 println("class " + grammar.getClassName() + " : "+sup); 1670 Token tsuffix = (Token)grammar.options.get("classHeaderSuffix"); 1671 if ( tsuffix != null ) { 1672 String suffix = StringUtils.stripFrontBack(tsuffix.getText(),"\"","\""); 1673 if ( suffix != null ) { 1674 print(" , "+suffix); } 1676 } 1677 println("{"); 1678 tabs++; 1679 1680 genTokenDefinitions(grammar.tokenManager); 1682 1683 print( 1685 processActionForSpecialSymbols(grammar.classMemberAction.getText(), grammar.classMemberAction.getLine(), currentRule, null) 1686 ); 1687 1688 println("public " + grammar.getClassName() + "()"); 1690 println("{"); 1691 tabs++; 1692 println("tokenNames = tokenNames_;"); 1693 tabs--; 1694 println("}"); 1695 println(""); 1696 1697 astTypes = new java.util.Vector (); 1698 Enumeration ids = grammar.rules.elements(); 1700 int ruleNum=0; 1701 String ruleNameInits = ""; 1702 while ( ids.hasMoreElements() ) { 1703 GrammarSymbol sym = (GrammarSymbol) ids.nextElement(); 1704 if ( sym instanceof RuleSymbol) { 1705 RuleSymbol rs = (RuleSymbol)sym; 1706 genRule(rs, rs.references.size()==0, ruleNum++, grammar.tokenManager); 1707 } 1708 exitIfError(); 1709 } 1710 1711 if ( usingCustomAST ) 1712 { 1713 println("public new " + labeledElementASTType + " getAST()"); 1716 println("{"); 1717 tabs++; 1718 println("return (" + labeledElementASTType + ") returnAST;"); 1719 tabs--; 1720 println("}"); 1721 println(""); 1722 } 1723 1724 genInitFactory( grammar ); 1726 1727 genTokenStrings(); 1729 1730 genBitsets(bitsetsUsed, grammar.tokenManager.maxTokenType()); 1732 1733 tabs--; 1735 println("}"); 1736 println(""); 1737 1738 tabs--; 1739 if (nameSpace != null) 1741 nameSpace.emitClosures(currentOutput); 1742 1743 currentOutput.close(); 1745 currentOutput = null; 1746 } 1747 1748 1751 protected void genCases(BitSet p) { 1752 if ( DEBUG_CODE_GENERATOR ) System.out.println("genCases("+p+")"); 1753 int[] elems; 1754 1755 elems = p.toArray(); 1756 int wrap = (grammar instanceof LexerGrammar) ? 4 : 1; 1758 int j=1; 1759 boolean startOfLine = true; 1760 for (int i = 0; i < elems.length; i++) { 1761 if (j==1) { 1762 print(""); 1763 } else { 1764 _print(" "); 1765 } 1766 _print("case " + getValueString(elems[i]) + ":"); 1767 if (j==wrap) { 1768 _println(""); 1769 startOfLine = true; 1770 j=1; 1771 } 1772 else { 1773 j++; 1774 startOfLine = false; 1775 } 1776 } 1777 if (!startOfLine) { 1778 _println(""); 1779 } 1780 } 1781 1782 1793 public CSharpBlockFinishingInfo genCommonBlock(AlternativeBlock blk, 1794 boolean noTestForSingle) 1795 { 1796 int nIF=0; 1797 boolean createdLL1Switch = false; 1798 int closingBracesOfIFSequence = 0; 1799 CSharpBlockFinishingInfo finishingInfo = new CSharpBlockFinishingInfo(); 1800 if ( DEBUG_CODE_GENERATOR ) System.out.println("genCommonBlock("+blk+")"); 1801 1802 boolean savegenAST = genAST; 1804 genAST = genAST && blk.getAutoGen(); 1805 1806 boolean oldsaveTest = saveText; 1807 saveText = saveText && blk.getAutoGen(); 1808 1809 if ( blk.not && 1811 analyzer.subruleCanBeInverted(blk, grammar instanceof LexerGrammar) ) 1812 { 1813 if ( DEBUG_CODE_GENERATOR ) System.out.println("special case: ~(subrule)"); 1814 Lookahead p = analyzer.look(1, blk); 1815 if (blk.getLabel() != null && syntacticPredLevel == 0) { 1817 println(blk.getLabel() + " = " + lt1Value + ";"); 1818 } 1819 1820 genElementAST(blk); 1822 1823 String astArgs=""; 1824 if (grammar instanceof TreeWalkerGrammar) { 1825 if ( usingCustomAST ) 1826 astArgs = "(AST)_t,"; 1827 else 1828 astArgs = "_t,"; 1829 } 1830 1831 println("match(" + astArgs + getBitsetName(markBitsetForGen(p.fset)) + ");"); 1833 1834 if (grammar instanceof TreeWalkerGrammar) 1836 { 1837 println("_t = _t.getNextSibling();"); 1838 } 1839 return finishingInfo; 1840 } 1841 1842 if (blk.getAlternatives().size() == 1) 1844 { 1845 Alternative alt = blk.getAlternativeAt(0); 1846 if (alt.synPred != null) 1848 { 1849 antlrTool.warning( 1850 "Syntactic predicate superfluous for single alternative", 1851 grammar.getFilename(), 1852 blk.getAlternativeAt(0).synPred.getLine(), 1853 blk.getAlternativeAt(0).synPred.getColumn() 1854 ); 1855 } 1856 if (noTestForSingle) 1857 { 1858 if (alt.semPred != null) 1859 { 1860 genSemPred(alt.semPred, blk.line); 1862 } 1863 genAlt(alt, blk); 1864 return finishingInfo; 1865 } 1866 } 1867 1868 int nLL1 = 0; 1878 for (int i=0; i<blk.getAlternatives().size(); i++) 1879 { 1880 Alternative a = blk.getAlternativeAt(i); 1881 if ( suitableForCaseExpression(a) ) { 1882 nLL1++; 1883 } 1884 } 1885 1886 if ( nLL1 >= makeSwitchThreshold) 1888 { 1889 String testExpr = lookaheadString(1); 1891 createdLL1Switch = true; 1892 if ( grammar instanceof TreeWalkerGrammar ) 1894 { 1895 println("if (null == _t)"); 1896 tabs++; 1897 println("_t = ASTNULL;"); 1898 tabs--; 1899 } 1900 println("switch ( " + testExpr+" )"); 1901 println("{"); 1902 for (int i=0; i<blk.alternatives.size(); i++) 1904 { 1905 Alternative alt = blk.getAlternativeAt(i); 1906 bSaveIndexCreated = false; 1909 if ( !suitableForCaseExpression(alt) ) 1910 { 1911 continue; 1912 } 1913 Lookahead p = alt.cache[1]; 1914 if (p.fset.degree() == 0 && !p.containsEpsilon()) 1915 { 1916 antlrTool.warning("Alternate omitted due to empty prediction set", 1917 grammar.getFilename(), 1918 alt.head.getLine(), alt.head.getColumn()); 1919 } 1920 else 1921 { 1922 genCases(p.fset); 1923 println("{"); 1924 tabs++; 1925 genAlt(alt, blk); 1926 println("break;"); 1927 tabs--; 1928 println("}"); 1929 } 1930 } 1931 println("default:"); 1932 tabs++; 1933 } 1934 1935 int startDepth = (grammar instanceof LexerGrammar) ? grammar.maxk : 0; 1949 for (int altDepth = startDepth; altDepth >= 0; altDepth--) { 1950 if ( DEBUG_CODE_GENERATOR ) System.out.println("checking depth "+altDepth); 1951 for (int i=0; i<blk.alternatives.size(); i++) { 1952 Alternative alt = blk.getAlternativeAt(i); 1953 if ( DEBUG_CODE_GENERATOR ) System.out.println("genAlt: "+i); 1954 if ( createdLL1Switch && suitableForCaseExpression(alt) ) 1959 { 1960 if ( DEBUG_CODE_GENERATOR ) System.out.println("ignoring alt because it was in the switch"); 1961 continue; 1962 } 1963 String e; 1964 1965 boolean unpredicted = false; 1966 1967 if (grammar instanceof LexerGrammar) { 1968 int effectiveDepth = alt.lookaheadDepth; 1972 if (effectiveDepth == GrammarAnalyzer.NONDETERMINISTIC) 1973 { 1974 effectiveDepth = grammar.maxk; 1976 } 1977 while ( effectiveDepth >= 1 && 1978 alt.cache[effectiveDepth].containsEpsilon() ) 1979 { 1980 effectiveDepth--; 1981 } 1982 if (effectiveDepth != altDepth) 1985 { 1986 if ( DEBUG_CODE_GENERATOR ) 1987 System.out.println("ignoring alt because effectiveDepth!=altDepth;"+effectiveDepth+"!="+altDepth); 1988 continue; 1989 } 1990 unpredicted = lookaheadIsEmpty(alt, effectiveDepth); 1991 e = getLookaheadTestExpression(alt, effectiveDepth); 1992 } 1993 else 1994 { 1995 unpredicted = lookaheadIsEmpty(alt, grammar.maxk); 1996 e = getLookaheadTestExpression(alt, grammar.maxk); 1997 } 1998 1999 if (alt.cache[1].fset.degree() > caseSizeThreshold && 2002 suitableForCaseExpression(alt)) 2003 { 2004 if ( nIF==0 ) 2005 { 2006 println("if " + e); 2007 println("{"); 2008 } 2009 else { 2010 println("else if " + e); 2011 println("{"); 2012 } 2013 } 2014 else if (unpredicted && 2015 alt.semPred==null && 2016 alt.synPred==null) 2017 { 2018 if ( nIF==0 ) { 2023 println("{"); 2024 } 2025 else { 2026 println("else {"); 2027 } 2028 finishingInfo.needAnErrorClause = false; 2029 } 2030 else 2031 { 2032 if ( alt.semPred != null ) { 2035 ActionTransInfo tInfo = new ActionTransInfo(); 2039 String actionStr = processActionForSpecialSymbols(alt.semPred, 2040 blk.line, 2041 currentRule, 2042 tInfo); 2043 if (((grammar instanceof ParserGrammar) || (grammar instanceof LexerGrammar)) && 2047 grammar.debuggingOutput) { 2048 e = "("+e+"&& fireSemanticPredicateEvaluated(persistence.antlr.debug.SemanticPredicateEvent.PREDICTING,"+ addSemPred(charFormatter.escapeString(actionStr))+","+actionStr+"))"; 2050 } 2051 else { 2052 e = "("+e+"&&("+actionStr +"))"; 2053 } 2054 } 2055 2056 if ( nIF>0 ) { 2058 if ( alt.synPred != null ) { 2059 println("else {"); 2060 tabs++; 2061 genSynPred( alt.synPred, e ); 2062 closingBracesOfIFSequence++; 2063 } 2064 else { 2065 println("else if " + e + " {"); 2066 } 2067 } 2068 else { 2069 if ( alt.synPred != null ) { 2070 genSynPred( alt.synPred, e ); 2071 } 2072 else { 2073 if ( grammar instanceof TreeWalkerGrammar ) { 2076 println("if (_t == null)"); 2077 tabs++; 2078 println("_t = ASTNULL;"); 2079 tabs--; 2080 } 2081 println("if " + e); 2082 println("{"); 2083 } 2084 } 2085 2086 } 2087 2088 nIF++; 2089 tabs++; 2090 genAlt(alt, blk); 2091 tabs--; 2092 println("}"); 2093 } 2094 } 2095 2096 String ps = ""; 2097 for (int i=1; i<=closingBracesOfIFSequence; i++) { 2098 ps+="}"; 2099 } 2100 2101 genAST = savegenAST; 2103 2104 saveText=oldsaveTest; 2106 2107 if ( createdLL1Switch ) { 2109 tabs--; 2110 finishingInfo.postscript = ps+"break; }"; 2111 finishingInfo.generatedSwitch = true; 2112 finishingInfo.generatedAnIf = nIF>0; 2113 2115 } 2116 else { 2117 finishingInfo.postscript = ps; 2118 finishingInfo.generatedSwitch = false; 2119 finishingInfo.generatedAnIf = nIF>0; 2120 } 2122 return finishingInfo; 2123 } 2124 2125 private static boolean suitableForCaseExpression(Alternative a) { 2126 return a.lookaheadDepth == 1 && 2127 a.semPred == null && 2128 !a.cache[1].containsEpsilon() && 2129 a.cache[1].fset.degree()<=caseSizeThreshold; 2130 } 2131 2132 2133 private void genElementAST(AlternativeElement el) { 2134 if ( grammar instanceof TreeWalkerGrammar && !grammar.buildAST ) 2137 { 2138 String elementRef; 2139 String astName; 2140 2141 if (el.getLabel() == null) 2143 { 2144 elementRef = lt1Value; 2145 astName = "tmp" + astVarNumber + "_AST"; 2147 astVarNumber++; 2148 mapTreeVariable(el, astName); 2150 println(labeledElementASTType+" "+astName+"_in = "+elementRef+";"); 2152 } 2153 return; 2154 } 2155 2156 if (grammar.buildAST && syntacticPredLevel == 0) 2157 { 2158 boolean needASTDecl = 2159 (genAST && 2160 (el.getLabel() != null || (el.getAutoGenType() != GrammarElement.AUTO_GEN_BANG))); 2161 2162 if (el.getAutoGenType() != GrammarElement.AUTO_GEN_BANG && 2167 (el instanceof TokenRefElement)) 2168 needASTDecl = true; 2169 2170 boolean doNoGuessTest = (grammar.hasSyntacticPredicate && needASTDecl); 2171 2172 String elementRef; 2173 String astNameBase; 2174 2175 if (el.getLabel() != null) 2177 { 2178 elementRef = el.getLabel(); 2180 astNameBase = el.getLabel(); 2181 } 2182 else 2183 { 2184 elementRef = lt1Value; 2186 astNameBase = "tmp" + astVarNumber; 2188 astVarNumber++; 2189 } 2190 2191 if (needASTDecl) 2193 { 2194 if ( el instanceof GrammarAtom ) 2196 { 2197 GrammarAtom ga = (GrammarAtom)el; 2198 if ( ga.getASTNodeType()!=null ) 2199 { 2200 genASTDeclaration(el, astNameBase, ga.getASTNodeType()); 2201 } 2203 else 2204 { 2205 genASTDeclaration(el, astNameBase, labeledElementASTType); 2206 } 2208 } 2209 else 2210 { 2211 genASTDeclaration(el, astNameBase, labeledElementASTType); 2212 } 2214 } 2215 2216 String astName = astNameBase + "_AST"; 2218 2219 mapTreeVariable(el, astName); 2221 if (grammar instanceof TreeWalkerGrammar) 2222 { 2223 println(labeledElementASTType+" " + astName + "_in = null;"); 2225 } 2226 2227 2228 if (doNoGuessTest) { 2230 } 2234 2235 if (el.getLabel() != null) 2238 { 2239 if ( el instanceof GrammarAtom ) 2240 { 2241 println(astName + " = "+ getASTCreateString((GrammarAtom)el, elementRef) + ";"); 2242 } 2243 else 2244 { 2245 println(astName + " = "+ getASTCreateString(elementRef) + ";"); 2246 } 2247 } 2248 2249 if (el.getLabel() == null && needASTDecl) 2251 { 2252 elementRef = lt1Value; 2253 if ( el instanceof GrammarAtom ) 2254 { 2255 println(astName + " = "+ getASTCreateString((GrammarAtom)el, elementRef) + ";"); 2256 } 2257 else 2258 { 2259 println(astName + " = "+ getASTCreateString(elementRef) + ";"); 2260 } 2261 if (grammar instanceof TreeWalkerGrammar) 2263 { 2264 println(astName + "_in = " + elementRef + ";"); 2266 } 2267 } 2268 2269 if (genAST) 2270 { 2271 switch (el.getAutoGenType()) 2272 { 2273 case GrammarElement.AUTO_GEN_NONE: 2274 if ( usingCustomAST || 2275 ( (el instanceof GrammarAtom) && 2276 (((GrammarAtom)el).getASTNodeType() != null) ) ) 2277 println("astFactory.addASTChild(currentAST, (AST)" + astName + ");"); 2278 else 2279 println("astFactory.addASTChild(currentAST, " + astName + ");"); 2280 break; 2281 case GrammarElement.AUTO_GEN_CARET: 2282 if ( usingCustomAST || 2283 ( (el instanceof GrammarAtom) && 2284 (((GrammarAtom)el).getASTNodeType() != null) ) ) 2285 println("astFactory.makeASTRoot(currentAST, (AST)" + astName + ");"); 2286 else 2287 println("astFactory.makeASTRoot(currentAST, " + astName + ");"); 2288 break; 2289 default: 2290 break; 2291 } 2292 } 2293 if (doNoGuessTest) 2294 { 2295 } 2298 } 2299 } 2300 2301 2302 2305 private void genErrorCatchForElement(AlternativeElement el) { 2306 if (el.getLabel() == null) return; 2307 String r = el.enclosingRuleName; 2308 if ( grammar instanceof LexerGrammar ) { 2309 r = CodeGenerator.encodeLexerRuleName(el.enclosingRuleName); 2310 } 2311 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(r); 2312 if (rs == null) { 2313 antlrTool.panic("Enclosing rule not found!"); 2314 } 2315 ExceptionSpec ex = rs.block.findExceptionSpec(el.getLabel()); 2316 if (ex != null) { 2317 tabs--; 2318 println("}"); 2319 genErrorHandler(ex); 2320 } 2321 } 2322 2323 2324 private void genErrorHandler(ExceptionSpec ex) 2325 { 2326 for (int i = 0; i < ex.handlers.size(); i++) 2328 { 2329 ExceptionHandler handler = (ExceptionHandler)ex.handlers.elementAt(i); 2330 println("catch (" + handler.exceptionTypeAndName.getText() + ")"); 2332 println("{"); 2333 tabs++; 2334 if (grammar.hasSyntacticPredicate) { 2335 println("if (0 == inputState.guessing)"); 2336 println("{"); 2337 tabs++; 2338 } 2339 2340 ActionTransInfo tInfo = new ActionTransInfo(); 2342 printAction(processActionForSpecialSymbols(handler.action.getText(), 2343 handler.action.getLine(), currentRule, tInfo)); 2344 2345 if (grammar.hasSyntacticPredicate) 2346 { 2347 tabs--; 2348 println("}"); 2349 println("else"); 2350 println("{"); 2351 tabs++; 2352 println("throw;"); 2355 tabs--; 2356 println("}"); 2357 } 2358 tabs--; 2360 println("}"); 2361 } 2362 } 2363 2364 private void genErrorTryForElement(AlternativeElement el) { 2365 if (el.getLabel() == null) return; 2366 String r = el.enclosingRuleName; 2367 if ( grammar instanceof LexerGrammar ) { 2368 r = CodeGenerator.encodeLexerRuleName(el.enclosingRuleName); 2369 } 2370 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(r); 2371 if (rs == null) { 2372 antlrTool.panic("Enclosing rule not found!"); 2373 } 2374 ExceptionSpec ex = rs.block.findExceptionSpec(el.getLabel()); 2375 if (ex != null) { 2376 println("try // for error handling"); 2377 println("{"); 2378 tabs++; 2379 } 2380 } 2381 2382 protected void genASTDeclaration(AlternativeElement el) 2383 { 2384 genASTDeclaration(el, labeledElementASTType); 2385 } 2386 2387 protected void genASTDeclaration(AlternativeElement el, String node_type) 2388 { 2389 genASTDeclaration(el, el.getLabel(), node_type); 2390 } 2391 2392 protected void genASTDeclaration(AlternativeElement el, String var_name, String node_type) 2393 { 2394 if (declaredASTVariables.contains(el)) 2396 return; 2397 2398 println(node_type + " " + var_name + "_AST = null;"); 2402 2403 declaredASTVariables.put(el,el); 2405 } 2406 2407 2408 protected void genHeader() 2409 { 2410 println("// $ANTLR "+Tool.version+": "+ 2411 "\"" + antlrTool.fileMinusPath(antlrTool.grammarFile) + "\"" + 2412 " -> "+ 2413 "\""+grammar.getClassName()+".cs\"$"); 2414 } 2415 2416 private void genLiteralsTest() { 2417 println("_ttype = testLiteralsTable(_ttype);"); 2418 } 2419 2420 private void genLiteralsTestForPartialToken() { 2421 println("_ttype = testLiteralsTable(text.ToString(_begin, text.Length-_begin), _ttype);"); 2422 } 2423 2424 protected void genMatch(BitSet b) { 2425 } 2426 2427 protected void genMatch(GrammarAtom atom) { 2428 if ( atom instanceof StringLiteralElement ) { 2429 if ( grammar instanceof LexerGrammar ) { 2430 genMatchUsingAtomText(atom); 2431 } 2432 else { 2433 genMatchUsingAtomTokenType(atom); 2434 } 2435 } 2436 else if ( atom instanceof CharLiteralElement ) { 2437 if ( grammar instanceof LexerGrammar ) { 2438 genMatchUsingAtomText(atom); 2439 } 2440 else { 2441 antlrTool.error("cannot ref character literals in grammar: "+atom); 2442 } 2443 } 2444 else if ( atom instanceof TokenRefElement ) { 2445 genMatchUsingAtomText(atom); 2446 } else if (atom instanceof WildcardElement) { 2447 gen((WildcardElement)atom); 2448 } 2449 } 2450 protected void genMatchUsingAtomText(GrammarAtom atom) { 2451 String astArgs=""; 2453 if (grammar instanceof TreeWalkerGrammar) { 2454 if ( usingCustomAST ) 2455 astArgs="(AST)_t,"; 2456 else 2457 astArgs="_t,"; 2458 } 2459 2460 if ( grammar instanceof LexerGrammar && (!saveText||atom.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 2462 declareSaveIndexVariableIfNeeded(); 2463 println("_saveIndex = text.Length;"); 2464 } 2465 2466 print(atom.not ? "matchNot(" : "match("); 2467 _print(astArgs); 2468 2469 if (atom.atomText.equals("EOF")) { 2471 _print("Token.EOF_TYPE"); 2473 } 2474 else { 2475 _print(atom.atomText); 2476 } 2477 _println(");"); 2478 2479 if ( grammar instanceof LexerGrammar && (!saveText||atom.getAutoGenType()==GrammarElement.AUTO_GEN_BANG) ) { 2480 declareSaveIndexVariableIfNeeded(); 2481 println("text.Length = _saveIndex;"); } 2483 } 2484 2485 protected void genMatchUsingAtomTokenType(GrammarAtom atom) { 2486 String astArgs=""; 2488 if (grammar instanceof TreeWalkerGrammar) { 2489 if( usingCustomAST ) 2490 astArgs="(AST)_t,"; 2491 else 2492 astArgs="_t,"; 2493 } 2494 2495 String mangledName = null; 2497 String s = astArgs + getValueString(atom.getType()); 2498 2499 println( (atom.not ? "matchNot(" : "match(") + s + ");"); 2501 } 2502 2503 2507 public void genNextToken() { 2508 boolean hasPublicRules = false; 2511 for (int i = 0; i < grammar.rules.size(); i++) { 2512 RuleSymbol rs = (RuleSymbol)grammar.rules.elementAt(i); 2513 if ( rs.isDefined() && rs.access.equals("public") ) { 2514 hasPublicRules = true; 2515 break; 2516 } 2517 } 2518 if (!hasPublicRules) { 2519 println(""); 2520 println("override public Token nextToken()\t\t\t//throws TokenStreamException"); 2521 println("{"); 2522 tabs++; 2523 println("try"); 2524 println("{"); 2525 tabs++; 2526 println("uponEOF();"); 2527 tabs--; 2528 println("}"); 2529 println("catch(CharStreamIOException csioe)"); 2530 println("{"); 2531 tabs++; 2532 println("throw new TokenStreamIOException(csioe.io);"); 2533 tabs--; 2534 println("}"); 2535 println("catch(CharStreamException cse)"); 2536 println("{"); 2537 tabs++; 2538 println("throw new TokenStreamException(cse.Message);"); 2539 tabs--; 2540 println("}"); 2541 println("return new CommonToken(Token.EOF_TYPE, \"\");"); 2542 tabs--; 2543 println("}"); 2544 println(""); 2545 return; 2546 } 2547 2548 RuleBlock nextTokenBlk = MakeGrammar.createNextTokenRule(grammar, grammar.rules, "nextToken"); 2550 RuleSymbol nextTokenRs = new RuleSymbol("mnextToken"); 2552 nextTokenRs.setDefined(); 2553 nextTokenRs.setBlock(nextTokenBlk); 2554 nextTokenRs.access = "private"; 2555 grammar.define(nextTokenRs); 2556 boolean ok = grammar.theLLkAnalyzer.deterministic(nextTokenBlk); 2558 2559 String filterRule=null; 2561 if ( ((LexerGrammar)grammar).filterMode ) { 2562 filterRule = ((LexerGrammar)grammar).filterRule; 2563 } 2564 2565 println(""); 2566 println("override public Token nextToken()\t\t\t//throws TokenStreamException"); 2567 println("{"); 2568 tabs++; 2569 println("Token theRetToken = null;"); 2570 _println("tryAgain:"); 2571 println("for (;;)"); 2572 println("{"); 2573 tabs++; 2574 println("Token _token = null;"); 2575 println("int _ttype = Token.INVALID_TYPE;"); 2576 if ( ((LexerGrammar)grammar).filterMode ) { 2577 println("setCommitToPath(false);"); 2578 if ( filterRule!=null ) { 2579 if (!grammar.isDefined(CodeGenerator.encodeLexerRuleName(filterRule))) { 2581 grammar.antlrTool.error("Filter rule " + filterRule + " does not exist in this lexer"); 2582 } 2583 else { 2584 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(CodeGenerator.encodeLexerRuleName(filterRule)); 2585 if ( !rs.isDefined() ) { 2586 grammar.antlrTool.error("Filter rule " + filterRule + " does not exist in this lexer"); 2587 } 2588 else if ( rs.access.equals("public") ) { 2589 grammar.antlrTool.error("Filter rule " + filterRule + " must be protected"); 2590 } 2591 } 2592 println("int _m;"); 2593 println("_m = mark();"); 2594 } 2595 } 2596 println("resetText();"); 2597 2598 println("try // for char stream error handling"); 2599 println("{"); 2600 tabs++; 2601 2602 println("try // for lexical error handling"); 2604 println("{"); 2605 tabs++; 2606 2607 for (int i=0; i<nextTokenBlk.getAlternatives().size(); i++) { 2609 Alternative a = nextTokenBlk.getAlternativeAt(i); 2610 if ( a.cache[1].containsEpsilon() ) { 2611 RuleRefElement rr = (RuleRefElement)a.head; 2613 String r = CodeGenerator.decodeLexerRuleName(rr.targetRule); 2614 antlrTool.warning("public lexical rule "+r+" is optional (can match \"nothing\")"); 2615 } 2616 } 2617 2618 String newline = System.getProperty("line.separator"); 2620 CSharpBlockFinishingInfo howToFinish = genCommonBlock(nextTokenBlk, false); 2621 String errFinish = "if (LA(1)==EOF_CHAR) { uponEOF(); returnToken_ = makeToken(Token.EOF_TYPE); }"; 2622 errFinish += newline+"\t\t\t\t"; 2623 if ( ((LexerGrammar)grammar).filterMode ) { 2624 if ( filterRule==null ) { 2625 errFinish += "\t\t\t\telse"; 2627 errFinish += "\t\t\t\t{"; 2628 errFinish += "\t\t\t\t\tconsume();"; 2629 errFinish += "\t\t\t\t\tgoto tryAgain;"; 2630 errFinish += "\t\t\t\t}"; 2631 } 2632 else { 2633 errFinish += "\t\t\t\t\telse"+newline+ 2634 "\t\t\t\t\t{"+newline+ 2635 "\t\t\t\t\tcommit();"+newline+ 2636 "\t\t\t\t\ttry {m"+filterRule+"(false);}"+newline+ 2637 "\t\t\t\t\tcatch(RecognitionException e)"+newline+ 2638 "\t\t\t\t\t{"+newline+ 2639 "\t\t\t\t\t // catastrophic failure"+newline+ 2640 "\t\t\t\t\t reportError(e);"+newline+ 2641 "\t\t\t\t\t consume();"+newline+ 2642 "\t\t\t\t\t}"+newline+ 2643 "\t\t\t\t\tgoto tryAgain;"+newline+ 2644 "\t\t\t\t}"; 2645 } 2646 } 2647 else { 2648 errFinish += "else {"+throwNoViable+"}"; 2649 } 2650 genBlockFinish(howToFinish, errFinish); 2651 2652 if ( ((LexerGrammar)grammar).filterMode && filterRule!=null ) { 2654 println("commit();"); 2655 } 2656 2657 println("if ( null==returnToken_ ) goto tryAgain; // found SKIP token"); 2661 println("_ttype = returnToken_.Type;"); 2662 if ( ((LexerGrammar)grammar).getTestLiterals()) { 2663 genLiteralsTest(); 2664 } 2665 2666 println("returnToken_.Type = _ttype;"); 2668 println("return returnToken_;"); 2669 2670 tabs--; 2672 println("}"); 2673 println("catch (RecognitionException e) {"); 2674 tabs++; 2675 if ( ((LexerGrammar)grammar).filterMode ) { 2676 if ( filterRule==null ) { 2677 println("if (!getCommitToPath())"); 2678 println("{"); 2679 tabs++; 2680 println("consume();"); 2681 println("goto tryAgain;"); 2682 tabs--; 2683 println("}"); 2684 } 2685 else { 2686 println("if (!getCommitToPath())"); 2687 println("{"); 2688 tabs++; 2689 println("rewind(_m);"); 2690 println("resetText();"); 2691 println("try {m"+filterRule+"(false);}"); 2692 println("catch(RecognitionException ee) {"); 2693 println(" // horrendous failure: error in filter rule"); 2694 println(" reportError(ee);"); 2695 println(" consume();"); 2696 println("}"); 2697 tabs--; 2699 println("}"); 2700 println("else"); 2701 } 2702 } 2703 if ( nextTokenBlk.getDefaultErrorHandler() ) { 2704 println("{"); 2705 tabs++; 2706 println("reportError(e);"); 2707 println("consume();"); 2708 tabs--; 2709 println("}"); 2710 } 2711 else { 2712 tabs++; 2714 println("throw new TokenStreamRecognitionException(e);"); 2715 tabs--; 2716 } 2717 tabs--; 2718 println("}"); 2719 2720 tabs--; 2722 println("}"); 2723 println("catch (CharStreamException cse) {"); 2724 println(" if ( cse is CharStreamIOException ) {"); 2725 println(" throw new TokenStreamIOException(((CharStreamIOException)cse).io);"); 2726 println(" }"); 2727 println(" else {"); 2728 println(" throw new TokenStreamException(cse.Message);"); 2729 println(" }"); 2730 println("}"); 2731 2732 tabs--; 2734 println("}"); 2735 2736 tabs--; 2738 println("}"); 2739 println(""); 2740 } 2741 2757 public void genRule(RuleSymbol s, boolean startSymbol, int ruleNum, TokenManager tm) { 2758 tabs=1; 2759 if ( DEBUG_CODE_GENERATOR ) System.out.println("genRule("+ s.getId() +")"); 2760 if ( !s.isDefined() ) { 2761 antlrTool.error("undefined rule: "+ s.getId()); 2762 return; 2763 } 2764 2765 RuleBlock rblk = s.getBlock(); 2767 currentRule = rblk; 2768 currentASTResult = s.getId(); 2769 2770 declaredASTVariables.clear(); 2772 2773 boolean savegenAST = genAST; 2775 genAST = genAST && rblk.getAutoGen(); 2776 2777 saveText = rblk.getAutoGen(); 2779 2780 if ( s.comment!=null ) { 2782 _println(s.comment); 2783 } 2784 2785 print(s.access + " "); 2788 2789 if (rblk.returnAction != null) 2791 { 2792 _print(extractTypeOfAction(rblk.returnAction, rblk.getLine(), rblk.getColumn()) + " "); 2794 } else { 2795 _print("void "); 2797 } 2798 2799 _print(s.getId() + "("); 2801 2802 _print(commonExtraParams); 2804 if (commonExtraParams.length() != 0 && rblk.argAction != null ) { 2805 _print(","); 2806 } 2807 2808 if (rblk.argAction != null) 2810 { 2811 _println(""); 2813 tabs++; 2814 println(rblk.argAction); 2815 tabs--; 2816 print(")"); 2817 } 2818 else { 2819 _print(")"); 2821 } 2822 2823 _print(" //throws " + exceptionThrown); 2825 if ( grammar instanceof ParserGrammar ) { 2826 _print(", TokenStreamException"); 2827 } 2828 else if ( grammar instanceof LexerGrammar ) { 2829 _print(", CharStreamException, TokenStreamException"); 2830 } 2831 if ( rblk.throwsSpec!=null ) { 2833 if ( grammar instanceof LexerGrammar ) { 2834 antlrTool.error("user-defined throws spec not allowed (yet) for lexer rule "+rblk.ruleName); 2835 } 2836 else { 2837 _print(", "+rblk.throwsSpec); 2838 } 2839 } 2840 2841 _println(""); 2842 _println("{"); 2843 tabs++; 2844 2845 if (rblk.returnAction != null) 2847 println(rblk.returnAction + ";"); 2848 2849 println(commonLocalVars); 2851 2852 if (grammar.traceRules) { 2853 if ( grammar instanceof TreeWalkerGrammar ) { 2854 if ( usingCustomAST ) 2855 println("traceIn(\""+ s.getId() +"\",(AST)_t);"); 2856 else 2857 println("traceIn(\""+ s.getId() +"\",_t);"); 2858 } 2859 else { 2860 println("traceIn(\""+ s.getId() +"\");"); 2861 } 2862 } 2863 2864 if ( grammar instanceof LexerGrammar ) { 2865 if (s.getId().equals("mEOF")) 2868 println("_ttype = Token.EOF_TYPE;"); 2869 else 2870 println("_ttype = " + s.getId().substring(1)+";"); 2871 2872 bSaveIndexCreated = false; 2874 2875 2881 } 2882 2883 if ( grammar.debuggingOutput) 2885 if (grammar instanceof ParserGrammar) 2886 println("fireEnterRule(" + ruleNum + ",0);"); 2887 else if (grammar instanceof LexerGrammar) 2888 println("fireEnterRule(" + ruleNum + ",_ttype);"); 2889 2890 2891 if ( grammar.debuggingOutput || grammar.traceRules) { 2893 println("try { // debugging"); 2894 tabs++; 2895 } 2896 2897 if (grammar instanceof TreeWalkerGrammar) { 2899 println(labeledElementASTType+" " + s.getId() + "_AST_in = ("+labeledElementASTType+")_t;"); 2901 } 2902 if (grammar.buildAST) { 2903 println("returnAST = null;"); 2905 println("ASTPair currentAST = new ASTPair();"); 2908 println(labeledElementASTType+" " + s.getId() + "_AST = null;"); 2910 } 2911 2912 genBlockPreamble(rblk); 2913 genBlockInitAction(rblk); 2914 println(""); 2915 2916 ExceptionSpec unlabeledUserSpec = rblk.findExceptionSpec(""); 2918 2919 if (unlabeledUserSpec != null || rblk.getDefaultErrorHandler() ) { 2921 println("try { // for error handling"); 2922 tabs++; 2923 } 2924 2925 if ( rblk.alternatives.size()==1 ) 2927 { 2928 Alternative alt = rblk.getAlternativeAt(0); 2930 String pred = alt.semPred; 2931 if ( pred!=null ) 2932 genSemPred(pred, currentRule.line); 2933 if (alt.synPred != null) { 2934 antlrTool.warning( 2935 "Syntactic predicate ignored for single alternative", 2936 grammar.getFilename(), alt.synPred.getLine(), alt.synPred.getColumn() 2937 ); 2938 } 2939 genAlt(alt, rblk); 2940 } 2941 else 2942 { 2943 boolean ok = grammar.theLLkAnalyzer.deterministic(rblk); 2945 2946 CSharpBlockFinishingInfo howToFinish = genCommonBlock(rblk, false); 2947 genBlockFinish(howToFinish, throwNoViable); 2948 } 2949 2950 if (unlabeledUserSpec != null || rblk.getDefaultErrorHandler() ) { 2952 tabs--; 2954 println("}"); 2955 } 2956 2957 if (unlabeledUserSpec != null) 2959 { 2960 genErrorHandler(unlabeledUserSpec); 2961 } 2962 else if (rblk.getDefaultErrorHandler()) 2963 { 2964 println("catch (" + exceptionThrown + " ex)"); 2966 println("{"); 2967 tabs++; 2968 if (grammar.hasSyntacticPredicate) { 2970 println("if (0 == inputState.guessing)"); 2971 println("{"); 2972 tabs++; 2973 } 2974 println("reportError(ex);"); 2975 if ( !(grammar instanceof TreeWalkerGrammar) ) 2976 { 2977 Lookahead follow = grammar.theLLkAnalyzer.FOLLOW(1, rblk.endNode); 2979 String followSetName = getBitsetName(markBitsetForGen(follow.fset)); 2980 println("consume();"); 2981 println("consumeUntil(" + followSetName + ");"); 2982 } 2983 else 2984 { 2985 println("if (null != _t)"); 2987 println("{"); 2988 tabs++; 2989 println("_t = _t.getNextSibling();"); 2990 tabs--; 2991 println("}"); 2992 } 2993 if (grammar.hasSyntacticPredicate) 2994 { 2995 tabs--; 2996 println("}"); 2998 println("else"); 2999 println("{"); 3000 tabs++; 3001 println("throw;"); 3003 tabs--; 3004 println("}"); 3005 } 3006 tabs--; 3008 println("}"); 3009 } 3010 3011 if (grammar.buildAST) { 3013 println("returnAST = " + s.getId() + "_AST;"); 3014 } 3015 3016 if ( grammar instanceof TreeWalkerGrammar ) { 3018 println("retTree_ = _t;"); 3019 } 3020 3021 if (rblk.getTestLiterals()) { 3023 if ( s.access.equals("protected") ) { 3024 genLiteralsTestForPartialToken(); 3025 } 3026 else { 3027 genLiteralsTest(); 3028 } 3029 } 3030 3031 if ( grammar instanceof LexerGrammar ) { 3033 println("if (_createToken && (null == _token) && (_ttype != Token.SKIP))"); 3034 println("{"); 3035 tabs++; 3036 println("_token = makeToken(_ttype);"); 3037 println("_token.setText(text.ToString(_begin, text.Length-_begin));"); 3038 tabs--; 3039 println("}"); 3040 println("returnToken_ = _token;"); 3041 } 3042 3043 if (rblk.returnAction != null) { 3045 println("return " + extractIdOfAction(rblk.returnAction, rblk.getLine(), rblk.getColumn()) + ";"); 3046 } 3047 3048 if ( grammar.debuggingOutput || grammar.traceRules) { 3049 tabs--; 3050 println("}"); 3051 println("finally"); 3052 println("{ // debugging"); 3053 tabs++; 3054 3055 if ( grammar.debuggingOutput) 3057 if (grammar instanceof ParserGrammar) 3058 println("fireExitRule(" + ruleNum + ",0);"); 3059 else if (grammar instanceof LexerGrammar) 3060 println("fireExitRule(" + ruleNum + ",_ttype);"); 3061 3062 if (grammar.traceRules) { 3063 if ( grammar instanceof TreeWalkerGrammar ) { 3064 println("traceOut(\""+ s.getId() +"\",_t);"); 3065 } 3066 else { 3067 println("traceOut(\""+ s.getId() +"\");"); 3068 } 3069 } 3070 3071 tabs--; 3072 println("}"); 3073 } 3074 3075 tabs--; 3076 println("}"); 3077 println(""); 3078 3079 genAST = savegenAST; 3081 3082 } 3085 private void GenRuleInvocation(RuleRefElement rr) { 3086 _print(rr.targetRule + "("); 3088 3089 if ( grammar instanceof LexerGrammar ) { 3091 if ( rr.getLabel() != null ) { 3093 _print("true"); 3094 } 3095 else { 3096 _print("false"); 3097 } 3098 if (commonExtraArgs.length() != 0 || rr.args!=null ) { 3099 _print(","); 3100 } 3101 } 3102 3103 _print(commonExtraArgs); 3105 if (commonExtraArgs.length() != 0 && rr.args!=null ) { 3106 _print(","); 3107 } 3108 3109 RuleSymbol rs = (RuleSymbol)grammar.getSymbol(rr.targetRule); 3111 if (rr.args != null) 3112 { 3113 ActionTransInfo tInfo = new ActionTransInfo(); 3115 String args = processActionForSpecialSymbols(rr.args, 0, currentRule, tInfo); 3116 if ( tInfo.assignToRoot || tInfo.refRuleRoot!=null ) 3117 { 3118 antlrTool.error("Arguments of rule reference '" + rr.targetRule + "' cannot set or ref #" + 3119 currentRule.getRuleName(), grammar.getFilename(), rr.getLine(), rr.getColumn()); 3120 } 3121 _print(args); 3122 3123 if (rs.block.argAction == null) 3125 { 3126 antlrTool.warning("Rule '" + rr.targetRule + "' accepts no arguments", grammar.getFilename(), rr.getLine(), rr.getColumn()); 3127 } 3128 } 3129 else 3130 { 3131 if (rs.block.argAction != null) 3134 { 3135 antlrTool.warning("Missing parameters on reference to rule " + rr.targetRule, grammar.getFilename(), rr.getLine(), rr.getColumn()); 3136 } 3137 } 3138 _println(");"); 3139 3140 if ( grammar instanceof TreeWalkerGrammar ) { 3142 println("_t = retTree_;"); 3143 } 3144 } 3145 protected void genSemPred(String pred, int line) { 3146 ActionTransInfo tInfo = new ActionTransInfo(); 3148 pred = processActionForSpecialSymbols(pred, line, currentRule, tInfo); 3149 String escapedPred = charFormatter.escapeString(pred); 3151 3152 if (grammar.debuggingOutput && ((grammar instanceof ParserGrammar) || (grammar instanceof LexerGrammar))) 3155 pred = "fireSemanticPredicateEvaluated(persistence.antlr.debug.SemanticPredicateEvent.VALIDATING," 3156 + addSemPred(escapedPred) + "," + pred + ")"; 3157 println("if (!(" + pred + "))"); 3158 println(" throw new SemanticException(\"" + escapedPred + "\");"); 3159 } 3160 3163 protected void genSemPredMap() { 3164 Enumeration e = semPreds.elements(); 3165 println("private string[] _semPredNames = {"); 3166 tabs++; 3167 while(e.hasMoreElements()) 3168 println("\""+e.nextElement()+"\","); 3169 tabs--; 3170 println("};"); 3171 } 3172 protected void genSynPred(SynPredBlock blk, String lookaheadExpr) { 3173 if ( DEBUG_CODE_GENERATOR ) System.out.println("gen=>("+blk+")"); 3174 3175 println("bool synPredMatched" + blk.ID + " = false;"); 3177 println("if (" + lookaheadExpr + ")"); 3179 println("{"); 3180 tabs++; 3181 3182 if ( grammar instanceof TreeWalkerGrammar ) { 3184 println("AST __t" + blk.ID + " = _t;"); 3185 } 3186 else { 3187 println("int _m" + blk.ID + " = mark();"); 3188 } 3189 3190 println("synPredMatched" + blk.ID + " = true;"); 3192 println("inputState.guessing++;"); 3193 3194 if (grammar.debuggingOutput && ((grammar instanceof ParserGrammar) || 3196 (grammar instanceof LexerGrammar))) { 3197 println("fireSyntacticPredicateStarted();"); 3198 } 3199 3200 syntacticPredLevel++; 3201 println("try {"); 3202 tabs++; 3203 gen((AlternativeBlock)blk); tabs--; 3205 println("}"); 3207 println("catch (" + exceptionThrown + ")"); 3210 println("{"); 3211 tabs++; 3212 println("synPredMatched"+blk.ID+" = false;"); 3213 tabs--; 3215 println("}"); 3216 3217 if ( grammar instanceof TreeWalkerGrammar ) { 3219 println("_t = __t"+blk.ID+";"); 3220 } 3221 else { 3222 println("rewind(_m"+blk.ID+");"); 3223 } 3224 3225 println("inputState.guessing--;"); 3226 3227 if (grammar.debuggingOutput && ((grammar instanceof ParserGrammar) || 3229 (grammar instanceof LexerGrammar))) { 3230 println("if (synPredMatched" + blk.ID +")"); 3231 println(" fireSyntacticPredicateSucceeded();"); 3232 println("else"); 3233 println(" fireSyntacticPredicateFailed();"); 3234 } 3235 3236 syntacticPredLevel--; 3237 tabs--; 3238 3239 println("}"); 3241 3242 println("if ( synPredMatched"+blk.ID+" )"); 3244 println("{"); 3245 } 3246 3254 public void genTokenStrings() { 3255 println(""); 3258 println("public static readonly string[] tokenNames_ = new string[] {"); 3259 tabs++; 3260 3261 Vector v = grammar.tokenManager.getVocabulary(); 3264 for (int i = 0; i < v.size(); i++) 3265 { 3266 String s = (String )v.elementAt(i); 3267 if (s == null) 3268 { 3269 s = "<"+String.valueOf(i)+">"; 3270 } 3271 if ( !s.startsWith("\"") && !s.startsWith("<") ) { 3272 TokenSymbol ts = (TokenSymbol)grammar.tokenManager.getTokenSymbol(s); 3273 if ( ts!=null && ts.getParaphrase()!=null ) { 3274 s = StringUtils.stripFrontBack(ts.getParaphrase(), "\"", "\""); 3275 } 3276 } 3277 else if (s.startsWith("\"")) { 3278 s = StringUtils.stripFrontBack(s, "\"", "\""); 3279 } 3280 print(charFormatter.literalString(s)); 3281 if (i != v.size()-1) { 3282 _print(","); 3283 } 3284 _println(""); 3285 } 3286 3287 tabs--; 3289 println("};"); 3290 } 3291 3292 protected void genTokenTypes(TokenManager tm) throws IOException { 3293 setupOutput(tm.getName() + TokenTypesFileSuffix); 3297 3298 tabs = 0; 3299 3300 genHeader(); 3302 println(behavior.getHeaderAction("")); 3304 3305 if (nameSpace != null) 3307 nameSpace.emitDeclarations(currentOutput); 3308 tabs++; 3309 3310 println("public class " + tm.getName() + TokenTypesFileSuffix); 3313 println("{"); 3315 tabs++; 3316 3317 genTokenDefinitions(tm); 3318 3319 tabs--; 3321 println("}"); 3322 3323 tabs--; 3324 if (nameSpace != null) 3326 nameSpace.emitClosures(currentOutput); 3327 3328 currentOutput.close(); 3330 currentOutput = null; 3331 exitIfError(); 3332 } 3333 protected void genTokenDefinitions(TokenManager tm) throws IOException { 3334 Vector v = tm.getVocabulary(); 3336 3337 println("public const int EOF = " + Token.EOF_TYPE + ";"); 3339 println("public const int NULL_TREE_LOOKAHEAD = " + Token.NULL_TREE_LOOKAHEAD + ";"); 3340 3341 for (int i = Token.MIN_USER_TYPE; i < v.size(); i++) { 3342 String s = (String )v.elementAt(i); 3343 if (s != null) { 3344 if ( s.startsWith("\"") ) { 3345 StringLiteralSymbol sl = (StringLiteralSymbol)tm.getTokenSymbol(s); 3347 if ( sl==null ) { 3348 antlrTool.panic("String literal " + s + " not in symbol table"); 3349 } 3350 else if ( sl.label != null ) { 3351 println("public const int " + sl.label + " = " + i + ";"); 3352 } 3353 else { 3354 String mangledName = mangleLiteral(s); 3355 if (mangledName != null) { 3356 println("public const int " + mangledName + " = " + i + ";"); 3358 sl.label = mangledName; 3360 } 3361 else { 3362 println("// " + s + " = " + i); 3363 } 3364 } 3365 } 3366 else if ( !s.startsWith("<") ) { 3367 println("public const int " + s + " = " + i + ";"); 3368 } 3369 } 3370 } 3371 println(""); 3372 } 3373 3379 public String processStringForASTConstructor( String str ) 3380 { 3381 3389 if( usingCustomAST && 3390 ( (grammar instanceof TreeWalkerGrammar) || 3391 (grammar instanceof ParserGrammar) ) && 3392 !(grammar.tokenManager.tokenDefined(str)) ) 3393 { 3394 return "(AST)"+str; 3396 } 3397 else 3398 { 3399 return str; 3401 } 3402 } 3403 3407 public String getASTCreateString(Vector v) { 3408 if (v.size() == 0) { 3409 return ""; 3410 } 3411 StringBuffer buf = new StringBuffer (); 3412 buf.append("("+labeledElementASTType+ 3413 ")astFactory.make( (new ASTArray(" + v.size() + 3414 "))"); 3415 for (int i = 0; i < v.size(); i++) { 3416 buf.append(".add(" + v.elementAt(i) + ")"); 3417 } 3418 buf.append(")"); 3419 return buf.toString(); 3420 } 3421 3422 3426 public String getASTCreateString(GrammarAtom atom, String astCtorArgs) { 3427 String astCreateString = "astFactory.create(" + astCtorArgs + ")"; 3428 3429 if (atom == null) 3430 return getASTCreateString(astCtorArgs); 3431 else { 3432 if ( atom.getASTNodeType() != null ) { 3433 TokenSymbol ts = grammar.tokenManager.getTokenSymbol(atom.getText()); 3441 if ( (ts == null) || (ts.getASTNodeType() != atom.getASTNodeType()) ) 3442 astCreateString = "(" + atom.getASTNodeType() + ") astFactory.create(" + astCtorArgs + ", \"" + atom.getASTNodeType() + "\")"; 3443 else if ( (ts != null) && (ts.getASTNodeType() != null) ) 3444 astCreateString = "(" + ts.getASTNodeType() + ") " + astCreateString; 3445 } 3446 else if ( usingCustomAST ) 3447 astCreateString = "(" + labeledElementASTType + ") " + astCreateString; 3448 } 3449 return astCreateString; 3450 } 3451 3452 3472 public String getASTCreateString(String astCtorArgs) { 3473 3483 if ( astCtorArgs==null ) { 3484 astCtorArgs = ""; 3485 } 3486 String astCreateString = "astFactory.create(" + astCtorArgs + ")"; 3487 String ctorID = astCtorArgs; 3488 String ctorText = null; 3489 int commaIndex; 3490 boolean ctorIncludesCustomType = false; 3492 commaIndex = astCtorArgs.indexOf(','); 3493 if ( commaIndex != -1 ) { 3494 ctorID = astCtorArgs.substring(0, commaIndex); ctorText = astCtorArgs.substring(commaIndex+1, astCtorArgs.length()); commaIndex = ctorText.indexOf(','); 3497 if (commaIndex != -1 ) { 3498 ctorIncludesCustomType = true; 3502 } 3503 } 3504 TokenSymbol ts = grammar.tokenManager.getTokenSymbol(ctorID); 3505 if ( (null != ts) && (null != ts.getASTNodeType()) ) 3506 astCreateString = "(" + ts.getASTNodeType() + ") " + astCreateString; 3507 else if ( usingCustomAST ) 3508 astCreateString = "(" + labeledElementASTType + ") " + astCreateString; 3509 3510 return astCreateString; 3511 } 3512 3513 protected String getLookaheadTestExpression(Lookahead[] look, int k) { 3514 StringBuffer e = new StringBuffer (100); 3515 boolean first = true; 3516 3517 e.append("("); 3518 for (int i = 1; i <= k; i++) { 3519 BitSet p = look[i].fset; 3520 if (!first) { 3521 e.append(") && ("); 3522 } 3523 first = false; 3524 3525 if (look[i].containsEpsilon()) { 3529 e.append("true"); 3530 } else { 3531 e.append(getLookaheadTestTerm(i, p)); 3532 } 3533 } 3534 e.append(")"); 3535 3536 return e.toString(); 3537 } 3538 3539 3543 protected String getLookaheadTestExpression(Alternative alt, int maxDepth) { 3544 int depth = alt.lookaheadDepth; 3545 if ( depth == GrammarAnalyzer.NONDETERMINISTIC ) { 3546 depth = grammar.maxk; 3549 } 3550 3551 if ( maxDepth==0 ) { 3552 return "( true )"; 3555 } 3556 return "(" + getLookaheadTestExpression(alt.cache,depth) + ")"; 3557 } 3558 3559 3567 protected String getLookaheadTestTerm(int k, BitSet p) { 3568 String ts = lookaheadString(k); 3570 3571 int[] elems = p.toArray(); 3573 if (elementsAreRange(elems)) { 3574 return getRangeExpression(k, elems); 3575 } 3576 3577 StringBuffer e; 3579 int degree = p.degree(); 3580 if ( degree == 0 ) { 3581 return "true"; 3582 } 3583 3584 if (degree >= bitsetTestThreshold) { 3585 int bitsetIdx = markBitsetForGen(p); 3586 return getBitsetName(bitsetIdx) + ".member(" + ts + ")"; 3587 } 3588 3589 e = new StringBuffer (); 3591 for (int i = 0; i < elems.length; i++) { 3592 String cs = getValueString(elems[i]); 3594 3595 if ( i>0 ) e.append("||"); 3597 e.append(ts); 3598 e.append("=="); 3599 e.append(cs); 3600 } 3601 return e.toString(); 3602 } 3603 3604 3609 public String getRangeExpression(int k, int[] elems) { 3610 if (!elementsAreRange(elems)) { 3611 antlrTool.panic("getRangeExpression called with non-range"); 3612 } 3613 int begin = elems[0]; 3614 int end = elems[elems.length-1]; 3615 3616 return 3617 "(" + lookaheadString(k) + " >= " + getValueString(begin) + " && " + 3618 lookaheadString(k) + " <= " + getValueString(end) + ")"; 3619 } 3620 3621 3624 private String getValueString(int value) { 3625 String cs; 3626 if ( grammar instanceof LexerGrammar ) { 3627 cs = charFormatter.literalChar(value); 3628 } 3629 else 3630 { 3631 TokenSymbol ts = grammar.tokenManager.getTokenSymbolAt(value); 3632 if ( ts == null ) { 3633 return ""+value; } 3636 String tId = ts.getId(); 3637 if ( ts instanceof StringLiteralSymbol ) { 3638 StringLiteralSymbol sl = (StringLiteralSymbol)ts; 3642 String label = sl.getLabel(); 3643 if ( label!=null ) { 3644 cs = label; 3645 } 3646 else { 3647 cs = mangleLiteral(tId); 3648 if (cs == null) { 3649 cs = String.valueOf(value); 3650 } 3651 } 3652 } 3653 else { 3654 cs = tId; 3655 } 3656 } 3657 return cs; 3658 } 3659 3660 3661 protected boolean lookaheadIsEmpty(Alternative alt, int maxDepth) { 3662 int depth = alt.lookaheadDepth; 3663 if ( depth == GrammarAnalyzer.NONDETERMINISTIC ) { 3664 depth = grammar.maxk; 3665 } 3666 for (int i=1; i<=depth && i<=maxDepth; i++) { 3667 BitSet p = alt.cache[i].fset; 3668 if (p.degree() != 0) { 3669 return false; 3670 } 3671 } 3672 return true; 3673 } 3674 3675 private String lookaheadString(int k) { 3676 if (grammar instanceof TreeWalkerGrammar) { 3677 return "_t.Type"; 3678 } 3679 return "LA(" + k + ")"; 3680 } 3681 3682 3688 private String mangleLiteral(String s) { 3689 String mangled = antlrTool.literalsPrefix; 3690 for (int i = 1; i < s.length()-1; i++) { 3691 if (!Character.isLetter(s.charAt(i)) && 3692 s.charAt(i) != '_') { 3693 return null; 3694 } 3695 mangled += s.charAt(i); 3696 } 3697 if ( antlrTool.upperCaseMangledLiterals ) { 3698 mangled = mangled.toUpperCase(); 3699 } 3700 return mangled; 3701 } 3702 3703 3709 public String mapTreeId(String idParam, ActionTransInfo transInfo) { 3710 if ( currentRule==null ) return idParam; 3712 3713 boolean in_var = false; 3714 String id = idParam; 3715 if (grammar instanceof TreeWalkerGrammar) 3716 { 3717 if ( !grammar.buildAST ) 3718 { 3719 in_var = true; 3720 } 3721 else if (id.length() > 3 && id.lastIndexOf("_in") == id.length()-3) 3723 { 3724 id = id.substring(0, id.length()-3); 3726 in_var = true; 3727 } 3728 } 3729 3730 for (int i = 0; i < currentRule.labeledElements.size(); i++) 3733 { 3734 AlternativeElement elt = (AlternativeElement)currentRule.labeledElements.elementAt(i); 3735 if (elt.getLabel().equals(id)) 3736 { 3737 return in_var ? id : id + "_AST"; 3738 } 3739 } 3740 3741 String s = (String )treeVariableMap.get(id); 3745 if (s != null) 3746 { 3747 if (s == NONUNIQUE) 3748 { 3749 antlrTool.error("Ambiguous reference to AST element "+id+ 3751 " in rule "+currentRule.getRuleName()); 3752 return null; 3753 } 3754 else if (s.equals(currentRule.getRuleName())) 3755 { 3756 antlrTool.error("Ambiguous reference to AST element "+id+ 3761 " in rule "+currentRule.getRuleName()); 3762 return null; 3763 } 3764 else 3765 { 3766 return in_var ? s + "_in" : s; 3767 } 3768 } 3769 3770 if( id.equals(currentRule.getRuleName()) ) 3773 { 3774 String r = in_var ? id + "_AST_in" : id + "_AST"; 3775 if ( transInfo!=null ) { 3776 if ( !in_var ) { 3777 transInfo.refRuleRoot = r; 3778 } 3779 } 3780 return r; 3781 } 3782 else 3783 { 3784 return id; 3786 } 3787 } 3788 3789 3792 private void mapTreeVariable(AlternativeElement e, String name) 3793 { 3794 if (e instanceof TreeElement) { 3796 mapTreeVariable( ((TreeElement)e).root, name); 3797 return; 3798 } 3799 3800 String elName = null; 3802 3803 if (e.getLabel() == null) { 3805 if (e instanceof TokenRefElement) { 3806 elName = ((TokenRefElement)e).atomText; 3808 } 3809 else if (e instanceof RuleRefElement) { 3810 elName = ((RuleRefElement)e).targetRule; 3812 } 3813 } 3814 if (elName != null) { 3816 if (treeVariableMap.get(elName) != null) { 3817 treeVariableMap.remove(elName); 3819 treeVariableMap.put(elName, NONUNIQUE); 3820 } 3821 else { 3822 treeVariableMap.put(elName, name); 3823 } 3824 } 3825 } 3826 3827 3831 protected String processActionForSpecialSymbols(String actionStr, 3832 int line, 3833 RuleBlock currentRule, 3834 ActionTransInfo tInfo) 3835 { 3836 if ( actionStr==null || actionStr.length()==0 ) 3837 return null; 3838 3839 if (grammar==null) 3842 return actionStr; 3843 3844 if ((grammar.buildAST && actionStr.indexOf('#') != -1) || 3846 grammar instanceof TreeWalkerGrammar || 3847 ((grammar instanceof LexerGrammar || 3848 grammar instanceof ParserGrammar) 3849 && actionStr.indexOf('$') != -1) ) 3850 { 3851 persistence.antlr.actions.csharp.ActionLexer lexer = new persistence.antlr.actions.csharp.ActionLexer(actionStr, currentRule, this, tInfo); 3853 3854 lexer.setLineOffset(line); 3855 lexer.setFilename(grammar.getFilename()); 3856 lexer.setTool(antlrTool); 3857 3858 try { 3859 lexer.mACTION(true); 3860 actionStr = lexer.getTokenObject().getText(); 3861 } 3864 catch (RecognitionException ex) { 3865 lexer.reportError(ex); 3866 return actionStr; 3867 } 3868 catch (TokenStreamException tex) { 3869 antlrTool.panic("Error reading action:"+actionStr); 3870 return actionStr; 3871 } 3872 catch (CharStreamException io) { 3873 antlrTool.panic("Error reading action:"+actionStr); 3874 return actionStr; 3875 } 3876 } 3877 return actionStr; 3878 } 3879 3880 private void setupGrammarParameters(Grammar g) { 3881 if (g instanceof ParserGrammar || 3882 g instanceof LexerGrammar || 3883 g instanceof TreeWalkerGrammar 3884 ) 3885 { 3886 3890 if( antlrTool.nameSpace != null ) 3891 nameSpace = new CSharpNameSpace( antlrTool.nameSpace.getName() ); 3892 3894 3896 if( g.hasOption("namespace") ) { 3897 Token t = g.getOption("namespace"); 3898 if( t != null ) { 3899 nameSpace = new CSharpNameSpace(t.getText()); 3900 } 3901 } 3902 3911 } 3912 3913 if (g instanceof ParserGrammar) { 3914 labeledElementASTType = "AST"; 3915 if ( g.hasOption("ASTLabelType") ) { 3916 Token tsuffix = g.getOption("ASTLabelType"); 3917 if ( tsuffix != null ) { 3918 String suffix = StringUtils.stripFrontBack(tsuffix.getText(), "\"", "\""); 3919 if ( suffix != null ) { 3920 usingCustomAST = true; 3921 labeledElementASTType = suffix; 3922 } 3923 } 3924 } 3925 labeledElementType = "Token "; 3926 labeledElementInit = "null"; 3927 commonExtraArgs = ""; 3928 commonExtraParams = ""; 3929 commonLocalVars = ""; 3930 lt1Value = "LT(1)"; 3931 exceptionThrown = "RecognitionException"; 3932 throwNoViable = "throw new NoViableAltException(LT(1), getFilename());"; 3933 } 3934 else if (g instanceof LexerGrammar) { 3935 labeledElementType = "char "; 3936 labeledElementInit = "'\\0'"; 3937 commonExtraArgs = ""; 3938 commonExtraParams = "bool _createToken"; 3939 commonLocalVars = "int _ttype; Token _token=null; int _begin=text.Length;"; 3940 lt1Value = "LA(1)"; 3941 exceptionThrown = "RecognitionException"; 3942 throwNoViable = "throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());"; 3943 } 3944 else if (g instanceof TreeWalkerGrammar) { 3945 labeledElementASTType = "AST"; 3946 labeledElementType = "AST"; 3947 if ( g.hasOption("ASTLabelType") ) { 3948 Token tsuffix = g.getOption("ASTLabelType"); 3949 if ( tsuffix != null ) { 3950 String suffix = StringUtils.stripFrontBack(tsuffix.getText(), "\"", "\""); 3951 if ( suffix != null ) { 3952 usingCustomAST = true; 3953 labeledElementASTType = suffix; 3954 labeledElementType = suffix; 3955 } 3956 } 3957 } 3958 if ( !g.hasOption("ASTLabelType") ) { 3959 g.setOption("ASTLabelType", new Token(ANTLRTokenTypes.STRING_LITERAL,"AST")); 3960 } 3961 labeledElementInit = "null"; 3962 commonExtraArgs = "_t"; 3963 commonExtraParams = "AST _t"; 3964 commonLocalVars = ""; 3965 if (usingCustomAST) 3966 lt1Value = "(_t==ASTNULL) ? null : (" + labeledElementASTType + ")_t"; 3967 else 3968 lt1Value = "_t"; 3969 exceptionThrown = "RecognitionException"; 3970 throwNoViable = "throw new NoViableAltException(_t);"; 3971 } 3972 else { 3973 antlrTool.panic("Unknown grammar type"); 3974 } 3975 } 3976 3977 3981 public void setupOutput(String className) throws IOException 3982 { 3983 currentOutput = antlrTool.openOutputFile(className + ".cs"); 3984 } 3985 3986 3987 private static String OctalToUnicode(String str) 3988 { 3989 if ( (4 <= str.length()) && 3991 ('\'' == str.charAt(0)) && 3992 ('\\' == str.charAt(1)) && 3993 (('0' <= str.charAt(2)) && ('7' >= str.charAt(2))) && 3994 ('\'' == str.charAt(str.length()-1)) ) 3995 { 3996 Integer x = Integer.valueOf(str.substring(2, str.length()-1), 8); 3998 3999 return "'\\x" + Integer.toHexString(x.intValue()) + "'"; 4000 } 4001 else { 4002 return str; 4003 } 4004 } 4005 4006 4009 public String getTokenTypesClassName() 4010 { 4011 TokenManager tm = grammar.tokenManager; 4012 return new String (tm.getName() + TokenTypesFileSuffix); 4013 } 4014 4015 private void declareSaveIndexVariableIfNeeded() 4016 { 4017 if (!bSaveIndexCreated) 4018 { 4019 println("int _saveIndex = 0;"); 4020 bSaveIndexCreated = true; 4021 } 4022 } 4023} 4024 | Popular Tags |