1 package antlr; 2 3 9 10 import java.util.Hashtable ; 11 12 import antlr.collections.impl.BitSet; 13 14 20 public class DefineGrammarSymbols implements ANTLRGrammarParseBehavior { 21 protected Hashtable grammars = new Hashtable (); 23 protected Hashtable tokenManagers = new Hashtable (); 25 protected Grammar grammar; 27 protected Tool tool; 29 LLkAnalyzer analyzer; 31 String [] args; 34 static final String DEFAULT_TOKENMANAGER_NAME = "*default"; 36 protected Hashtable headerActions = new Hashtable (); 39 Token thePreambleAction = new CommonToken(Token.INVALID_TYPE, ""); String language = "Java"; 43 44 protected int numLexers = 0; 45 protected int numParsers = 0; 46 protected int numTreeParsers = 0; 47 48 public DefineGrammarSymbols(Tool tool_, String [] args_, LLkAnalyzer analyzer_) { 49 tool = tool_; 50 args = args_; 51 analyzer = analyzer_; 52 } 53 54 public void _refStringLiteral(Token lit, Token label, int autoGenType, boolean lastInRule) { 55 if (!(grammar instanceof LexerGrammar)) { 56 String str = lit.getText(); 58 if (grammar.tokenManager.getTokenSymbol(str) != null) { 59 return; 61 } 62 StringLiteralSymbol sl = new StringLiteralSymbol(str); 63 int tt = grammar.tokenManager.nextTokenType(); 64 sl.setTokenType(tt); 65 grammar.tokenManager.define(sl); 66 } 67 } 68 69 70 public void _refToken(Token assignId, 71 Token t, 72 Token label, 73 Token args, 74 boolean inverted, 75 int autoGenType, 76 boolean lastInRule) { 77 String id = t.getText(); 78 if (!grammar.tokenManager.tokenDefined(id)) { 79 84 int tt = grammar.tokenManager.nextTokenType(); 85 TokenSymbol ts = new TokenSymbol(id); 86 ts.setTokenType(tt); 87 grammar.tokenManager.define(ts); 88 } 89 } 90 91 92 public void abortGrammar() { 93 if (grammar != null && grammar.getClassName() != null) { 94 grammars.remove(grammar.getClassName()); 95 } 96 grammar = null; 97 } 98 99 public void beginAlt(boolean doAST_) { 100 } 101 102 public void beginChildList() { 103 } 104 105 public void beginExceptionGroup() { 107 } 108 109 public void beginExceptionSpec(Token label) { 110 } 111 112 public void beginSubRule(Token label, Token start, boolean not) { 113 } 114 115 public void beginTree(Token tok) throws SemanticException { 116 } 117 118 119 public void defineRuleName(Token r, 120 String access, 121 boolean ruleAutoGen, 122 String docComment) 123 throws SemanticException { 124 String id = r.getText(); 125 126 if (r.type == ANTLRTokenTypes.TOKEN_REF) { 128 id = CodeGenerator.encodeLexerRuleName(id); 130 if (!grammar.tokenManager.tokenDefined(r.getText())) { 132 int tt = grammar.tokenManager.nextTokenType(); 133 TokenSymbol ts = new TokenSymbol(r.getText()); 134 ts.setTokenType(tt); 135 grammar.tokenManager.define(ts); 136 } 137 } 138 139 RuleSymbol rs; 140 if (grammar.isDefined(id)) { 141 rs = (RuleSymbol)grammar.getSymbol(id); 143 if (rs.isDefined()) { 145 tool.error("redefinition of rule " + id, grammar.getFilename(), r.getLine(), r.getColumn()); 146 } 147 } 148 else { 149 rs = new RuleSymbol(id); 150 grammar.define(rs); 151 } 152 rs.setDefined(); 153 rs.access = access; 154 rs.comment = docComment; 155 } 156 157 160 public void defineToken(Token tokname, Token tokliteral) { 161 String name = null; 162 String literal = null; 163 if (tokname != null) { 164 name = tokname.getText(); 165 } 166 if (tokliteral != null) { 167 literal = tokliteral.getText(); 168 } 169 if (literal != null) { 172 StringLiteralSymbol sl = (StringLiteralSymbol)grammar.tokenManager.getTokenSymbol(literal); 173 if (sl != null) { 174 if (name == null || sl.getLabel() != null) { 180 tool.warning("Redefinition of literal in tokens {...}: " + literal, grammar.getFilename(), tokliteral.getLine(), tokliteral.getColumn()); 181 return; 182 } 183 else if (name != null) { 184 sl.setLabel(name); 186 grammar.tokenManager.mapToTokenSymbol(name, sl); 188 } 189 } 190 if (name != null) { 193 TokenSymbol ts = (TokenSymbol)grammar.tokenManager.getTokenSymbol(name); 194 if (ts != null) { 195 if (ts instanceof StringLiteralSymbol) { 198 tool.warning("Redefinition of token in tokens {...}: " + name, grammar.getFilename(), tokliteral.getLine(), tokliteral.getColumn()); 199 return; 200 } 201 int ttype = ts.getTokenType(); 208 sl = new StringLiteralSymbol(literal); 210 sl.setTokenType(ttype); 211 sl.setLabel(name); 212 grammar.tokenManager.define(sl); 214 grammar.tokenManager.mapToTokenSymbol(name, sl); 216 return; 217 } 218 } 220 sl = new StringLiteralSymbol(literal); 221 int tt = grammar.tokenManager.nextTokenType(); 222 sl.setTokenType(tt); 223 sl.setLabel(name); 224 grammar.tokenManager.define(sl); 225 if (name != null) { 226 grammar.tokenManager.mapToTokenSymbol(name, sl); 228 } 229 } 230 231 else { 233 if (grammar.tokenManager.tokenDefined(name)) { 234 tool.warning("Redefinition of token in tokens {...}: " + name, grammar.getFilename(), tokname.getLine(), tokname.getColumn()); 235 return; 236 } 237 int tt = grammar.tokenManager.nextTokenType(); 238 TokenSymbol ts = new TokenSymbol(name); 239 ts.setTokenType(tt); 240 grammar.tokenManager.define(ts); 241 } 242 } 243 244 public void endAlt() { 245 } 246 247 public void endChildList() { 248 } 249 250 public void endExceptionGroup() { 251 } 252 253 public void endExceptionSpec() { 254 } 255 256 public void endGrammar() { 257 } 258 259 264 public void endOptions() { 265 if (grammar.exportVocab == null && grammar.importVocab == null) { 267 grammar.exportVocab = grammar.getClassName(); 268 if (tokenManagers.containsKey(DEFAULT_TOKENMANAGER_NAME)) { 270 grammar.exportVocab = DEFAULT_TOKENMANAGER_NAME; 272 TokenManager tm = (TokenManager)tokenManagers.get(DEFAULT_TOKENMANAGER_NAME); 273 grammar.setTokenManager(tm); 275 return; 276 } 277 TokenManager tm = new SimpleTokenManager(grammar.exportVocab, tool); 280 grammar.setTokenManager(tm); 281 tokenManagers.put(grammar.exportVocab, tm); 283 tokenManagers.put(DEFAULT_TOKENMANAGER_NAME, tm); 285 return; 286 } 287 288 if (grammar.exportVocab == null && grammar.importVocab != null) { 290 grammar.exportVocab = grammar.getClassName(); 291 if (grammar.importVocab.equals(grammar.exportVocab)) { 293 tool.warning("Grammar " + grammar.getClassName() + 294 " cannot have importVocab same as default output vocab (grammar name); ignored."); 295 grammar.importVocab = null; 297 endOptions(); 298 return; 299 } 300 if (tokenManagers.containsKey(grammar.importVocab)) { 303 TokenManager tm = (TokenManager)tokenManagers.get(grammar.importVocab); 307 TokenManager dup = (TokenManager)tm.clone(); 309 dup.setName(grammar.exportVocab); 310 dup.setReadOnly(false); 312 grammar.setTokenManager(dup); 313 tokenManagers.put(grammar.exportVocab, dup); 314 return; 315 } 316 ImportVocabTokenManager tm = 319 new ImportVocabTokenManager(grammar, 320 grammar.importVocab + CodeGenerator.TokenTypesFileSuffix + CodeGenerator.TokenTypesFileExt, 321 grammar.exportVocab, 322 tool); 323 tm.setReadOnly(false); tokenManagers.put(grammar.exportVocab, tm); 326 grammar.setTokenManager(tm); 329 330 if (!tokenManagers.containsKey(DEFAULT_TOKENMANAGER_NAME)) { 332 tokenManagers.put(DEFAULT_TOKENMANAGER_NAME, tm); 333 } 334 335 return; 336 } 337 338 if (grammar.exportVocab != null && grammar.importVocab == null) { 340 if (tokenManagers.containsKey(grammar.exportVocab)) { 342 TokenManager tm = (TokenManager)tokenManagers.get(grammar.exportVocab); 344 grammar.setTokenManager(tm); 346 return; 347 } 348 TokenManager tm = new SimpleTokenManager(grammar.exportVocab, tool); 351 grammar.setTokenManager(tm); 352 tokenManagers.put(grammar.exportVocab, tm); 354 if (!tokenManagers.containsKey(DEFAULT_TOKENMANAGER_NAME)) { 356 tokenManagers.put(DEFAULT_TOKENMANAGER_NAME, tm); 357 } 358 return; 359 } 360 361 if (grammar.exportVocab != null && grammar.importVocab != null) { 363 if (grammar.importVocab.equals(grammar.exportVocab)) { 365 tool.error("exportVocab of " + grammar.exportVocab + " same as importVocab; probably not what you want"); 366 } 367 if (tokenManagers.containsKey(grammar.importVocab)) { 369 TokenManager tm = (TokenManager)tokenManagers.get(grammar.importVocab); 372 TokenManager dup = (TokenManager)tm.clone(); 374 dup.setName(grammar.exportVocab); 375 dup.setReadOnly(false); 377 grammar.setTokenManager(dup); 378 tokenManagers.put(grammar.exportVocab, dup); 379 return; 380 } 381 ImportVocabTokenManager tm = 383 new ImportVocabTokenManager(grammar, 384 grammar.importVocab + CodeGenerator.TokenTypesFileSuffix + CodeGenerator.TokenTypesFileExt, 385 grammar.exportVocab, 386 tool); 387 tm.setReadOnly(false); tokenManagers.put(grammar.exportVocab, tm); 390 grammar.setTokenManager(tm); 392 393 if (!tokenManagers.containsKey(DEFAULT_TOKENMANAGER_NAME)) { 395 tokenManagers.put(DEFAULT_TOKENMANAGER_NAME, tm); 396 } 397 398 return; 399 } 400 } 401 402 public void endRule(String r) { 403 } 404 405 public void endSubRule() { 406 } 407 408 public void endTree() { 409 } 410 411 public void hasError() { 412 } 413 414 public void noASTSubRule() { 415 } 416 417 public void oneOrMoreSubRule() { 418 } 419 420 public void optionalSubRule() { 421 } 422 423 public void setUserExceptions(String thr) { 424 } 425 426 public void refAction(Token action) { 427 } 428 429 public void refArgAction(Token action) { 430 } 431 432 public void refCharLiteral(Token lit, Token label, boolean inverted, int autoGenType, boolean lastInRule) { 433 } 434 435 public void refCharRange(Token t1, Token t2, Token label, int autoGenType, boolean lastInRule) { 436 } 437 438 public void refElementOption(Token option, Token value) { 439 } 440 441 public void refTokensSpecElementOption(Token tok, Token option, Token value) { 442 } 443 444 public void refExceptionHandler(Token exTypeAndName, Token action) { 445 } 446 447 public void refHeaderAction(Token name, Token act) { 449 String key; 450 451 if (name == null) 452 key = ""; 453 else 454 key = StringUtils.stripFrontBack(name.getText(), "\"", "\""); 455 456 if (headerActions.containsKey(key)) { 459 if (key.equals("")) 460 tool.error(act.getLine() + ": header action already defined"); 461 else 462 tool.error(act.getLine() + ": header action '" + key + "' already defined"); 463 } 464 headerActions.put(key, act); 465 } 466 467 public String getHeaderAction(String name) { 468 Token t = (Token)headerActions.get(name); 469 if (t == null) { 470 return ""; 471 } 472 return t.getText(); 473 } 474 475 public void refInitAction(Token action) { 476 } 477 478 public void refMemberAction(Token act) { 479 } 480 481 public void refPreambleAction(Token act) { 482 thePreambleAction = act; 483 } 484 485 public void refReturnAction(Token returnAction) { 486 } 487 488 public void refRule(Token idAssign, 489 Token r, 490 Token label, 491 Token args, 492 int autoGenType) { 493 String id = r.getText(); 494 if (r.type == ANTLRTokenTypes.TOKEN_REF) { 496 id = CodeGenerator.encodeLexerRuleName(id); 498 } 499 if (!grammar.isDefined(id)) { 500 grammar.define(new RuleSymbol(id)); 501 } 502 } 503 504 public void refSemPred(Token pred) { 505 } 506 507 public void refStringLiteral(Token lit, 508 Token label, 509 int autoGenType, 510 boolean lastInRule) { 511 _refStringLiteral(lit, label, autoGenType, lastInRule); 512 } 513 514 515 public void refToken(Token assignId, Token t, Token label, Token args, 516 boolean inverted, int autoGenType, boolean lastInRule) { 517 _refToken(assignId, t, label, args, inverted, autoGenType, lastInRule); 518 } 519 520 public void refTokenRange(Token t1, Token t2, Token label, int autoGenType, boolean lastInRule) { 521 if (t1.getText().charAt(0) == '"') { 524 refStringLiteral(t1, null, GrammarElement.AUTO_GEN_NONE, lastInRule); 525 } 526 else { 527 _refToken(null, t1, null, null, false, GrammarElement.AUTO_GEN_NONE, lastInRule); 528 } 529 if (t2.getText().charAt(0) == '"') { 530 _refStringLiteral(t2, null, GrammarElement.AUTO_GEN_NONE, lastInRule); 531 } 532 else { 533 _refToken(null, t2, null, null, false, GrammarElement.AUTO_GEN_NONE, lastInRule); 534 } 535 } 536 537 public void refTreeSpecifier(Token treeSpec) { 538 } 539 540 public void refWildcard(Token t, Token label, int autoGenType) { 541 } 542 543 544 public void reset() { 545 grammar = null; 546 } 547 548 public void setArgOfRuleRef(Token argaction) { 549 } 550 551 552 public void setCharVocabulary(BitSet b) { 553 ((LexerGrammar)grammar).setCharVocabulary(b); 555 } 556 557 562 public void setFileOption(Token key, Token value, String filename) { 563 if (key.getText().equals("language")) { 564 if (value.getType() == ANTLRParser.STRING_LITERAL) { 565 language = StringUtils.stripBack(StringUtils.stripFront(value.getText(), '"'), '"'); 566 } 567 else if (value.getType() == ANTLRParser.TOKEN_REF || value.getType() == ANTLRParser.RULE_REF) { 568 language = value.getText(); 569 } 570 else { 571 tool.error("language option must be string or identifier", filename, value.getLine(), value.getColumn()); 572 } 573 } 574 else if (key.getText().equals("mangleLiteralPrefix")) { 575 if (value.getType() == ANTLRParser.STRING_LITERAL) { 576 tool.literalsPrefix = StringUtils.stripFrontBack(value.getText(), "\"", "\""); 577 } 578 else { 579 tool.error("mangleLiteralPrefix option must be string", filename, value.getLine(), value.getColumn()); 580 } 581 } 582 else if (key.getText().equals("upperCaseMangledLiterals")) { 583 if (value.getText().equals("true")) { 584 tool.upperCaseMangledLiterals = true; 585 } 586 else if (value.getText().equals("false")) { 587 tool.upperCaseMangledLiterals = false; 588 } 589 else { 590 grammar.antlrTool.error("Value for upperCaseMangledLiterals must be true or false", filename, key.getLine(), key.getColumn()); 591 } 592 } 593 else if (key.getText().equals("namespace") || 594 key.getText().equals("namespaceStd") || 595 key.getText().equals("namespaceAntlr") || 596 key.getText().equals("genHashLines")) { 597 if (!language.equals("Cpp")) { 598 tool.error(key.getText() + " option only valid for C++", filename, key.getLine(), key.getColumn()); 599 } 600 else { 601 if (key.getText().equals("genHashLines")) { 602 if (!(value.getText().equals("true") || value.getText().equals("false"))) 603 tool.error("genHashLines option must be true or false", filename, value.getLine(), value.getColumn()); 604 tool.genHashLines = value.getText().equals("true"); 605 } 606 else { 607 if (value.getType() != ANTLRParser.STRING_LITERAL) { 608 tool.error(key.getText() + " option must be a string", filename, value.getLine(), value.getColumn()); 609 } 610 else { 611 if (key.getText().equals("namespace")) 612 tool.setNameSpace(value.getText()); 613 else if (key.getText().equals("namespaceStd")) 614 tool.namespaceStd = value.getText(); 615 else if (key.getText().equals("namespaceAntlr")) 616 tool.namespaceAntlr = value.getText(); 617 } 618 } 619 } 620 } 621 else { 622 tool.error("Invalid file-level option: " + key.getText(), filename, key.getLine(), value.getColumn()); 623 } 624 } 625 626 631 public void setGrammarOption(Token key, Token value) { 632 if (key.getText().equals("tokdef") || key.getText().equals("tokenVocabulary")) { 633 tool.error("tokdef/tokenVocabulary options are invalid >= ANTLR 2.6.0.\n" + 634 " Use importVocab/exportVocab instead. Please see the documentation.\n" + 635 " The previous options were so heinous that Terence changed the whole\n" + 636 " vocabulary mechanism; it was better to change the names rather than\n" + 637 " subtly change the functionality of the known options. Sorry!", grammar.getFilename(), value.getLine(), value.getColumn()); 638 } 639 else if (key.getText().equals("literal") && 640 grammar instanceof LexerGrammar) { 641 tool.error("the literal option is invalid >= ANTLR 2.6.0.\n" + 642 " Use the \"tokens {...}\" mechanism instead.", 643 grammar.getFilename(), value.getLine(), value.getColumn()); 644 } 645 else if (key.getText().equals("exportVocab")) { 646 if (value.getType() == ANTLRParser.RULE_REF || value.getType() == ANTLRParser.TOKEN_REF) { 648 grammar.exportVocab = value.getText(); 649 } 650 else { 651 tool.error("exportVocab must be an identifier", grammar.getFilename(), value.getLine(), value.getColumn()); 652 } 653 } 654 else if (key.getText().equals("importVocab")) { 655 if (value.getType() == ANTLRParser.RULE_REF || value.getType() == ANTLRParser.TOKEN_REF) { 656 grammar.importVocab = value.getText(); 657 } 658 else { 659 tool.error("importVocab must be an identifier", grammar.getFilename(), value.getLine(), value.getColumn()); 660 } 661 } 662 else { 663 grammar.setOption(key.getText(), value); 665 } 666 } 667 668 public void setRuleOption(Token key, Token value) { 669 } 670 671 public void setSubruleOption(Token key, Token value) { 672 } 673 674 675 public void startLexer(String file, Token name, String superClass, String doc) { 676 if (numLexers > 0) { 677 tool.panic("You may only have one lexer per grammar file: class " + name.getText()); 678 } 679 numLexers++; 680 reset(); 681 Grammar g = (Grammar)grammars.get(name); 684 if (g != null) { 685 if (!(g instanceof LexerGrammar)) { 686 tool.panic("'" + name.getText() + "' is already defined as a non-lexer"); 687 } 688 else { 689 tool.panic("Lexer '" + name.getText() + "' is already defined"); 690 } 691 } 692 else { 693 LexerGrammar lg = new LexerGrammar(name.getText(), tool, superClass); 695 lg.comment = doc; 696 lg.processArguments(args); 697 lg.setFilename(file); 698 grammars.put(lg.getClassName(), lg); 699 lg.preambleAction = thePreambleAction; 701 thePreambleAction = new CommonToken(Token.INVALID_TYPE, ""); 702 grammar = lg; 704 } 705 } 706 707 708 public void startParser(String file, Token name, String superClass, String doc) { 709 if (numParsers > 0) { 710 tool.panic("You may only have one parser per grammar file: class " + name.getText()); 711 } 712 numParsers++; 713 reset(); 714 Grammar g = (Grammar)grammars.get(name); 717 if (g != null) { 718 if (!(g instanceof ParserGrammar)) { 719 tool.panic("'" + name.getText() + "' is already defined as a non-parser"); 720 } 721 else { 722 tool.panic("Parser '" + name.getText() + "' is already defined"); 723 } 724 } 725 else { 726 grammar = new ParserGrammar(name.getText(), tool, superClass); 728 grammar.comment = doc; 729 grammar.processArguments(args); 730 grammar.setFilename(file); 731 grammars.put(grammar.getClassName(), grammar); 732 grammar.preambleAction = thePreambleAction; 734 thePreambleAction = new CommonToken(Token.INVALID_TYPE, ""); 735 } 736 } 737 738 739 public void startTreeWalker(String file, Token name, String superClass, String doc) { 740 if (numTreeParsers > 0) { 741 tool.panic("You may only have one tree parser per grammar file: class " + name.getText()); 742 } 743 numTreeParsers++; 744 reset(); 745 Grammar g = (Grammar)grammars.get(name); 748 if (g != null) { 749 if (!(g instanceof TreeWalkerGrammar)) { 750 tool.panic("'" + name.getText() + "' is already defined as a non-tree-walker"); 751 } 752 else { 753 tool.panic("Tree-walker '" + name.getText() + "' is already defined"); 754 } 755 } 756 else { 757 grammar = new TreeWalkerGrammar(name.getText(), tool, superClass); 759 grammar.comment = doc; 760 grammar.processArguments(args); 761 grammar.setFilename(file); 762 grammars.put(grammar.getClassName(), grammar); 763 grammar.preambleAction = thePreambleAction; 765 thePreambleAction = new CommonToken(Token.INVALID_TYPE, ""); 766 } 767 } 768 769 public void synPred() { 770 } 771 772 public void zeroOrMoreSubRule() { 773 } 774 } 775 | Popular Tags |