1 43 44 package de.susebox.jtopas; 45 46 import java.util.Iterator ; 50 import java.text.MessageFormat ; 51 52 import java.io.File ; 53 import java.io.FileReader ; 54 import java.io.FileOutputStream ; 55 import java.io.FileInputStream ; 56 import java.io.OutputStreamWriter ; 57 import java.io.InputStreamReader ; 58 import java.io.IOException ; 59 60 import junit.framework.Test; 61 import junit.framework.TestCase; 62 import junit.framework.TestSuite; 63 import junit.framework.Assert; 64 65 import de.susebox.jtopas.Token; 66 import de.susebox.jtopas.Tokenizer; 67 import de.susebox.jtopas.StandardTokenizer; 68 import de.susebox.jtopas.StandardTokenizerProperties; 69 import de.susebox.jtopas.TokenizerProperty; 70 import de.susebox.jtopas.TokenizerException; 71 72 import de.susebox.TestUtilities; 73 74 75 79 87 public class TestLargeSource extends TestCase { 88 89 93 94 98 101 public static void main(String [] args) { 102 String [] tests = { TestLargeSource.class.getName() }; 103 104 TestUtilities.run(tests, args); 105 } 106 107 108 112 118 public static Test suite() { 119 boolean charSequenceAvailable; 121 122 try { 123 Class.forName("java.lang.CharSequence"); charSequenceAvailable = true; 125 } catch (Throwable throwable) { 126 charSequenceAvailable = false; 127 } 128 129 TestSuite suite = new TestSuite(TestLargeSource.class.getName()); 131 int[] flags = { Flags.F_RETURN_WHITESPACES | Flags.F_TOKEN_POS_ONLY | Flags.F_COUNT_LINES, 132 Flags.F_RETURN_WHITESPACES | Flags.F_TOKEN_POS_ONLY, 133 Flags.F_RETURN_BLOCK_COMMENTS | Flags.F_RETURN_LINE_COMMENTS | Flags.F_TOKEN_POS_ONLY, 134 Flags.F_RETURN_BLOCK_COMMENTS | Flags.F_RETURN_LINE_COMMENTS, 135 Flags.F_RETURN_WHITESPACES, 136 Flags.F_RETURN_WHITESPACES | Flags.F_COUNT_LINES, 137 Flags.F_RETURN_WHITESPACES | Flags.F_TOKEN_POS_ONLY | Flags.F_KEEP_DATA, 138 Flags.F_RETURN_WHITESPACES | Flags.F_KEEP_DATA, 139 Flags.F_RETURN_BLOCK_COMMENTS | Flags.F_RETURN_LINE_COMMENTS | Flags.F_KEEP_DATA }; 140 141 long[] types = { (1 << Token.PATTERN) + (1 << Token.KEYWORD) + (1 << Token.SPECIAL_SEQUENCE) + (1 << Token.BLOCK_COMMENT) + (1 << Token.LINE_COMMENT) + (1 << Token.STRING), 142 (1 << Token.KEYWORD) + (1 << Token.SPECIAL_SEQUENCE) + (1 << Token.BLOCK_COMMENT) + (1 << Token.LINE_COMMENT) + (1 << Token.STRING), 143 (1 << Token.SPECIAL_SEQUENCE) + (1 << Token.BLOCK_COMMENT) + (1 << Token.LINE_COMMENT) + (1 << Token.STRING) }; 144 145 for (int flagsIndex = 0; flagsIndex < flags.length; ++flagsIndex) { 146 for (int typesIndex = 0; typesIndex < types.length; ++typesIndex) { 147 suite.addTest(new TestLargeSource("parseFile", flags[flagsIndex], types[typesIndex])); 148 if (charSequenceAvailable) { 149 suite.addTest(new TestLargeSource("parseCharSequence", flags[flagsIndex], types[typesIndex])); 150 } 151 } 152 } 153 154 return suite; 155 } 156 157 158 162 165 public TestLargeSource(String test, int flags, long typeMask) { 166 super(test); 167 _flags = flags; 168 _typeMask = typeMask; 169 } 170 171 172 176 180 protected void setUp() throws Exception { 181 OutputStreamWriter writer; 183 184 _smallFile = File.createTempFile("jtopas", null); 185 _smallFile.deleteOnExit(); 186 _largeFile = File.createTempFile("jtopas", null); 187 _largeFile.deleteOnExit(); 188 189 writer = new OutputStreamWriter (new FileOutputStream (_smallFile)); 190 for (int count = 0; count < SMALL_LOOPS; ++count) { 191 writer.write(CODE_PIECE); 192 } 193 writer.close(); 194 195 writer = new OutputStreamWriter (new FileOutputStream (_largeFile)); 196 for (int count = 0; count < SMALL_LOOPS * LARGE_SMALL_RATIO; ++count) { 197 writer.write(CODE_PIECE); 198 } 199 writer.close(); 200 201 _properties = new StandardTokenizerProperties(_flags); 203 204 for (int index = 0; index < _javaProperties.length; ++index) { 205 if ((_typeMask & (1 << _javaProperties[index].getType())) != 0) { 206 _properties.addProperty(_javaProperties[index]); 207 } 208 } 209 } 210 211 212 216 protected void tearDown() throws Exception {} 217 218 219 223 226 public void parseFile() throws Throwable { 227 long smallTime = tokenizeFile(MessageFormat.format( TEST_MESSAGE, 228 new Object [] { "parseFile", flags2String(), types2String(), Long.toString(_smallFile.length()) } ), 229 _smallFile, true); 230 long largeTime = tokenizeFile(MessageFormat.format( TEST_MESSAGE, 231 new Object [] { "parseFile", flags2String(), types2String(), Long.toString(_largeFile.length()) } ), 232 _largeFile, true); 233 234 compareTime(smallTime, largeTime); 235 } 236 237 240 public void parseCharSequence() throws Throwable { 241 long smallTime = tokenizeFile(MessageFormat.format( TEST_MESSAGE, 242 new Object [] { "parseCharSequence", flags2String(), types2String(), Long.toString(_smallFile.length()) } ), 243 _smallFile, false); 244 long largeTime = tokenizeFile(MessageFormat.format( TEST_MESSAGE, 245 new Object [] { "parseCharSequence", flags2String(), types2String(), Long.toString(_largeFile.length()) } ), 246 _largeFile, false); 247 248 compareTime(smallTime, largeTime); 249 } 250 251 252 256 259 private void removeProperties(int propertyType) { 260 Iterator iter = _properties.getProperties(); 261 262 while (iter.hasNext()) { 263 TokenizerProperty prop = (TokenizerProperty)iter.next(); 264 265 if (prop.getType() == propertyType) { 266 iter.remove(); 267 } 268 } 269 } 270 271 274 private long tokenizeFile(String message, File file, boolean useReaderSource) throws Throwable { 275 System.out.println(message); 276 277 TokenizerSource source = null; 278 StandardTokenizer tokenizer = new StandardTokenizer(_properties); 279 280 try { 281 if (useReaderSource) { 282 source = new ReaderSource(file); 283 } else { 284 source = new CharArraySource(readFile(file)); 285 } 286 tokenizer.setSource(source); 287 return tokenize(tokenizer); 288 } finally { 289 if (useReaderSource && source != null) { 290 ((ReaderSource)source).close(); 291 } 292 tokenizer.close(); 293 } 294 } 295 296 299 private long tokenize(Tokenizer tokenizer) throws Throwable { 300 double[] times = { -1, 0, -1 }; int index = 0; 302 int count = 0; 303 boolean hasPattern = tokenizer.getTokenizerProperties().getPatterns().hasNext(); 304 boolean hasKeywords = tokenizer.getTokenizerProperties().getKeywords().hasNext(); 305 int[] expected = hasPattern ? EXPECTED_TOKEN 306 : hasKeywords ? EXPECTED_TOKEN_WITHOUT_PATTERN 307 : EXPECTED_TOKEN_WITHOUT_PATTERN_AND_KEYWORDS; 308 309 long diffTime = 0; 310 long start = System.currentTimeMillis(); 311 long localStart = start; 312 313 while (tokenizer.hasMoreToken()) { 315 Token token = tokenizer.nextToken(); 316 int type = token.getType(); 317 318 switch (type) { 319 case Token.WHITESPACE: 320 case Token.EOF: 321 break; 322 default: 323 if (expected[index] != type) { 327 assertTrue("Line/Column " + token.getStartLine() + "/" + token.getStartColumn() + ": Expected " + Token.getTypeName(expected[index]) + ", got " + Token.getTypeName(type) + ".\n" + tokenizer.currentImage(), false); 328 } 329 if (++index >= expected.length) { 330 long localEnd = System.currentTimeMillis(); 331 332 diffTime = localEnd - localStart; 333 localStart = localEnd; 334 335 if (times[0] < 0 || times[0] > diffTime) { 337 times[0] = diffTime; 338 } 339 340 if (times[2] < 0 || times[2] < diffTime) { 342 times[2] = diffTime; 343 } 344 345 times[1] += diffTime; 347 count++; 348 349 index = 0; 351 } 352 } 353 } 354 355 diffTime = System.currentTimeMillis() - start; 357 times[1] = times[1] / (double)count; 358 System.out.println(" Finished after " + diffTime + " milliseconds."); 359 System.out.println(" Min/avg/max tokenize time for " + expected.length + " token: " + times[0] + "/" + times[1] + "/" + times[2] + " milliseconds."); 360 361 return diffTime; 362 } 363 364 365 369 375 private void compareTime(double smallTime, double largeTime) { 376 if (largeTime / (smallTime + 1) > LARGE_SMALL_RATIO) { 377 System.out.println("Tokenizer too slow. Time for small / large file " 378 + smallTime + "/" + largeTime + "ms. Exceeding limit ratio of " 379 + LARGE_SMALL_RATIO + "."); 380 } 381 } 382 383 389 private char[] readFile(File file) throws Throwable { 390 char[] cbuf = new char[(int)file.length()]; 391 int chars = 0; 392 FileReader reader = new FileReader (file); 393 394 try { 395 while (chars < cbuf.length) { 396 int read = reader.read(cbuf, chars, cbuf.length - chars); 397 398 if (read < 0) { 399 throw new IOException ("Unexpected EOF after " + chars + " characters. Expected " + cbuf.length + "."); 400 } 401 chars += read; 402 } 403 } finally { 404 try { reader.close(); } catch (IOException ex) {} 405 } 406 return cbuf; 407 } 408 409 412 private String flags2String() { 413 StringBuffer buffer = new StringBuffer (); 414 415 if ((_flags & Flags.F_KEEP_DATA) != 0) { 416 buffer.append("F_KEEP_DATA"); 417 } 418 419 if ((_flags & Flags.F_RETURN_WHITESPACES) == Flags.F_RETURN_WHITESPACES) { 420 if (buffer.length() > 0) { 421 buffer.append(" + "); 422 } 423 buffer.append("F_RETURN_WHITESPACES"); 424 } else { 425 if ((_flags & Flags.F_RETURN_BLOCK_COMMENTS) != 0) { 426 if (buffer.length() > 0) { 427 buffer.append(" + "); 428 } 429 buffer.append("F_RETURN_BLOCK_COMMENTS"); 430 } 431 if ((_flags & Flags.F_RETURN_LINE_COMMENTS) != 0) { 432 if (buffer.length() > 0) { 433 buffer.append(" + "); 434 } 435 buffer.append("F_RETURN_LINE_COMMENTS"); 436 } 437 if ((_flags & Flags.F_RETURN_SIMPLE_WHITESPACES) != 0) { 438 if (buffer.length() > 0) { 439 buffer.append(" + "); 440 } 441 buffer.append("F_RETURN_SIMPLE_WHITESPACES"); 442 } 443 } 444 445 if ((_flags & Flags.F_COUNT_LINES) != 0) { 446 if (buffer.length() > 0) { 447 buffer.append(" + "); 448 } 449 buffer.append("F_COUNT_LINES"); 450 } 451 452 if ((_flags & Flags.F_TOKEN_POS_ONLY) != 0) { 453 if (buffer.length() > 0) { 454 buffer.append(" + "); 455 } 456 buffer.append("F_TOKEN_POS_ONLY"); 457 } 458 return buffer.toString(); 459 } 460 461 464 private String types2String() { 465 StringBuffer buffer = new StringBuffer (); 466 467 if ((_typeMask & (1 << Token.PATTERN)) != 0) { 468 buffer.append("PATTERN"); 469 } 470 if ((_typeMask & (1 << Token.KEYWORD)) != 0) { 471 if (buffer.length() > 0) { 472 buffer.append(" + "); 473 } 474 buffer.append("KEYWORD"); 475 } 476 return buffer.toString(); 477 } 478 479 483 private static final String CODE_PIECE = 485 "/**\n" 486 + " * A Java-like code example with lots of comments, strings, special\n" 487 + " * sequences etc.\n" 488 + " *<br>\n" 489 + " * Even some HTML tags like in real javadoc comments are present :-)\n" 490 + " * This piece of code is multiplied into a temporary file to get a really\n" 491 + " * huge source file (nothing that should happen in real life).\n" 492 + " */\n" 493 + "\n" 494 + "// package declaration\n" 495 + "package ours.my.subpackage;\n" 496 + "\n" 497 + "// imports\n" 498 + "import java.util.*;\n" 499 + "import java.io.InputStream;\n" 500 + "import java.io.InputStreamReader;\n" 501 + "import java.io.OutputStream;\n" 502 + "import java.io.OutputStreamWriter;\n" 503 + "import java.net.URL;\n" 504 + "import java.net.URI;\n" 505 + "import javax.swing.*;\n" 506 + "\n" 507 + "// class declaration\n" 508 + "\n" 509 + "/**\n" 510 + " * An example Java class probably not even syntactically ok.\n" 511 + " *\n" 512 + " * @see OtherClass\n" 513 + " * @see java.io.File\n" 514 + " * @author me\n" 515 + " */\n" 516 + "public class MyTestClass implements Serializable {\n" 517 + "\n" 518 + " /**\n" 519 + " * The usual main method.\n" 520 + " *\n" 521 + " * @param args the command line options and arguments\n" 522 + " */\n" 523 + " public static void main(String[] args) {\n" 524 + " // create the argument store\n" 525 + " argStore = new ArrayList(32);\n" 526 + "\n" 527 + " // wich GUI should be used?\n" 528 + " if (args != null && args.length > 0) {\n" 529 + " if (args[0].equals(\"swingui\")) {\n" 530 + " new junit.swingui.TestRunner().main(tests);\n" 531 + " } else if (args[0].equals(\"awtui\")) {\n" 532 + " new junit.awtui.TestRunner().main(tests);\n" 533 + " } else {\n" 534 + " new junit.textui.TestRunner().main(tests);\n" 535 + " }\n" 536 + " } else {\n" 537 + " new junit.textui.TestRunner().main(tests);\n" 538 + " }\n" 539 + "\n" 540 + " // get all the other command line arguments\n" 541 + " double doubleValue = 0.0;\n" 542 + " int intValue = 0;\n" 543 + " String stringValue = null;\n" 544 + "\n" 545 + " for (int index = 1; args != null && index < args.length; ++index) {\n" 546 + " if (args[index].charAt(0) == '-') {\n" 547 + " // options\n" 548 + " switch (args[index].charAt(1)) {\n" 549 + " case 'd':\n" 550 + " doubleValue = Double.valueOf(args[index].substring(2)).doubleValue();\n" 551 + " break;\n" 552 + " case 's':\n" 553 + " stringValue = args[index].substring(2);\n" 554 + " break;\n" 555 + " case 'i':\n" 556 + " intValue = Integer.valueOf(args[index].substring(2)).intValue();\n" 557 + " break;\n" 558 + " default:\n" 559 + " stringValue = \"\";\n" 560 + " doubleValue = 0.0;\n" 561 + " intValue = 0;\n" 562 + " }\n" 563 + "\n" 564 + " } else {\n" 565 + " // normal arguments\n" 566 + " if ( ! argStore.contains(args[index])) {\n" 567 + " argStore.add(args[index]);\n" 568 + " } else {\n" 569 + " System.out.println(\"Duplicate element \\\"\" + args[index] + \"\\\".\");\n" 570 + " /* perhaps better use Environment.getEnvironment(this).out().println() */\n" 571 + " }\n" 572 + " }\n" 573 + " }\n" 574 + " }\n" 575 + "\n" 576 + " /**\n" 577 + " * The argument store.\n" 578 + " */\n" 579 + " private ArrayList argStore = null;\n" 580 + "}\n" 581 + "\n" 582 + "\n"; 583 584 private static final int EXPECTED_TOKEN[] = { 586 Token.BLOCK_COMMENT, 587 Token.LINE_COMMENT, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.BLOCK_COMMENT, 600 Token.KEYWORD, Token.KEYWORD, Token.NORMAL, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, 602 Token.KEYWORD, Token.KEYWORD, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.PATTERN, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, 651 Token.KEYWORD, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE }; 654 655 private static final int EXPECTED_TOKEN_WITHOUT_PATTERN[] = { 657 Token.BLOCK_COMMENT, 658 Token.LINE_COMMENT, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.BLOCK_COMMENT, 671 Token.KEYWORD, Token.KEYWORD, Token.NORMAL, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, 673 Token.KEYWORD, Token.KEYWORD, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, 722 Token.KEYWORD, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE }; 725 726 private static final int EXPECTED_TOKEN_WITHOUT_PATTERN_AND_KEYWORDS[] = { 728 Token.BLOCK_COMMENT, 729 Token.LINE_COMMENT, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.BLOCK_COMMENT, 742 Token.NORMAL, Token.NORMAL, Token.NORMAL, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, 744 Token.NORMAL, Token.NORMAL, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.STRING, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SEPARATOR, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.LINE_COMMENT, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.BLOCK_COMMENT, 793 Token.NORMAL, Token.NORMAL, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE }; 796 797 private static final Object JAVADOC_COMPANION = new Object (); 799 private static final Object BLOCK_COMMENT_COMPANION = new Object (); 800 private static final Object LINE_COMMENT_COMPANION = new Object (); 801 private static final Object STRING_COMPANION = new Object (); 802 private static final Object CHAR_COMPANION = new Object (); 803 private static final Object BRACE_CLOSE_COMPANION = new Object (); 804 private static final Object COLON_COMPANION = new Object (); 805 private static final Object SEMICOLON_COMPANION = new Object (); 806 private static final Object STAR_COMPANION = new Object (); 807 808 static final TokenizerProperty[] _javaProperties = { 810 new TokenizerProperty(Token.BLOCK_COMMENT, new String [] { "/**", "*/" }, JAVADOC_COMPANION), 811 new TokenizerProperty(Token.BLOCK_COMMENT, new String [] { "/*", "*/" }, BLOCK_COMMENT_COMPANION), 812 new TokenizerProperty(Token.LINE_COMMENT, new String [] { "//" }, LINE_COMMENT_COMPANION), 813 new TokenizerProperty(Token.STRING, new String [] { "\"", "\"", "\\" }, STRING_COMPANION), 814 new TokenizerProperty(Token.STRING, new String [] { "'", "'", "\\" }, CHAR_COMPANION), 815 new TokenizerProperty(Token.PATTERN, new String [] { "[+\\-]?[0-9]+\\.?[0-9]*" } ), 816 new TokenizerProperty(Token.KEYWORD, new String [] { "package" } ), 817 new TokenizerProperty(Token.KEYWORD, new String [] { "abstract" } ), 818 new TokenizerProperty(Token.KEYWORD, new String [] { "public" } ), 819 new TokenizerProperty(Token.KEYWORD, new String [] { "protected" } ), 820 new TokenizerProperty(Token.KEYWORD, new String [] { "private" } ), 821 new TokenizerProperty(Token.KEYWORD, new String [] { "class" } ), 822 new TokenizerProperty(Token.KEYWORD, new String [] { "final" } ), 823 new TokenizerProperty(Token.KEYWORD, new String [] { "static" } ), 824 new TokenizerProperty(Token.KEYWORD, new String [] { "interface" } ), 825 new TokenizerProperty(Token.KEYWORD, new String [] { "extends" } ), 826 new TokenizerProperty(Token.KEYWORD, new String [] { "implements" } ), 827 new TokenizerProperty(Token.KEYWORD, new String [] { "synchronized" } ), 828 new TokenizerProperty(Token.KEYWORD, new String [] { "null" } ), 829 new TokenizerProperty(Token.KEYWORD, new String [] { "this" } ), 830 new TokenizerProperty(Token.KEYWORD, new String [] { "super" } ), 831 new TokenizerProperty(Token.KEYWORD, new String [] { "new" } ), 832 new TokenizerProperty(Token.KEYWORD, new String [] { "void" } ), 833 new TokenizerProperty(Token.KEYWORD, new String [] { "byte" } ), 834 new TokenizerProperty(Token.KEYWORD, new String [] { "char" } ), 835 new TokenizerProperty(Token.KEYWORD, new String [] { "short" } ), 836 new TokenizerProperty(Token.KEYWORD, new String [] { "int" } ), 837 new TokenizerProperty(Token.KEYWORD, new String [] { "long" } ), 838 new TokenizerProperty(Token.KEYWORD, new String [] { "double" } ), 839 new TokenizerProperty(Token.KEYWORD, new String [] { "float" } ), 840 new TokenizerProperty(Token.KEYWORD, new String [] { "String" } ), 841 new TokenizerProperty(Token.KEYWORD, new String [] { "throws" } ), 842 new TokenizerProperty(Token.KEYWORD, new String [] { "static" } ), 843 new TokenizerProperty(Token.KEYWORD, new String [] { "import" } ), 844 new TokenizerProperty(Token.KEYWORD, new String [] { "package" } ), 845 new TokenizerProperty(Token.KEYWORD, new String [] { "if" } ), 846 new TokenizerProperty(Token.KEYWORD, new String [] { "else" } ), 847 new TokenizerProperty(Token.KEYWORD, new String [] { "for" } ), 848 new TokenizerProperty(Token.KEYWORD, new String [] { "while" } ), 849 new TokenizerProperty(Token.KEYWORD, new String [] { "switch" } ), 850 new TokenizerProperty(Token.KEYWORD, new String [] { "case" } ), 851 new TokenizerProperty(Token.KEYWORD, new String [] { "break" } ), 852 new TokenizerProperty(Token.KEYWORD, new String [] { "default" } ), 853 new TokenizerProperty(Token.KEYWORD, new String [] { "continue" } ), 854 new TokenizerProperty(Token.KEYWORD, new String [] { "goto" } ), 855 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "." } ), 856 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ";" } ), 857 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "," } ), 858 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "=" } ), 859 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "==" } ), 860 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "!=" } ), 861 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">" } ), 862 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "<" } ), 863 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">=" } ), 864 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "<=" } ), 865 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "+=" } ), 866 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "-=" } ), 867 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "*=" } ), 868 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "/=" } ), 869 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">>=" } ), 870 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "<<=" } ), 871 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "+" } ), 872 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "-" } ), 873 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "*" } ), 874 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "/" } ), 875 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "++" } ), 876 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "--" } ), 877 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">>" } ), 878 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "<<" } ), 879 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">>>" } ), 880 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "&" } ), 881 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "|" } ), 882 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "^" } ), 883 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "&&" } ), 884 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "||" } ), 885 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "!" } ), 886 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "{" } ), 887 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "}" } ), 888 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "(" } ), 889 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ")" } ), 890 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "[" } ), 891 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "]" } ) 892 }; 893 894 private static final int SMALL_LOOPS = 10; 896 private static final double LARGE_SMALL_RATIO = 100.0; 897 898 private static final String TEST_MESSAGE = System.getProperty("line.separator", "\n") + "{0}, flags \"{1}\", types \"{2}\": {3} bytes."; 900 901 private TokenizerProperties _properties = null; 905 private int _flags = 0; 906 private long _typeMask = 0; 907 908 private static File _smallFile = null; 912 private static File _largeFile = null; 913 } 914 915 | Popular Tags |