1 43 44 package de.susebox.jtopas; 45 46 import java.util.Iterator ; 50 import java.util.Random ; 51 import java.util.ArrayList ; 52 53 import java.io.Reader ; 54 import java.io.StringReader ; 55 56 import junit.framework.Test; 57 import junit.framework.TestCase; 58 import junit.framework.TestSuite; 59 import junit.framework.Assert; 60 61 import de.susebox.java.lang.ExtRuntimeException; 62 63 import de.susebox.TestUtilities; 64 65 66 70 77 public class TestMultithreadTokenizer extends TestCase { 78 79 83 86 public static void main(String [] args) { 87 String [] tests = { TestMultithreadTokenizer.class.getName() }; 88 89 TestUtilities.run(tests, args); 90 } 91 92 93 97 103 public static Test suite() { 104 TestSuite suite = new TestSuite(TestMultithreadTokenizer.class.getName()); 105 106 suite.addTest(new TestMultithreadTokenizer("testParallelParsing")); 107 return suite; 108 } 109 110 111 115 119 public TestMultithreadTokenizer(String test) { 120 super(test); 121 } 122 123 124 128 132 protected void setUp() throws Exception { 133 _properties = new StandardTokenizerProperties(); 134 for (int index = 0; index < _testProperties.length; ++index) { 135 _properties.addProperty(_testProperties[index]); 136 } 137 } 138 139 140 144 protected void tearDown() throws Exception { 145 _properties = null; 146 } 147 148 149 153 156 public void testParallelParsing() throws Throwable { 157 Random random = new Random (); 158 StringBuffer [] active = new StringBuffer [_numberOfThreads]; 159 String [] last = new String [_numberOfThreads]; 160 Runner[] runner = new Runner[_numberOfThreads]; 161 Thread [] thread = new Thread [_numberOfThreads]; 162 long start = System.currentTimeMillis(); 163 164 _properties = new StandardTokenizerProperties(Flags.F_RETURN_WHITESPACES); 166 167 for (int index = 0; index < _testProperties.length; ++index) { 168 _properties.addProperty(_testProperties[index]); 169 } 170 171 for (int index = 0; index < _numberOfThreads; ++index) { 173 active[index] = new StringBuffer ("0"); 174 runner[index] = new Runner(this, random.nextInt(_testTexts.length), active[index]); 175 thread[index] = new Thread (runner[index]); 176 thread[index].setDaemon(true); 177 } 178 179 try { 181 for (int index = 0; index < _numberOfThreads; ++index) { 182 thread[index].start(); 183 } 184 185 while (System.currentTimeMillis() - start < _duration * 1000) { 186 synchronized(this) { 188 try { 189 wait(3000); 190 } catch (InterruptedException ex) {} 191 } 192 193 for (int index = 0; index < _numberOfThreads; ++index) { 195 System.out.println(System.currentTimeMillis() + ": Activity at runner " + index + ": " + active[index]); 196 last[index] = active[index].toString(); 197 } 198 } 199 200 for (int index = 0; index < _numberOfThreads; ++index) { 202 runner[index].stop(); 203 } 204 Thread.sleep(1000); 205 206 for (int index = 0; index < _numberOfThreads; ++index) { 208 String activity = active[index].toString(); 209 210 assertTrue("No good activity at runner " + index + ": " + activity, 211 new Integer (activity).intValue() > (50 / _numberOfThreads) * _duration); 212 } 213 214 } finally { 215 for (int index = 0; index < _numberOfThreads; ++index) { 216 thread[index] = null; 217 } 218 } 219 } 220 221 222 private static int _numberOfThreads = 30; 226 private static int _duration = 60; 227 228 231 private static final TokenizerProperty[] _testProperties = { 232 new TokenizerProperty(Token.STRING, new String [] { "\"", "\"", "\\" }, null, 0, Flags.F_NO_CASE ), 233 new TokenizerProperty(Token.STRING, new String [] { "'", "'", "\\" }, null, 0, Flags.F_NO_CASE ), 234 new TokenizerProperty(Token.LINE_COMMENT, new String [] { "//" }, null, 0, Flags.F_NO_CASE ), 235 new TokenizerProperty(Token.BLOCK_COMMENT, new String [] { "/*", "*/" }, null, 0, Flags.F_NO_CASE ), 236 new TokenizerProperty(Token.BLOCK_COMMENT, new String [] { "/**", "*/" }, null, 0, Flags.F_NO_CASE ), 237 new TokenizerProperty(Token.KEYWORD, new String [] { "if" }, null, 0, Flags.F_NO_CASE ), 238 new TokenizerProperty(Token.KEYWORD, new String [] { "else" }, null, 0, Flags.F_NO_CASE ), 239 new TokenizerProperty(Token.KEYWORD, new String [] { "return" }, null, 0, Flags.F_NO_CASE ), 240 new TokenizerProperty(Token.KEYWORD, new String [] { "native" }, null, 0, Flags.F_NO_CASE ), 241 new TokenizerProperty(Token.KEYWORD, new String [] { "for" }, null, 0, Flags.F_NO_CASE ), 242 new TokenizerProperty(Token.KEYWORD, new String [] { "while" }, null, 0, Flags.F_NO_CASE ), 243 new TokenizerProperty(Token.KEYWORD, new String [] { "do" }, null, 0, Flags.F_NO_CASE ), 244 new TokenizerProperty(Token.KEYWORD, new String [] { "switch" }, null, 0, Flags.F_NO_CASE ), 245 new TokenizerProperty(Token.KEYWORD, new String [] { "case" }, null, 0, Flags.F_NO_CASE ), 246 new TokenizerProperty(Token.KEYWORD, new String [] { "default" }, null, 0, Flags.F_NO_CASE ), 247 new TokenizerProperty(Token.KEYWORD, new String [] { "break" }, null, 0, Flags.F_NO_CASE ), 248 new TokenizerProperty(Token.KEYWORD, new String [] { "class" }, null, 0, Flags.F_NO_CASE ), 249 new TokenizerProperty(Token.KEYWORD, new String [] { "interface" }, null, 0, Flags.F_NO_CASE ), 250 new TokenizerProperty(Token.KEYWORD, new String [] { "synchronized" }, null, 0, Flags.F_NO_CASE), 251 new TokenizerProperty(Token.KEYWORD, new String [] { "public" }, null, 0, Flags.F_NO_CASE ), 252 new TokenizerProperty(Token.KEYWORD, new String [] { "protected" }, null, 0, Flags.F_NO_CASE ), 253 new TokenizerProperty(Token.KEYWORD, new String [] { "private" }, null, 0, Flags.F_NO_CASE ), 254 new TokenizerProperty(Token.KEYWORD, new String [] { "final" }, null, 0, Flags.F_NO_CASE ), 255 new TokenizerProperty(Token.KEYWORD, new String [] { "static" }, null, 0, Flags.F_NO_CASE ), 256 new TokenizerProperty(Token.KEYWORD, new String [] { "implements" }, null, 0, Flags.F_NO_CASE ), 257 new TokenizerProperty(Token.KEYWORD, new String [] { "extends" }, null, 0, Flags.F_NO_CASE ), 258 new TokenizerProperty(Token.KEYWORD, new String [] { "byte" }, null, 0, Flags.F_NO_CASE ), 259 new TokenizerProperty(Token.KEYWORD, new String [] { "char" }, null, 0, Flags.F_NO_CASE ), 260 new TokenizerProperty(Token.KEYWORD, new String [] { "int" }, null, 0, Flags.F_NO_CASE ), 261 new TokenizerProperty(Token.KEYWORD, new String [] { "long" }, null, 0, Flags.F_NO_CASE ), 262 new TokenizerProperty(Token.KEYWORD, new String [] { "double" }, null, 0, Flags.F_NO_CASE ), 263 new TokenizerProperty(Token.KEYWORD, new String [] { "String" }, null, 0, Flags.F_NO_CASE ), 264 new TokenizerProperty(Token.KEYWORD, new String [] { "boolean" }, null, 0, Flags.F_NO_CASE ), 265 new TokenizerProperty(Token.KEYWORD, new String [] { "void" }, null, 0, Flags.F_NO_CASE ), 266 new TokenizerProperty(Token.KEYWORD, new String [] { "throw" }, null, 0, Flags.F_NO_CASE ), 267 new TokenizerProperty(Token.KEYWORD, new String [] { "throws" }, null, 0, Flags.F_NO_CASE ), 268 new TokenizerProperty(Token.KEYWORD, new String [] { "new" }, null, 0, Flags.F_NO_CASE ), 269 new TokenizerProperty(Token.KEYWORD, new String [] { "assert" }, null, 0, Flags.F_NO_CASE ), 270 new TokenizerProperty(Token.KEYWORD, new String [] { "try" }, null, 0, Flags.F_NO_CASE ), 271 new TokenizerProperty(Token.KEYWORD, new String [] { "catch" }, null, 0, Flags.F_NO_CASE ), 272 new TokenizerProperty(Token.KEYWORD, new String [] { "finally" }, null, 0, Flags.F_NO_CASE ), 273 new TokenizerProperty(Token.KEYWORD, new String [] { "import" }, null, 0, Flags.F_NO_CASE ), 274 new TokenizerProperty(Token.KEYWORD, new String [] { "package" }, null, 0, Flags.F_NO_CASE ), 275 new TokenizerProperty(Token.KEYWORD, new String [] { "this" }, null, 0, Flags.F_NO_CASE ), 276 new TokenizerProperty(Token.KEYWORD, new String [] { "super" }, null, 0, Flags.F_NO_CASE ), 277 new TokenizerProperty(Token.KEYWORD, new String [] { "null" }, null, 0, Flags.F_NO_CASE ), 278 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "," }, null, 0, Flags.F_NO_CASE ), 279 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ";" }, null, 0, Flags.F_NO_CASE ), 280 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "=" }, null, 0, Flags.F_NO_CASE ), 281 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "==" }, null, 0, Flags.F_NO_CASE ), 282 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "!=" }, null, 0, Flags.F_NO_CASE ), 283 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">=" }, null, 0, Flags.F_NO_CASE ), 284 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">>=" }, null, 0, Flags.F_NO_CASE ), 285 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "<=" }, null, 0, Flags.F_NO_CASE ), 286 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "<<=" }, null, 0, Flags.F_NO_CASE ), 287 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">" }, null, 0, Flags.F_NO_CASE ), 288 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "<" }, null, 0, Flags.F_NO_CASE ), 289 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "+=" }, null, 0, Flags.F_NO_CASE ), 290 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "-=" }, null, 0, Flags.F_NO_CASE ), 291 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "*=" }, null, 0, Flags.F_NO_CASE ), 292 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "/=" }, null, 0, Flags.F_NO_CASE ), 293 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "&=" }, null, 0, Flags.F_NO_CASE ), 294 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "|=" }, null, 0, Flags.F_NO_CASE ), 295 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "<<" }, null, 0, Flags.F_NO_CASE ), 296 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">>" }, null, 0, Flags.F_NO_CASE ), 297 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ">>>" }, null, 0, Flags.F_NO_CASE ), 298 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "++" }, null, 0, Flags.F_NO_CASE ), 299 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "--" }, null, 0, Flags.F_NO_CASE ), 300 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "~" }, null, 0, Flags.F_NO_CASE ), 301 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "*" }, null, 0, Flags.F_NO_CASE ), 302 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "/" }, null, 0, Flags.F_NO_CASE ), 303 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "%" }, null, 0, Flags.F_NO_CASE ), 304 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "^" }, null, 0, Flags.F_NO_CASE ), 305 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "+" }, null, 0, Flags.F_NO_CASE ), 306 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "-" }, null, 0, Flags.F_NO_CASE ), 307 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "." }, null, 0, Flags.F_NO_CASE ), 308 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "(" }, null, 0, Flags.F_NO_CASE ), 309 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ")" }, null, 0, Flags.F_NO_CASE ), 310 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "{" }, null, 0, Flags.F_NO_CASE ), 311 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "}" }, null, 0, Flags.F_NO_CASE ), 312 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "[" }, null, 0, Flags.F_NO_CASE ), 313 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "]" }, null, 0, Flags.F_NO_CASE ), 314 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { "?" }, null, 0, Flags.F_NO_CASE ), 315 new TokenizerProperty(Token.SPECIAL_SEQUENCE, new String [] { ":" }, null, 0, Flags.F_NO_CASE ) 316 }; 317 318 321 private static final String [] _testTexts = { 322 "import junit.framework.TestCase;\n" 323 + "\n" 324 + "public class MyTest extends TestCase implements TestListener {\n" 325 + "/** default constructor */\n" 326 + "public MyTest() {\n" 327 + "this(null);\n" 328 + "}\n" 329 + "\n" 330 + "/**\n" 331 + " * constructor taking name\n" 332 + " *\n" 333 + " * @param name the name\n" 334 + " */\n" 335 + "public MyTest(String name) {\n" 336 + "setName(name);\n" 337 + "}\n" 338 + "\n" 339 + "/**\n" 340 + " * Getting the name\n" 341 + " *\n" 342 + " * @return the name\n" 343 + " */\n" 344 + "public String getName() {\n" 345 + "return _myName;\n" 346 + "}\n" 347 + "\n" 348 + "/**\n" 349 + " * Setting a new name\n" 350 + " *\n" 351 + " * @param name the new name\n" 352 + " * @return the old name or <code>null</code>\n" 353 + " */\n" 354 + "public String setName(String name) {\n" 355 + "// setting _myName safely to a non-null value\n" 356 + "_myName = (name != null) ? name : \"\";\n" 357 + "}\n" 358 + "\n" 359 + "// Members\n" 360 + "private String _myName = null;\n" 361 + "}\n", 362 363 "// package declaration\r\n" 365 + "package my.domain.toppackage.subpackage;\r\n" 366 + "\r\n" 367 + "// imports\r\n" 368 + "import java.applet.Applet;\r\n" 369 + "import java.util.ArrayList;\r\n" 370 + "import java.io.InputStream;\r\n" 371 + "import java.io.InputStreamReader;\r\n" 372 + "import java.io.FileInputStream;\r\n" 373 + "import java.io.StringReader;\r\n" 374 + "import java.io.IOException;\r\n" 375 + "\n" 376 + "/**\r\n" 377 + " * A class for parsing only :-)\r\n" 378 + " */\r\n" 379 + "public class MyRunner extends Applet implements Runnable {\r\n" 380 + "/** default constructor */\r\n" 381 + "public MyRunner() {\r\n" 382 + "super();\r\n" 383 + "}\r\n" 384 + "\r\n" 385 + "/**\r\n" 386 + " * constructor taking name\r\n" 387 + " *\r\n" 388 + " * @param name the name\r\n" 389 + " */\r\n" 390 + "public MyRunner(String name) {\r\n" 391 + "super(name);\r\n" 392 + "}\r\n" 393 + "\r\n" 394 + "/**\r\n" 395 + " * Getting the name\r\n" 396 + " *\r\n" 397 + " * @return the name\r\n" 398 + " */\r\n" 399 + "public String getName() {\r\n" 400 + "return super.getName();\r\n" 401 + "}\r\n" 402 + "\r\n" 403 + "/**\n" 404 + " * Run method a defined in {@link java.lang.Runnable}.\r\n" 405 + " */\r\n" 406 + "public void run() throws Throwable {\r\n" 407 + "Thread thread = Thread.currentThread();\r\n" 408 + "long count = 0;\r\n" 409 + "\r\n" 410 + "while (Thread.currentThread() == this) {\r\n" 411 + "count++;\r\n" 412 + "_shifter >>= 1;\r\n" 413 + "synchronized(this){\r\n" 414 + "try {\r\n" 415 + "wait((count % 100) + 10);\r\n" 416 + "} catch (Exception ex) {\r\n" 417 + "break;\r\n" 418 + "} finally {\r\n" 419 + "_shifter = 0;\r\n" 420 + "}\r\n" 421 + "}\r\n" 422 + "}\r\n" 423 + "}\r\n" 424 + "\r\n" 425 + "// Members\r\n" 426 + "private long _shifter = 0;\r\n" 427 + "}" 428 }; 429 430 433 protected static final int _expectedResults[][] = { 434 { 435 Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 436 Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 437 Token.BLOCK_COMMENT, Token.WHITESPACE, 438 Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 439 Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 440 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 441 Token.BLOCK_COMMENT, Token.WHITESPACE, 442 Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 443 Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 444 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 445 Token.BLOCK_COMMENT, Token.WHITESPACE, 446 Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 447 Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 448 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 449 Token.BLOCK_COMMENT, Token.WHITESPACE, 450 Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 451 Token.LINE_COMMENT, 452 Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.STRING, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 453 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 454 Token.LINE_COMMENT, 455 Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 456 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 457 Token.EOF 458 }, 459 { 460 Token.LINE_COMMENT, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 462 Token.LINE_COMMENT, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.BLOCK_COMMENT, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.BLOCK_COMMENT, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 476 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 477 Token.BLOCK_COMMENT, Token.WHITESPACE, 478 Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 479 Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 480 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 481 Token.BLOCK_COMMENT, Token.WHITESPACE, 482 Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 483 Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 484 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 485 Token.BLOCK_COMMENT, Token.WHITESPACE, 486 Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 487 Token.NORMAL, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 488 Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 489 Token.KEYWORD, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 490 Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 491 Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 492 Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 493 Token.KEYWORD, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 494 Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 495 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.NORMAL, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 496 Token.KEYWORD, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 497 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 498 Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 499 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 500 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 501 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 502 Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 503 Token.LINE_COMMENT, 504 Token.KEYWORD, Token.WHITESPACE, Token.KEYWORD, Token.WHITESPACE, Token.NORMAL, Token.WHITESPACE, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, Token.NORMAL, Token.SPECIAL_SEQUENCE, Token.WHITESPACE, 505 Token.SPECIAL_SEQUENCE, 506 Token.EOF 507 } 508 }; 509 510 511 protected TokenizerProperties _properties = null; 515 516 517 521 524 class Runner implements Runnable { 525 526 529 public Runner(TestMultithreadTokenizer parent, int startIndex, StringBuffer activity) { 530 _parent = parent; 531 _tokenizer = new StandardTokenizer(_parent._properties); 532 _start = startIndex; 533 _activity = activity; 534 } 535 536 547 public void run() { 548 Thread thread = Thread.currentThread(); 549 String name = thread.getName(); 550 int counter = _start; 551 ArrayList tokens = new ArrayList (); 552 553 try { 554 while (Thread.currentThread() == thread && ! _stop) { 555 long start = System.currentTimeMillis(); 556 int index = counter % _parent._testTexts.length; 557 int[] expected = _parent._expectedResults[index]; 558 559 _tokenizer.setSource(new ReaderSource(new StringReader (_parent._testTexts[index]))); 561 tokens.clear(); 562 while (_tokenizer.hasMoreToken()) { 563 tokens.add(_tokenizer.nextToken()); 564 } 565 566 int typeIndex = 0; 568 569 while (typeIndex < tokens.size() && typeIndex < expected.length) { 570 Token token = (Token)tokens.get(typeIndex); 571 int type = token.getType(); 572 573 _parent.assertTrue("Index " + typeIndex + ": Expected type " + Token.getTypeName(expected[typeIndex]) + ", found " + token, type == expected[typeIndex]); 574 typeIndex++; 575 } 576 _parent.assertTrue("Expected " + expected.length + " token, found " + tokens.size() + ".", 577 expected.length == tokens.size()); 578 579 counter++; 581 582 long value = Long.parseLong(_activity.toString()); 584 _activity.setLength(0); 585 _activity.append(value + 1); 586 587 synchronized(this) { 589 try { 590 wait(1); 591 } catch (InterruptedException ex) {} 592 } 593 } 594 } catch (Throwable t) { 595 t.printStackTrace(); 596 } finally { 597 _tokenizer.close(); 598 } 599 System.out.println(name + ": exiting. Activity: " + _activity); 600 } 601 602 605 public void stop() { 606 synchronized(this) { 607 _stop = true; 608 } 609 } 610 611 612 private TestMultithreadTokenizer _parent = null; 616 private Tokenizer _tokenizer = null; 617 private int _start = 0; 618 private boolean _stop = false; 619 private StringBuffer _activity = null; 620 } 621 } 622 623 | Popular Tags |