1 19 20 package org.netbeans.api.lexer; 21 22 import java.util.ConcurrentModificationException ; 23 import org.netbeans.lib.lexer.EmbeddingContainer; 24 import org.netbeans.lib.lexer.SubSequenceTokenList; 25 import org.netbeans.lib.lexer.LexerUtilsConstants; 26 import org.netbeans.lib.lexer.TokenList; 27 import org.netbeans.lib.lexer.inc.FilterSnapshotTokenList; 28 import org.netbeans.lib.lexer.inc.SnapshotTokenList; 29 import org.netbeans.lib.lexer.token.AbstractToken; 30 31 97 98 public final class TokenSequence<T extends TokenId> { 99 100 private TokenList<T> tokenList; 102 private AbstractToken<T> token; 104 private int tokenIndex; 106 110 private int tokenOffset = -1; 112 116 private final int modCount; 118 125 private int[] parentTokenIndexes; 127 130 TokenSequence(TokenList<T> tokenList) { 131 this.tokenList = tokenList; 132 this.modCount = tokenList.modCount(); 133 } 134 135 139 public Language<T> language() { 140 return LexerUtilsConstants.mostEmbeddedLanguage(languagePath()); 141 } 142 143 147 public LanguagePath languagePath() { 148 return tokenList.languagePath(); 149 } 150 151 189 public Token<T> token() { 190 return token; 191 } 192 193 210 public Token<T> offsetToken() { 211 checkTokenNotNull(); 212 if (token.isFlyweight()) { 213 token = tokenList.replaceFlyToken(tokenIndex, token, offset()); 214 } 215 return token; 216 } 217 218 232 public int offset() { 233 checkTokenNotNull(); 234 if (tokenOffset == -1) { 235 tokenOffset = tokenList.tokenOffset(tokenIndex); 236 } 237 return tokenOffset; 238 } 239 240 265 public int index() { 266 return tokenIndex; 267 } 268 269 283 public TokenSequence<? extends TokenId> embedded() { 284 checkTokenNotNull(); 285 return embeddedImpl(null); 286 } 287 288 private <ET extends TokenId> TokenSequence<ET> embeddedImpl(Language<ET> embeddedLanguage) { 289 TokenList<ET> embeddedTokenList 290 = EmbeddingContainer.getEmbedding(tokenList, tokenIndex, embeddedLanguage); 291 if (embeddedTokenList != null) { 292 TokenList<T> tl = tokenList; 293 if (tokenList.getClass() == SubSequenceTokenList.class) { 294 tl = ((SubSequenceTokenList<T>)tokenList).delegate(); 295 } 296 297 if (tl.getClass() == FilterSnapshotTokenList.class) { 298 embeddedTokenList = new FilterSnapshotTokenList<ET>(embeddedTokenList, 299 ((FilterSnapshotTokenList<T>)tl).tokenOffsetDiff()); 300 301 } else if (tl.getClass() == SnapshotTokenList.class) { 302 embeddedTokenList = new FilterSnapshotTokenList<ET>(embeddedTokenList, 303 offset() - token().offset(null)); 304 } 305 return new TokenSequence<ET>(embeddedTokenList); 306 307 } else return null; 309 } 310 311 318 public <ET extends TokenId> TokenSequence<ET> embedded(Language<ET> embeddedLanguage) { 319 checkTokenNotNull(); 320 return embeddedImpl(embeddedLanguage); 321 } 322 323 329 public boolean createEmbedding(Language<? extends TokenId> embeddedLanguage, 330 int startSkipLength, int endSkipLength) { 331 return createEmbedding(embeddedLanguage, startSkipLength, endSkipLength, false); 332 } 333 334 364 public boolean createEmbedding(Language<? extends TokenId> embeddedLanguage, 365 int startSkipLength, int endSkipLength, boolean joinSections) { 366 checkTokenNotNull(); 367 return EmbeddingContainer.createEmbedding(tokenList, tokenIndex, 368 embeddedLanguage, startSkipLength, endSkipLength, joinSections); 369 } 370 371 387 public boolean moveNext() { 388 checkModCount(); 389 if (token != null) tokenIndex++; 391 Object tokenOrEmbeddingContainer = tokenList.tokenOrEmbeddingContainer(tokenIndex); 392 if (tokenOrEmbeddingContainer != null) { 393 AbstractToken origToken = token; 394 token = LexerUtilsConstants.token(tokenOrEmbeddingContainer); 395 if (tokenOffset != -1) { 397 if (origToken != null) { 398 if (tokenList.isContinuous() || token.isFlyweight()) { 403 tokenOffset += origToken.length(); } else tokenOffset = -1; } else tokenOffset = -1; 408 } 409 return true; 410 } 411 if (token != null) tokenIndex--; 413 return false; 414 } 415 416 432 public boolean movePrevious() { 433 checkModCount(); 434 if (tokenIndex > 0) { 435 AbstractToken origToken = token; 436 tokenIndex--; 437 token = LexerUtilsConstants.token(tokenList.tokenOrEmbeddingContainer(tokenIndex)); 438 if (tokenOffset != -1) { 439 if (tokenList.isContinuous() || origToken.isFlyweight()) { 444 tokenOffset -= token.length(); } else { tokenOffset = -1; 447 } 448 } 449 return true; 450 451 } return false; 453 } 454 455 487 public int moveIndex(int index) { 488 checkModCount(); 489 if (index >= 0) { 490 Object tokenOrEmbeddingContainer = tokenList.tokenOrEmbeddingContainer(index); 491 if (tokenOrEmbeddingContainer != null) { resetTokenIndex(index); 493 } else resetTokenIndex(tokenCount()); 495 } else resetTokenIndex(0); 497 return index - tokenIndex; 498 } 499 500 505 public void moveStart() { 506 moveIndex(0); 507 } 508 509 514 public void moveEnd() { 515 moveIndex(tokenCount()); 516 } 517 518 556 public int move(int offset) { 557 checkModCount(); 558 int tokenCount = tokenList.tokenCountCurrent(); if (tokenCount == 0) { if (tokenList.tokenOrEmbeddingContainer(0) == null) { resetTokenIndex(0); 567 return offset; 568 } 569 tokenCount = tokenList.tokenCountCurrent(); 571 } 572 573 int prevTokenOffset = tokenList.tokenOffset(tokenCount - 1); 575 if (offset > prevTokenOffset) { int tokenLength = LexerUtilsConstants.token(tokenList, tokenCount - 1).length(); 580 while (offset >= prevTokenOffset + tokenLength) { Object tokenOrEmbeddingContainer = tokenList.tokenOrEmbeddingContainer(tokenCount); 582 if (tokenOrEmbeddingContainer != null) { 583 AbstractToken t = LexerUtilsConstants.token(tokenOrEmbeddingContainer); 584 if (t.isFlyweight()) { prevTokenOffset += tokenLength; 586 } else { prevTokenOffset = tokenList.tokenOffset(tokenCount); 588 } 589 tokenLength = t.length(); 590 tokenCount++; 591 592 } else { resetTokenIndex(tokenCount); 594 tokenOffset = prevTokenOffset + tokenLength; return offset - tokenOffset; 596 } 597 } 598 resetTokenIndex(tokenCount - 1); 599 tokenOffset = prevTokenOffset; return offset - prevTokenOffset; 601 } 602 603 int low = 0; 606 int high = tokenCount - 1; 607 608 while (low <= high) { 609 int mid = (low + high) / 2; 610 int midStartOffset = tokenList.tokenOffset(mid); 611 612 if (midStartOffset < offset) { 613 low = mid + 1; 614 } else if (midStartOffset > offset) { 615 high = mid - 1; 616 } else { 617 resetTokenIndex(mid); 619 tokenOffset = midStartOffset; 620 return 0; } 622 } 623 624 if (high >= 0) { AbstractToken t = LexerUtilsConstants.token(tokenList, high); 628 prevTokenOffset = tokenList.tokenOffset(high); 629 if (!tokenList.isContinuous() && offset > prevTokenOffset + t.length()) { 631 high++; 633 prevTokenOffset += t.length(); 634 } 635 } else { high = 0; 637 prevTokenOffset = tokenList.tokenOffset(0); } 639 resetTokenIndex(high); 640 tokenOffset = prevTokenOffset; 641 return offset - prevTokenOffset; 642 } 643 644 651 public boolean isEmpty() { 652 return (tokenIndex == 0 && tokenList.tokenOrEmbeddingContainer(0) == null); 653 } 654 655 664 public int tokenCount() { 665 checkModCount(); 666 return tokenList.tokenCount(); 667 } 668 669 678 public TokenSequence<T> subSequence(int startOffset) { 679 return subSequence(startOffset, Integer.MAX_VALUE); 680 } 681 682 694 public TokenSequence<T> subSequence(int startOffset, int endOffset) { 695 checkModCount(); TokenList<T> tl; 697 if (tokenList.getClass() == SubSequenceTokenList.class) { 698 SubSequenceTokenList<T> stl = (SubSequenceTokenList<T>)tokenList; 699 tl = stl.delegate(); 700 startOffset = Math.max(startOffset, stl.limitStartOffset()); 701 endOffset = Math.min(endOffset, stl.limitEndOffset()); 702 } else tl = tokenList; 704 return new TokenSequence<T>(new SubSequenceTokenList<T>(tl, startOffset, endOffset)); 705 } 706 707 public String toString() { 708 return LexerUtilsConstants.appendTokenList(null, tokenList, tokenIndex).toString(); 709 } 710 711 int[] parentTokenIndexes() { 712 return parentTokenIndexes; 713 } 714 715 private void resetTokenIndex(int index) { 716 tokenIndex = index; 718 token = null; 719 tokenOffset = -1; 720 } 721 722 private void checkTokenNotNull() { 723 if (token == null) { 724 throw new IllegalStateException ( 725 "No token fetched by moveNext() from token sequence yet: index=" + tokenIndex 726 ); } 728 } 729 730 private void checkModCount() { 731 if (tokenList.modCount() != this.modCount) { 732 throw new ConcurrentModificationException ( 733 "This token sequence is no longer valid. Underlying token hierarchy" + " has been modified: " + this.modCount + " != " + tokenList.modCount() ); 736 } 737 } 738 739 } | Popular Tags |