1 17 package org.alfresco.repo.search.impl.lucene; 18 19 import java.io.IOException ; 20 import java.io.InputStream ; 21 import java.io.InputStreamReader ; 22 import java.io.Serializable ; 23 import java.io.UnsupportedEncodingException ; 24 import java.util.ArrayList ; 25 import java.util.Collection ; 26 import java.util.Collections ; 27 import java.util.HashMap ; 28 import java.util.Iterator ; 29 import java.util.LinkedHashSet ; 30 import java.util.LinkedList ; 31 import java.util.List ; 32 import java.util.ListIterator ; 33 import java.util.Map ; 34 import java.util.Set ; 35 36 import javax.transaction.Status ; 37 import javax.transaction.xa.XAResource ; 38 39 import org.alfresco.model.ContentModel; 40 import org.alfresco.repo.content.MimetypeMap; 41 import org.alfresco.repo.search.IndexerException; 42 import org.alfresco.repo.search.impl.lucene.fts.FTSIndexerAware; 43 import org.alfresco.repo.search.impl.lucene.fts.FullTextSearchIndexer; 44 import org.alfresco.service.cmr.dictionary.AspectDefinition; 45 import org.alfresco.service.cmr.dictionary.DataTypeDefinition; 46 import org.alfresco.service.cmr.dictionary.DictionaryService; 47 import org.alfresco.service.cmr.dictionary.PropertyDefinition; 48 import org.alfresco.service.cmr.dictionary.TypeDefinition; 49 import org.alfresco.service.cmr.repository.ChildAssociationRef; 50 import org.alfresco.service.cmr.repository.ContentData; 51 import org.alfresco.service.cmr.repository.ContentIOException; 52 import org.alfresco.service.cmr.repository.ContentReader; 53 import org.alfresco.service.cmr.repository.ContentService; 54 import org.alfresco.service.cmr.repository.ContentWriter; 55 import org.alfresco.service.cmr.repository.InvalidNodeRefException; 56 import org.alfresco.service.cmr.repository.NoTransformerException; 57 import org.alfresco.service.cmr.repository.NodeRef; 58 import org.alfresco.service.cmr.repository.NodeService; 59 import org.alfresco.service.cmr.repository.Path; 60 import org.alfresco.service.cmr.repository.StoreRef; 61 import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter; 62 import org.alfresco.service.cmr.search.ResultSetRow; 63 import org.alfresco.service.cmr.search.SearchParameters; 64 import org.alfresco.service.namespace.QName; 65 import org.alfresco.util.EqualsHelper; 66 import org.alfresco.util.ISO9075; 67 import org.apache.log4j.Logger; 68 import org.apache.lucene.document.Document; 69 import org.apache.lucene.document.Field; 70 import org.apache.lucene.index.IndexReader; 71 import org.apache.lucene.index.IndexWriter; 72 import org.apache.lucene.index.Term; 73 import org.apache.lucene.index.TermDocs; 74 import org.apache.lucene.search.BooleanQuery; 75 import org.apache.lucene.search.Hits; 76 import org.apache.lucene.search.IndexSearcher; 77 import org.apache.lucene.search.Searcher; 78 import org.apache.lucene.search.TermQuery; 79 80 86 public class LuceneIndexerImpl extends LuceneBase implements LuceneIndexer 87 { 88 public static final String NOT_INDEXED_NO_TRANSFORMATION = "nint"; 89 90 public static final String NOT_INDEXED_TRANSFORMATION_FAILED = "nitf"; 91 92 public static final String NOT_INDEXED_CONTENT_MISSING = "nicm"; 93 94 private static Logger s_logger = Logger.getLogger(LuceneIndexerImpl.class); 95 96 99 private enum Action 100 { 101 INDEX, REINDEX, DELETE, CASCADEREINDEX 102 }; 103 104 107 private NodeService nodeService; 108 109 112 private ContentService contentService; 113 114 119 120 private Set <NodeRef> deletions = new LinkedHashSet <NodeRef>(); 121 122 125 126 private int status = Status.STATUS_UNKNOWN; 127 128 131 132 private boolean isModified = false; 133 134 138 139 private Boolean isFTSUpdate = null; 140 141 144 private List <Command> commandList = new ArrayList <Command>(10000); 145 146 149 private FTSIndexerAware callBack; 150 151 154 private int remainingCount = 0; 155 156 159 private ArrayList <Helper> toFTSIndex = new ArrayList <Helper>(); 160 161 165 LuceneIndexerImpl() 166 { 167 super(); 168 } 169 170 173 174 public void setDictionaryService(DictionaryService dictionaryService) 175 { 176 super.setDictionaryService(dictionaryService); 177 } 178 179 184 185 public void setNodeService(NodeService nodeService) 186 { 187 this.nodeService = nodeService; 188 } 189 190 195 public void setContentService(ContentService contentService) 196 { 197 this.contentService = contentService; 198 } 199 200 203 204 208 209 private void checkAbleToDoWork(boolean isFTS, boolean isModified) 210 { 211 if (isFTSUpdate == null) 212 { 213 isFTSUpdate = Boolean.valueOf(isFTS); 214 } 215 else 216 { 217 if (isFTS != isFTSUpdate.booleanValue()) 218 { 219 throw new IndexerException("Can not mix FTS and transactional updates"); 220 } 221 } 222 223 switch (status) 224 { 225 case Status.STATUS_UNKNOWN: 226 status = Status.STATUS_ACTIVE; 227 break; 228 case Status.STATUS_ACTIVE: 229 break; 231 default: 232 throw new IndexerException(buildErrorString()); 234 } 235 this.isModified = isModified; 236 } 237 238 243 private String buildErrorString() 244 { 245 StringBuilder buffer = new StringBuilder (128); 246 buffer.append("The indexer is unable to accept more work: "); 247 switch (status) 248 { 249 case Status.STATUS_COMMITTED: 250 buffer.append("The indexer has been committed"); 251 break; 252 case Status.STATUS_COMMITTING: 253 buffer.append("The indexer is committing"); 254 break; 255 case Status.STATUS_MARKED_ROLLBACK: 256 buffer.append("The indexer is marked for rollback"); 257 break; 258 case Status.STATUS_PREPARED: 259 buffer.append("The indexer is prepared to commit"); 260 break; 261 case Status.STATUS_PREPARING: 262 buffer.append("The indexer is preparing to commit"); 263 break; 264 case Status.STATUS_ROLLEDBACK: 265 buffer.append("The indexer has been rolled back"); 266 break; 267 case Status.STATUS_ROLLING_BACK: 268 buffer.append("The indexer is rolling back"); 269 break; 270 case Status.STATUS_UNKNOWN: 271 buffer.append("The indexer is in an unknown state"); 272 break; 273 default: 274 break; 275 } 276 return buffer.toString(); 277 } 278 279 282 283 public void createNode(ChildAssociationRef relationshipRef) throws LuceneIndexException 284 { 285 if (s_logger.isDebugEnabled()) 286 { 287 s_logger.debug("Create node " + relationshipRef.getChildRef()); 288 } 289 checkAbleToDoWork(false, true); 290 try 291 { 292 NodeRef childRef = relationshipRef.getChildRef(); 293 if ((relationshipRef.getParentRef() == null) 295 && childRef.equals(nodeService.getRootNode(childRef.getStoreRef()))) 296 { 297 addRootNodesToDeletionList(); 298 s_logger.warn("Detected root node addition: deleting all nodes from the index"); 299 } 300 index(childRef); 301 } 302 catch (LuceneIndexException e) 303 { 304 setRollbackOnly(); 305 throw new LuceneIndexException("Create node failed", e); 306 } 307 } 308 309 private void addRootNodesToDeletionList() 310 { 311 IndexReader mainReader = null; 312 try 313 { 314 try 315 { 316 mainReader = getReader(); 317 TermDocs td = mainReader.termDocs(new Term("ISROOT", "T")); 318 while (td.next()) 319 { 320 int doc = td.doc(); 321 Document document = mainReader.document(doc); 322 String id = document.get("ID"); 323 NodeRef ref = new NodeRef(id); 324 deleteImpl(ref, false, true, mainReader); 325 } 326 } 327 catch (IOException e) 328 { 329 throw new LuceneIndexException("Failed to delete all primary nodes", e); 330 } 331 } 332 finally 333 { 334 if (mainReader != null) 335 { 336 try 337 { 338 mainReader.close(); 339 } 340 catch (IOException e) 341 { 342 throw new LuceneIndexException("Filed to close main reader", e); 343 } 344 } 345 } 346 } 347 348 public void updateNode(NodeRef nodeRef) throws LuceneIndexException 349 { 350 if (s_logger.isDebugEnabled()) 351 { 352 s_logger.debug("Update node " + nodeRef); 353 } 354 checkAbleToDoWork(false, true); 355 try 356 { 357 reindex(nodeRef, false); 358 } 359 catch (LuceneIndexException e) 360 { 361 setRollbackOnly(); 362 throw new LuceneIndexException("Update node failed", e); 363 } 364 } 365 366 public void deleteNode(ChildAssociationRef relationshipRef) throws LuceneIndexException 367 { 368 if (s_logger.isDebugEnabled()) 369 { 370 s_logger.debug("Delete node " + relationshipRef.getChildRef()); 371 } 372 checkAbleToDoWork(false, true); 373 try 374 { 375 delete(relationshipRef.getChildRef()); 376 } 377 catch (LuceneIndexException e) 378 { 379 setRollbackOnly(); 380 throw new LuceneIndexException("Delete node failed", e); 381 } 382 } 383 384 public void createChildRelationship(ChildAssociationRef relationshipRef) throws LuceneIndexException 385 { 386 if (s_logger.isDebugEnabled()) 387 { 388 s_logger.debug("Create child " + relationshipRef); 389 } 390 checkAbleToDoWork(false, true); 391 try 392 { 393 reindex(relationshipRef.getChildRef(), true); 396 } 397 catch (LuceneIndexException e) 398 { 399 setRollbackOnly(); 400 throw new LuceneIndexException("Failed to create child relationship", e); 401 } 402 } 403 404 public void updateChildRelationship(ChildAssociationRef relationshipBeforeRef, 405 ChildAssociationRef relationshipAfterRef) throws LuceneIndexException 406 { 407 if (s_logger.isDebugEnabled()) 408 { 409 s_logger.debug("Update child " + relationshipBeforeRef + " to " + relationshipAfterRef); 410 } 411 checkAbleToDoWork(false, true); 412 try 413 { 414 if (relationshipBeforeRef.getParentRef() != null) 416 { 417 } 419 reindex(relationshipBeforeRef.getChildRef(), true); 420 } 421 catch (LuceneIndexException e) 422 { 423 setRollbackOnly(); 424 throw new LuceneIndexException("Failed to update child relationship", e); 425 } 426 } 427 428 public void deleteChildRelationship(ChildAssociationRef relationshipRef) throws LuceneIndexException 429 { 430 if (s_logger.isDebugEnabled()) 431 { 432 s_logger.debug("Delete child " + relationshipRef); 433 } 434 checkAbleToDoWork(false, true); 435 try 436 { 437 if (relationshipRef.getParentRef() != null) 439 { 440 } 442 reindex(relationshipRef.getChildRef(), true); 443 } 444 catch (LuceneIndexException e) 445 { 446 setRollbackOnly(); 447 throw new LuceneIndexException("Failed to delete child relationship", e); 448 } 449 } 450 451 458 public static LuceneIndexerImpl getUpdateIndexer(StoreRef storeRef, String deltaId, LuceneConfig config) 459 throws LuceneIndexException 460 { 461 if (s_logger.isDebugEnabled()) 462 { 463 s_logger.debug("Creating indexer"); 464 } 465 LuceneIndexerImpl indexer = new LuceneIndexerImpl(); 466 indexer.setLuceneConfig(config); 467 indexer.initialise(storeRef, deltaId, false, true); 468 return indexer; 469 } 470 471 474 475 478 479 public void commit() throws LuceneIndexException 480 { 481 switch (status) 482 { 483 case Status.STATUS_COMMITTING: 484 throw new LuceneIndexException("Unable to commit: Transaction is committing"); 485 case Status.STATUS_COMMITTED: 486 throw new LuceneIndexException("Unable to commit: Transaction is commited "); 487 case Status.STATUS_ROLLING_BACK: 488 throw new LuceneIndexException("Unable to commit: Transaction is rolling back"); 489 case Status.STATUS_ROLLEDBACK: 490 throw new LuceneIndexException("Unable to commit: Transaction is aleady rolled back"); 491 case Status.STATUS_MARKED_ROLLBACK: 492 throw new LuceneIndexException("Unable to commit: Transaction is marked for roll back"); 493 case Status.STATUS_PREPARING: 494 throw new LuceneIndexException("Unable to commit: Transaction is preparing"); 495 case Status.STATUS_ACTIVE: 496 prepare(); 498 default: 500 if (status != Status.STATUS_PREPARED) 501 { 502 throw new LuceneIndexException("Index must be prepared to commit"); 503 } 504 status = Status.STATUS_COMMITTING; 505 try 506 { 507 if (isModified()) 508 { 509 if (isFTSUpdate.booleanValue()) 510 { 511 doFTSIndexCommit(); 512 } 514 else 515 { 516 Set <Term> terms = new LinkedHashSet <Term>(); 518 for (NodeRef nodeRef : deletions) 519 { 520 terms.add(new Term("ID", nodeRef.toString())); 521 } 522 mergeDeltaIntoMain(terms); 524 luceneFullTextSearchIndexer.requiresIndex(store); 525 } 526 } 527 status = Status.STATUS_COMMITTED; 528 if (callBack != null) 529 { 530 callBack.indexCompleted(store, remainingCount, null); 531 } 532 } 533 catch (LuceneIndexException e) 534 { 535 rollback(); 537 throw new LuceneIndexException("Commit failed", e); 538 } 539 finally 540 { 541 deleteDelta(); 543 } 544 break; 545 } 546 } 547 548 private void doFTSIndexCommit() throws LuceneIndexException 549 { 550 IndexReader mainReader = null; 551 IndexReader deltaReader = null; 552 IndexSearcher mainSearcher = null; 553 IndexSearcher deltaSearcher = null; 554 555 try 556 { 557 try 558 { 559 mainReader = getReader(); 560 deltaReader = getDeltaReader(); 561 mainSearcher = new IndexSearcher(mainReader); 562 deltaSearcher = new IndexSearcher(deltaReader); 563 564 for (Helper helper : toFTSIndex) 565 { 566 BooleanQuery query = new BooleanQuery(); 567 query.add(new TermQuery(new Term("ID", helper.nodeRef.toString())), true, false); 568 query.add(new TermQuery(new Term("TX", helper.tx)), true, false); 569 query.add(new TermQuery(new Term("ISNODE", "T")), false, false); 570 571 try 572 { 573 Hits hits = mainSearcher.search(query); 574 if (hits.length() > 0) 575 { 576 for (int i = 0; i < hits.length(); i++) 578 { 579 mainReader.delete(hits.id(i)); 580 } 581 } 582 else 583 { 584 hits = deltaSearcher.search(query); 585 for (int i = 0; i < hits.length(); i++) 586 { 587 deltaReader.delete(hits.id(i)); 588 } 589 } 590 } 591 catch (IOException e) 592 { 593 throw new LuceneIndexException("Failed to delete an FTS update from the original index", e); 594 } 595 } 596 597 } 598 finally 599 { 600 if (deltaSearcher != null) 601 { 602 try 603 { 604 deltaSearcher.close(); 605 } 606 catch (IOException e) 607 { 608 s_logger.warn("Failed to close delta searcher", e); 609 } 610 } 611 if (mainSearcher != null) 612 { 613 try 614 { 615 mainSearcher.close(); 616 } 617 catch (IOException e) 618 { 619 s_logger.warn("Failed to close main searcher", e); 620 } 621 } 622 try 623 { 624 closeDeltaReader(); 625 } 626 catch (LuceneIndexException e) 627 { 628 s_logger.warn("Failed to close delta reader", e); 629 } 630 if (mainReader != null) 631 { 632 try 633 { 634 mainReader.close(); 635 } 636 catch (IOException e) 637 { 638 s_logger.warn("Failed to close main reader", e); 639 } 640 } 641 } 642 643 mergeDeltaIntoMain(new LinkedHashSet <Term>()); 644 } 645 catch (LuceneIndexException e) 646 { 647 rollback(); 649 throw new LuceneIndexException("Commit failed", e); 650 } 651 finally 652 { 653 deleteDelta(); 655 } 656 657 } 658 659 668 public int prepare() throws LuceneIndexException 669 { 670 671 switch (status) 672 { 673 case Status.STATUS_COMMITTING: 674 throw new IndexerException("Unable to prepare: Transaction is committing"); 675 case Status.STATUS_COMMITTED: 676 throw new IndexerException("Unable to prepare: Transaction is commited "); 677 case Status.STATUS_ROLLING_BACK: 678 throw new IndexerException("Unable to prepare: Transaction is rolling back"); 679 case Status.STATUS_ROLLEDBACK: 680 throw new IndexerException("Unable to prepare: Transaction is aleady rolled back"); 681 case Status.STATUS_MARKED_ROLLBACK: 682 throw new IndexerException("Unable to prepare: Transaction is marked for roll back"); 683 case Status.STATUS_PREPARING: 684 throw new IndexerException("Unable to prepare: Transaction is already preparing"); 685 case Status.STATUS_PREPARED: 686 throw new IndexerException("Unable to prepare: Transaction is already prepared"); 687 default: 688 status = Status.STATUS_PREPARING; 689 try 690 { 691 if (isModified()) 692 { 693 saveDelta(); 694 flushPending(); 695 prepareToMergeIntoMain(); 696 } 697 status = Status.STATUS_PREPARED; 698 return isModified ? XAResource.XA_OK : XAResource.XA_RDONLY; 699 } 700 catch (LuceneIndexException e) 701 { 702 setRollbackOnly(); 703 throw new LuceneIndexException("Index failed to prepare", e); 704 } 705 } 706 } 707 708 713 public boolean isModified() 714 { 715 return isModified; 716 } 717 718 723 public int getStatus() 724 { 725 return status; 726 } 727 728 732 733 public void rollback() throws LuceneIndexException 734 { 735 switch (status) 736 { 737 738 case Status.STATUS_COMMITTED: 739 throw new IndexerException("Unable to roll back: Transaction is committed "); 740 case Status.STATUS_ROLLING_BACK: 741 throw new IndexerException("Unable to roll back: Transaction is rolling back"); 742 case Status.STATUS_ROLLEDBACK: 743 throw new IndexerException("Unable to roll back: Transaction is already rolled back"); 744 case Status.STATUS_COMMITTING: 745 default: 747 status = Status.STATUS_ROLLING_BACK; 748 if (isModified()) 749 { 750 deleteDelta(); 751 } 752 status = Status.STATUS_ROLLEDBACK; 753 if (callBack != null) 754 { 755 callBack.indexCompleted(store, 0, null); 756 } 757 break; 758 } 759 } 760 761 765 766 public void setRollbackOnly() 767 { 768 switch (status) 769 { 770 case Status.STATUS_COMMITTING: 771 throw new IndexerException("Unable to mark for rollback: Transaction is committing"); 772 case Status.STATUS_COMMITTED: 773 throw new IndexerException("Unable to mark for rollback: Transaction is committed"); 774 default: 775 status = Status.STATUS_MARKED_ROLLBACK; 776 break; 777 } 778 } 779 780 783 784 private void index(NodeRef nodeRef) throws LuceneIndexException 785 { 786 addCommand(new Command(nodeRef, Action.INDEX)); 787 } 788 789 private void reindex(NodeRef nodeRef, boolean cascadeReindexDirectories) throws LuceneIndexException 790 { 791 addCommand(new Command(nodeRef, cascadeReindexDirectories ? Action.CASCADEREINDEX : Action.REINDEX)); 792 } 793 794 private void delete(NodeRef nodeRef) throws LuceneIndexException 795 { 796 addCommand(new Command(nodeRef, Action.DELETE)); 797 } 798 799 private void addCommand(Command command) 800 { 801 if (commandList.size() > 0) 802 { 803 Command last = commandList.get(commandList.size() - 1); 804 if ((last.action == command.action) && (last.nodeRef.equals(command.nodeRef))) 805 { 806 return; 807 } 808 } 809 purgeCommandList(command); 810 commandList.add(command); 811 812 if (commandList.size() > getLuceneConfig().getIndexerBatchSize()) 813 { 814 flushPending(); 815 } 816 } 817 818 private void purgeCommandList(Command command) 819 { 820 if (command.action == Action.DELETE) 821 { 822 removeFromCommandList(command, false); 823 } 824 else if (command.action == Action.REINDEX) 825 { 826 removeFromCommandList(command, true); 827 } 828 else if (command.action == Action.INDEX) 829 { 830 removeFromCommandList(command, true); 831 } 832 else if (command.action == Action.CASCADEREINDEX) 833 { 834 removeFromCommandList(command, true); 835 } 836 } 837 838 private void removeFromCommandList(Command command, boolean matchExact) 839 { 840 for (ListIterator <Command> it = commandList.listIterator(commandList.size()); it.hasPrevious(); ) 841 { 842 Command current = it.previous(); 843 if (matchExact) 844 { 845 if ((current.action == command.action) && (current.nodeRef.equals(command.nodeRef))) 846 { 847 it.remove(); 848 return; 849 } 850 } 851 else 852 { 853 if (current.nodeRef.equals(command.nodeRef)) 854 { 855 it.remove(); 856 } 857 } 858 } 859 } 860 861 public void flushPending() throws LuceneIndexException 862 { 863 IndexReader mainReader = null; 864 try 865 { 866 mainReader = getReader(); 867 Set <NodeRef> forIndex = new LinkedHashSet <NodeRef>(); 868 869 for (Command command : commandList) 870 { 871 if (command.action == Action.INDEX) 872 { 873 forIndex.add(command.nodeRef); 875 } 876 else if (command.action == Action.REINDEX) 877 { 878 Set <NodeRef> set = deleteImpl(command.nodeRef, true, false, mainReader); 880 881 forIndex.removeAll(set); 884 forIndex.addAll(set); 886 } 887 else if (command.action == Action.CASCADEREINDEX) 888 { 889 Set <NodeRef> set = deleteImpl(command.nodeRef, true, true, mainReader); 891 892 forIndex.removeAll(set); 895 forIndex.addAll(set); 897 } 898 else if (command.action == Action.DELETE) 899 { 900 Set <NodeRef> set = deleteImpl(command.nodeRef, false, true, mainReader); 902 forIndex.removeAll(set); 904 } 905 } 906 commandList.clear(); 907 indexImpl(forIndex, false); 908 } 909 finally 910 { 911 if (mainReader != null) 912 { 913 try 914 { 915 mainReader.close(); 916 } 917 catch (IOException e) 918 { 919 throw new LuceneIndexException("Filed to close main reader", e); 920 } 921 } 922 closeDeltaWriter(); 923 } 924 } 925 926 private Set <NodeRef> deleteImpl(NodeRef nodeRef, boolean forReindex, boolean cascade, IndexReader mainReader) 927 throws LuceneIndexException 928 { 929 getDeltaReader(); 931 Set <NodeRef> refs = new LinkedHashSet <NodeRef>(); 933 934 refs.addAll(deleteContainerAndBelow(nodeRef, getDeltaReader(), true, cascade)); 935 refs.addAll(deleteContainerAndBelow(nodeRef, mainReader, false, cascade)); 936 937 if (!forReindex) 938 { 939 Set <NodeRef> leafrefs = new LinkedHashSet <NodeRef>(); 940 941 leafrefs.addAll(deletePrimary(refs, getDeltaReader(), true)); 942 leafrefs.addAll(deletePrimary(refs, mainReader, false)); 943 944 leafrefs.addAll(deleteReference(refs, getDeltaReader(), true)); 945 leafrefs.addAll(deleteReference(refs, mainReader, false)); 946 947 refs.addAll(leafrefs); 948 } 949 950 deletions.addAll(refs); 951 952 return refs; 953 954 } 955 956 private Set <NodeRef> deletePrimary(Collection <NodeRef> nodeRefs, IndexReader reader, boolean delete) 957 throws LuceneIndexException 958 { 959 960 Set <NodeRef> refs = new LinkedHashSet <NodeRef>(); 961 962 for (NodeRef nodeRef : nodeRefs) 963 { 964 965 try 966 { 967 TermDocs td = reader.termDocs(new Term("PRIMARYPARENT", nodeRef.toString())); 968 while (td.next()) 969 { 970 int doc = td.doc(); 971 Document document = reader.document(doc); 972 String id = document.get("ID"); 973 NodeRef ref = new NodeRef(id); 974 refs.add(ref); 975 if (delete) 976 { 977 reader.delete(doc); 978 } 979 } 980 } 981 catch (IOException e) 982 { 983 throw new LuceneIndexException("Failed to delete node by primary parent for " + nodeRef.toString(), e); 984 } 985 } 986 987 return refs; 988 989 } 990 991 private Set <NodeRef> deleteReference(Collection <NodeRef> nodeRefs, IndexReader reader, boolean delete) 992 throws LuceneIndexException 993 { 994 995 Set <NodeRef> refs = new LinkedHashSet <NodeRef>(); 996 997 for (NodeRef nodeRef : nodeRefs) 998 { 999 1000 try 1001 { 1002 TermDocs td = reader.termDocs(new Term("PARENT", nodeRef.toString())); 1003 while (td.next()) 1004 { 1005 int doc = td.doc(); 1006 Document document = reader.document(doc); 1007 String id = document.get("ID"); 1008 NodeRef ref = new NodeRef(id); 1009 refs.add(ref); 1010 if (delete) 1011 { 1012 reader.delete(doc); 1013 } 1014 } 1015 } 1016 catch (IOException e) 1017 { 1018 throw new LuceneIndexException("Failed to delete node by parent for " + nodeRef.toString(), e); 1019 } 1020 } 1021 1022 return refs; 1023 1024 } 1025 1026 private Set <NodeRef> deleteContainerAndBelow(NodeRef nodeRef, IndexReader reader, boolean delete, boolean cascade) 1027 throws LuceneIndexException 1028 { 1029 Set <NodeRef> refs = new LinkedHashSet <NodeRef>(); 1030 1031 try 1032 { 1033 if (delete) 1034 { 1035 reader.delete(new Term("ID", nodeRef.toString())); 1036 } 1037 refs.add(nodeRef); 1038 if (cascade) 1039 { 1040 TermDocs td = reader.termDocs(new Term("ANCESTOR", nodeRef.toString())); 1041 while (td.next()) 1042 { 1043 int doc = td.doc(); 1044 Document document = reader.document(doc); 1045 String id = document.get("ID"); 1046 NodeRef ref = new NodeRef(id); 1047 refs.add(ref); 1048 if (delete) 1049 { 1050 reader.delete(doc); 1051 } 1052 } 1053 } 1054 } 1055 catch (IOException e) 1056 { 1057 throw new LuceneIndexException("Failed to delete container and below for " + nodeRef.toString(), e); 1058 } 1059 return refs; 1060 } 1061 1062 private void indexImpl(Set <NodeRef> nodeRefs, boolean isNew) throws LuceneIndexException 1063 { 1064 for (NodeRef ref : nodeRefs) 1065 { 1066 indexImpl(ref, isNew); 1067 } 1068 } 1069 1070 private void indexImpl(NodeRef nodeRef, boolean isNew) throws LuceneIndexException 1071 { 1072 IndexWriter writer = getDeltaWriter(); 1073 1074 1076 try 1077 { 1078 List <Document> docs = createDocuments(nodeRef, isNew, false, true); 1079 for (Document doc : docs) 1080 { 1081 try 1082 { 1083 writer.addDocument(doc ); 1086 } 1087 catch (IOException e) 1088 { 1089 throw new LuceneIndexException("Failed to add document to index", e); 1090 } 1091 } 1092 } 1093 catch (InvalidNodeRefException e) 1094 { 1095 return; 1097 } 1098 1099 } 1100 1101 static class Counter 1102 { 1103 int countInParent = 0; 1104 1105 int count = -1; 1106 1107 int getCountInParent() 1108 { 1109 return countInParent; 1110 } 1111 1112 int getRepeat() 1113 { 1114 return (count / countInParent) + 1; 1115 } 1116 1117 void incrementParentCount() 1118 { 1119 countInParent++; 1120 } 1121 1122 void increment() 1123 { 1124 count++; 1125 } 1126 1127 } 1128 1129 private class Pair<F, S> 1130 { 1131 private F first; 1132 1133 private S second; 1134 1135 public Pair(F first, S second) 1136 { 1137 this.first = first; 1138 this.second = second; 1139 } 1140 1141 public F getFirst() 1142 { 1143 return first; 1144 } 1145 1146 public S getSecond() 1147 { 1148 return second; 1149 } 1150 } 1151 1152 private List <Document> createDocuments(NodeRef nodeRef, boolean isNew, boolean indexAllProperties, 1153 boolean includeDirectoryDocuments) 1154 { 1155 Map <ChildAssociationRef, Counter> nodeCounts = getNodeCounts(nodeRef); 1156 List <Document> docs = new ArrayList <Document>(); 1157 ChildAssociationRef qNameRef = null; 1158 Map <QName, Serializable > properties = nodeService.getProperties(nodeRef); 1159 NodeRef.Status nodeStatus = nodeService.getNodeStatus(nodeRef); 1160 1161 Collection <Path> directPaths = nodeService.getPaths(nodeRef, false); 1162 Collection <Pair<Path, QName>> categoryPaths = getCategoryPaths(nodeRef, properties); 1163 Collection <Pair<Path, QName>> paths = new ArrayList <Pair<Path, QName>>(directPaths.size() 1164 + categoryPaths.size()); 1165 for (Path path : directPaths) 1166 { 1167 paths.add(new Pair<Path, QName>(path, null)); 1168 } 1169 paths.addAll(categoryPaths); 1170 1171 Document xdoc = new Document(); 1172 xdoc.add(new Field("ID", nodeRef.toString(), true, true, false)); 1173 xdoc.add(new Field("TX", nodeStatus.getChangeTxnId(), true, true, false)); 1174 boolean isAtomic = true; 1175 for (QName propertyName : properties.keySet()) 1176 { 1177 Serializable value = properties.get(propertyName); 1178 isAtomic = indexProperty(nodeRef, propertyName, value, xdoc, isAtomic, true); 1179 if (indexAllProperties) 1180 { 1181 indexProperty(nodeRef, propertyName, value, xdoc, false, false); 1182 } 1183 } 1184 1185 boolean isRoot = nodeRef.equals(nodeService.getRootNode(nodeRef.getStoreRef())); 1186 1187 StringBuilder parentBuffer = new StringBuilder (); 1188 StringBuilder qNameBuffer = new StringBuilder (64); 1189 1190 int containerCount = 0; 1191 for (Iterator <Pair<Path, QName>> it = paths.iterator(); it.hasNext(); ) 1192 { 1193 Pair<Path, QName> pair = it.next(); 1194 1196 qNameRef = getLastRefOrNull(pair.getFirst()); 1197 1198 String pathString = pair.getFirst().toString(); 1199 if ((pathString.length() > 0) && (pathString.charAt(0) == '/')) 1200 { 1201 pathString = pathString.substring(1); 1202 } 1203 1204 if (isRoot) 1205 { 1206 } 1208 else if (pair.getFirst().size() == 1) 1209 { 1210 } 1212 else 1213 { 1215 Counter counter = nodeCounts.get(qNameRef); 1216 1219 if ((counter == null) || (counter.getRepeat() < counter.getCountInParent())) 1220 { 1221 if ((qNameRef != null) && (qNameRef.getParentRef() != null) && (qNameRef.getQName() != null)) 1222 { 1223 if (qNameBuffer.length() > 0) 1224 { 1225 qNameBuffer.append(";/"); 1226 } 1227 qNameBuffer.append(ISO9075.getXPathName(qNameRef.getQName())); 1228 xdoc.add(new Field("PARENT", qNameRef.getParentRef().toString(), true, true, false)); 1229 xdoc.add(new Field("ASSOCTYPEQNAME", ISO9075.getXPathName(qNameRef.getTypeQName()), true, 1230 false, false)); 1231 xdoc.add(new Field("LINKASPECT", (pair.getSecond() == null) ? "" : ISO9075.getXPathName(pair 1232 .getSecond()), true, true, false)); 1233 } 1234 } 1235 1236 if (counter != null) 1237 { 1238 counter.increment(); 1239 } 1240 1241 QName nodeTypeRef = nodeService.getType(nodeRef); 1243 TypeDefinition nodeTypeDef = getDictionaryService().getType(nodeTypeRef); 1244 1246 if (includeDirectoryDocuments) 1247 { 1248 if (nodeTypeDef.getChildAssociations().size() > 0) 1249 { 1250 if (directPaths.contains(pair.getFirst())) 1251 { 1252 Document directoryEntry = new Document(); 1253 directoryEntry.add(new Field("ID", nodeRef.toString(), true, true, false)); 1254 directoryEntry.add(new Field("PATH", pathString, true, true, true)); 1255 for (NodeRef parent : getParents(pair.getFirst())) 1256 { 1257 directoryEntry.add(new Field("ANCESTOR", parent.toString(), false, true, false)); 1258 } 1259 directoryEntry.add(new Field("ISCONTAINER", "T", true, true, false)); 1260 1261 if (isCategory(getDictionaryService().getType(nodeService.getType(nodeRef)))) 1262 { 1263 directoryEntry.add(new Field("ISCATEGORY", "T", true, true, false)); 1264 } 1265 1266 docs.add(directoryEntry); 1267 } 1268 } 1269 } 1270 } 1271 } 1272 1273 if (isRoot) 1275 { 1276 xdoc.add(new Field("ISCONTAINER", "T", true, true, false)); 1278 xdoc.add(new Field("PATH", "", true, true, true)); 1279 xdoc.add(new Field("QNAME", "", true, true, true)); 1280 xdoc.add(new Field("ISROOT", "T", false, true, false)); 1281 xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(ContentModel.ASSOC_CHILDREN), true, false, 1282 false)); 1283 xdoc.add(new Field("ISNODE", "T", false, true, false)); 1284 docs.add(xdoc); 1285 1286 } 1287 else 1288 { 1290 xdoc.add(new Field("QNAME", qNameBuffer.toString(), true, true, true)); 1291 1294 ChildAssociationRef primary = nodeService.getPrimaryParent(nodeRef); 1295 xdoc.add(new Field("PRIMARYPARENT", primary.getParentRef().toString(), true, true, false)); 1296 xdoc.add(new Field("PRIMARYASSOCTYPEQNAME", ISO9075.getXPathName(primary.getTypeQName()), true, false, 1297 false)); 1298 QName typeQName = nodeService.getType(nodeRef); 1299 1300 xdoc.add(new Field("TYPE", ISO9075.getXPathName(typeQName), true, true, false)); 1301 for (QName classRef : nodeService.getAspects(nodeRef)) 1302 { 1303 xdoc.add(new Field("ASPECT", ISO9075.getXPathName(classRef), true, true, false)); 1304 } 1305 1306 xdoc.add(new Field("ISROOT", "F", false, true, false)); 1307 xdoc.add(new Field("ISNODE", "T", false, true, false)); 1308 if (isAtomic || indexAllProperties) 1309 { 1310 xdoc.add(new Field("FTSSTATUS", "Clean", false, true, false)); 1311 } 1312 else 1313 { 1314 if (isNew) 1315 { 1316 xdoc.add(new Field("FTSSTATUS", "New", false, true, false)); 1317 } 1318 else 1319 { 1320 xdoc.add(new Field("FTSSTATUS", "Dirty", false, true, false)); 1321 } 1322 } 1323 1324 docs.add(xdoc); 1326 } 1328 1329 return docs; 1330 } 1331 1332 private ArrayList <NodeRef> getParents(Path path) 1333 { 1334 ArrayList <NodeRef> parentsInDepthOrderStartingWithSelf = new ArrayList <NodeRef>(8); 1335 for (Iterator <Path.Element> elit = path.iterator(); elit.hasNext(); ) 1336 { 1337 Path.Element element = elit.next(); 1338 if (!(element instanceof Path.ChildAssocElement)) 1339 { 1340 throw new IndexerException("Confused path: " + path); 1341 } 1342 Path.ChildAssocElement cae = (Path.ChildAssocElement) element; 1343 parentsInDepthOrderStartingWithSelf.add(0, cae.getRef().getChildRef()); 1344 1345 } 1346 return parentsInDepthOrderStartingWithSelf; 1347 } 1348 1349 private ChildAssociationRef getLastRefOrNull(Path path) 1350 { 1351 if (path.last() instanceof Path.ChildAssocElement) 1352 { 1353 Path.ChildAssocElement cae = (Path.ChildAssocElement) path.last(); 1354 return cae.getRef(); 1355 } 1356 else 1357 { 1358 return null; 1359 } 1360 } 1361 1362 private boolean indexProperty(NodeRef nodeRef, QName propertyName, Serializable value, Document doc, 1363 boolean isAtomic, boolean indexAtomicProperties) 1364 { 1365 String attributeName = "@" 1366 + QName.createQName(propertyName.getNamespaceURI(), ISO9075.encode(propertyName.getLocalName())); 1367 1368 boolean store = true; 1369 boolean index = true; 1370 boolean tokenise = true; 1371 boolean atomic = true; 1372 boolean isContent = false; 1373 1374 PropertyDefinition propertyDef = getDictionaryService().getProperty(propertyName); 1375 if (propertyDef != null) 1376 { 1377 index = propertyDef.isIndexed(); 1378 store = propertyDef.isStoredInIndex(); 1379 tokenise = propertyDef.isTokenisedInIndex(); 1380 atomic = propertyDef.isIndexedAtomically(); 1381 isContent = propertyDef.getDataType().getName().equals(DataTypeDefinition.CONTENT); 1382 } 1383 isAtomic &= atomic; 1384 1385 if (value != null) 1386 { 1387 if (indexAtomicProperties == atomic) 1388 { 1389 if (!indexAtomicProperties) 1390 { 1391 doc.removeFields(propertyName.toString()); 1392 } 1393 for (String strValue : DefaultTypeConverter.INSTANCE.getCollection(String .class, value)) 1395 { 1396 if (strValue != null) 1397 { 1398 1401 if (isContent) 1402 { 1403 ContentData contentData = DefaultTypeConverter.INSTANCE.convert(ContentData.class, value); 1404 if (contentData.getMimetype() != null && index) 1405 { 1406 doc.add(new Field(attributeName + ".mimetype", contentData.getMimetype(), false, true, 1408 false)); 1409 1410 ContentReader reader = contentService.getReader(nodeRef, propertyName); 1411 if (reader != null && reader.exists()) 1412 { 1413 boolean readerReady = true; 1414 if (!EqualsHelper.nullSafeEquals(reader.getMimetype(), 1417 MimetypeMap.MIMETYPE_TEXT_PLAIN) 1418 || !EqualsHelper.nullSafeEquals(reader.getEncoding(), "UTF-8")) 1419 { 1420 ContentWriter writer = contentService.getTempWriter(); 1421 writer.setMimetype(MimetypeMap.MIMETYPE_TEXT_PLAIN); 1422 writer.setEncoding("UTF-8"); 1424 try 1425 { 1426 contentService.transform(reader, writer); 1427 reader = writer.getReader(); 1429 } 1430 catch (NoTransformerException e) 1431 { 1432 if (s_logger.isDebugEnabled()) 1434 { 1435 s_logger.debug("Not indexed: No transformation", e); 1436 } 1437 readerReady = false; 1439 doc.add(Field.Text("TEXT", NOT_INDEXED_NO_TRANSFORMATION)); 1441 doc.add(Field.Text(attributeName, NOT_INDEXED_NO_TRANSFORMATION)); 1442 } 1443 catch (ContentIOException e) 1444 { 1445 if (s_logger.isDebugEnabled()) 1447 { 1448 s_logger.debug("Not indexed: Transformation failed", e); 1449 } 1450 readerReady = false; 1452 doc.add(Field.Text("TEXT", NOT_INDEXED_TRANSFORMATION_FAILED)); 1455 doc.add(Field.Text(attributeName, NOT_INDEXED_TRANSFORMATION_FAILED)); 1456 } 1457 } 1458 if (readerReady) 1461 { 1462 InputStreamReader isr = null; 1463 InputStream ris = reader.getContentInputStream(); 1464 try 1465 { 1466 isr = new InputStreamReader (ris, "UTF-8"); 1467 } 1468 catch (UnsupportedEncodingException e) 1469 { 1470 isr = new InputStreamReader (ris); 1471 } 1472 doc.add(Field.Text("TEXT", isr)); 1473 1474 ris = reader.getReader().getContentInputStream(); 1475 try 1476 { 1477 isr = new InputStreamReader (ris, "UTF-8"); 1478 } 1479 catch (UnsupportedEncodingException e) 1480 { 1481 isr = new InputStreamReader (ris); 1482 } 1483 1484 doc.add(Field.Text("@" 1485 + QName.createQName(propertyName.getNamespaceURI(), ISO9075 1486 .encode(propertyName.getLocalName())), isr)); 1487 } 1488 } 1489 1490 else 1491 { 1493 if (s_logger.isDebugEnabled()) 1495 { 1496 s_logger.debug("Not indexed: Content Missing \n" 1497 + " node: " + nodeRef + "\n" + " reader: " + reader + "\n" 1498 + " content exists: " 1499 + (reader == null ? " --- " : Boolean.toString(reader.exists()))); 1500 } 1501 doc.add(Field.Text("TEXT", NOT_INDEXED_CONTENT_MISSING)); 1503 doc.add(Field.Text(attributeName, NOT_INDEXED_CONTENT_MISSING)); 1504 } 1505 } 1506 } 1507 else 1508 { 1509 doc.add(new Field(attributeName, strValue, store, index, tokenise)); 1510 } 1511 } 1512 } 1513 } 1514 } 1515 1516 return isAtomic; 1517 } 1518 1519 private Map <ChildAssociationRef, Counter> getNodeCounts(NodeRef nodeRef) 1520 { 1521 Map <ChildAssociationRef, Counter> nodeCounts = new HashMap <ChildAssociationRef, Counter>(5); 1522 List <ChildAssociationRef> parentAssocs = nodeService.getParentAssocs(nodeRef); 1523 for (ChildAssociationRef assoc : parentAssocs) 1525 { 1526 Counter counter = nodeCounts.get(assoc); 1527 if (counter == null) 1528 { 1529 counter = new Counter(); 1530 nodeCounts.put(assoc, counter); 1531 } 1532 counter.incrementParentCount(); 1533 1534 } 1535 return nodeCounts; 1536 } 1537 1538 private Collection <Pair<Path, QName>> getCategoryPaths(NodeRef nodeRef, Map <QName, Serializable > properties) 1539 { 1540 ArrayList <Pair<Path, QName>> categoryPaths = new ArrayList <Pair<Path, QName>>(); 1541 Set <QName> aspects = nodeService.getAspects(nodeRef); 1542 1543 for (QName classRef : aspects) 1544 { 1545 AspectDefinition aspDef = getDictionaryService().getAspect(classRef); 1546 if (isCategorised(aspDef)) 1547 { 1548 LinkedList <Pair<Path, QName>> aspectPaths = new LinkedList <Pair<Path, QName>>(); 1549 for (PropertyDefinition propDef : aspDef.getProperties().values()) 1550 { 1551 if (propDef.getDataType().getName().equals(DataTypeDefinition.CATEGORY)) 1552 { 1553 for (NodeRef catRef : DefaultTypeConverter.INSTANCE.getCollection(NodeRef.class, properties 1554 .get(propDef.getName()))) 1555 { 1556 if (catRef != null) 1557 { 1558 for (Path path : nodeService.getPaths(catRef, false)) 1559 { 1560 if ((path.size() > 1) && (path.get(1) instanceof Path.ChildAssocElement)) 1561 { 1562 Path.ChildAssocElement cae = (Path.ChildAssocElement) path.get(1); 1563 boolean isFakeRoot = true; 1564 for (ChildAssociationRef car : nodeService.getParentAssocs(cae.getRef() 1565 .getChildRef())) 1566 { 1567 if (cae.getRef().equals(car)) 1568 { 1569 isFakeRoot = false; 1570 break; 1571 } 1572 } 1573 if (isFakeRoot) 1574 { 1575 if (path.toString().indexOf(aspDef.getName().toString()) != -1) 1576 { 1577 aspectPaths.add(new Pair<Path, QName>(path, aspDef.getName())); 1578 } 1579 } 1580 } 1581 } 1582 1583 } 1584 } 1585 } 1586 } 1587 categoryPaths.addAll(aspectPaths); 1588 } 1589 } 1590 for (Pair<Path, QName> pair : categoryPaths) 1592 { 1593 if (pair.getFirst().last() instanceof Path.ChildAssocElement) 1594 { 1595 Path.ChildAssocElement cae = (Path.ChildAssocElement) pair.getFirst().last(); 1596 ChildAssociationRef assocRef = cae.getRef(); 1597 pair.getFirst().append( 1598 new Path.ChildAssocElement(new ChildAssociationRef(assocRef.getTypeQName(), assocRef 1599 .getChildRef(), QName.createQName("member"), nodeRef))); 1600 } 1601 } 1602 1603 return categoryPaths; 1604 } 1605 1606 private boolean isCategorised(AspectDefinition aspDef) 1607 { 1608 AspectDefinition current = aspDef; 1609 while (current != null) 1610 { 1611 if (current.getName().equals(ContentModel.ASPECT_CLASSIFIABLE)) 1612 { 1613 return true; 1614 } 1615 else 1616 { 1617 QName parentName = current.getParentName(); 1618 if (parentName == null) 1619 { 1620 break; 1621 } 1622 current = getDictionaryService().getAspect(parentName); 1623 } 1624 } 1625 return false; 1626 } 1627 1628 private boolean isCategory(TypeDefinition typeDef) 1629 { 1630 if (typeDef == null) 1631 { 1632 return false; 1633 } 1634 TypeDefinition current = typeDef; 1635 while (current != null) 1636 { 1637 if (current.getName().equals(ContentModel.TYPE_CATEGORY)) 1638 { 1639 return true; 1640 } 1641 else 1642 { 1643 QName parentName = current.getParentName(); 1644 if (parentName == null) 1645 { 1646 break; 1647 } 1648 current = getDictionaryService().getType(parentName); 1649 } 1650 } 1651 return false; 1652 } 1653 1654 public void updateFullTextSearch(int size) throws LuceneIndexException 1655 { 1656 checkAbleToDoWork(true, false); 1657 if (!mainIndexExists()) 1658 { 1659 remainingCount = size; 1660 return; 1661 } 1662 try 1663 { 1664 NodeRef lastId = null; 1665 1666 toFTSIndex = new ArrayList <Helper>(size); 1667 BooleanQuery booleanQuery = new BooleanQuery(); 1668 booleanQuery.add(new TermQuery(new Term("FTSSTATUS", "Dirty")), false, false); 1669 booleanQuery.add(new TermQuery(new Term("FTSSTATUS", "New")), false, false); 1670 1671 int count = 0; 1672 Searcher searcher = null; 1673 LuceneResultSet results = null; 1674 try 1675 { 1676 searcher = getSearcher(null); 1677 if(searcher == null) 1679 { 1680 remainingCount = size; 1681 return; 1682 } 1683 Hits hits; 1684 try 1685 { 1686 hits = searcher.search(booleanQuery); 1687 } 1688 catch (IOException e) 1689 { 1690 throw new LuceneIndexException( 1691 "Failed to execute query to find content which needs updating in the index", e); 1692 } 1693 results = new LuceneResultSet(hits, searcher, nodeService, null, new SearchParameters()); 1694 1695 for (ResultSetRow row : results) 1696 { 1697 LuceneResultSetRow lrow = (LuceneResultSetRow) row; 1698 Helper helper = new Helper(lrow.getNodeRef(), lrow.getDocument().getField("TX").stringValue()); 1699 toFTSIndex.add(helper); 1700 if (++count >= size) 1701 { 1702 break; 1703 } 1704 } 1705 count = results.length(); 1706 } 1707 finally 1708 { 1709 if (results != null) 1710 { 1711 results.close(); } 1713 else if (searcher != null) 1714 { 1715 try 1716 { 1717 searcher.close(); 1718 } 1719 catch (IOException e) 1720 { 1721 throw new LuceneIndexException("Failed to close searcher", e); 1722 } 1723 } 1724 } 1725 1726 if (toFTSIndex.size() > 0) 1727 { 1728 checkAbleToDoWork(true, true); 1729 1730 IndexWriter writer = null; 1731 try 1732 { 1733 writer = getDeltaWriter(); 1734 for (Helper helper : toFTSIndex) 1735 { 1736 NodeRef ref = helper.nodeRef; 1738 1739 List <Document> docs = createDocuments(ref, false, true, false); 1740 for (Document doc : docs) 1741 { 1742 try 1743 { 1744 writer.addDocument(doc ); 1747 } 1748 catch (IOException e) 1749 { 1750 throw new LuceneIndexException("Failed to add document while updating fts index", e); 1751 } 1752 } 1753 1754 if (writer.docCount() > size) 1758 { 1759 if (lastId == null) 1760 { 1761 lastId = ref; 1762 } 1763 if (!lastId.equals(ref)) 1764 { 1765 break; 1766 } 1767 } 1768 } 1769 1770 remainingCount = count - writer.docCount(); 1771 } 1772 catch (LuceneIndexException e) 1773 { 1774 if (writer != null) 1775 { 1776 closeDeltaWriter(); 1777 } 1778 } 1779 } 1780 } 1781 catch (LuceneIndexException e) 1782 { 1783 setRollbackOnly(); 1784 throw new LuceneIndexException("Failed FTS update", e); 1785 } 1786 } 1787 1788 public void registerCallBack(FTSIndexerAware callBack) 1789 { 1790 this.callBack = callBack; 1791 } 1792 1793 private static class Helper 1794 { 1795 NodeRef nodeRef; 1796 1797 String tx; 1798 1799 Helper(NodeRef nodeRef, String tx) 1800 { 1801 this.nodeRef = nodeRef; 1802 this.tx = tx; 1803 } 1804 } 1805 1806 private static class Command 1807 { 1808 NodeRef nodeRef; 1809 1810 Action action; 1811 1812 Command(NodeRef nodeRef, Action action) 1813 { 1814 this.nodeRef = nodeRef; 1815 this.action = action; 1816 } 1817 1818 public String toString() 1819 { 1820 StringBuffer buffer = new StringBuffer (); 1821 if (action == Action.INDEX) 1822 { 1823 buffer.append("Index "); 1824 } 1825 else if (action == Action.DELETE) 1826 { 1827 buffer.append("Delete "); 1828 } 1829 else if (action == Action.REINDEX) 1830 { 1831 buffer.append("Reindex "); 1832 } 1833 else 1834 { 1835 buffer.append("Unknown ... "); 1836 } 1837 buffer.append(nodeRef); 1838 return buffer.toString(); 1839 } 1840 1841 } 1842 1843 private FullTextSearchIndexer luceneFullTextSearchIndexer; 1844 1845 public void setLuceneFullTextSearchIndexer(FullTextSearchIndexer luceneFullTextSearchIndexer) 1846 { 1847 this.luceneFullTextSearchIndexer = luceneFullTextSearchIndexer; 1848 } 1849 1850 public Set <NodeRef> getDeletions() 1851 { 1852 return Collections.unmodifiableSet(deletions); 1853 } 1854} 1855 | Popular Tags |