KickJava   Java API By Example, From Geeks To Geeks.

Java > Open Source Codes > org > apache > cayenne > access > DataDomainFlushAction


1 /*****************************************************************
2  * Licensed to the Apache Software Foundation (ASF) under one
3  * or more contributor license agreements. See the NOTICE file
4  * distributed with this work for additional information
5  * regarding copyright ownership. The ASF licenses this file
6  * to you under the Apache License, Version 2.0 (the
7  * "License"); you may not use this file except in compliance
8  * with the License. You may obtain a copy of the License at
9  *
10  * http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing,
13  * software distributed under the License is distributed on an
14  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15  * KIND, either express or implied. See the License for the
16  * specific language governing permissions and limitations
17  * under the License.
18  ****************************************************************/

19
20
21 package org.apache.cayenne.access;
22
23 import java.util.ArrayList JavaDoc;
24 import java.util.Collection JavaDoc;
25 import java.util.Collections JavaDoc;
26 import java.util.HashMap JavaDoc;
27 import java.util.HashSet JavaDoc;
28 import java.util.Iterator JavaDoc;
29 import java.util.List JavaDoc;
30 import java.util.Map JavaDoc;
31
32 import org.apache.cayenne.CayenneRuntimeException;
33 import org.apache.cayenne.ObjectId;
34 import org.apache.cayenne.PersistenceState;
35 import org.apache.cayenne.Persistent;
36 import org.apache.cayenne.graph.CompoundDiff;
37 import org.apache.cayenne.graph.GraphDiff;
38 import org.apache.cayenne.map.DbEntity;
39 import org.apache.cayenne.query.BatchQuery;
40 import org.apache.cayenne.reflect.ClassDescriptor;
41
42 /**
43  * A stateful commit handler used by DataContext to perform commit operation.
44  * DataContextCommitAction resolves primary key dependencies, referential integrity
45  * dependencies (including multi-reflexive entities), generates primary keys, creates
46  * batches for massive data modifications, assigns operations to data nodes.
47  *
48  * @author Andrus Adamchik
49  * @since 1.2
50  */

51 class DataDomainFlushAction {
52
53     private final DataDomain domain;
54     private DataContext context;
55     private Map JavaDoc changesByObjectId;
56
57     private CompoundDiff resultDiff;
58     private Collection JavaDoc resultDeletedIds;
59     private Map JavaDoc resultModifiedSnapshots;
60     private Collection JavaDoc resultIndirectlyModifiedIds;
61
62     private DataDomainInsertBucket insertBucket;
63     private DataDomainUpdateBucket updateBucket;
64     private DataDomainDeleteBucket deleteBucket;
65     private DataDomainFlattenedBucket flattenedBucket;
66
67     private List JavaDoc queries;
68
69     DataDomainFlushAction(DataDomain domain) {
70         this.domain = domain;
71     }
72
73     DataDomain getDomain() {
74         return domain;
75     }
76     
77     DataContext getContext() {
78         return context;
79     }
80
81     Collection JavaDoc getResultDeletedIds() {
82         return resultDeletedIds;
83     }
84
85     CompoundDiff getResultDiff() {
86         return resultDiff;
87     }
88
89     Collection JavaDoc getResultIndirectlyModifiedIds() {
90         return resultIndirectlyModifiedIds;
91     }
92
93     Map JavaDoc getResultModifiedSnapshots() {
94         return resultModifiedSnapshots;
95     }
96
97     ObjectDiff objectDiff(Object JavaDoc objectId) {
98         return (ObjectDiff) changesByObjectId.get(objectId);
99     }
100
101     void addFlattenedInsert(DbEntity flattenedEntity, FlattenedArcKey flattenedInsertInfo) {
102         flattenedBucket.addFlattenedInsert(flattenedEntity, flattenedInsertInfo);
103     }
104
105     void addFlattenedDelete(DbEntity flattenedEntity, FlattenedArcKey flattenedDeleteInfo) {
106         flattenedBucket.addFlattenedDelete(flattenedEntity, flattenedDeleteInfo);
107     }
108
109     GraphDiff flush(DataContext context, GraphDiff changes) {
110
111         if (changes == null) {
112             return new CompoundDiff();
113         }
114
115         // TODO: Andrus, 3/13/2006 - support categorizing an arbitrary diff
116
if (!(changes instanceof ObjectStoreGraphDiff)) {
117             throw new IllegalArgumentException JavaDoc("Expected 'ObjectStoreGraphDiff', got: "
118                     + changes.getClass().getName());
119         }
120
121         this.context = context;
122         
123         // ObjectStoreGraphDiff contains changes already categorized by objectId...
124
this.changesByObjectId = ((ObjectStoreGraphDiff) changes).getChangesByObjectId();
125         this.insertBucket = new DataDomainInsertBucket(this);
126         this.deleteBucket = new DataDomainDeleteBucket(this);
127         this.updateBucket = new DataDomainUpdateBucket(this);
128         this.flattenedBucket = new DataDomainFlattenedBucket(this);
129
130         this.queries = new ArrayList JavaDoc();
131         this.resultIndirectlyModifiedIds = new HashSet JavaDoc();
132
133         preprocess(context, changes);
134
135         if (queries.isEmpty()) {
136             return new CompoundDiff();
137         }
138
139         this.resultDiff = new CompoundDiff();
140         this.resultDeletedIds = new ArrayList JavaDoc();
141         this.resultModifiedSnapshots = new HashMap JavaDoc();
142
143         runQueries();
144
145         // note that there is no syncing on the object store itself. This is caller's
146
// responsibility.
147
synchronized (context.getObjectStore().getDataRowCache()) {
148             postprocess(context);
149             return resultDiff;
150         }
151     }
152
153     private void preprocess(DataContext context, GraphDiff changes) {
154
155         // categorize dirty objects by state
156

157         ObjectStore objectStore = context.getObjectStore();
158
159         Iterator JavaDoc it = changesByObjectId.keySet().iterator();
160         while (it.hasNext()) {
161             ObjectId id = (ObjectId) it.next();
162             Persistent object = (Persistent) objectStore.getNode(id);
163             ClassDescriptor descriptor = context.getEntityResolver().getClassDescriptor(
164                     id.getEntityName());
165
166             switch (object.getPersistenceState()) {
167                 case PersistenceState.NEW:
168                     insertBucket.addDirtyObject(object, descriptor);
169                     break;
170                 case PersistenceState.MODIFIED:
171                     updateBucket.addDirtyObject(object, descriptor);
172                     break;
173                 case PersistenceState.DELETED:
174                     deleteBucket.addDirtyObject(object, descriptor);
175                     break;
176             }
177         }
178
179         new DataDomainIndirectDiffBuilder(this).processIndirectChanges(changes);
180
181         insertBucket.appendQueries(queries);
182         flattenedBucket.appendInserts(queries);
183         updateBucket.appendQueries(queries);
184         flattenedBucket.appendDeletes(queries);
185         deleteBucket.appendQueries(queries);
186     }
187
188     private void runQueries() {
189         DataDomainFlushObserver observer = new DataDomainFlushObserver();
190
191         // split query list by spanned nodes and run each single node range individually.
192
// Since connections are reused per node within an open transaction, there should
193
// not be much overhead in accessing the same node multiple times (may happen due
194
// to imperfect sorting)
195

196         try {
197
198             DataNode lastNode = null;
199             DbEntity lastEntity = null;
200             int rangeStart = 0;
201             int len = queries.size();
202
203             for (int i = 0; i < len; i++) {
204
205                 BatchQuery query = (BatchQuery) queries.get(i);
206                 if (query.getDbEntity() != lastEntity) {
207                     lastEntity = query.getDbEntity();
208
209                     DataNode node = domain.lookupDataNode(lastEntity.getDataMap());
210                     if (node != lastNode) {
211
212                         if (i - rangeStart > 0) {
213                             lastNode.performQueries(
214                                     queries.subList(rangeStart, i),
215                                     observer);
216                         }
217
218                         rangeStart = i;
219                         lastNode = node;
220                     }
221                 }
222             }
223
224             // process last segment of the query list...
225
lastNode.performQueries(queries.subList(rangeStart, len), observer);
226         }
227         catch (Throwable JavaDoc th) {
228             Transaction.getThreadTransaction().setRollbackOnly();
229             throw new CayenneRuntimeException("Transaction was rolledback.", th);
230         }
231     }
232
233     /*
234      * Sends notification of changes to the DataRowStore, returns GraphDiff with replaced
235      * ObjectIds.
236      */

237     private void postprocess(DataContext context) {
238
239         deleteBucket.postprocess();
240         updateBucket.postprocess();
241         insertBucket.postprocess();
242
243         // notify cache...
244
if (!resultDeletedIds.isEmpty()
245                 || !resultModifiedSnapshots.isEmpty()
246                 || !resultIndirectlyModifiedIds.isEmpty()) {
247
248             context.getObjectStore().getDataRowCache().processSnapshotChanges(
249                     context.getObjectStore(),
250                     resultModifiedSnapshots,
251                     resultDeletedIds,
252                     Collections.EMPTY_LIST,
253                     resultIndirectlyModifiedIds);
254         }
255         
256         context.getObjectStore().postprocessAfterCommit(resultDiff);
257     }
258 }
259
Popular Tags