KickJava   Java API By Example, From Geeks To Geeks.

Java > Open Source Codes > com > blandware > atleap > persistence > hibernate > core > BackupDAOHibernate


1 /*
2  * Copyright 2004 Blandware (http://www.blandware.com)
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */

16 package com.blandware.atleap.persistence.hibernate.core;
17
18 import com.blandware.atleap.common.util.PartialCollection;
19 import com.blandware.atleap.common.util.QueryInfo;
20 import com.blandware.atleap.model.core.MenuItem;
21 import com.blandware.atleap.persistence.core.BackupDAO;
22 import com.blandware.atleap.persistence.core.MenuDAO;
23 import com.blandware.atleap.persistence.exception.BackupFileAccessException;
24 import org.apache.commons.logging.Log;
25 import org.apache.commons.logging.LogFactory;
26 import org.dbunit.database.*;
27 import org.dbunit.dataset.CachedDataSet;
28 import org.dbunit.dataset.FilteredDataSet;
29 import org.dbunit.dataset.IDataSet;
30 import org.dbunit.dataset.datatype.IDataTypeFactory;
31 import org.dbunit.dataset.filter.ITableFilter;
32 import org.dbunit.dataset.stream.IDataSetProducer;
33 import org.dbunit.dataset.xml.XmlDataSet;
34 import org.dbunit.dataset.xml.XmlProducer;
35 import org.dbunit.ext.mssql.InsertIdentityOperation;
36 import org.dbunit.operation.DatabaseOperation;
37 import org.hibernate.FlushMode;
38 import org.hibernate.HibernateException;
39 import org.hibernate.Session;
40 import org.hibernate.cfg.Configuration;
41 import org.hibernate.dialect.Dialect;
42 import org.hibernate.util.StringHelper;
43 import org.springframework.orm.hibernate3.HibernateCallback;
44 import org.springframework.orm.hibernate3.HibernateTemplate;
45 import org.springframework.orm.hibernate3.LocalSessionFactoryBean;
46 import org.xml.sax.InputSource JavaDoc;
47
48 import java.io.*;
49 import java.sql.*;
50 import java.text.SimpleDateFormat JavaDoc;
51 import java.util.*;
52 import java.util.Date JavaDoc;
53 import java.util.zip.ZipEntry JavaDoc;
54 import java.util.zip.ZipInputStream JavaDoc;
55 import java.util.zip.ZipOutputStream JavaDoc;
56
57 /**
58  * <p>DAO for performing backup/restore database operations</p>
59  * <p/>
60  * <p><a HREF="BackupDAOHibernate.java.htm"><i>View Source</i></a></p>
61  *
62  * @author Andrey Grebnev <a HREF="mailto:andrey.grebnev@blandware.com">&lt;andrey.grebnev@blandware.com&gt;</a>
63  * @version $Revision: 1.5 $ $Date: 2006/03/25 09:04:40 $
64  */

65 public class BackupDAOHibernate extends BaseDAOHibernate implements BackupDAO {
66
67     private static final String JavaDoc INITIAL_DATA_FILE = "initial-data.xml";
68     private static final SimpleDateFormat JavaDoc ARCHIVE_PATTERN_FORMATTER = new SimpleDateFormat JavaDoc("yyyyMMdd-HHmmss'.zip'");
69     private static final String JavaDoc ARCHIVE_ENTRY_NAME = "db-export.xml";
70     private static final String JavaDoc TEST_TABLE_NAME = "al_core_localizable";
71     private static final boolean TEST_TABLE_NAME_QUOTED = true;
72
73     protected transient final Log log = LogFactory.getLog(getClass());
74
75
76     protected String JavaDoc escapePattern = "\"?\"";
77
78     /**
79      * Set escape pattren for DBMS
80      *
81      * @param escapePattern e.g. <code>"?"</code> or <code>`?`</code> or <code>[?]</code>
82      */

83     public void setEscapePattern(String JavaDoc escapePattern) {
84         this.escapePattern = escapePattern;
85     }
86
87     protected String JavaDoc databaseType = "mysql";
88
89     /**
90      * Setup database type. If <code>sqlserver</code> we will use MSSQL_CLEAN_INSERT and CLEAN_INSERT in all other cases
91      *
92      * @param databaseType
93      */

94     public void setDatabaseType(String JavaDoc databaseType) {
95         this.databaseType = databaseType;
96     }
97
98     protected String JavaDoc schema = null;
99
100     /**
101      * Set schema for database tables
102      *
103      * @param schema
104      */

105     public void setSchema(String JavaDoc schema) {
106         if (schema != null && schema.trim().length() == 0)
107             this.schema = null;
108         else
109             this.schema = schema;
110     }
111
112     protected String JavaDoc backupDirPath = ".";
113
114     /**
115      * Setup path to backup directory
116      *
117      * @param backupDirPath
118      */

119     public void setBackupDirPath(String JavaDoc backupDirPath) {
120         this.backupDirPath = backupDirPath;
121     }
122
123     protected String JavaDoc dataTypeFactoryClass = "org.dbunit.ext.mysql.MySqlDataTypeFactory";
124
125     /**
126      * Setup datatype factory for DBUnit
127      *
128      * @param dataTypeFactoryClass
129      */

130     public void setDataTypeFactoryClass(String JavaDoc dataTypeFactoryClass) {
131         this.dataTypeFactoryClass = dataTypeFactoryClass;
132     }
133
134     protected LocalSessionFactoryBean localSessionFactoryBean = null;
135
136     /**
137      * Setup local session factory bean
138      * @param localSessionFactoryBean
139      */

140     public void setLocalSessionFactoryBean(LocalSessionFactoryBean localSessionFactoryBean) {
141         this.localSessionFactoryBean = localSessionFactoryBean;
142     }
143
144     protected MenuDAO menuDAO = null;
145
146     public void setMenuDAO(MenuDAO menuDAO) {
147         this.menuDAO = menuDAO;
148     }
149
150     /**
151      * @see com.blandware.atleap.persistence.core.BackupDAO#backup()
152      */

153     public void backup() throws BackupFileAccessException {
154         HibernateTemplate hibernateTemplate = new HibernateTemplate(getSessionFactory());
155         hibernateTemplate.setFlushMode(HibernateTemplate.FLUSH_NEVER);
156         hibernateTemplate.execute(
157                 new HibernateCallback() {
158                     public Object JavaDoc doInHibernate(Session session) throws HibernateException, SQLException {
159                         Connection con = session.connection();
160
161                         boolean oldAutoCommit = con.getAutoCommit();
162                         if (!oldAutoCommit) {
163                             con.setAutoCommit(true);
164                         }
165
166                         OutputStream JavaDoc out = null;
167                         try {
168                             IDatabaseConnection connection = getConnection(con);
169
170                             // Setup the ResultSet table factory
171
IResultSetTableFactory factory = null;
172                             factory = new CachedResultSetTableFactory();
173                             DatabaseConfig config = connection.getConfig();
174                             config.setProperty(DatabaseConfig.PROPERTY_RESULTSET_TABLE_FACTORY, factory);
175                             IDataSet dataset = connection.createDataSet();
176
177                             // Use topologically sorted database
178
ITableFilter filter = new DatabaseSequenceFilter(connection);
179                             dataset = new FilteredDataSet(filter, dataset);
180
181                             out = openOutput();
182                             XmlDataSet.write(dataset, out);
183
184                             if (log.isInfoEnabled()) {
185                                 log.info("Backuping database into file dated " + new Date JavaDoc() + " finished.");
186                             }
187
188                         } catch (Exception JavaDoc ex) {
189                             String JavaDoc message = "Cannot backup database into dir " + backupDirPath;
190                             if (log.isWarnEnabled()) {
191                                 log.warn(message, ex);
192                             }
193                             throw new HibernateException(message, ex);
194                         } finally {
195                             try {
196                                 out.close();
197                             } catch(Exception JavaDoc e) {}
198
199                             if (!oldAutoCommit) {
200                                 con.setAutoCommit(false);
201                             }
202                         }
203
204                         return null;
205                     }
206                 }
207         );
208
209     }
210
211     /**
212      * Execute the given schema script on the given JDBC Connection.
213      * Will log unsuccessful statements and continue to execute.
214      * @param con the JDBC Connection to execute the script on
215      * @param sql the SQL statements to execute
216      * @throws SQLException if thrown by JDBC methods
217      */

218     protected void executeSQLScript(Connection con, String JavaDoc []sql) throws SQLException {
219         if (sql != null && sql.length > 0) {
220             Statement stmt = con.createStatement();
221             try {
222                 for (int i = 0; i < sql.length; i++) {
223                     if (log.isDebugEnabled()) {
224                         log.debug("Executing schema statement: " + sql[i]);
225                     }
226                     try {
227                         stmt.executeUpdate(sql[i]);
228                     }
229                     catch (SQLException ex) {
230                         if (log.isWarnEnabled()) {
231                             log.warn("Unsuccessful schema statement: " + sql[i], ex);
232                         }
233                     }
234                 }
235             }
236             finally {
237                 if (stmt != null) {
238                     try {
239                         stmt.close();
240                     }
241                     catch (SQLException ex) {
242                         if (log.isWarnEnabled()) {
243                             log.warn("Could not close JDBC Statement", ex);
244                         }
245                     }
246                     catch (RuntimeException JavaDoc ex) {
247                         if (log.isErrorEnabled()) {
248                             log.error("Unexpected exception on closing JDBC Statement", ex);
249                         }
250                     }
251                 }
252             }
253         }
254     }
255
256     /**
257      * @see com.blandware.atleap.persistence.core.BackupDAO#restore(java.util.Date, java.lang.Boolean)
258      */

259     public void restore(final Date JavaDoc date, final Boolean JavaDoc force) throws BackupFileAccessException {
260
261         HibernateTemplate hibernateTemplate = new HibernateTemplate(getSessionFactory());
262         hibernateTemplate.setFlushMode(HibernateTemplate.FLUSH_NEVER);
263         hibernateTemplate.execute(
264                 new HibernateCallback() {
265                     public Object JavaDoc doInHibernate(Session session) throws HibernateException, SQLException {
266                         Connection con = session.connection();
267
268                         boolean oldAutoCommit = con.getAutoCommit();
269                         if (!oldAutoCommit) {
270                             con.setAutoCommit(true);
271                         }
272                         InputStream JavaDoc is = null;
273                         try {
274                             is = openInput(date);
275
276                             Configuration configuration = localSessionFactoryBean.getConfiguration();
277                             Dialect dialect = Dialect.getDialect(configuration.getProperties());
278
279                             boolean tablesExist = isTables(con);
280
281                             String JavaDoc[] sql = null;
282
283                             if (!tablesExist) {
284                                 //create tables
285
sql = configuration.generateSchemaCreationScript(dialect);
286                                 executeSQLScript(con, sql);
287
288                                 if (log.isInfoEnabled()) {
289                                     log.info("Database tables created.");
290                                 }
291                             } else {
292                                 if (Boolean.TRUE.equals(force)) {
293                                     //delete menu items as they have slef reference and some databases e.g. MySQL, HSQLDB
294
//cannot delete them
295
session.setFlushMode(FlushMode.AUTO);
296
297                                     Collection menuItems = menuDAO.listMenuItems(null);
298                                     for (Iterator iterator = menuItems.iterator(); iterator.hasNext();) {
299                                         MenuItem menuItem = (MenuItem) iterator.next();
300                                         menuDAO.deleteMenuItem(menuItem);
301                                     }
302                                     session.flush();
303                                     session.setFlushMode(FlushMode.NEVER);
304                                     session.clear();
305                                 }
306                             }
307
308                             if (!tablesExist || Boolean.TRUE.equals(force)) {
309                                 //load data
310
IDatabaseConnection connection = getConnection(con);
311
312                                 if (is == null) {
313                                     throw new BackupFileAccessException("Cannot read file");
314                                 }
315
316                                 IDataSetProducer producer = new XmlProducer(new InputSource JavaDoc(is));
317                                 IDataSet dataset = new CachedDataSet(producer);
318
319                                 DatabaseOperation operation = DatabaseOperation.CLEAN_INSERT;
320                                 if ("sqlserver".equalsIgnoreCase(databaseType)) {
321                                     operation = InsertIdentityOperation.CLEAN_INSERT;
322                                 }
323
324                                 operation.execute(connection, dataset);
325
326                                 if (log.isInfoEnabled()) {
327                                     if (date == null)
328                                         log.info("Initial data from file " + INITIAL_DATA_FILE + " loaded.");
329                                     else
330                                         log.info("Initial data from file dated " + date + " loaded.");
331                                 }
332                             }
333
334
335                         } catch (Exception JavaDoc ex) {
336                             String JavaDoc message = "Cannot restore database with date " + date;
337                             if (log.isWarnEnabled()) {
338                                 log.warn(message, ex);
339                             }
340                             throw new HibernateException(message, ex);
341                         } finally {
342                             try {
343                                 if (is != null) {
344                                     is.close();
345                                 }
346                             } catch(Exception JavaDoc e) {}
347
348                             if (!oldAutoCommit) {
349                                 con.setAutoCommit(false);
350                             }
351                         }
352
353                         return null;
354                     }
355                 }
356         );
357
358     }
359
360     /**
361      * @see com.blandware.atleap.persistence.core.BackupDAO#canListArchives()
362      */

363     public boolean canListArchives() {
364         File dir = new File(backupDirPath);
365         if (!dir.canRead()) {
366             return false;
367         }
368         String JavaDoc files[] = dir.list(new FilenameFilter() {
369             public boolean accept(File dir, String JavaDoc name) {
370                 return name.endsWith(".zip");
371             }
372         }
373         );
374
375         if (files == null) {
376             return false;
377         }
378
379         return true;
380     }
381
382     /**
383      * @see com.blandware.atleap.persistence.core.BackupDAO#listArchives(com.blandware.atleap.common.util.QueryInfo)
384      */

385     public PartialCollection listArchives(QueryInfo queryInfo) throws BackupFileAccessException {
386         File dir = new File(backupDirPath);
387         createDirIfNotExists();
388         String JavaDoc accessErrorMessage = "Cannot read " + backupDirPath + " directory";
389         if (!dir.canRead()) {
390             throw new BackupFileAccessException(accessErrorMessage);
391         }
392         String JavaDoc files[] = dir.list(new FilenameFilter() {
393             public boolean accept(File dir, String JavaDoc name) {
394                 return name.endsWith(".zip");
395             }
396         }
397         );
398
399         if (files == null) {
400             throw new BackupFileAccessException(accessErrorMessage);
401         }
402
403         List list = new ArrayList(files.length + 1);
404         for (int i = 0; i < files.length; i++) {
405             String JavaDoc fileName = files[i];
406             Date JavaDoc date = null;
407             try {
408                 date = ARCHIVE_PATTERN_FORMATTER.parse(fileName);
409             } catch(Exception JavaDoc ex) {}
410             if (date != null)
411                 list.add(date);
412         }
413         Collections.sort(list);
414
415         int fromIndex = queryInfo.getOffset().intValue();
416         int toIndex = queryInfo.getOffset().intValue() + queryInfo.getLimit().intValue() < list.size() ? queryInfo.getOffset().intValue() + queryInfo.getLimit().intValue() : list.size();
417
418         return new PartialCollection(list.subList(fromIndex, toIndex), list.size());
419     }
420
421     /**
422      * Get DBUnit configured connection
423      *
424      * @param con SQL connection
425      * @return DBUnit connection
426      * @throws Exception if e.g. datatype factory class not found
427      */

428     protected IDatabaseConnection getConnection(Connection con) throws Exception JavaDoc {
429         IDatabaseConnection connection = new DatabaseConnection(con, schema);
430         DatabaseConfig config = connection.getConfig();
431         config.setFeature(DatabaseConfig.FEATURE_BATCHED_STATEMENTS, false);
432         config.setFeature(DatabaseConfig.FEATURE_QUALIFIED_TABLE_NAMES, false);
433         config.setFeature(DatabaseConfig.FEATURE_DATATYPE_WARNING, true);
434         config.setProperty(DatabaseConfig.PROPERTY_ESCAPE_PATTERN, escapePattern);
435         config.setProperty(DatabaseConfig.PROPERTY_RESULTSET_TABLE_FACTORY,
436                 new ForwardOnlyResultSetTableFactory());
437         IDataTypeFactory dataTypeFactory = (IDataTypeFactory) Class.forName(dataTypeFactoryClass).newInstance();
438         config.setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, dataTypeFactory);
439
440         return connection;
441     }
442
443     /**
444      * Open input stream for reading data from archive
445      * @param archiveDate if date is <code>null</code> read initial data as resource
446      * @return InputStream. Do not forget to close it.
447      * @throws Exception if something wrong
448      */

449     protected InputStream JavaDoc openInput(Date JavaDoc archiveDate) throws Exception JavaDoc {
450         if (archiveDate != null) {
451             File file = new File(backupDirPath, ARCHIVE_PATTERN_FORMATTER.format(archiveDate));
452             if (!file.canRead()) {
453                 throw new BackupFileAccessException("Cannot read from " + file.getPath() + " file");
454             }
455             ZipInputStream JavaDoc zis = new ZipInputStream JavaDoc(new BufferedInputStream(new FileInputStream(file)));
456             ZipEntry JavaDoc entry = null;
457             while((entry = zis.getNextEntry()) != null) {
458                 if (entry.getName().equalsIgnoreCase(ARCHIVE_ENTRY_NAME)) {
459                     return zis;
460                 }
461             }
462             if (log.isErrorEnabled()) {
463                 log.error("Cannot find " + ARCHIVE_ENTRY_NAME + " inside archive");
464             }
465             return null;
466
467         } else {
468             ClassLoader JavaDoc classLoader = Thread.currentThread().getContextClassLoader();
469             if (classLoader == null) {
470                 classLoader = getClass().getClassLoader();
471             }
472
473             InputStream JavaDoc is = classLoader.getResourceAsStream(INITIAL_DATA_FILE);
474             if (is == null) {
475                 if (log.isErrorEnabled()) {
476                     log.error("Cannot load " + INITIAL_DATA_FILE);
477                 }
478             }
479             return is;
480         }
481     }
482
483     /**
484      * Open output stream for writing data into archive
485      * @return output stream. Do not forget to close it.
486      * @throws Exception if something wrong
487      */

488     protected OutputStream JavaDoc openOutput() throws Exception JavaDoc {
489         File dir = new File(backupDirPath);
490         createDirIfNotExists();
491         if (!dir.canWrite()) {
492             throw new BackupFileAccessException("Cannot write to " + dir.getPath() + " dir");
493         }
494         
495         File file = new File(backupDirPath, ARCHIVE_PATTERN_FORMATTER.format(new Date JavaDoc()));
496         ZipOutputStream JavaDoc out = new ZipOutputStream JavaDoc(new BufferedOutputStream(new FileOutputStream(file)));
497         ZipEntry JavaDoc entry = new ZipEntry JavaDoc(ARCHIVE_ENTRY_NAME);
498         out.putNextEntry(entry);
499         return out;
500     }
501
502     /**
503      * Are there some tables in database?
504      * @param connection
505      * @return true is TEST_TABLE_NAME exists
506      */

507     protected boolean isTables(Connection connection) throws SQLException {
508         boolean result = false;
509         try {
510             DatabaseMetaData metadata = connection.getMetaData();
511
512             ResultSet rs = null;
513             try {
514                 if (TEST_TABLE_NAME_QUOTED) {
515                     if (metadata.supportsMixedCaseQuotedIdentifiers() || metadata.storesMixedCaseQuotedIdentifiers()) {
516                         rs = metadata.getTables(
517                                 null,
518                                 schema,
519                                 TEST_TABLE_NAME,
520                                 new String JavaDoc[] {"TABLE"}
521                             );
522                     } else if (metadata.storesLowerCaseQuotedIdentifiers()) {
523                         rs = metadata.getTables(
524                                 null,
525                                 StringHelper.toLowerCase(schema),
526                                 StringHelper.toLowerCase(TEST_TABLE_NAME),
527                                 new String JavaDoc[] {"TABLE"}
528                             );
529                     } else {
530                         rs = metadata.getTables(
531                                 null,
532                                 StringHelper.toUpperCase(schema),
533                                 StringHelper.toUpperCase(TEST_TABLE_NAME),
534                                 new String JavaDoc[] {"TABLE"}
535                             );
536                     }
537
538                 } else {
539                     if (metadata.supportsMixedCaseIdentifiers() || metadata.storesMixedCaseIdentifiers()) {
540                         rs = metadata.getTables(
541                                 null,
542                                 schema,
543                                 TEST_TABLE_NAME,
544                                 new String JavaDoc[] {"TABLE"}
545                             );
546                     } else if (metadata.storesLowerCaseIdentifiers()) {
547                         rs = metadata.getTables(
548                                 null,
549                                 StringHelper.toLowerCase(schema),
550                                 StringHelper.toLowerCase(TEST_TABLE_NAME),
551                                 new String JavaDoc[] {"TABLE"}
552                             );
553                     } else {
554                         rs = metadata.getTables(
555                                 null,
556                                 StringHelper.toUpperCase(schema),
557                                 StringHelper.toUpperCase(TEST_TABLE_NAME),
558                                 new String JavaDoc[] {"TABLE"}
559                             );
560                     }
561                 }
562                 result = rs.next();
563
564             } finally {
565                 if (rs!=null) rs.close();
566             }
567
568         } catch(SQLException ex) {
569             if (log.isErrorEnabled()) {
570                 log.error("Could not get metadata", ex);
571             }
572             throw ex;
573         }
574         return result;
575     }
576
577     /**
578      * Tries to create a dbbackup directory if it does not exist
579      *
580      * @return true if dbbackup directory already exists or was created
581      * successfully.
582      */

583     public boolean createDirIfNotExists() {
584         File dir = new File(backupDirPath);
585         boolean created = false;
586
587         if (!dir.exists()) {
588             if (log.isDebugEnabled()) {
589                 log.debug(backupDirPath + " does not exist, trying to create it");
590             }
591             created = dir.mkdir();
592         } else {
593             created = true;
594         }
595
596         return created;
597     }
598
599 }
600
Popular Tags