1 16 package com.blandware.atleap.persistence.hibernate.core; 17 18 import com.blandware.atleap.common.util.PartialCollection; 19 import com.blandware.atleap.common.util.QueryInfo; 20 import com.blandware.atleap.model.core.MenuItem; 21 import com.blandware.atleap.persistence.core.BackupDAO; 22 import com.blandware.atleap.persistence.core.MenuDAO; 23 import com.blandware.atleap.persistence.exception.BackupFileAccessException; 24 import org.apache.commons.logging.Log; 25 import org.apache.commons.logging.LogFactory; 26 import org.dbunit.database.*; 27 import org.dbunit.dataset.CachedDataSet; 28 import org.dbunit.dataset.FilteredDataSet; 29 import org.dbunit.dataset.IDataSet; 30 import org.dbunit.dataset.datatype.IDataTypeFactory; 31 import org.dbunit.dataset.filter.ITableFilter; 32 import org.dbunit.dataset.stream.IDataSetProducer; 33 import org.dbunit.dataset.xml.XmlDataSet; 34 import org.dbunit.dataset.xml.XmlProducer; 35 import org.dbunit.ext.mssql.InsertIdentityOperation; 36 import org.dbunit.operation.DatabaseOperation; 37 import org.hibernate.FlushMode; 38 import org.hibernate.HibernateException; 39 import org.hibernate.Session; 40 import org.hibernate.cfg.Configuration; 41 import org.hibernate.dialect.Dialect; 42 import org.hibernate.util.StringHelper; 43 import org.springframework.orm.hibernate3.HibernateCallback; 44 import org.springframework.orm.hibernate3.HibernateTemplate; 45 import org.springframework.orm.hibernate3.LocalSessionFactoryBean; 46 import org.xml.sax.InputSource ; 47 48 import java.io.*; 49 import java.sql.*; 50 import java.text.SimpleDateFormat ; 51 import java.util.*; 52 import java.util.Date ; 53 import java.util.zip.ZipEntry ; 54 import java.util.zip.ZipInputStream ; 55 import java.util.zip.ZipOutputStream ; 56 57 65 public class BackupDAOHibernate extends BaseDAOHibernate implements BackupDAO { 66 67 private static final String INITIAL_DATA_FILE = "initial-data.xml"; 68 private static final SimpleDateFormat ARCHIVE_PATTERN_FORMATTER = new SimpleDateFormat ("yyyyMMdd-HHmmss'.zip'"); 69 private static final String ARCHIVE_ENTRY_NAME = "db-export.xml"; 70 private static final String TEST_TABLE_NAME = "al_core_localizable"; 71 private static final boolean TEST_TABLE_NAME_QUOTED = true; 72 73 protected transient final Log log = LogFactory.getLog(getClass()); 74 75 76 protected String escapePattern = "\"?\""; 77 78 83 public void setEscapePattern(String escapePattern) { 84 this.escapePattern = escapePattern; 85 } 86 87 protected String databaseType = "mysql"; 88 89 94 public void setDatabaseType(String databaseType) { 95 this.databaseType = databaseType; 96 } 97 98 protected String schema = null; 99 100 105 public void setSchema(String schema) { 106 if (schema != null && schema.trim().length() == 0) 107 this.schema = null; 108 else 109 this.schema = schema; 110 } 111 112 protected String backupDirPath = "."; 113 114 119 public void setBackupDirPath(String backupDirPath) { 120 this.backupDirPath = backupDirPath; 121 } 122 123 protected String dataTypeFactoryClass = "org.dbunit.ext.mysql.MySqlDataTypeFactory"; 124 125 130 public void setDataTypeFactoryClass(String dataTypeFactoryClass) { 131 this.dataTypeFactoryClass = dataTypeFactoryClass; 132 } 133 134 protected LocalSessionFactoryBean localSessionFactoryBean = null; 135 136 140 public void setLocalSessionFactoryBean(LocalSessionFactoryBean localSessionFactoryBean) { 141 this.localSessionFactoryBean = localSessionFactoryBean; 142 } 143 144 protected MenuDAO menuDAO = null; 145 146 public void setMenuDAO(MenuDAO menuDAO) { 147 this.menuDAO = menuDAO; 148 } 149 150 153 public void backup() throws BackupFileAccessException { 154 HibernateTemplate hibernateTemplate = new HibernateTemplate(getSessionFactory()); 155 hibernateTemplate.setFlushMode(HibernateTemplate.FLUSH_NEVER); 156 hibernateTemplate.execute( 157 new HibernateCallback() { 158 public Object doInHibernate(Session session) throws HibernateException, SQLException { 159 Connection con = session.connection(); 160 161 boolean oldAutoCommit = con.getAutoCommit(); 162 if (!oldAutoCommit) { 163 con.setAutoCommit(true); 164 } 165 166 OutputStream out = null; 167 try { 168 IDatabaseConnection connection = getConnection(con); 169 170 IResultSetTableFactory factory = null; 172 factory = new CachedResultSetTableFactory(); 173 DatabaseConfig config = connection.getConfig(); 174 config.setProperty(DatabaseConfig.PROPERTY_RESULTSET_TABLE_FACTORY, factory); 175 IDataSet dataset = connection.createDataSet(); 176 177 ITableFilter filter = new DatabaseSequenceFilter(connection); 179 dataset = new FilteredDataSet(filter, dataset); 180 181 out = openOutput(); 182 XmlDataSet.write(dataset, out); 183 184 if (log.isInfoEnabled()) { 185 log.info("Backuping database into file dated " + new Date () + " finished."); 186 } 187 188 } catch (Exception ex) { 189 String message = "Cannot backup database into dir " + backupDirPath; 190 if (log.isWarnEnabled()) { 191 log.warn(message, ex); 192 } 193 throw new HibernateException(message, ex); 194 } finally { 195 try { 196 out.close(); 197 } catch(Exception e) {} 198 199 if (!oldAutoCommit) { 200 con.setAutoCommit(false); 201 } 202 } 203 204 return null; 205 } 206 } 207 ); 208 209 } 210 211 218 protected void executeSQLScript(Connection con, String []sql) throws SQLException { 219 if (sql != null && sql.length > 0) { 220 Statement stmt = con.createStatement(); 221 try { 222 for (int i = 0; i < sql.length; i++) { 223 if (log.isDebugEnabled()) { 224 log.debug("Executing schema statement: " + sql[i]); 225 } 226 try { 227 stmt.executeUpdate(sql[i]); 228 } 229 catch (SQLException ex) { 230 if (log.isWarnEnabled()) { 231 log.warn("Unsuccessful schema statement: " + sql[i], ex); 232 } 233 } 234 } 235 } 236 finally { 237 if (stmt != null) { 238 try { 239 stmt.close(); 240 } 241 catch (SQLException ex) { 242 if (log.isWarnEnabled()) { 243 log.warn("Could not close JDBC Statement", ex); 244 } 245 } 246 catch (RuntimeException ex) { 247 if (log.isErrorEnabled()) { 248 log.error("Unexpected exception on closing JDBC Statement", ex); 249 } 250 } 251 } 252 } 253 } 254 } 255 256 259 public void restore(final Date date, final Boolean force) throws BackupFileAccessException { 260 261 HibernateTemplate hibernateTemplate = new HibernateTemplate(getSessionFactory()); 262 hibernateTemplate.setFlushMode(HibernateTemplate.FLUSH_NEVER); 263 hibernateTemplate.execute( 264 new HibernateCallback() { 265 public Object doInHibernate(Session session) throws HibernateException, SQLException { 266 Connection con = session.connection(); 267 268 boolean oldAutoCommit = con.getAutoCommit(); 269 if (!oldAutoCommit) { 270 con.setAutoCommit(true); 271 } 272 InputStream is = null; 273 try { 274 is = openInput(date); 275 276 Configuration configuration = localSessionFactoryBean.getConfiguration(); 277 Dialect dialect = Dialect.getDialect(configuration.getProperties()); 278 279 boolean tablesExist = isTables(con); 280 281 String [] sql = null; 282 283 if (!tablesExist) { 284 sql = configuration.generateSchemaCreationScript(dialect); 286 executeSQLScript(con, sql); 287 288 if (log.isInfoEnabled()) { 289 log.info("Database tables created."); 290 } 291 } else { 292 if (Boolean.TRUE.equals(force)) { 293 session.setFlushMode(FlushMode.AUTO); 296 297 Collection menuItems = menuDAO.listMenuItems(null); 298 for (Iterator iterator = menuItems.iterator(); iterator.hasNext();) { 299 MenuItem menuItem = (MenuItem) iterator.next(); 300 menuDAO.deleteMenuItem(menuItem); 301 } 302 session.flush(); 303 session.setFlushMode(FlushMode.NEVER); 304 session.clear(); 305 } 306 } 307 308 if (!tablesExist || Boolean.TRUE.equals(force)) { 309 IDatabaseConnection connection = getConnection(con); 311 312 if (is == null) { 313 throw new BackupFileAccessException("Cannot read file"); 314 } 315 316 IDataSetProducer producer = new XmlProducer(new InputSource (is)); 317 IDataSet dataset = new CachedDataSet(producer); 318 319 DatabaseOperation operation = DatabaseOperation.CLEAN_INSERT; 320 if ("sqlserver".equalsIgnoreCase(databaseType)) { 321 operation = InsertIdentityOperation.CLEAN_INSERT; 322 } 323 324 operation.execute(connection, dataset); 325 326 if (log.isInfoEnabled()) { 327 if (date == null) 328 log.info("Initial data from file " + INITIAL_DATA_FILE + " loaded."); 329 else 330 log.info("Initial data from file dated " + date + " loaded."); 331 } 332 } 333 334 335 } catch (Exception ex) { 336 String message = "Cannot restore database with date " + date; 337 if (log.isWarnEnabled()) { 338 log.warn(message, ex); 339 } 340 throw new HibernateException(message, ex); 341 } finally { 342 try { 343 if (is != null) { 344 is.close(); 345 } 346 } catch(Exception e) {} 347 348 if (!oldAutoCommit) { 349 con.setAutoCommit(false); 350 } 351 } 352 353 return null; 354 } 355 } 356 ); 357 358 } 359 360 363 public boolean canListArchives() { 364 File dir = new File(backupDirPath); 365 if (!dir.canRead()) { 366 return false; 367 } 368 String files[] = dir.list(new FilenameFilter() { 369 public boolean accept(File dir, String name) { 370 return name.endsWith(".zip"); 371 } 372 } 373 ); 374 375 if (files == null) { 376 return false; 377 } 378 379 return true; 380 } 381 382 385 public PartialCollection listArchives(QueryInfo queryInfo) throws BackupFileAccessException { 386 File dir = new File(backupDirPath); 387 createDirIfNotExists(); 388 String accessErrorMessage = "Cannot read " + backupDirPath + " directory"; 389 if (!dir.canRead()) { 390 throw new BackupFileAccessException(accessErrorMessage); 391 } 392 String files[] = dir.list(new FilenameFilter() { 393 public boolean accept(File dir, String name) { 394 return name.endsWith(".zip"); 395 } 396 } 397 ); 398 399 if (files == null) { 400 throw new BackupFileAccessException(accessErrorMessage); 401 } 402 403 List list = new ArrayList(files.length + 1); 404 for (int i = 0; i < files.length; i++) { 405 String fileName = files[i]; 406 Date date = null; 407 try { 408 date = ARCHIVE_PATTERN_FORMATTER.parse(fileName); 409 } catch(Exception ex) {} 410 if (date != null) 411 list.add(date); 412 } 413 Collections.sort(list); 414 415 int fromIndex = queryInfo.getOffset().intValue(); 416 int toIndex = queryInfo.getOffset().intValue() + queryInfo.getLimit().intValue() < list.size() ? queryInfo.getOffset().intValue() + queryInfo.getLimit().intValue() : list.size(); 417 418 return new PartialCollection(list.subList(fromIndex, toIndex), list.size()); 419 } 420 421 428 protected IDatabaseConnection getConnection(Connection con) throws Exception { 429 IDatabaseConnection connection = new DatabaseConnection(con, schema); 430 DatabaseConfig config = connection.getConfig(); 431 config.setFeature(DatabaseConfig.FEATURE_BATCHED_STATEMENTS, false); 432 config.setFeature(DatabaseConfig.FEATURE_QUALIFIED_TABLE_NAMES, false); 433 config.setFeature(DatabaseConfig.FEATURE_DATATYPE_WARNING, true); 434 config.setProperty(DatabaseConfig.PROPERTY_ESCAPE_PATTERN, escapePattern); 435 config.setProperty(DatabaseConfig.PROPERTY_RESULTSET_TABLE_FACTORY, 436 new ForwardOnlyResultSetTableFactory()); 437 IDataTypeFactory dataTypeFactory = (IDataTypeFactory) Class.forName(dataTypeFactoryClass).newInstance(); 438 config.setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, dataTypeFactory); 439 440 return connection; 441 } 442 443 449 protected InputStream openInput(Date archiveDate) throws Exception { 450 if (archiveDate != null) { 451 File file = new File(backupDirPath, ARCHIVE_PATTERN_FORMATTER.format(archiveDate)); 452 if (!file.canRead()) { 453 throw new BackupFileAccessException("Cannot read from " + file.getPath() + " file"); 454 } 455 ZipInputStream zis = new ZipInputStream (new BufferedInputStream(new FileInputStream(file))); 456 ZipEntry entry = null; 457 while((entry = zis.getNextEntry()) != null) { 458 if (entry.getName().equalsIgnoreCase(ARCHIVE_ENTRY_NAME)) { 459 return zis; 460 } 461 } 462 if (log.isErrorEnabled()) { 463 log.error("Cannot find " + ARCHIVE_ENTRY_NAME + " inside archive"); 464 } 465 return null; 466 467 } else { 468 ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); 469 if (classLoader == null) { 470 classLoader = getClass().getClassLoader(); 471 } 472 473 InputStream is = classLoader.getResourceAsStream(INITIAL_DATA_FILE); 474 if (is == null) { 475 if (log.isErrorEnabled()) { 476 log.error("Cannot load " + INITIAL_DATA_FILE); 477 } 478 } 479 return is; 480 } 481 } 482 483 488 protected OutputStream openOutput() throws Exception { 489 File dir = new File(backupDirPath); 490 createDirIfNotExists(); 491 if (!dir.canWrite()) { 492 throw new BackupFileAccessException("Cannot write to " + dir.getPath() + " dir"); 493 } 494 495 File file = new File(backupDirPath, ARCHIVE_PATTERN_FORMATTER.format(new Date ())); 496 ZipOutputStream out = new ZipOutputStream (new BufferedOutputStream(new FileOutputStream(file))); 497 ZipEntry entry = new ZipEntry (ARCHIVE_ENTRY_NAME); 498 out.putNextEntry(entry); 499 return out; 500 } 501 502 507 protected boolean isTables(Connection connection) throws SQLException { 508 boolean result = false; 509 try { 510 DatabaseMetaData metadata = connection.getMetaData(); 511 512 ResultSet rs = null; 513 try { 514 if (TEST_TABLE_NAME_QUOTED) { 515 if (metadata.supportsMixedCaseQuotedIdentifiers() || metadata.storesMixedCaseQuotedIdentifiers()) { 516 rs = metadata.getTables( 517 null, 518 schema, 519 TEST_TABLE_NAME, 520 new String [] {"TABLE"} 521 ); 522 } else if (metadata.storesLowerCaseQuotedIdentifiers()) { 523 rs = metadata.getTables( 524 null, 525 StringHelper.toLowerCase(schema), 526 StringHelper.toLowerCase(TEST_TABLE_NAME), 527 new String [] {"TABLE"} 528 ); 529 } else { 530 rs = metadata.getTables( 531 null, 532 StringHelper.toUpperCase(schema), 533 StringHelper.toUpperCase(TEST_TABLE_NAME), 534 new String [] {"TABLE"} 535 ); 536 } 537 538 } else { 539 if (metadata.supportsMixedCaseIdentifiers() || metadata.storesMixedCaseIdentifiers()) { 540 rs = metadata.getTables( 541 null, 542 schema, 543 TEST_TABLE_NAME, 544 new String [] {"TABLE"} 545 ); 546 } else if (metadata.storesLowerCaseIdentifiers()) { 547 rs = metadata.getTables( 548 null, 549 StringHelper.toLowerCase(schema), 550 StringHelper.toLowerCase(TEST_TABLE_NAME), 551 new String [] {"TABLE"} 552 ); 553 } else { 554 rs = metadata.getTables( 555 null, 556 StringHelper.toUpperCase(schema), 557 StringHelper.toUpperCase(TEST_TABLE_NAME), 558 new String [] {"TABLE"} 559 ); 560 } 561 } 562 result = rs.next(); 563 564 } finally { 565 if (rs!=null) rs.close(); 566 } 567 568 } catch(SQLException ex) { 569 if (log.isErrorEnabled()) { 570 log.error("Could not get metadata", ex); 571 } 572 throw ex; 573 } 574 return result; 575 } 576 577 583 public boolean createDirIfNotExists() { 584 File dir = new File(backupDirPath); 585 boolean created = false; 586 587 if (!dir.exists()) { 588 if (log.isDebugEnabled()) { 589 log.debug(backupDirPath + " does not exist, trying to create it"); 590 } 591 created = dir.mkdir(); 592 } else { 593 created = true; 594 } 595 596 return created; 597 } 598 599 } 600 | Popular Tags |