1 23 package org.archive.crawler.datamodel; 24 25 import java.util.Map ; 26 import java.util.Hashtable ; 27 import java.util.logging.Level ; 28 import java.util.logging.Logger ; 29 30 import org.apache.commons.httpclient.URIException; 31 import org.archive.crawler.framework.CrawlController; 32 import org.archive.crawler.settings.SettingsHandler; 33 34 39 public class ServerCache { 40 private static Logger logger = 41 Logger.getLogger(ServerCache.class.getName()); 42 43 protected SettingsHandler settingsHandler = null; 44 45 49 protected Map <String ,CrawlServer> servers = null; 50 51 55 protected Map <String ,CrawlHost> hosts = null; 56 57 61 protected ServerCache() { 62 super(); 63 } 64 65 72 public ServerCache(final SettingsHandler sh) 73 throws Exception { 74 this.settingsHandler = sh; 75 this.servers = new Hashtable <String ,CrawlServer>(); 76 this.hosts = new Hashtable <String ,CrawlHost>(); 77 } 78 79 public ServerCache(final CrawlController c) 80 throws Exception { 81 this.settingsHandler = c.getSettingsHandler(); 82 this.servers = c.getBigMap("servers", String .class, CrawlServer.class); 83 this.hosts = c.getBigMap("hosts", String .class, CrawlHost.class); 84 } 85 86 91 public synchronized CrawlServer getServerFor(String serverKey) { 92 CrawlServer cserver = (CrawlServer)this.servers.get(serverKey); 93 return (cserver != null)? cserver: createServerFor(serverKey); 94 } 95 96 protected CrawlServer createServerFor(String s) { 97 CrawlServer cserver = (CrawlServer)this.servers.get(s); 98 if (cserver != null) { 99 return cserver; 100 } 101 String skey = new String (s); 103 cserver = new CrawlServer(skey); 104 cserver.setSettingsHandler(settingsHandler); 105 servers.put(skey,cserver); 106 if (logger.isLoggable(Level.FINER)) { 107 logger.finer("Created server " + s); 108 } 109 return cserver; 110 } 111 112 117 public CrawlServer getServerFor(CandidateURI cauri) { 118 CrawlServer cs = null; 119 try { 120 String key = CrawlServer.getServerKey(cauri); 121 if (key != null) { 124 cs = getServerFor(key); 125 } 126 } catch (URIException e) { 127 logger.severe(e.getMessage() + ": " + cauri); 128 e.printStackTrace(); 129 } catch (NullPointerException npe) { 130 logger.severe(npe.getMessage() + ": " + cauri); 131 npe.printStackTrace(); 132 } 133 return cs; 134 } 135 136 141 public synchronized CrawlHost getHostFor(String hostname) { 142 if (hostname == null || hostname.length() == 0) { 143 return null; 144 } 145 CrawlHost host = (CrawlHost)this.hosts.get(hostname); 146 return (host != null)? host: createHostFor(hostname); 147 } 148 149 protected CrawlHost createHostFor(String hostname) { 150 if (hostname == null || hostname.length() == 0) { 151 return null; 152 } 153 CrawlHost host = (CrawlHost)this.hosts.get(hostname); 154 if (host != null) { 155 return host; 156 } 157 String hkey = new String (hostname); 158 host = new CrawlHost(hkey); 159 this.hosts.put(hkey, host); 160 if (logger.isLoggable(Level.FINE)) { 161 logger.fine("Created host " + hostname); 162 } 163 return host; 164 } 165 166 171 public CrawlHost getHostFor(CandidateURI cauri) { 172 CrawlHost h = null; 173 try { 174 h = getHostFor(cauri.getUURI().getReferencedHost()); 175 } catch (URIException e) { 176 e.printStackTrace(); 177 } 178 return h; 179 } 180 181 185 public boolean containsServer(String serverKey) { 186 return (CrawlServer) servers.get(serverKey) != null; 187 } 188 189 193 public boolean containsHost(String hostKey) { 194 return (CrawlHost) hosts.get(hostKey) != null; 195 } 196 197 200 public void cleanup() { 201 if (this.hosts != null) { 202 this.hosts.clear(); 205 this.hosts = null; 206 } 207 if (this.servers != null) { 208 this.servers.clear(); 209 this.servers = null; 210 } 211 } 212 } | Popular Tags |