-from binascii import a2b_hex
+from binascii import b2a_hex
+from urlparse import urlunparse
+import os, re
from twisted.internet import defer
-from twisted.web2 import server, http, http_headers
-from twisted.python import log
+from twisted.web2 import server, http, http_headers, static
+from twisted.python import log, failure
+from twisted.python.filepath import FilePath
from apt_dht_conf import config
from PeerManager import PeerManager
from HTTPServer import TopLevel
from MirrorManager import MirrorManager
+from CacheManager import CacheManager
+from Hash import HashObject
+from db import DB
+from util import findMyIPAddr
+
+download_dir = 'cache'
class AptDHT:
def __init__(self, dht):
log.msg('Initializing the main apt_dht application')
+ self.cache_dir = FilePath(config.get('DEFAULT', 'cache_dir'))
+ if not self.cache_dir.child(download_dir).exists():
+ self.cache_dir.child(download_dir).makedirs()
+ self.db = DB(self.cache_dir.child('apt-dht.db'))
self.dht = dht
- self.http_server = TopLevel(config.get('DEFAULT', 'cache_dir'), self)
- self.http_site = server.Site(self.http_server)
+ self.dht.loadConfig(config, config.get('DEFAULT', 'DHT'))
+ self.dht.join().addCallbacks(self.joinComplete, self.joinError)
+ self.http_server = TopLevel(self.cache_dir.child(download_dir), self.db, self)
+ self.getHTTPFactory = self.http_server.getHTTPFactory
self.peers = PeerManager()
- self.mirrors = MirrorManager(config.get('DEFAULT', 'cache_dir'))
+ self.mirrors = MirrorManager(self.cache_dir)
+ other_dirs = [FilePath(f) for f in config.getstringlist('DEFAULT', 'OTHER_DIRS')]
+ self.cache = CacheManager(self.cache_dir.child(download_dir), self.db, other_dirs, self)
+ self.my_addr = None
- def getSite(self):
- return self.http_site
+ def joinComplete(self, result):
+ self.my_addr = findMyIPAddr(result,
+ config.getint(config.get('DEFAULT', 'DHT'), 'PORT'),
+ config.getboolean('DEFAULT', 'LOCAL_OK'))
+ if not self.my_addr:
+ raise RuntimeError, "IP address for this machine could not be found"
+ self.cache.scanDirectories()
+
+ def joinError(self, failure):
+ log.msg("joining DHT failed miserably")
+ log.err(failure)
+ raise RuntimeError, "IP address for this machine could not be found"
- def check_freshness(self, path, modtime, resp):
+ def check_freshness(self, req, path, modtime, resp):
log.msg('Checking if %s is still fresh' % path)
d = self.peers.get([path], "HEAD", modtime)
- d.addCallback(self.check_freshness_done, path, resp)
+ d.addCallback(self.check_freshness_done, req, path, resp)
return d
- def check_freshness_done(self, resp, path, orig_resp):
+ def check_freshness_done(self, resp, req, path, orig_resp):
if resp.code == 304:
log.msg('Still fresh, returning: %s' % path)
return orig_resp
else:
log.msg('Stale, need to redownload: %s' % path)
- return self.get_resp(path)
+ return self.get_resp(req, path)
- def get_resp(self, path):
+ def get_resp(self, req, path):
d = defer.Deferred()
log.msg('Trying to find hash for %s' % path)
findDefer = self.mirrors.findHash(path)
- findDefer.addCallback(self.findHash_done, path, d)
- findDefer.addErrback(self.findHash_error, path, d)
+ findDefer.addCallbacks(self.findHash_done, self.findHash_error,
+ callbackArgs=(req, path, d), errbackArgs=(req, path, d))
+ findDefer.addErrback(log.err)
return d
- def findHash_error(self, failure, path, d):
+ def findHash_error(self, failure, req, path, d):
log.err(failure)
- self.findHash_done((None, None), path, d)
+ self.findHash_done(HashObject(), req, path, d)
- def findHash_done(self, (hash, size), path, d):
- if hash is None:
+ def findHash_done(self, hash, req, path, d):
+ if hash.expected() is None:
log.msg('Hash for %s was not found' % path)
- self.download_file([path], hash, size, path, d)
+ self.lookupHash_done([], hash, path, d)
+ else:
+ log.msg('Found hash %s for %s' % (hash.hexexpected(), path))
+
+ # Lookup hash in cache
+ locations = self.db.lookupHash(hash.expected())
+ self.getCachedFile(hash, req, path, d, locations)
+
+ def getCachedFile(self, hash, req, path, d, locations):
+ if not locations:
+ log.msg('Failed to return file from cache: %s' % path)
+ self.lookupHash(hash, path, d)
+ return
+
+ # Get the first possible location from the list
+ file = locations.pop(0)['path']
+ log.msg('Returning cached file: %s' % file.path)
+
+ # Get it's response
+ resp = static.File(file.path).renderHTTP(req)
+ if isinstance(resp, defer.Deferred):
+ resp.addBoth(self._getCachedFile, hash, req, path, d, locations)
else:
- log.msg('Found hash %s for %s' % (hash, path))
- # Lookup hash from DHT
- lookupDefer = self.dht.getValue(a2b_hex(hash))
- lookupDefer.addCallback(self.lookupHash_done, hash, size, path, d)
+ self._getCachedFile(resp, hash, req, path, d, locations)
+
+ def _getCachedFile(self, resp, hash, req, path, d, locations):
+ if isinstance(resp, failure.Failure):
+ log.msg('Got error trying to get cached file')
+ log.err()
+ # Try the next possible location
+ self.getCachedFile(hash, req, path, d, locations)
+ return
- def lookupHash_done(self, locations, hash, size, path, d):
+ log.msg('Cached response: %r' % resp)
+
+ if resp.code >= 200 and resp.code < 400:
+ d.callback(resp)
+ else:
+ # Try the next possible location
+ self.getCachedFile(hash, req, path, d, locations)
+
+ def lookupHash(self, hash, path, d):
+ log.msg('Looking up hash in DHT for file: %s' % path)
+ key = hash.normexpected(bits = config.getint(config.get('DEFAULT', 'DHT'), 'HASH_LENGTH'))
+ lookupDefer = self.dht.getValue(key)
+ lookupDefer.addCallback(self.lookupHash_done, hash, path, d)
+
+ def lookupHash_done(self, locations, hash, path, d):
if not locations:
log.msg('Peers for %s were not found' % path)
- self.download_file([path], hash, size, path, d)
+ getDefer = self.peers.get([path])
+ getDefer.addCallback(self.cache.save_file, hash, path)
+ getDefer.addErrback(self.cache.save_error, path)
+ getDefer.addCallbacks(d.callback, d.errback)
else:
- log.msg('Found peers for $s: %r' % (path, locations))
+ log.msg('Found peers for %s: %r' % (path, locations))
# Download from the found peers
- self.download_file(locations, hash, size, path, d)
+ getDefer = self.peers.get(locations)
+ getDefer.addCallback(self.check_response, hash, path)
+ getDefer.addCallback(self.cache.save_file, hash, path)
+ getDefer.addErrback(self.cache.save_error, path)
+ getDefer.addCallbacks(d.callback, d.errback)
- def download_file(self, locations, hash, size, path, d):
- getDefer = self.peers.get(locations)
- getDefer.addCallback(self.mirrors.save_file, hash, size, path)
- getDefer.addErrback(self.mirrors.save_error, path)
- getDefer.addCallbacks(d.callback, d.errback)
+ def check_response(self, response, hash, path):
+ if response.code < 200 or response.code >= 300:
+ log.msg('Download from peers failed, going to direct download: %s' % path)
+ getDefer = self.peers.get([path])
+ return getDefer
+ return response
+
+ def new_cached_file(self, file_path, hash, url = None):
+ """Add a newly cached file to the DHT.
+
+ If the file was downloaded, set url to the path it was downloaded for.
+ """
+ if url:
+ self.mirrors.updatedFile(url, file_path)
+
+ if self.my_addr and hash:
+ site = self.my_addr + ':' + str(config.getint('DEFAULT', 'PORT'))
+ key = hash.norm(bits = config.getint(config.get('DEFAULT', 'DHT'), 'HASH_LENGTH'))
+ storeDefer = self.dht.storeValue(key, site)
+ storeDefer.addCallback(self.store_done, hash)
+ return storeDefer
+ return None
+
+ def store_done(self, result, hash):
+ log.msg('Added %s to the DHT: %r' % (hash, result))
+
\ No newline at end of file