X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;ds=sidebyside;f=apt_dht%2Fapt_dht.py;h=047d1c2b815dbf7c3d3643244c5f04fa9b8d0fad;hb=056a62b7b55e95a3a50d64d2e769d29b9eede4a9;hp=0682c8add3928ab324b60e11126b97518fb0f9cf;hpb=0447dd29ac037182294ca7fd80ea47bb028d38c2;p=quix0rs-apt-p2p.git diff --git a/apt_dht/apt_dht.py b/apt_dht/apt_dht.py index 0682c8a..047d1c2 100644 --- a/apt_dht/apt_dht.py +++ b/apt_dht/apt_dht.py @@ -4,28 +4,38 @@ from urlparse import urlunparse import os, re from twisted.internet import defer -from twisted.web2 import server, http, http_headers -from twisted.python import log +from twisted.web2 import server, http, http_headers, static +from twisted.python import log, failure +from twisted.python.filepath import FilePath from apt_dht_conf import config from PeerManager import PeerManager from HTTPServer import TopLevel from MirrorManager import MirrorManager +from CacheManager import CacheManager from Hash import HashObject from db import DB from util import findMyIPAddr +download_dir = 'cache' + class AptDHT: def __init__(self, dht): log.msg('Initializing the main apt_dht application') - self.db = DB(config.get('DEFAULT', 'cache_dir') + '/.apt-dht.db') + self.cache_dir = FilePath(config.get('DEFAULT', 'cache_dir')) + if not self.cache_dir.child(download_dir).exists(): + self.cache_dir.child(download_dir).makedirs() + self.db = DB(self.cache_dir.child('apt-dht.db')) self.dht = dht self.dht.loadConfig(config, config.get('DEFAULT', 'DHT')) self.dht.join().addCallbacks(self.joinComplete, self.joinError) - self.http_server = TopLevel(config.get('DEFAULT', 'cache_dir'), self) + self.http_server = TopLevel(self.cache_dir.child(download_dir), self) + self.setDirectories = self.http_server.setDirectories self.http_site = server.Site(self.http_server) self.peers = PeerManager() - self.mirrors = MirrorManager(config.get('DEFAULT', 'cache_dir'), self) + self.mirrors = MirrorManager(self.cache_dir) + other_dirs = [FilePath(f) for f in config.getstringlist('DEFAULT', 'OTHER_DIRS')] + self.cache = CacheManager(self.cache_dir.child(download_dir), self.db, other_dirs, self) self.my_addr = None def getSite(self): @@ -33,65 +43,108 @@ class AptDHT: def joinComplete(self, result): self.my_addr = findMyIPAddr(result, config.getint(config.get('DEFAULT', 'DHT'), 'PORT')) + if not self.my_addr: + raise RuntimeError, "IP address for this machine could not be found" + self.cache.scanDirectories() def joinError(self, failure): log.msg("joining DHT failed miserably") log.err(failure) + raise RuntimeError, "IP address for this machine could not be found" - def check_freshness(self, path, modtime, resp): + def check_freshness(self, req, path, modtime, resp): log.msg('Checking if %s is still fresh' % path) d = self.peers.get([path], "HEAD", modtime) - d.addCallback(self.check_freshness_done, path, resp) + d.addCallback(self.check_freshness_done, req, path, resp) return d - def check_freshness_done(self, resp, path, orig_resp): + def check_freshness_done(self, resp, req, path, orig_resp): if resp.code == 304: log.msg('Still fresh, returning: %s' % path) return orig_resp else: log.msg('Stale, need to redownload: %s' % path) - return self.get_resp(path) + return self.get_resp(req, path) - def get_resp(self, path): + def get_resp(self, req, path): d = defer.Deferred() log.msg('Trying to find hash for %s' % path) findDefer = self.mirrors.findHash(path) findDefer.addCallbacks(self.findHash_done, self.findHash_error, - callbackArgs=(path, d), errbackArgs=(path, d)) + callbackArgs=(req, path, d), errbackArgs=(req, path, d)) findDefer.addErrback(log.err) return d - def findHash_error(self, failure, path, d): + def findHash_error(self, failure, req, path, d): log.err(failure) - self.findHash_done(HashObject(), path, d) + self.findHash_done(HashObject(), req, path, d) - def findHash_done(self, hash, path, d): + def findHash_done(self, hash, req, path, d): if hash.expected() is None: log.msg('Hash for %s was not found' % path) self.lookupHash_done([], hash, path, d) else: log.msg('Found hash %s for %s' % (hash.hexexpected(), path)) - # Lookup hash from DHT - key = hash.normexpected(bits = config.getint(config.get('DEFAULT', 'DHT'), 'HASH_LENGTH')) - lookupDefer = self.dht.getValue(key) - lookupDefer.addCallback(self.lookupHash_done, hash, path, d) + # Lookup hash in cache + locations = self.db.lookupHash(hash.expected()) + self.getCachedFile(hash, req, path, d, locations) + + def getCachedFile(self, hash, req, path, d, locations): + if not locations: + log.msg('Failed to return file from cache: %s' % path) + self.lookupHash(hash, path, d) + return + + # Get the first possible location from the list + file = locations.pop(0)['path'] + log.msg('Returning cached file: %s' % file.path) + + # Get it's response + resp = static.File(file.path).renderHTTP(req) + if isinstance(resp, defer.Deferred): + resp.addBoth(self._getCachedFile, hash, req, path, d, locations) + else: + self._getCachedFile(resp, hash, req, path, d, locations) + + def _getCachedFile(self, resp, hash, req, path, d, locations): + if isinstance(resp, failure.Failure): + log.msg('Got error trying to get cached file') + log.err() + # Try the next possible location + self.getCachedFile(hash, req, path, d, locations) + return + + log.msg('Cached response: %r' % resp) + + if resp.code >= 200 and resp.code < 400: + d.callback(resp) + else: + # Try the next possible location + self.getCachedFile(hash, req, path, d, locations) + + def lookupHash(self, hash, path, d): + log.msg('Looking up hash in DHT for file: %s' % path) + key = hash.normexpected(bits = config.getint(config.get('DEFAULT', 'DHT'), 'HASH_LENGTH')) + lookupDefer = self.dht.getValue(key) + lookupDefer.addCallback(self.lookupHash_done, hash, path, d) + def lookupHash_done(self, locations, hash, path, d): if not locations: log.msg('Peers for %s were not found' % path) getDefer = self.peers.get([path]) - getDefer.addCallback(self.mirrors.save_file, hash, path) - getDefer.addErrback(self.mirrors.save_error, path) + getDefer.addCallback(self.cache.save_file, hash, path) + getDefer.addErrback(self.cache.save_error, path) getDefer.addCallbacks(d.callback, d.errback) else: log.msg('Found peers for %s: %r' % (path, locations)) # Download from the found peers getDefer = self.peers.get(locations) getDefer.addCallback(self.check_response, hash, path) - getDefer.addCallback(self.mirrors.save_file, hash, path) - getDefer.addErrback(self.mirrors.save_error, path) + getDefer.addCallback(self.cache.save_file, hash, path) + getDefer.addErrback(self.cache.save_error, path) getDefer.addCallbacks(d.callback, d.errback) def check_response(self, response, hash, path): @@ -101,18 +154,22 @@ class AptDHT: return getDefer return response - def cached_file(self, hash, url, file_path): - assert file_path.startswith(config.get('DEFAULT', 'cache_dir')) - urlpath, newdir = self.db.storeFile(file_path, hash.digest(), config.get('DEFAULT', 'cache_dir')) - log.msg('now avaliable at %s: %s' % (urlpath, url)) - - if self.my_addr: + def new_cached_file(self, file_path, hash, urlpath, url = None): + """Add a newly cached file to the DHT. + + If the file was downloaded, set url to the path it was downloaded for. + """ + if url: + self.mirrors.updatedFile(url, file_path) + + if self.my_addr and hash: site = self.my_addr + ':' + str(config.getint('DEFAULT', 'PORT')) full_path = urlunparse(('http', site, urlpath, None, None, None)) key = hash.norm(bits = config.getint(config.get('DEFAULT', 'DHT'), 'HASH_LENGTH')) storeDefer = self.dht.storeValue(key, full_path) storeDefer.addCallback(self.store_done, full_path) - storeDefer.addErrback(log.err) + return storeDefer + return None def store_done(self, result, path): log.msg('Added %s to the DHT: %r' % (path, result))