X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=apt_dht%2FCacheManager.py;h=9bdf75bb5edc5064b5b8a2fce3366ed72b1f3859;hb=f67d1f47283729722ac1b3f528a78063b4b36a14;hp=4fdcf40c0a1069ec548d48bffa8b5bac7fe89ca2;hpb=d2e2a977ff11ad786479c362a8ed716fbc10e04c;p=quix0rs-apt-p2p.git diff --git a/apt_dht/CacheManager.py b/apt_dht/CacheManager.py index 4fdcf40..9bdf75b 100644 --- a/apt_dht/CacheManager.py +++ b/apt_dht/CacheManager.py @@ -147,25 +147,26 @@ class ProxyFileStream(stream.SimpleStream): class CacheManager: """Manages all requests for cached objects.""" - def __init__(self, cache_dir, db, manager = None): + def __init__(self, cache_dir, db, other_dirs = [], manager = None): self.cache_dir = cache_dir + self.other_dirs = other_dirs + self.all_dirs = self.other_dirs[:] + self.all_dirs.insert(0, self.cache_dir) self.db = db self.manager = manager self.scanning = [] - # Init the database, remove old files, init the HTTP dirs - self.db.removeUntrackedFiles([self.cache_dir]) - self.db.reconcileDirectories() - self.manager.setDirectories(self.db.getAllDirectories()) + # Init the database, remove old files + self.db.removeUntrackedFiles(self.all_dirs) def scanDirectories(self): """Scan the cache directories, hashing new and rehashing changed files.""" assert not self.scanning, "a directory scan is already under way" - self.scanning.append(self.cache_dir) + self.scanning = self.all_dirs[:] self._scanDirectories() - def _scanDirectories(self, walker = None): + def _scanDirectories(self, result = None, walker = None): # Need to start waling a new directory if walker is None: # If there are any left, get them @@ -173,10 +174,7 @@ class CacheManager: log.msg('started scanning directory: %s' % self.scanning[0].path) walker = self.scanning[0].walk() else: - # Done, just check if the HTTP directories need updating log.msg('cache directory scan complete') - if self.db.reconcileDirectories(): - self.manager.setDirectories(self.db.getAllDirectories()) return try: @@ -195,7 +193,7 @@ class CacheManager: log.msg('entering directory: %s' % file.path) else: log.msg('file is unchanged: %s' % file.path) - reactor.callLater(0, self._scanDirectories, walker) + reactor.callLater(0, self._scanDirectories, None, walker) return # Otherwise hash it @@ -206,23 +204,22 @@ class CacheManager: df.addErrback(log.err) def _doneHashing(self, result, file, walker): - reactor.callLater(0, self._scanDirectories, walker) if isinstance(result, HashObject): log.msg('hash check of %s completed with hash: %s' % (file.path, result.hexdigest())) + url = None if self.scanning[0] == self.cache_dir: - mirror_dir = self.cache_dir.child(file.path[len(self.cache_dir.path)+1:].split('/', 1)[0]) - urlpath, newdir = self.db.storeFile(file, result.digest(), mirror_dir) url = 'http:/' + file.path[len(self.cache_dir.path):] + new_hash = self.db.storeFile(file, result.digest()) + df = self.manager.new_cached_file(file, result, new_hash, url, True) + if df is None: + reactor.callLater(0, self._scanDirectories, None, walker) else: - urlpath, newdir = self.db.storeFile(file, result.digest(), self.scanning[0]) - url = None - if newdir: - self.manager.setDirectories(self.db.getAllDirectories()) - self.manager.new_cached_file(file, result, urlpath, url) + df.addBoth(self._scanDirectories, walker) else: log.msg('hash check of %s failed' % file.path) log.err(result) + reactor.callLater(0, self._scanDirectories, None, walker) def save_file(self, response, hash, url): """Save a downloaded file to the cache and stream it.""" @@ -276,18 +273,13 @@ class CacheManager: else: log.msg('Hashed file to %s: %s' % (hash.hexdigest(), url)) - mirror_dir = self.cache_dir.child(destFile.path[len(self.cache_dir.path)+1:].split('/', 1)[0]) - urlpath, newdir = self.db.storeFile(destFile, hash.digest(), mirror_dir) - log.msg('now avaliable at %s: %s' % (urlpath, url)) + new_hash = self.db.storeFile(destFile, hash.digest()) + log.msg('now avaliable: %s' % (url)) if self.manager: - if newdir: - log.msg('A new web directory was created, so enable it') - self.manager.setDirectories(self.db.getAllDirectories()) - - self.manager.new_cached_file(destFile, hash, urlpath, url) + self.manager.new_cached_file(destFile, hash, new_hash, url) if ext: - self.manager.new_cached_file(decFile, None, urlpath, url[:-len(ext)]) + self.manager.new_cached_file(decFile, None, False, url[:-len(ext)]) else: log.msg("Hashes don't match %s != %s: %s" % (hash.hexexpected(), hash.hexdigest(), url)) destFile.remove()