X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=apt_dht%2FMirrorManager.py;h=8bf197f9d40203749bb4c34ae33938d290e94c51;hb=831e4f2adebf21b0427edde718668356a8df4837;hp=efa46d8d4529302d7c3400ee74221bb407bf5ec6;hpb=48b7787f8a2c434d67eb92d1d8bc6e97e9ecb755;p=quix0rs-apt-p2p.git diff --git a/apt_dht/MirrorManager.py b/apt_dht/MirrorManager.py index efa46d8..8bf197f 100644 --- a/apt_dht/MirrorManager.py +++ b/apt_dht/MirrorManager.py @@ -24,13 +24,15 @@ class MirrorError(Exception): class ProxyFileStream(stream.SimpleStream): """Saves a stream to a file while providing a new stream.""" - def __init__(self, stream, outFile, decompress = None, decFile = None): + def __init__(self, stream, outFile, hash, decompress = None, decFile = None): """Initializes the proxy. @type stream: C{twisted.web2.stream.IByteStream} @param stream: the input stream to read from @type outFile: C{twisted.python.filepath.FilePath} @param outFile: the file to write to + @type hash: L{Hash.HashObject} + @param hash: the hash object to use for the file @type decompress: C{string} @param decompress: also decompress the file as this type (currently only '.gz' and '.bz2' are supported) @@ -39,6 +41,8 @@ class ProxyFileStream(stream.SimpleStream): """ self.stream = stream self.outFile = outFile.open('w') + self.hash = hash + self.hash.new() self.gzfile = None self.bz2file = None if decompress == ".gz": @@ -56,6 +60,7 @@ class ProxyFileStream(stream.SimpleStream): """Close the output file.""" if not self.outFile.closed: self.outFile.close() + self.hash.digest() if self.gzfile: data_dec = self.gzdec.flush() self.gzfile.write(data_dec) @@ -65,7 +70,7 @@ class ProxyFileStream(stream.SimpleStream): self.bz2file.close() self.bz2file = None - self.doneDefer.callback(1) + self.doneDefer.callback(self.hash) def read(self): """Read some data from the stream.""" @@ -87,6 +92,7 @@ class ProxyFileStream(stream.SimpleStream): return data self.outFile.write(data) + self.hash.update(data) if self.gzfile: if self.gzheader: self.gzheader = False @@ -143,7 +149,8 @@ class ProxyFileStream(stream.SimpleStream): class MirrorManager: """Manages all requests for mirror objects.""" - def __init__(self, cache_dir): + def __init__(self, cache_dir, manager = None): + self.manager = manager self.cache_dir = cache_dir self.cache = filepath.FilePath(self.cache_dir) self.apt_caches = {} @@ -186,6 +193,11 @@ class MirrorManager: site_cache = os.path.join(self.cache_dir, aptpkg_dir, 'mirrors', site + baseDir.replace('/', '_')) self.apt_caches[site][baseDir] = AptPackages(site_cache) + def updatedFile(self, url, file_path): + site, baseDir, path = self.extractPath(url) + self.init(site, baseDir) + self.apt_caches[site][baseDir].file_updated(path, file_path) + def findHash(self, url): site, baseDir, path = self.extractPath(url) if site in self.apt_caches and baseDir in self.apt_caches[site]: @@ -193,9 +205,13 @@ class MirrorManager: d = defer.Deferred() d.errback(MirrorError("Site Not Found")) return d - - def save_file(self, response, hash, size, url): + + def save_file(self, response, hash, url): """Save a downloaded file to the cache and stream it.""" + if response.code != 200: + log.msg('File was not found (%r): %s' % (response, url)) + return response + log.msg('Returning file: %s' % url) parsed = urlparse(url) @@ -219,27 +235,37 @@ class MirrorManager: else: ext = None decFile = None - + orig_stream = response.stream - response.stream = ProxyFileStream(orig_stream, destFile, ext, decFile) + response.stream = ProxyFileStream(orig_stream, destFile, hash, ext, decFile) response.stream.doneDefer.addCallback(self.save_complete, url, destFile, response.headers.getHeader('Last-Modified'), ext, decFile) response.stream.doneDefer.addErrback(self.save_error, url) return response - def save_complete(self, result, url, destFile, modtime = None, ext = None, decFile = None): + def save_complete(self, hash, url, destFile, modtime = None, ext = None, decFile = None): """Update the modification time and AptPackages.""" if modtime: os.utime(destFile.path, (modtime, modtime)) if ext: os.utime(decFile.path, (modtime, modtime)) + + result = hash.verify() + if result or result is None: + if result: + log.msg('Hashes match: %s' % url) + else: + log.msg('Hashed file to %s: %s' % (hash.hexdigest(), url)) + + self.updatedFile(url, destFile.path) + if ext: + self.updatedFile(url[:-len(ext)], decFile.path) - site, baseDir, path = self.extractPath(url) - self.init(site, baseDir) - self.apt_caches[site][baseDir].file_updated(path, destFile.path) - if ext: - self.apt_caches[site][baseDir].file_updated(path[:-len(ext)], decFile.path) + if self.manager: + self.manager.download_complete(hash, url, destFile.path) + else: + log.msg("Hashes don't match %s != %s: %s" % (hash.hexexpected(), hash.hexdigest(), url)) def save_error(self, failure, url): """An error has occurred in downloadign or saving the file.""" @@ -274,8 +300,8 @@ class TestMirrorManager(unittest.TestCase): self.failUnless(path == "/dists/unstable/Release", "no match: %s" % path) def verifyHash(self, found_hash, path, true_hash): - self.failUnless(found_hash[0] == true_hash, - "%s hashes don't match: %s != %s" % (path, found_hash[0], true_hash)) + self.failUnless(found_hash.hexexpected() == true_hash, + "%s hashes don't match: %s != %s" % (path, found_hash.hexexpected(), true_hash)) def test_findHash(self): self.packagesFile = os.popen('ls -Sr /var/lib/apt/lists/ | grep -E "_main_.*Packages$" | tail -n 1').read().rstrip('\n')