X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=apt_dht%2FMirrorManager.py;h=8bf197f9d40203749bb4c34ae33938d290e94c51;hb=831e4f2adebf21b0427edde718668356a8df4837;hp=c89a5915bc6622fdddd9fc4394656aa77732420f;hpb=b81ab657e1565ae71fb76a74dad79d79f1bb3fa9;p=quix0rs-apt-p2p.git diff --git a/apt_dht/MirrorManager.py b/apt_dht/MirrorManager.py index c89a591..8bf197f 100644 --- a/apt_dht/MirrorManager.py +++ b/apt_dht/MirrorManager.py @@ -3,7 +3,6 @@ from bz2 import BZ2Decompressor from zlib import decompressobj, MAX_WBITS from gzip import FCOMMENT, FEXTRA, FHCRC, FNAME, FTEXT from urlparse import urlparse -from binascii import a2b_hex import os from twisted.python import log, filepath @@ -25,13 +24,15 @@ class MirrorError(Exception): class ProxyFileStream(stream.SimpleStream): """Saves a stream to a file while providing a new stream.""" - def __init__(self, stream, outFile, decompress = None, decFile = None): + def __init__(self, stream, outFile, hash, decompress = None, decFile = None): """Initializes the proxy. @type stream: C{twisted.web2.stream.IByteStream} @param stream: the input stream to read from @type outFile: C{twisted.python.filepath.FilePath} @param outFile: the file to write to + @type hash: L{Hash.HashObject} + @param hash: the hash object to use for the file @type decompress: C{string} @param decompress: also decompress the file as this type (currently only '.gz' and '.bz2' are supported) @@ -40,6 +41,8 @@ class ProxyFileStream(stream.SimpleStream): """ self.stream = stream self.outFile = outFile.open('w') + self.hash = hash + self.hash.new() self.gzfile = None self.bz2file = None if decompress == ".gz": @@ -57,6 +60,7 @@ class ProxyFileStream(stream.SimpleStream): """Close the output file.""" if not self.outFile.closed: self.outFile.close() + self.hash.digest() if self.gzfile: data_dec = self.gzdec.flush() self.gzfile.write(data_dec) @@ -66,7 +70,7 @@ class ProxyFileStream(stream.SimpleStream): self.bz2file.close() self.bz2file = None - self.doneDefer.callback(1) + self.doneDefer.callback(self.hash) def read(self): """Read some data from the stream.""" @@ -88,6 +92,7 @@ class ProxyFileStream(stream.SimpleStream): return data self.outFile.write(data) + self.hash.update(data) if self.gzfile: if self.gzheader: self.gzheader = False @@ -144,7 +149,8 @@ class ProxyFileStream(stream.SimpleStream): class MirrorManager: """Manages all requests for mirror objects.""" - def __init__(self, cache_dir): + def __init__(self, cache_dir, manager = None): + self.manager = manager self.cache_dir = cache_dir self.cache = filepath.FilePath(self.cache_dir) self.apt_caches = {} @@ -195,21 +201,17 @@ class MirrorManager: def findHash(self, url): site, baseDir, path = self.extractPath(url) if site in self.apt_caches and baseDir in self.apt_caches[site]: - d = self.apt_caches[site][baseDir].findHash(path) - d.addCallback(self.translateHash) - return d + return self.apt_caches[site][baseDir].findHash(path) d = defer.Deferred() d.errback(MirrorError("Site Not Found")) return d - def translateHash(self, (hash, size)): - """Translate a hash from apt's hex encoding to a string.""" - if hash: - hash = a2b_hex(hash) - return (hash, size) - - def save_file(self, response, hash, size, url): + def save_file(self, response, hash, url): """Save a downloaded file to the cache and stream it.""" + if response.code != 200: + log.msg('File was not found (%r): %s' % (response, url)) + return response + log.msg('Returning file: %s' % url) parsed = urlparse(url) @@ -233,25 +235,37 @@ class MirrorManager: else: ext = None decFile = None - + orig_stream = response.stream - response.stream = ProxyFileStream(orig_stream, destFile, ext, decFile) + response.stream = ProxyFileStream(orig_stream, destFile, hash, ext, decFile) response.stream.doneDefer.addCallback(self.save_complete, url, destFile, response.headers.getHeader('Last-Modified'), ext, decFile) response.stream.doneDefer.addErrback(self.save_error, url) return response - def save_complete(self, result, url, destFile, modtime = None, ext = None, decFile = None): + def save_complete(self, hash, url, destFile, modtime = None, ext = None, decFile = None): """Update the modification time and AptPackages.""" if modtime: os.utime(destFile.path, (modtime, modtime)) if ext: os.utime(decFile.path, (modtime, modtime)) + + result = hash.verify() + if result or result is None: + if result: + log.msg('Hashes match: %s' % url) + else: + log.msg('Hashed file to %s: %s' % (hash.hexdigest(), url)) + + self.updatedFile(url, destFile.path) + if ext: + self.updatedFile(url[:-len(ext)], decFile.path) - self.updatedFile(url, destFile.path) - if ext: - self.updatedFile(url[:-len(ext)], decFile.path) + if self.manager: + self.manager.download_complete(hash, url, destFile.path) + else: + log.msg("Hashes don't match %s != %s: %s" % (hash.hexexpected(), hash.hexdigest(), url)) def save_error(self, failure, url): """An error has occurred in downloadign or saving the file.""" @@ -286,8 +300,8 @@ class TestMirrorManager(unittest.TestCase): self.failUnless(path == "/dists/unstable/Release", "no match: %s" % path) def verifyHash(self, found_hash, path, true_hash): - self.failUnless(found_hash[0] == true_hash, - "%s hashes don't match: %s != %s" % (path, found_hash[0], true_hash)) + self.failUnless(found_hash.hexexpected() == true_hash, + "%s hashes don't match: %s != %s" % (path, found_hash.hexexpected(), true_hash)) def test_findHash(self): self.packagesFile = os.popen('ls -Sr /var/lib/apt/lists/ | grep -E "_main_.*Packages$" | tail -n 1').read().rstrip('\n') @@ -315,7 +329,7 @@ class TestMirrorManager(unittest.TestCase): idx_path = 'http://' + self.releaseFile.replace('_','/')[:-7] + 'main/binary-i386/Packages.bz2' d = self.client.findHash(idx_path) - d.addCallback(self.verifyHash, idx_path, a2b_hex(idx_hash)) + d.addCallback(self.verifyHash, idx_path, idx_hash) pkg_hash = os.popen('grep -A 30 -E "^Package: dpkg$" ' + '/var/lib/apt/lists/' + self.packagesFile + @@ -328,7 +342,7 @@ class TestMirrorManager(unittest.TestCase): ' | cut -d\ -f 2').read().rstrip('\n') d = self.client.findHash(pkg_path) - d.addCallback(self.verifyHash, pkg_path, a2b_hex(pkg_hash)) + d.addCallback(self.verifyHash, pkg_path, pkg_hash) src_dir = os.popen('grep -A 30 -E "^Package: dpkg$" ' + '/var/lib/apt/lists/' + self.sourcesFile + @@ -346,7 +360,7 @@ class TestMirrorManager(unittest.TestCase): for i in range(len(src_hashes)): src_path = 'http://' + self.releaseFile[:self.releaseFile.find('_dists_')+1].replace('_','/') + src_dir + '/' + src_paths[i] d = self.client.findHash(src_path) - d.addCallback(self.verifyHash, src_path, a2b_hex(src_hashes[i])) + d.addCallback(self.verifyHash, src_path, src_hashes[i]) idx_hash = os.popen('grep -A 3000 -E "^SHA1:" ' + '/var/lib/apt/lists/' + self.releaseFile + @@ -355,7 +369,7 @@ class TestMirrorManager(unittest.TestCase): idx_path = 'http://' + self.releaseFile.replace('_','/')[:-7] + 'main/source/Sources.bz2' d = self.client.findHash(idx_path) - d.addCallback(self.verifyHash, idx_path, a2b_hex(idx_hash)) + d.addCallback(self.verifyHash, idx_path, idx_hash) d.addBoth(lastDefer.callback) return lastDefer