e39e99b3f408bdde057d70eeeafc2593798658b8
[quix0rs-apt-p2p.git] / apt_dht / MirrorManager.py
1
2 from urlparse import urlparse
3 import os
4
5 from twisted.python import log, filepath
6 from twisted.internet import defer
7 from twisted.trial import unittest
8 from twisted.web2 import stream
9 from twisted.web2.http import splitHostPort
10
11 from AptPackages import AptPackages
12
13 aptpkg_dir='.apt-dht'
14
15 class MirrorError(Exception):
16     """Exception raised when there's a problem with the mirror."""
17
18 class ProxyFileStream(stream.SimpleStream):
19     """Saves a stream to a file while providing a new stream."""
20     
21     def __init__(self, stream, outFile):
22         """Initializes the proxy.
23         
24         @type stream: C{twisted.web2.stream.IByteStream}
25         @param stream: the input stream to read from
26         @type outFile: C{twisted.python.filepath.FilePath}
27         @param outFile: the file to write to
28         """
29         self.stream = stream
30         self.outFile = outFile.open('w')
31         self.length = self.stream.length
32         self.start = 0
33
34     def _done(self):
35         """Close the output file."""
36         self.outFile.close()
37     
38     def read(self):
39         """Read some data from the stream."""
40         if self.outFile.closed:
41             return None
42         
43         data = self.stream.read()
44         if isinstance(data, defer.Deferred):
45             data.addCallbacks(self._write, self._done)
46             return data
47         
48         self._write(data)
49         return data
50     
51     def _write(self, data):
52         """Write the stream data to the file and return it for others to use."""
53         if data is None:
54             self._done()
55             return data
56         
57         self.outFile.write(data)
58         return data
59     
60     def close(self):
61         """Clean everything up and return None to future reads."""
62         self.length = 0
63         self._done()
64         self.stream.close()
65
66 class MirrorManager:
67     """Manages all requests for mirror objects."""
68     
69     def __init__(self, cache_dir):
70         self.cache_dir = cache_dir
71         self.cache = filepath.FilePath(self.cache_dir)
72         self.apt_caches = {}
73     
74     def extractPath(self, url):
75         parsed = urlparse(url)
76         host, port = splitHostPort(parsed[0], parsed[1])
77         site = host + ":" + str(port)
78         path = parsed[2]
79             
80         i = max(path.rfind('/dists/'), path.rfind('/pool/'))
81         if i >= 0:
82             baseDir = path[:i]
83             path = path[i:]
84         else:
85             # Uh oh, this is not good
86             log.msg("Couldn't find a good base directory for path: %s" % (site + path))
87             baseDir = ''
88             if site in self.apt_caches:
89                 longest_match = 0
90                 for base in self.apt_caches[site]:
91                     base_match = ''
92                     for dirs in path.split('/'):
93                         if base.startswith(base_match + '/' + dirs):
94                             base_match += '/' + dirs
95                         else:
96                             break
97                     if len(base_match) > longest_match:
98                         longest_match = len(base_match)
99                         baseDir = base_match
100             log.msg("Settled on baseDir: %s" % baseDir)
101         
102         log.msg("Parsing '%s' gave '%s', '%s', '%s'" % (url, site, baseDir, path))
103         return site, baseDir, path
104         
105     def init(self, site, baseDir):
106         if site not in self.apt_caches:
107             self.apt_caches[site] = {}
108             
109         if baseDir not in self.apt_caches[site]:
110             site_cache = os.path.join(self.cache_dir, aptpkg_dir, 'mirrors', site + baseDir.replace('/', '_'))
111             self.apt_caches[site][baseDir] = AptPackages(site_cache)
112     
113     def updatedFile(self, url, file_path):
114         site, baseDir, path = self.extractPath(url)
115         self.init(site, baseDir)
116         self.apt_caches[site][baseDir].file_updated(path, file_path)
117     
118     def findHash(self, url):
119         log.msg('Trying to find hash for %s' % url)
120         site, baseDir, path = self.extractPath(url)
121         if site in self.apt_caches and baseDir in self.apt_caches[site]:
122             return self.apt_caches[site][baseDir].findHash(path)
123         d = defer.Deferred()
124         d.errback(MirrorError("Site Not Found"))
125         return d
126
127     def save_file(self, response, hash, size, url):
128         """Save a downloaded file to the cache and stream it."""
129         log.msg('Returning file: %s' % url)
130         
131         parsed = urlparse(url)
132         destFile = self.cache.preauthChild(parsed[1] + parsed[2])
133         log.msg('Cache file: %s' % destFile.path)
134         
135         if destFile.exists():
136             log.err('File already exists: %s', destFile.path)
137             d.callback(response)
138             return
139         
140         destFile.parent().makedirs()
141         log.msg('Saving returned %i byte file to: %s' % (response.stream.length, destFile.path))
142         
143         orig_stream = response.stream
144         response.stream = ProxyFileStream(orig_stream, destFile)
145         return response
146
147     def save_error(self, failure, url):
148         """An error has occurred in downloadign or saving the file."""
149         log.msg('Error occurred downloading %s' % url)
150         log.err(failure)
151         return failure
152
153 class TestMirrorManager(unittest.TestCase):
154     """Unit tests for the mirror manager."""
155     
156     timeout = 20
157     pending_calls = []
158     client = None
159     
160     def setUp(self):
161         self.client = MirrorManager('/tmp')
162         
163     def test_extractPath(self):
164         site, baseDir, path = self.client.extractPath('http://ftp.us.debian.org/debian/dists/unstable/Release')
165         self.failUnless(site == "ftp.us.debian.org:80", "no match: %s" % site)
166         self.failUnless(baseDir == "/debian", "no match: %s" % baseDir)
167         self.failUnless(path == "/dists/unstable/Release", "no match: %s" % path)
168
169         site, baseDir, path = self.client.extractPath('http://ftp.us.debian.org:16999/debian/pool/d/dpkg/dpkg_1.2.1-1.tar.gz')
170         self.failUnless(site == "ftp.us.debian.org:16999", "no match: %s" % site)
171         self.failUnless(baseDir == "/debian", "no match: %s" % baseDir)
172         self.failUnless(path == "/pool/d/dpkg/dpkg_1.2.1-1.tar.gz", "no match: %s" % path)
173
174         site, baseDir, path = self.client.extractPath('http://debian.camrdale.org/dists/unstable/Release')
175         self.failUnless(site == "debian.camrdale.org:80", "no match: %s" % site)
176         self.failUnless(baseDir == "", "no match: %s" % baseDir)
177         self.failUnless(path == "/dists/unstable/Release", "no match: %s" % path)
178
179     def verifyHash(self, found_hash, path, true_hash):
180         self.failUnless(found_hash[0] == true_hash, 
181                     "%s hashes don't match: %s != %s" % (path, found_hash[0], true_hash))
182
183     def test_findHash(self):
184         self.packagesFile = os.popen('ls -Sr /var/lib/apt/lists/ | grep -E "_main_.*Packages$" | tail -n 1').read().rstrip('\n')
185         self.sourcesFile = os.popen('ls -Sr /var/lib/apt/lists/ | grep -E "_main_.*Sources$" | tail -n 1').read().rstrip('\n')
186         for f in os.walk('/var/lib/apt/lists').next()[2]:
187             if f[-7:] == "Release" and self.packagesFile.startswith(f[:-7]):
188                 self.releaseFile = f
189                 break
190         
191         self.client.updatedFile('http://' + self.releaseFile.replace('_','/'), 
192                                 '/var/lib/apt/lists/' + self.releaseFile)
193         self.client.updatedFile('http://' + self.releaseFile[:self.releaseFile.find('_dists_')+1].replace('_','/') +
194                                 self.packagesFile[self.packagesFile.find('_dists_')+1:].replace('_','/'), 
195                                 '/var/lib/apt/lists/' + self.packagesFile)
196         self.client.updatedFile('http://' + self.releaseFile[:self.releaseFile.find('_dists_')+1].replace('_','/') +
197                                 self.sourcesFile[self.sourcesFile.find('_dists_')+1:].replace('_','/'), 
198                                 '/var/lib/apt/lists/' + self.sourcesFile)
199
200         lastDefer = defer.Deferred()
201         
202         idx_hash = os.popen('grep -A 3000 -E "^SHA1:" ' + 
203                             '/var/lib/apt/lists/' + self.releaseFile + 
204                             ' | grep -E " main/binary-i386/Packages.bz2$"'
205                             ' | head -n 1 | cut -d\  -f 2').read().rstrip('\n')
206         idx_path = 'http://' + self.releaseFile.replace('_','/')[:-7] + 'main/binary-i386/Packages.bz2'
207
208         d = self.client.findHash(idx_path)
209         d.addCallback(self.verifyHash, idx_path, idx_hash)
210
211         pkg_hash = os.popen('grep -A 30 -E "^Package: dpkg$" ' + 
212                             '/var/lib/apt/lists/' + self.packagesFile + 
213                             ' | grep -E "^SHA1:" | head -n 1' + 
214                             ' | cut -d\  -f 2').read().rstrip('\n')
215         pkg_path = 'http://' + self.releaseFile[:self.releaseFile.find('_dists_')+1].replace('_','/') + \
216                    os.popen('grep -A 30 -E "^Package: dpkg$" ' + 
217                             '/var/lib/apt/lists/' + self.packagesFile + 
218                             ' | grep -E "^Filename:" | head -n 1' + 
219                             ' | cut -d\  -f 2').read().rstrip('\n')
220
221         d = self.client.findHash(pkg_path)
222         d.addCallback(self.verifyHash, pkg_path, pkg_hash)
223
224         src_dir = os.popen('grep -A 30 -E "^Package: dpkg$" ' + 
225                             '/var/lib/apt/lists/' + self.sourcesFile + 
226                             ' | grep -E "^Directory:" | head -n 1' + 
227                             ' | cut -d\  -f 2').read().rstrip('\n')
228         src_hashes = os.popen('grep -A 20 -E "^Package: dpkg$" ' + 
229                             '/var/lib/apt/lists/' + self.sourcesFile + 
230                             ' | grep -A 4 -E "^Files:" | grep -E "^ " ' + 
231                             ' | cut -d\  -f 2').read().split('\n')[:-1]
232         src_paths = os.popen('grep -A 20 -E "^Package: dpkg$" ' + 
233                             '/var/lib/apt/lists/' + self.sourcesFile + 
234                             ' | grep -A 4 -E "^Files:" | grep -E "^ " ' + 
235                             ' | cut -d\  -f 4').read().split('\n')[:-1]
236
237         for i in range(len(src_hashes)):
238             src_path = 'http://' + self.releaseFile[:self.releaseFile.find('_dists_')+1].replace('_','/') + src_dir + '/' + src_paths[i]
239             d = self.client.findHash(src_path)
240             d.addCallback(self.verifyHash, src_path, src_hashes[i])
241             
242         idx_hash = os.popen('grep -A 3000 -E "^SHA1:" ' + 
243                             '/var/lib/apt/lists/' + self.releaseFile + 
244                             ' | grep -E " main/source/Sources.bz2$"'
245                             ' | head -n 1 | cut -d\  -f 2').read().rstrip('\n')
246         idx_path = 'http://' + self.releaseFile.replace('_','/')[:-7] + 'main/source/Sources.bz2'
247
248         d = self.client.findHash(idx_path)
249         d.addCallback(self.verifyHash, idx_path, idx_hash)
250
251         d.addBoth(lastDefer.callback)
252         return lastDefer
253
254     def tearDown(self):
255         for p in self.pending_calls:
256             if p.active():
257                 p.cancel()
258         self.client = None
259