from urlparse import urlparse
import os
-from twisted.python import log, filepath
+from twisted.python import log
+from twisted.python.filepath import FilePath
from twisted.internet import defer
from twisted.trial import unittest
-from twisted.web2 import stream
from twisted.web2.http import splitHostPort
from AptPackages import AptPackages
-aptpkg_dir='.apt-dht'
+aptpkg_dir='apt-packages'
class MirrorError(Exception):
"""Exception raised when there's a problem with the mirror."""
-class ProxyFileStream(stream.SimpleStream):
- """Saves a stream to a file while providing a new stream."""
-
- def __init__(self, stream, outFile):
- """Initializes the proxy.
-
- @type stream: C{twisted.web2.stream.IByteStream}
- @param stream: the input stream to read from
- @type outFile: C{twisted.python.filepath.FilePath}
- @param outFile: the file to write to
- """
- self.stream = stream
- self.outFile = outFile.open('w')
- self.length = self.stream.length
- self.start = 0
-
- def _done(self):
- """Close the output file."""
- self.outFile.close()
-
- def read(self):
- """Read some data from the stream."""
- if self.outFile.closed:
- return None
-
- data = self.stream.read()
- if isinstance(data, defer.Deferred):
- data.addCallbacks(self._write, self._done)
- return data
-
- self._write(data)
- return data
-
- def _write(self, data):
- """Write the stream data to the file and return it for others to use."""
- if data is None:
- self._done()
- return data
-
- self.outFile.write(data)
- return data
-
- def close(self):
- """Clean everything up and return None to future reads."""
- self.length = 0
- self._done()
- self.stream.close()
-
class MirrorManager:
"""Manages all requests for mirror objects."""
- def __init__(self, cache_dir):
+ def __init__(self, cache_dir, unload_delay):
self.cache_dir = cache_dir
- self.cache = filepath.FilePath(self.cache_dir)
+ self.unload_delay = unload_delay
self.apt_caches = {}
def extractPath(self, url):
baseDir = base_match
log.msg("Settled on baseDir: %s" % baseDir)
- log.msg("Parsing '%s' gave '%s', '%s', '%s'" % (url, site, baseDir, path))
return site, baseDir, path
def init(self, site, baseDir):
self.apt_caches[site] = {}
if baseDir not in self.apt_caches[site]:
- site_cache = os.path.join(self.cache_dir, aptpkg_dir, 'mirrors', site + baseDir.replace('/', '_'))
- self.apt_caches[site][baseDir] = AptPackages(site_cache)
+ site_cache = self.cache_dir.child(aptpkg_dir).child('mirrors').child(site + baseDir.replace('/', '_'))
+ site_cache.makedirs
+ self.apt_caches[site][baseDir] = AptPackages(site_cache, self.unload_delay)
def updatedFile(self, url, file_path):
site, baseDir, path = self.extractPath(url)
self.init(site, baseDir)
self.apt_caches[site][baseDir].file_updated(path, file_path)
-
+
def findHash(self, url):
- log.msg('Trying to find hash for %s' % url)
site, baseDir, path = self.extractPath(url)
if site in self.apt_caches and baseDir in self.apt_caches[site]:
return self.apt_caches[site][baseDir].findHash(path)
d = defer.Deferred()
d.errback(MirrorError("Site Not Found"))
return d
-
- def save_file(self, response, hash, size, url):
- """Save a downloaded file to the cache and stream it."""
- log.msg('Returning file: %s' % url)
-
- parsed = urlparse(url)
- destFile = self.cache.preauthChild(parsed[1] + parsed[2])
- log.msg('Cache file: %s' % destFile.path)
-
- if destFile.exists():
- log.err('File already exists: %s', destFile.path)
- d.callback(response)
- return
-
- destFile.parent().makedirs()
- log.msg('Saving returned %i byte file to: %s' % (response.stream.length, destFile.path))
-
- orig_stream = response.stream
- response.stream = ProxyFileStream(orig_stream, destFile)
- return response
-
- def save_error(self, failure, url):
- """An error has occurred in downloadign or saving the file."""
- log.msg('Error occurred downloading %s' % url)
- log.err(failure)
- return failure
-
+
class TestMirrorManager(unittest.TestCase):
"""Unit tests for the mirror manager."""
client = None
def setUp(self):
- self.client = MirrorManager('/tmp')
+ self.client = MirrorManager(FilePath('/tmp/.apt-dht'), 300)
def test_extractPath(self):
site, baseDir, path = self.client.extractPath('http://ftp.us.debian.org/debian/dists/unstable/Release')
self.failUnless(path == "/dists/unstable/Release", "no match: %s" % path)
def verifyHash(self, found_hash, path, true_hash):
- self.failUnless(found_hash[0] == true_hash,
- "%s hashes don't match: %s != %s" % (path, found_hash[0], true_hash))
+ self.failUnless(found_hash.hexexpected() == true_hash,
+ "%s hashes don't match: %s != %s" % (path, found_hash.hexexpected(), true_hash))
def test_findHash(self):
self.packagesFile = os.popen('ls -Sr /var/lib/apt/lists/ | grep -E "_main_.*Packages$" | tail -n 1').read().rstrip('\n')
break
self.client.updatedFile('http://' + self.releaseFile.replace('_','/'),
- '/var/lib/apt/lists/' + self.releaseFile)
+ FilePath('/var/lib/apt/lists/' + self.releaseFile))
self.client.updatedFile('http://' + self.releaseFile[:self.releaseFile.find('_dists_')+1].replace('_','/') +
self.packagesFile[self.packagesFile.find('_dists_')+1:].replace('_','/'),
- '/var/lib/apt/lists/' + self.packagesFile)
+ FilePath('/var/lib/apt/lists/' + self.packagesFile))
self.client.updatedFile('http://' + self.releaseFile[:self.releaseFile.find('_dists_')+1].replace('_','/') +
self.sourcesFile[self.sourcesFile.find('_dists_')+1:].replace('_','/'),
- '/var/lib/apt/lists/' + self.sourcesFile)
+ FilePath('/var/lib/apt/lists/' + self.sourcesFile))
lastDefer = defer.Deferred()