req = self.request_queue.pop(0)
self.response_queue.append(req)
req.deferResponse = self.proto.submitRequest(req, False)
- req.deferResponse.addCallback(self.requestComplete)
- req.deferResponse.addErrback(self.requestError)
+ req.deferResponse.addCallbacks(self.requestComplete, self.requestError)
def requestComplete(self, resp):
req = self.response_queue.pop(0)
- log.msg('Download of %s completed with code %d' % (req.uri, resp.code))
+ log.msg('%s of %s completed with code %d' % (req.method, req.uri, resp.code))
req.deferRequest.callback(resp)
def requestError(self, error):
import os.path, time
from twisted.python import log
+from twisted.internet import defer
from twisted.web2 import server, http, resource, channel
from twisted.web2 import static, http_headers, responsecode
self.manager = manager
super(FileDownloader, self).__init__(path, defaultType, ignoredExts, processors, indexNames)
- def render(self, req):
+ def renderHTTP(self, req):
log.msg('Got request for %s from %s' % (req.uri, req.remoteAddr))
- resp = super(FileDownloader, self).render(req)
+ resp = super(FileDownloader, self).renderHTTP(req)
+ if isinstance(resp, defer.Deferred):
+ resp.addCallback(self._renderHTTP_done, req)
+ else:
+ resp = self._renderHTTP_done(resp, req)
+ return resp
+
+ def _renderHTTP_done(self, resp, req):
log.msg('Initial response to %s: %r' % (req.uri, resp))
if self.manager:
path = 'http:/' + req.uri
- if resp != responsecode.NOT_FOUND:
- log.msg('Checking freshness of %s' % req.uri)
+ if resp.code >= 200 and resp.code < 400:
return self.manager.check_freshness(path, resp.headers.getHeader('Last-Modified'), resp)
log.msg('Not found, trying other methods for %s' % req.uri)
class ProxyFileStream(stream.SimpleStream):
"""Saves a stream to a file while providing a new stream."""
- def __init__(self, stream, outFile):
+ def __init__(self, stream, outFile, modtime = None):
"""Initializes the proxy.
@type stream: C{twisted.web2.stream.IByteStream}
@param stream: the input stream to read from
@type outFile: C{twisted.python.filepath.FilePath}
@param outFile: the file to write to
+ @type modtime: C{int}
+ @param modtime: the modification time to set for the file
"""
self.stream = stream
- self.outFile = outFile.open('w')
+ self.outFile = outFile
+ self.openFile = outFile.open('w')
+ self.modtime = modtime
self.length = self.stream.length
self.start = 0
def _done(self):
"""Close the output file."""
- self.outFile.close()
+ if not self.openFile.closed:
+ self.openFile.close()
+ if self.modtime:
+ os.utime(self.outFile.path, (self.modtime, self.modtime))
def read(self):
"""Read some data from the stream."""
- if self.outFile.closed:
+ if self.openFile.closed:
return None
data = self.stream.read()
self._done()
return data
- self.outFile.write(data)
+ self.openFile.write(data)
return data
def close(self):
baseDir = base_match
log.msg("Settled on baseDir: %s" % baseDir)
- log.msg("Parsing '%s' gave '%s', '%s', '%s'" % (url, site, baseDir, path))
return site, baseDir, path
def init(self, site, baseDir):
self.apt_caches[site][baseDir].file_updated(path, file_path)
def findHash(self, url):
- log.msg('Trying to find hash for %s' % url)
site, baseDir, path = self.extractPath(url)
if site in self.apt_caches and baseDir in self.apt_caches[site]:
return self.apt_caches[site][baseDir].findHash(path)
parsed = urlparse(url)
destFile = self.cache.preauthChild(parsed[1] + parsed[2])
- log.msg('Cache file: %s' % destFile.path)
+ log.msg('Saving returned %r byte file to cache: %s' % (response.stream.length, destFile.path))
if destFile.exists():
- log.err('File already exists: %s', destFile.path)
- d.callback(response)
- return
-
- destFile.parent().makedirs()
- log.msg('Saving returned %i byte file to: %s' % (response.stream.length, destFile.path))
+ log.msg('File already exists, removing: %s' % destFile.path)
+ destFile.remove()
+ else:
+ destFile.parent().makedirs()
orig_stream = response.stream
- response.stream = ProxyFileStream(orig_stream, destFile)
+ response.stream = ProxyFileStream(orig_stream, destFile, response.headers.getHeader('Last-Modified'))
return response
def save_error(self, failure, url):
return self.http_site
def check_freshness(self, path, modtime, resp):
- log.msg('Checking if %s is still fresh: %r' % (path, modtime))
+ log.msg('Checking if %s is still fresh' % path)
d = self.peers.get([path], "HEAD", modtime)
d.addCallback(self.check_freshness_done, path, resp)
return d
def check_freshness_done(self, resp, path, orig_resp):
- if resp.code == "304":
- log.msg('Still fresh: %s' % path)
+ if resp.code == 304:
+ log.msg('Still fresh, returning: %s' % path)
return orig_resp
else:
log.msg('Stale, need to redownload: %s' % path)