import os.path, time
+from twisted.python import log
from twisted.web2 import server, http, resource, channel
from twisted.web2 import static, http_headers, responsecode
super(FileDownloader, self).__init__(path, defaultType, ignoredExts, processors, indexNames)
def render(self, req):
+ log.msg('Got request for %s from %s' % (req.uri, req.remoteAddr))
resp = super(FileDownloader, self).render(req)
+ log.msg('Initial response to %s: %r' % (req.uri, resp))
if self.manager:
path = 'http:/' + req.uri
if resp != responsecode.NOT_FOUND:
+ log.msg('Checking freshness of %s' % req.uri)
return self.manager.check_freshness(path, resp.headers.getHeader('Last-Modified'), resp)
+ log.msg('Not found, trying other methods for %s' % req.uri)
return self.manager.get_resp(path)
return resp
def createSimilarFile(self, path):
return self.__class__(path, self.manager, self.defaultType, self.ignoredExts,
self.processors, self.indexNames[:])
+
+
class TopLevel(resource.Resource):
addSlash = True
try:
loc = int(name[1:])
except:
+ log.msg('Not found: %s from %s' % (request.uri, request.remoteAddr))
return None, ()
if loc >= 0 and loc < len(self.subdirs) and self.subdirs[loc]:
+ log.msg('Sharing %s with %s' % (request.uri, request.remoteAddr))
return static.File(self.subdirs[loc]), segments[1:]
else:
+ log.msg('Not found: %s from %s' % (request.uri, request.remoteAddr))
return None, ()
+ if request.remoteAddr.host != "127.0.0.1":
+ log.msg('Blocked illegal access to %s from %s' % (request.uri, request.remoteAddr))
+ return None, ()
+
if len(name) > 1:
return FileDownloader(self.directory, self.manager), segments[0:]
else:
from urlparse import urlparse, urlunparse
from twisted.internet import reactor, defer
+from twisted.python import log
from twisted.trial import unittest
from twisted.web2 import stream as stream_mod
+from twisted.web2.http import splitHostPort
from HTTPDownloader import HTTPClientManager
@var locations: a list of the locations where the file can be found
"""
url = choice(locations)
+ log.msg('Downloading %s' % url)
parsed = urlparse(url)
assert(parsed[0] == "http", "Only HTTP is supported, not '%s'" % parsed[0])
- host = parsed[1]
+ host, port = splitHostPort(parsed[0], parsed[1])
path = urlunparse(('', '') + parsed[2:])
-
- # Make sure a port is included for consistency
- if host.find(':') >= 0:
- host, port = host.split(':', 1)
- port = int(port)
- else:
- port = 80
+
return self.getPeer(host, port, path, method, modtime)
def getPeer(self, host, port, path, method="GET", modtime=None):
return self.http_site
def check_freshness(self, path, modtime, resp):
+ log.msg('Checking if %s is still fresh: %r' % (path, modtime))
d = self.peers.get([path], "HEAD", modtime)
d.addCallback(self.check_freshness_done, path, resp)
return d
def check_freshness_done(self, resp, path, orig_resp):
if resp.code == "304":
+ log.msg('Still fresh: %s' % path)
return orig_resp
else:
+ log.msg('Stale, need to redownload: %s' % path)
return self.get_resp(path)
def get_resp(self, path):
d = defer.Deferred()
+ log.msg('Trying to find hash for %s' % path)
findDefer = self.mirrors.findHash(path)
- findDefer.addcallback(self.findHash_done, path, d)
+ findDefer.addCallback(self.findHash_done, path, d)
+ findDefer.addErrback(self.findHash_error, path, d)
return d
+
+ def findHash_error(self, failure, path, d):
+ self.findHash_done((None, None), path, d)
def findHash_done(self, (hash, size), path, d):
if hash is None:
+ log.msg('Hash for %s was not found' % path)
getDefer = self.peers.get([path])
getDefer.addCallback(d.callback)
else:
+ log.msg('Found hash %s for %s' % (hash, path))
# Lookup hash from DHT
lookupDefer = self.dht.getValue(hash)
lookupDefer.addCallback(self.lookupHash_done, hash, size, path, d)
def lookupHash_done(self, locations, hash, size, path, d):
if not locations:
+ log.msg('Peers for %s were not found' % path)
getDefer = self.peers.get([path])
getDefer.addCallback(d.callback)
else:
+ log.msg('Found peers for $s: %r' % (path, locations))
# Download from the found peers
getDefer = self.peers.get(locations)
getDefer.addCallback(d.callback)