Make downloaded files accessible via the HTTP server.
[quix0rs-apt-p2p.git] / apt_dht / apt_dht.py
1
2 from binascii import b2a_hex
3 import os.path
4
5 from twisted.internet import defer
6 from twisted.web2 import server, http, http_headers
7 from twisted.python import log
8
9 from apt_dht_conf import config
10 from PeerManager import PeerManager
11 from HTTPServer import TopLevel
12 from MirrorManager import MirrorManager
13 from Hash import HashObject
14
15 class AptDHT:
16     def __init__(self, dht):
17         log.msg('Initializing the main apt_dht application')
18         self.dht = dht
19         self.http_server = TopLevel(config.get('DEFAULT', 'cache_dir'), self)
20         self.http_site = server.Site(self.http_server)
21         self.peers = PeerManager()
22         self.mirrors = MirrorManager(self, config.get('DEFAULT', 'cache_dir'))
23     
24     def getSite(self):
25         return self.http_site
26     
27     def check_freshness(self, path, modtime, resp):
28         log.msg('Checking if %s is still fresh' % path)
29         d = self.peers.get([path], "HEAD", modtime)
30         d.addCallback(self.check_freshness_done, path, resp)
31         return d
32     
33     def check_freshness_done(self, resp, path, orig_resp):
34         if resp.code == 304:
35             log.msg('Still fresh, returning: %s' % path)
36             return orig_resp
37         else:
38             log.msg('Stale, need to redownload: %s' % path)
39             return self.get_resp(path)
40     
41     def get_resp(self, path):
42         d = defer.Deferred()
43         
44         log.msg('Trying to find hash for %s' % path)
45         findDefer = self.mirrors.findHash(path)
46         
47         findDefer.addCallbacks(self.findHash_done, self.findHash_error, 
48                                callbackArgs=(path, d), errbackArgs=(path, d))
49         findDefer.addErrback(log.err)
50         return d
51     
52     def findHash_error(self, failure, path, d):
53         log.err(failure)
54         self.findHash_done(HashObject(), path, d)
55         
56     def findHash_done(self, hash, path, d):
57         if hash.expected() is None:
58             log.msg('Hash for %s was not found' % path)
59             self.download_file([path], hash, path, d)
60         else:
61             log.msg('Found hash %s for %s' % (hash.hexexpected(), path))
62             # Lookup hash from DHT
63             key = hash.normexpected(bits = config.getint(config.get('DEFAULT', 'DHT'), 'HASH_LENGTH'))
64             lookupDefer = self.dht.getValue(key)
65             lookupDefer.addCallback(self.lookupHash_done, hash, path, d)
66             
67     def lookupHash_done(self, locations, hash, path, d):
68         if not locations:
69             log.msg('Peers for %s were not found' % path)
70             self.download_file([path], hash, path, d)
71         else:
72             log.msg('Found peers for %s: %r' % (path, locations))
73             # Download from the found peers
74             self.download_file(locations, hash, path, d)
75             
76     def download_file(self, locations, hash, path, d):
77         getDefer = self.peers.get(locations)
78         getDefer.addCallback(self.mirrors.save_file, hash, path)
79         getDefer.addErrback(self.mirrors.save_error, path)
80         getDefer.addCallbacks(d.callback, d.errback)
81         
82     def download_complete(self, hash, url, file_path):
83         assert file_path.startswith(config.get('DEFAULT', 'cache_dir'))
84         directory = file_path[:len(config.get('DEFAULT', 'cache_dir'))]
85         url_path = file_path[len(config.get('DEFAULT', 'cache_dir')):]
86         if url_path[0] == '/':
87             url_path = url_path[1:]
88         top_directory = url_path.split('/',1)[0]
89         url_path = url_path[len(top_directory):]
90         http_dir = os.path.join(directory, top_directory)
91         new_top = self.http_server.addDirectory(http_dir)
92         url_path = '/' + new_top + url_path
93         log.msg('now avaliable at %s: %s' % (url_path, url))