]> git.mxchange.org Git - quix0rs-apt-p2p.git/blobdiff - apt_p2p/PeerManager.py
Better handling and logging for intermittent HTTP client submission errors.
[quix0rs-apt-p2p.git] / apt_p2p / PeerManager.py
index dd450ffa6b64c930ed8b6d63e7ccdc59f34544a4..deeb668a54c4ae670a4e501c5e106fe7b6a300ff 100644 (file)
@@ -8,7 +8,7 @@ from binascii import b2a_hex, a2b_hex
 import sha
 
 from twisted.internet import reactor, defer
-from twisted.python import log
+from twisted.python import log, filepath
 from twisted.trial import unittest
 from twisted.web2 import stream
 from twisted.web2.http import Response, splitHostPort
@@ -17,7 +17,12 @@ from HTTPDownloader import Peer
 from util import uncompact
 from Hash import PIECE_SIZE
 from apt_p2p_Khashmir.bencode import bdecode
+from apt_p2p_conf import config
 
+
+class PeerError(Exception):
+    """An error occurred downloading from peers."""
+    
 class GrowingFileStream(stream.FileStream):
     """Modified to stream data from a file as it becomes available.
     
@@ -86,11 +91,13 @@ class GrowingFileStream(stream.FileStream):
                     deferred.callback(b)
                 else:
                     # We're done
+                    self._close()
                     deferred = self.deferred
                     self.deferred = None
                     deferred.callback(None)
             else:
                 # We're done
+                self._close()
                 deferred = self.deferred
                 self.deferred = None
                 deferred.callback(None)
@@ -107,6 +114,7 @@ class GrowingFileStream(stream.FileStream):
         # If we don't have any available, we're done or deferred
         if readSize <= 0:
             if self.finished:
+                self._close()
                 return None
             else:
                 self.deferred = defer.Deferred()
@@ -119,6 +127,7 @@ class GrowingFileStream(stream.FileStream):
         if not bytesRead:
             # End of file was reached, we're done or deferred
             if self.finished:
+                self._close()
                 return None
             else:
                 self.deferred = defer.Deferred()
@@ -127,6 +136,12 @@ class GrowingFileStream(stream.FileStream):
             self.position += bytesRead
             return b
 
+    def _close(self):
+        """Close the temporary file and remove it."""
+        self.f.close()
+        filepath.FilePath(self.f.name).remove()
+        self.f = None
+        
 class StreamToFile:
     """Save a stream to a partial file and hash it.
     
@@ -134,8 +149,8 @@ class StreamToFile:
     @ivar stream: the input stream being read
     @type outFile: L{twisted.python.filepath.FilePath}
     @ivar outFile: the file being written
-    @type hash: C{sha1}
-    @ivar hash: the hash object for the data
+    @type hasher: hashing object, e.g. C{sha1}
+    @ivar hasher: the hash object for the data
     @type position: C{int}
     @ivar position: the current file position to write the next data to
     @type length: C{int}
@@ -144,9 +159,11 @@ class StreamToFile:
     @ivar doneDefer: the deferred that will fire when done writing
     """
     
-    def __init__(self, inputStream, outFile, start = 0, length = None):
+    def __init__(self, hasher, inputStream, outFile, start = 0, length = None):
         """Initializes the file.
         
+        @type hasher: hashing object, e.g. C{sha1}
+        @param hasher: the hash object for the data
         @type inputStream: L{twisted.web2.stream.IByteStream}
         @param inputStream: the input stream to read from
         @type outFile: L{twisted.python.filepath.FilePath}
@@ -160,7 +177,7 @@ class StreamToFile:
         """
         self.stream = inputStream
         self.outFile = outFile
-        self.hash = sha.new()
+        self.hasher = hasher
         self.position = start
         self.length = None
         if length is not None:
@@ -172,7 +189,6 @@ class StreamToFile:
 
         @rtype: L{twisted.internet.defer.Deferred}
         """
-        log.msg('Started streaming %r bytes to file at position %d' % (self.length, self.position))
         self.doneDefer = stream.readStream(self.stream, self._gotData)
         self.doneDefer.addCallbacks(self._done, self._error)
         return self.doneDefer
@@ -180,10 +196,10 @@ class StreamToFile:
     def _gotData(self, data):
         """Process the received data."""
         if self.outFile.closed:
-            raise Exception, "outFile was unexpectedly closed"
+            raise PeerError, "outFile was unexpectedly closed"
         
         if data is None:
-            raise Exception, "Data is None?"
+            raise PeerError, "Data is None?"
         
         # Make sure we don't go too far
         if self.length is not None and self.position + len(data) > self.length:
@@ -192,13 +208,12 @@ class StreamToFile:
         # Write and hash the streamed data
         self.outFile.seek(self.position)
         self.outFile.write(data)
-        self.hash.update(data)
+        self.hasher.update(data)
         self.position += len(data)
         
     def _done(self, result):
         """Return the result."""
-        log.msg('Streaming is complete')
-        return self.hash.digest()
+        return self.hasher.digest()
     
     def _error(self, err):
         """Log the error."""
@@ -261,6 +276,8 @@ class FileDownload:
         self.compact_peers = compact_peers
         
         self.path = '/~/' + quote_plus(hash.expected())
+        self.defer = None
+        self.mirror_path = None
         self.pieces = None
         self.started = False
         
@@ -312,7 +329,7 @@ class FileDownload:
         if max_found == no_pieces:
             # The file is not split into pieces
             log.msg('No pieces were found for the file')
-            self.pieces = []
+            self.pieces = [self.hash.expected()]
             self.startDownload()
         elif max_found == max(pieces_string.values()):
             # Small number of pieces in a string
@@ -354,23 +371,27 @@ class FileDownload:
 
         # Start the DHT lookup
         lookupDefer = self.manager.dht.getValue(key)
-        lookupDefer.addCallback(self._getDHTPieces, key)
+        lookupDefer.addBoth(self._getDHTPieces, key)
         
     def _getDHTPieces(self, results, key):
         """Check the retrieved values."""
-        for result in results:
-            # Make sure the hash matches the key
-            result_hash = sha.new(result.get('t', '')).digest()
-            if result_hash == key:
-                pieces = result['t']
-                self.pieces = [pieces[x:x+20] for x in xrange(0, len(pieces), 20)]
-                log.msg('Retrieved %d piece hashes from the DHT' % len(self.pieces))
-                self.startDownload()
-                return
+        if isinstance(results, list):
+            for result in results:
+                # Make sure the hash matches the key
+                result_hash = sha.new(result.get('t', '')).digest()
+                if result_hash == key:
+                    pieces = result['t']
+                    self.pieces = [pieces[x:x+20] for x in xrange(0, len(pieces), 20)]
+                    log.msg('Retrieved %d piece hashes from the DHT' % len(self.pieces))
+                    self.startDownload()
+                    return
+                
+            log.msg('Could not retrieve the piece hashes from the DHT')
+        else:
+            log.msg('Looking up piece hashes in the DHT resulted in an error: %r' % (result, ))
             
         # Continue without the piece hashes
-        log.msg('Could not retrieve the piece hashes from the DHT')
-        self.pieces = []
+        self.pieces = [None for x in xrange(0, self.hash.expSize, PIECE_SIZE)]
         self.startDownload()
 
     def getPeerPieces(self, key, failedSite = None):
@@ -392,7 +413,6 @@ class FileDownload:
 
         if self.pieces is None:
             # Send a request to one or more peers
-            log.msg('Checking for a peer to request piece hashes from')
             for site in self.peers:
                 if self.peers[site].get('failed', False) != True:
                     log.msg('Sending a piece hash request to %r' % (site, ))
@@ -406,11 +426,10 @@ class FileDownload:
                     if self.outstanding >= 4:
                         break
         
-        log.msg('Done sending piece hash requests for now, %d outstanding' % self.outstanding)
         if self.pieces is None and self.outstanding <= 0:
             # Continue without the piece hashes
             log.msg('Could not retrieve the piece hashes from the peers')
-            self.pieces = []
+            self.pieces = [None for x in xrange(0, self.hash.expSize, PIECE_SIZE)]
             self.startDownload()
         
     def _getPeerPieces(self, response, key, site):
@@ -418,7 +437,6 @@ class FileDownload:
         log.msg('Got a piece hash response %d from %r' % (response.code, site))
         if response.code != 200:
             # Request failed, try a different peer
-            log.msg('Did not like response %d from %r' % (response.code, site))
             self.getPeerPieces(key, site)
         else:
             # Read the response stream to a string
@@ -483,37 +501,36 @@ class FileDownload:
         
         log.msg('Starting to download %s' % self.path)
         self.started = True
-        assert self.pieces is not None, "You must initialize the piece hashes first"
+        assert self.pieces, "You must initialize the piece hashes first"
         self.peerlist = [self.peers[site]['peer'] for site in self.peers]
         
+        # Use the mirror if there are few peers
+        if len(self.peerlist) < config.getint('DEFAULT', 'MIN_DOWNLOAD_PEERS'):
+            parsed = urlparse(self.mirror)
+            if parsed[0] == "http":
+                site = splitHostPort(parsed[0], parsed[1])
+                self.mirror_path = urlunparse(('', '') + parsed[2:])
+                peer = self.manager.getPeer(site, mirror = True)
+                self.peerlist.append(peer)
+        
         # Special case if there's only one good peer left
-        if len(self.peerlist) == 1:
-            log.msg('Downloading from peer %r' % (self.peerlist[0], ))
-            self.defer.callback(self.peerlist[0].get(self.path))
-            return
+#        if len(self.peerlist) == 1:
+#            log.msg('Downloading from peer %r' % (self.peerlist[0], ))
+#            self.defer.callback(self.peerlist[0].get(self.path))
+#            return
         
-        # Start sending the return file
-        self.stream = GrowingFileStream(self.file, self.hash.expSize)
-        resp = Response(200, {}, self.stream)
-        self.defer.callback(resp)
-
         # Begin to download the pieces
         self.outstanding = 0
         self.nextFinish = 0
-        if self.pieces:
-            self.completePieces = [False for piece in self.pieces]
-        else:
-            self.completePieces = [False]
+        self.completePieces = [False for piece in self.pieces]
         self.getPieces()
         
     #{ Downloading the pieces
     def getPieces(self):
         """Download the next pieces from the peers."""
-        log.msg('Checking for more piece requests to send')
         self.sort()
         piece = self.nextFinish
         while self.outstanding < 4 and self.peerlist and piece < len(self.completePieces):
-            log.msg('Checking piece %d' % piece)
             if self.completePieces[piece] == False:
                 # Send a request to the highest ranked peer
                 peer = self.peerlist.pop()
@@ -521,25 +538,26 @@ class FileDownload:
                 log.msg('Sending a request for piece %d to peer %r' % (piece, peer))
                 
                 self.outstanding += 1
-                if self.pieces:
-                    df = peer.getRange(self.path, piece*PIECE_SIZE, (piece+1)*PIECE_SIZE - 1)
+                path = self.path
+                if peer.mirror:
+                    path = self.mirror_path
+                if len(self.completePieces) > 1:
+                    df = peer.getRange(path, piece*PIECE_SIZE, (piece+1)*PIECE_SIZE - 1)
                 else:
-                    df = peer.get(self.path)
+                    df = peer.get(path)
                 reactor.callLater(0, df.addCallbacks,
                                   *(self._getPiece, self._getError),
                                   **{'callbackArgs': (piece, peer),
                                      'errbackArgs': (piece, peer)})
             piece += 1
                 
-        log.msg('Finished checking pieces, %d outstanding, next piece %d of %d' % (self.outstanding, self.nextFinish, len(self.completePieces)))
         # Check if we're done
         if self.outstanding <= 0 and self.nextFinish >= len(self.completePieces):
-            log.msg('We seem to be done with all pieces')
+            log.msg('Download is complete for %s' % self.path)
             self.stream.allAvailable()
     
     def _getPiece(self, response, piece, peer):
         """Process the retrieved headers from the peer."""
-        log.msg('Got response for piece %d from peer %r' % (piece, peer))
         if ((len(self.completePieces) > 1 and response.code != 206) or
             (response.code not in (200, 206))):
             # Request failed, try a different peer
@@ -549,14 +567,31 @@ class FileDownload:
             if response.stream and response.stream.length:
                 stream.readAndDiscard(response.stream)
         else:
+            if self.defer:
+                # Start sending the return file
+                df = self.defer
+                self.defer = None
+                self.stream = GrowingFileStream(self.file, self.hash.expSize)
+
+                # Get the headers from the peer's response
+                headers = {}
+                if response.headers.hasHeader('last-modified'):
+                    headers['last-modified'] = response.headers.getHeader('last-modified')
+                resp = Response(200, headers, self.stream)
+                df.callback(resp)
+
             # Read the response stream to the file
             log.msg('Streaming piece %d from peer %r' % (piece, peer))
             if response.code == 206:
-                df = StreamToFile(response.stream, self.file, piece*PIECE_SIZE, PIECE_SIZE).run()
+                df = StreamToFile(self.hash.newPieceHasher(), response.stream,
+                                  self.file, piece*PIECE_SIZE, PIECE_SIZE).run()
             else:
-                df = StreamToFile(response.stream, self.file).run()
-            df.addCallbacks(self._gotPiece, self._gotError,
-                            callbackArgs=(piece, peer), errbackArgs=(piece, peer))
+                df = StreamToFile(self.hash.newHasher(), response.stream,
+                                  self.file).run()
+            reactor.callLater(0, df.addCallbacks,
+                              *(self._gotPiece, self._gotError),
+                              **{'callbackArgs': (piece, peer),
+                                 'errbackArgs': (piece, peer)})
 
         self.outstanding -= 1
         self.peerlist.append(peer)
@@ -573,14 +608,12 @@ class FileDownload:
 
     def _gotPiece(self, response, piece, peer):
         """Process the retrieved piece from the peer."""
-        log.msg('Finished streaming piece %d from peer %r: %r' % (piece, peer, response))
-        if ((self.pieces and response != self.pieces[piece]) or
-            (len(self.pieces) == 0 and response != self.hash.expected())):
+        if self.pieces[piece] and response != self.pieces[piece]:
             # Hash doesn't match
             log.msg('Hash error for piece %d from peer %r' % (piece, peer))
             peer.hashError('Piece received from peer does not match expected')
             self.completePieces[piece] = False
-        elif self.pieces:
+        else:
             # Successfully completed one of several pieces
             log.msg('Finished with piece %d from peer %r' % (piece, peer))
             self.completePieces[piece] = True
@@ -588,12 +621,6 @@ class FileDownload:
                    self.completePieces[self.nextFinish] == True):
                 self.nextFinish += 1
                 self.stream.updateAvailable(PIECE_SIZE)
-        else:
-            # Whole download (only one piece) is complete
-            log.msg('Piece %d from peer %r is the last piece' % (piece, peer))
-            self.completePieces[piece] = True
-            self.nextFinish = 1
-            self.stream.updateAvailable(2**30)
 
         self.getPieces()
 
@@ -611,17 +638,20 @@ class PeerManager:
     @ivar cache_dir: the directory to use for storing all files
     @type dht: L{interfaces.IDHT}
     @ivar dht: the DHT instance
+    @type stats: L{stats.StatsLogger}
+    @ivar stats: the statistics logger to record sent data to
     @type clients: C{dictionary}
     @ivar clients: the available peers that have been previously contacted
     """
 
-    def __init__(self, cache_dir, dht):
+    def __init__(self, cache_dir, dht, stats):
         """Initialize the instance."""
         self.cache_dir = cache_dir
         self.cache_dir.restat(False)
         if not self.cache_dir.exists():
             self.cache_dir.makedirs()
         self.dht = dht
+        self.stats = stats
         self.clients = {}
         
     def get(self, hash, mirror, peers = [], method="GET", modtime=None):
@@ -647,26 +677,30 @@ class PeerManager:
             assert parsed[0] == "http", "Only HTTP is supported, not '%s'" % parsed[0]
             site = splitHostPort(parsed[0], parsed[1])
             path = urlunparse(('', '') + parsed[2:])
-            peer = self.getPeer(site)
+            peer = self.getPeer(site, mirror = True)
             return peer.get(path, method, modtime)
-        elif len(peers) == 1:
-            site = uncompact(peers[0]['c'])
-            log.msg('Downloading from peer %r' % (site, ))
-            path = '/~/' + quote_plus(hash.expected())
-            peer = self.getPeer(site)
-            return peer.get(path)
+#        elif len(peers) == 1:
+#            site = uncompact(peers[0]['c'])
+#            log.msg('Downloading from peer %r' % (site, ))
+#            path = '/~/' + quote_plus(hash.expected())
+#            peer = self.getPeer(site)
+#            return peer.get(path)
         else:
             tmpfile = self.cache_dir.child(hash.hexexpected())
             return FileDownload(self, hash, mirror, peers, tmpfile).run()
         
-    def getPeer(self, site):
+    def getPeer(self, site, mirror = False):
         """Create a new peer if necessary and return it.
         
         @type site: (C{string}, C{int})
         @param site: the IP address and port of the peer
+        @param mirror: whether the peer is actually a mirror
+            (optional, defaults to False)
         """
         if site not in self.clients:
-            self.clients[site] = Peer(site[0], site[1])
+            self.clients[site] = Peer(site[0], site[1], self.stats)
+            if mirror:
+                self.clients[site].mirror = True
         return self.clients[site]
     
     def close(self):