2 """Hash and store hash information for a file.
4 @var PIECE_SIZE: the piece size to use for hashing pieces of files
8 from binascii import b2a_hex, a2b_hex
11 from twisted.internet import threads, defer
12 from twisted.trial import unittest
16 class HashError(ValueError):
17 """An error has occurred while hashing a file."""
20 """Manages hashes and hashing for a file.
22 @ivar ORDER: the priority ordering of hashes, and how to extract them
26 ORDER = [ {'name': 'sha1',
28 'AptPkgRecord': 'SHA1Hash',
29 'AptSrcRecord': False,
30 'AptIndexRecord': 'SHA1',
32 'hashlib_func': 'sha1',
36 'AptPkgRecord': 'SHA256Hash',
37 'AptSrcRecord': False,
38 'AptIndexRecord': 'SHA256',
39 'hashlib_func': 'sha256',
43 'AptPkgRecord': 'MD5Hash',
45 'AptIndexRecord': 'MD5SUM',
47 'hashlib_func': 'md5',
51 def __init__(self, digest = None, size = None, pieces = ''):
52 """Initialize the hash object."""
53 self.hashTypeNum = 0 # Use the first if nothing else matters
54 if sys.version_info < (2, 5):
55 # sha256 is not available in python before 2.5, remove it
56 for hashType in self.ORDER:
57 if hashType['name'] == 'sha256':
58 del self.ORDER[self.ORDER.index(hashType)]
64 self.expNormHash = None
65 self.fileHasher = None
66 self.pieceHasher = None
67 self.fileHash = digest
68 self.pieceHash = [pieces[x:x+self.ORDER[self.hashTypeNum]['length']]
69 for x in xrange(0, len(pieces), self.ORDER[self.hashTypeNum]['length'])]
72 self.fileNormHash = None
77 def new(self, force = False):
78 """Generate a new hashing object suitable for hashing a file.
80 @param force: set to True to force creating a new object even if
81 the hash has been verified already
83 if self.result is None or force:
86 self.fileHasher = self._new()
87 self.pieceHasher = None
92 self.fileNormHash = None
95 """Create a new hashing object according to the hash type."""
96 if sys.version_info < (2, 5):
97 mod = __import__(self.ORDER[self.hashTypeNum]['old_module'], globals(), locals(), [])
101 func = getattr(hashlib, self.ORDER[self.hashTypeNum]['hashlib_func'])
104 def update(self, data):
105 """Add more data to the file hasher."""
106 if self.result is None:
108 raise HashError, "Already done, you can't add more data after calling digest() or verify()"
109 if self.fileHasher is None:
110 raise HashError, "file hasher not initialized"
112 if not self.pieceHasher and self.size + len(data) > PIECE_SIZE:
113 # Hash up to the piece size
114 self.fileHasher.update(data[:(PIECE_SIZE - self.size)])
115 data = data[(PIECE_SIZE - self.size):]
116 self.size = PIECE_SIZE
118 # Save the first piece digest and initialize a new piece hasher
119 self.pieceHash.append(self.fileHasher.digest())
120 self.pieceHasher = self._new()
123 # Loop in case the data contains multiple pieces
124 piece_size = self.size % PIECE_SIZE
125 while piece_size + len(data) > PIECE_SIZE:
126 # Save the piece hash and start a new one
127 self.pieceHasher.update(data[:(PIECE_SIZE - piece_size)])
128 self.pieceHash.append(self.pieceHasher.digest())
129 self.pieceHasher = self._new()
131 # Don't forget to hash the data normally
132 self.fileHasher.update(data[:(PIECE_SIZE - piece_size)])
133 data = data[(PIECE_SIZE - piece_size):]
134 self.size += PIECE_SIZE - piece_size
135 piece_size = self.size % PIECE_SIZE
137 # Hash any remaining data
138 self.pieceHasher.update(data)
140 self.fileHasher.update(data)
141 self.size += len(data)
143 def hashInThread(self, file):
144 """Hashes a file in a separate thread, returning a deferred that will callback with the result."""
146 if not file.exists():
147 df = defer.Deferred()
148 df.errback(HashError("file not found"))
151 df = threads.deferToThread(self._hashInThread, file)
154 def _hashInThread(self, file):
155 """Hashes a file, returning itself as the result."""
157 self.new(force = True)
165 #{ Checking hashes of data
166 def pieceDigests(self):
167 """Get the piece hashes of the added file data."""
169 return self.pieceHash
172 """Get the hash of the added file data."""
173 if self.fileHash is None:
174 if self.fileHasher is None:
175 raise HashError, "you must hash some data first"
176 self.fileHash = self.fileHasher.digest()
179 # Save the last piece hash
181 self.pieceHash.append(self.pieceHasher.digest())
185 """Get the hash of the added file data in hex format."""
186 if self.fileHex is None:
187 self.fileHex = b2a_hex(self.digest())
191 """Verify that the added file data hash matches the expected hash."""
192 if self.result is None and self.fileHash is not None and self.expHash is not None:
193 self.result = (self.fileHash == self.expHash and self.size == self.expSize)
198 """Get the expected hash."""
201 def hexexpected(self):
202 """Get the expected hash in hex format."""
203 if self.expHex is None and self.expHash is not None:
204 self.expHex = b2a_hex(self.expHash)
207 #{ Setting the expected hash
208 def set(self, hashType, hashHex, size):
209 """Initialize the hash object.
211 @param hashType: must be one of the dictionaries from L{ORDER}
213 self.hashTypeNum = self.ORDER.index(hashType) # error if not found
214 self.expHex = hashHex
215 self.expSize = int(size)
216 self.expHash = a2b_hex(self.expHex)
218 def setFromIndexRecord(self, record):
219 """Set the hash from the cache of index file records.
221 @type record: C{dictionary}
222 @param record: keys are hash types, values are tuples of (hash, size)
224 for hashType in self.ORDER:
225 result = record.get(hashType['AptIndexRecord'], None)
227 self.set(hashType, result[0], result[1])
231 def setFromPkgRecord(self, record, size):
232 """Set the hash from Apt's binary packages cache.
234 @param record: whatever is returned by apt_pkg.GetPkgRecords()
236 for hashType in self.ORDER:
237 hashHex = getattr(record, hashType['AptPkgRecord'], None)
239 self.set(hashType, hashHex, size)
243 def setFromSrcRecord(self, record):
244 """Set the hash from Apt's source package records cache.
246 Currently very simple since Apt only tracks MD5 hashes of source files.
248 @type record: (C{string}, C{int}, C{string})
249 @param record: the hash, size and path of the source file
251 for hashType in self.ORDER:
252 if hashType['AptSrcRecord']:
253 self.set(hashType, record[0], record[1])
257 class TestHashObject(unittest.TestCase):
258 """Unit tests for the hash objects."""
261 if sys.version_info < (2, 4):
264 def test_failure(self):
265 """Tests that the hash object fails when treated badly."""
267 h.set(h.ORDER[0], b2a_hex('12345678901234567890'), '0')
268 self.failUnlessRaises(HashError, h.digest)
269 self.failUnlessRaises(HashError, h.hexdigest)
270 self.failUnlessRaises(HashError, h.update, 'gfgf')
272 def test_pieces(self):
273 """Tests the hashing of large files into pieces."""
276 h.update('1234567890'*120*1024)
277 self.failUnless(h.digest() == '1(j\xd2q\x0b\n\x91\xd2\x13\x90\x15\xa3E\xcc\xb0\x8d.\xc3\xc5')
278 pieces = h.pieceDigests()
279 self.failUnless(len(pieces) == 3)
280 self.failUnless(pieces[0] == ',G \xd8\xbbPl\xf1\xa3\xa0\x0cW\n\xe6\xe6a\xc9\x95/\xe5')
281 self.failUnless(pieces[1] == '\xf6V\xeb/\xa8\xad[\x07Z\xf9\x87\xa4\xf5w\xdf\xe1|\x00\x8e\x93')
282 self.failUnless(pieces[2] == 'M[\xbf\xee\xaa+\x19\xbaV\xf699\r\x17o\xcb\x8e\xcfP\x19')
284 for i in xrange(120*1024):
285 h.update('1234567890')
286 pieces = h.pieceDigests()
287 self.failUnless(h.digest() == '1(j\xd2q\x0b\n\x91\xd2\x13\x90\x15\xa3E\xcc\xb0\x8d.\xc3\xc5')
288 self.failUnless(len(pieces) == 3)
289 self.failUnless(pieces[0] == ',G \xd8\xbbPl\xf1\xa3\xa0\x0cW\n\xe6\xe6a\xc9\x95/\xe5')
290 self.failUnless(pieces[1] == '\xf6V\xeb/\xa8\xad[\x07Z\xf9\x87\xa4\xf5w\xdf\xe1|\x00\x8e\x93')
291 self.failUnless(pieces[2] == 'M[\xbf\xee\xaa+\x19\xbaV\xf699\r\x17o\xcb\x8e\xcfP\x19')
294 """Test hashing using the SHA1 hash."""
297 for hashType in h.ORDER:
298 if hashType['name'] == 'sha1':
301 self.failUnless(found == True)
302 h.set(hashType, '3bba0a5d97b7946ad2632002bf9caefe2cb18e00', '19')
304 h.update('apt-p2p is the best')
305 self.failUnless(h.hexdigest() == '3bba0a5d97b7946ad2632002bf9caefe2cb18e00')
306 self.failUnlessRaises(HashError, h.update, 'gfgf')
307 self.failUnless(h.verify() == True)
310 """Test hashing using the MD5 hash."""
313 for hashType in h.ORDER:
314 if hashType['name'] == 'md5':
317 self.failUnless(found == True)
318 h.set(hashType, '6b5abdd30d7ed80edd229f9071d8c23c', '19')
320 h.update('apt-p2p is the best')
321 self.failUnless(h.hexdigest() == '6b5abdd30d7ed80edd229f9071d8c23c')
322 self.failUnlessRaises(HashError, h.update, 'gfgf')
323 self.failUnless(h.verify() == True)
325 def test_sha256(self):
326 """Test hashing using the SHA256 hash."""
329 for hashType in h.ORDER:
330 if hashType['name'] == 'sha256':
333 self.failUnless(found == True)
334 h.set(hashType, '47f2238a30a0340faa2bf01a9bdc42ba77b07b411cda1e24cd8d7b5c4b7d82a7', '19')
336 h.update('apt-p2p is the best')
337 self.failUnless(h.hexdigest() == '47f2238a30a0340faa2bf01a9bdc42ba77b07b411cda1e24cd8d7b5c4b7d82a7')
338 self.failUnlessRaises(HashError, h.update, 'gfgf')
339 self.failUnless(h.verify() == True)
341 if sys.version_info < (2, 5):
342 test_sha256.skip = "SHA256 hashes are not supported by Python until version 2.5"