from twisted.internet import process
from twisted.python import log
import copy, UserDict
+from twisted.trial import unittest
aptpkg_dir='.apt-dht'
-apt_pkg.InitSystem()
+apt_pkg.init()
class AptDpkgInfo(UserDict.UserDict):
"""
key, value = line.split(': ', 1)
self.data[key] = value
-class PackageFileList:
+class PackageFileList(UserDict.DictMixin):
"""
Manages a list of package files belonging to a backend
"""
def open(self):
if self.packages is None:
self.packages = shelve.open(self.packagedb_dir+'/packages.db')
+
def close(self):
if self.packages is not None:
self.packages.close()
- def update_file(self, entry):
+ def update_file(self, filename, cache_path, file_path):
"""
Called from apt_proxy.py when files get updated so we can update our
fake lists/ directory and sources.list.
-
- @param entry CacheEntry for cached file
"""
- if entry.filename=="Packages" or entry.filename=="Release":
- log.msg("Registering package file: "+entry.cache_path)
- stat_result = os.stat(entry.file_path)
- self.packages[entry.cache_path] = stat_result
+ if filename=="Packages" or filename=="Release" or filename=="Sources":
+ log.msg("Registering package file: "+cache_path)
+ self.packages[cache_path] = file_path
+ return True
+ return False
- def get_files(self):
+ def check_files(self):
"""
- Get list of files in database. Each file will be checked that it exists
+ Check all files in the database to make sure it exists.
"""
files = self.packages.keys()
#print self.packages.keys()
for f in files:
- if not os.path.exists(self.cache_dir + os.sep + f):
+ if not os.path.exists(self.packages[f]):
log.msg("File in packages database has been deleted: "+f)
- del files[files.index(f)]
del self.packages[f]
- return files
+
+ def __getitem__(self, key): return self.packages[key]
+ def __setitem__(self, key, item): self.packages[key] = item
+ def __delitem__(self, key): del self.packages[key]
+ def keys(self): return self.packages.keys()
class AptPackages:
"""
"""
DEFAULT_APT_CONFIG = {
#'APT' : '',
- 'APT::Architecture' : 'i386', # TODO: Fix this, see bug #436011 and #285360
+ #'APT::Architecture' : 'amd64', # TODO: Fix this, see bug #436011 and #285360
#'APT::Default-Release' : 'unstable',
'Dir':'.', # /
#os.system('find '+self.status_dir+' -ls ')
#print "status:"+self.apt_config['Dir::State::status']
self.packages = PackageFileList(backendName, cache_dir)
+ self.indexrecords = {}
self.loaded = 0
#print "Loaded aptPackages [%s] %s " % (self.backendName, self.cache_dir)
#print "start aptPackages [%s] %s " % (self.backendName, self.cache_dir)
self.packages.close()
#print "Deleted aptPackages [%s] %s " % (self.backendName, self.cache_dir)
- def file_updated(self, entry):
+
+ def addRelease(self, cache_path, file_path):
+ """
+ Dirty hack until python-apt supports apt-pkg/indexrecords.h
+ (see Bug #456141)
+ """
+ self.indexrecords[cache_path] = {}
+
+ read_packages = False
+ f = open(file_path, 'r')
+
+ for line in f:
+ line = line.rstrip()
+
+ if line[:1] != " ":
+ read_packages = False
+ try:
+ # Read the various headers from the file
+ h, v = line.split(":", 1)
+ if h == "MD5Sum" or h == "SHA1" or h == "SHA256":
+ read_packages = True
+ hash_type = h
+ except:
+ # Bad header line, just ignore it
+ log.msg("WARNING: Ignoring badly formatted Release line: %s" % line)
+
+ # Skip to the next line
+ continue
+
+ # Read file names from the multiple hash sections of the file
+ if read_packages:
+ p = line.split()
+ self.indexrecords[cache_path].setdefault(p[2], {})[hash_type] = (p[0], p[1])
+
+ f.close()
+
+ def file_updated(self, filename, cache_path, file_path):
"""
A file in the backend has changed. If this affects us, unload our apt database
"""
- if self.packages.update_file(entry):
+ if filename == "Release":
+ self.addRelease(cache_path, file_path)
+ if self.packages.update_file(filename, cache_path, file_path):
self.unload()
def __save_stdout(self):
sources_filename = self.status_dir+'/'+'apt/etc/sources.list'
sources = open(sources_filename, 'w')
sources_count = 0
- for file in self.packages.get_files():
+ self.packages.check_files()
+ for f in self.packages:
# we should probably clear old entries from self.packages and
# take into account the recorded mtime as optimization
- filepath = self.cache_dir + file
- fake_uri='http://apt-dht/'+file
- source_line='deb '+dirname(fake_uri)+'/ /'
+ filepath = self.packages[f]
+ fake_uri='http://apt-dht/'+f
+ if f.endswith('Sources'):
+ source_line='deb-src '+dirname(fake_uri)+'/ /'
+ else:
+ source_line='deb '+dirname(fake_uri)+'/ /'
listpath=(self.status_dir+'/apt/lists/'
+apt_pkg.URItoFileName(fake_uri))
sources.write(source_line+'\n')
os.unlink(listpath)
except:
pass
- os.symlink('../../../../../'+file, listpath)
+ os.symlink(self.packages[f], listpath)
sources.close()
if sources_count == 0:
try:
self.cache = apt_pkg.GetCache()
finally:
- pass
self.__restore_stdout()
self.records = apt_pkg.GetPkgRecords(self.cache)
+ self.srcrecords = apt_pkg.GetPkgSrcRecords()
#for p in self.cache.Packages:
# print p
#log.debug("%s packages found" % (len(self.cache)),'apt_pkg')
if self.loaded:
del self.cache
del self.records
+ del self.srcrecords
self.loaded = 0
def cleanup(self):
self.unload()
+ self.packages.close()
def get_mirror_path(self, name, version):
"Find the path for version 'version' of package 'name'"
log.msg(file + ' skipped - no suitable backend found')
return 0
-def test(factory, file):
- "Just for testing purposes, this should probably go to hell soon."
- for backend in factory.backends:
- backend.get_packages_db().load()
-
- info = AptDpkgInfo(file)
- path = get_mirror_path(factory, file)
- print "Exact Match:"
- print "\t%s:%s"%(info['Version'], path)
-
- vers = get_mirror_versions(factory, info['Package'])
- print "Other Versions:"
- for ver in vers:
- print "\t%s:%s"%(ver)
- print "Guess:"
- print "\t%s:%s"%(info['Version'], closest_match(info, vers))
-
-if __name__ == '__main__':
- from apt_proxy_conf import factoryConfig
- class DummyFactory:
- def debug(self, msg):
- pass
- factory = DummyFactory()
- factoryConfig(factory)
- test(factory,
- '/home/ranty/work/apt-proxy/related/tools/galeon_1.2.5-1_i386.deb')
- test(factory,
- '/storage/apt-proxy/debian/dists/potato/main/binary-i386/base/'
- +'libstdc++2.10_2.95.2-13.deb')
-
- cleanup(factory)
+class TestAptPackages(unittest.TestCase):
+ """Unit tests for the AptPackages cache."""
+
+ pending_calls = []
+ client = None
+ packagesFile = ''
+ sourcesFile = ''
+ releaseFile = ''
+
+ def setUp(self):
+ self.client = AptPackages('whatever', '/tmp')
+
+ self.packagesFile = os.popen('ls -Sr /var/lib/apt/lists/ | grep -E "Packages$" | tail -n 1').read().rstrip('\n')
+ self.sourcesFile = os.popen('ls -Sr /var/lib/apt/lists/ | grep -E "Sources$" | tail -n 1').read().rstrip('\n')
+ for f in os.walk('/var/lib/apt/lists').next()[2]:
+ if f[-7:] == "Release" and self.packagesFile.startswith(f[:-7]):
+ self.releaseFile = f
+ break
+
+ self.client.file_updated('Release',
+ self.releaseFile[self.releaseFile.find('_debian_')+1:].replace('_','/'),
+ '/var/lib/apt/lists/' + self.releaseFile)
+ self.client.file_updated('Packages',
+ self.packagesFile[self.packagesFile.find('_debian_')+1:].replace('_','/'),
+ '/var/lib/apt/lists/' + self.packagesFile)
+ self.client.file_updated('Sources',
+ self.sourcesFile[self.sourcesFile.find('_debian_')+1:].replace('_','/'),
+ '/var/lib/apt/lists/' + self.sourcesFile)
+
+ self.client.load()
+ def test_pkg_hash(self):
+ self.client.records.Lookup(self.client.cache['dpkg'].VersionList[0].FileList[0])
+
+ pkg_hash = os.popen('grep -A 30 -E "^Package: dpkg$" ' +
+ '/var/lib/apt/lists/' + self.packagesFile +
+ ' | grep -E "^SHA1:" | head -n 1' +
+ ' | cut -d\ -f 2').read().rstrip('\n')
+
+ self.failUnless(self.client.records.SHA1Hash == pkg_hash,
+ "Hashes don't match: %s != %s" % (self.client.records.SHA1Hash, pkg_hash))
+
+ def test_src_hash(self):
+ self.client.srcrecords.Lookup('dpkg')
+
+ src_hashes = os.popen('grep -A 20 -E "^Package: dpkg$" ' +
+ '/var/lib/apt/lists/' + self.sourcesFile +
+ ' | grep -A 4 -E "^Files:" | grep -E "^ " ' +
+ ' | cut -d\ -f 2').read().split('\n')[:-1]
+
+ for f in self.client.srcrecords.Files:
+ self.failUnless(f[0] in src_hashes, "Couldn't find %s in: %r" % (f[0], src_hashes))
+
+ def test_index_hash(self):
+ indexhash = self.client.indexrecords[self.releaseFile[self.releaseFile.find('_debian_')+1:].replace('_','/')]['main/binary-i386/Packages.bz2']['SHA1'][0]
+
+ idx_hash = os.popen('grep -A 3000 -E "^SHA1:" ' +
+ '/var/lib/apt/lists/' + self.releaseFile +
+ ' | grep -E " main/binary-i386/Packages.bz2$"'
+ ' | head -n 1 | cut -d\ -f 2').read().rstrip('\n')
+
+ self.failUnless(indexhash == idx_hash, "Hashes don't match: %s != %s" % (indexhash, idx_hash))
+
+ def tearDown(self):
+ for p in self.pending_calls:
+ if p.active():
+ p.cancel()
+ self.pending_calls = []
+ self.client.cleanup()
+ self.client = None