From ef9b0df174a6c0775c7ab29f6f46e0c6e89d8088 Mon Sep 17 00:00:00 2001
From: Niels Thykier <niels@thykier.net>
Date: Mon, 12 Dec 2011 18:40:47 +0100
Subject: [PATCH] Encapsulate package data in class

Signed-off-by: Niels Thykier <niels@thykier.net>
---
 britney.py |  387 ++++++++++++++++++++++++++++--------------------------------
 package.py |  171 ++++++++++++++++++++++++++
 2 files changed, 354 insertions(+), 204 deletions(-)
 create mode 100644 package.py

diff --git a/britney.py b/britney.py
index 7a577d3..9bc4e86 100755
--- a/britney.py
+++ b/britney.py
@@ -190,6 +190,7 @@ import urllib
 
 import apt_pkg
 
+from package import BinaryPackage, SourcePackage
 from excuse import Excuse
 from migrationitem import MigrationItem, HintItem
 from hints import HintCollection
@@ -198,24 +199,6 @@ from britney import buildSystem
 __author__ = 'Fabio Tranchitella and the Debian Release Team'
 __version__ = '2.0'
 
-# source package
-VERSION = 0
-SECTION = 1
-BINARIES = 2
-MAINTAINER = 3
-FAKESRC = 4
-
-# binary package
-SOURCE = 2
-SOURCEVER = 3
-ARCHITECTURE = 4
-PREDEPENDS = 5
-DEPENDS = 6
-CONFLICTS = 7
-PROVIDES = 8
-RDEPENDS = 9
-RCONFLICTS = 10
-
 
 class Britney:
     """Britney, the Debian testing updater script
@@ -396,10 +379,7 @@ class Britney:
             packages = {}
             binaries = self.binaries['testing'][arch][0].copy()
             for k in binaries:
-                packages[k] = binaries[k][:]
-                if packages[k][PROVIDES]:
-                    packages[k][PROVIDES] = ", ".join(packages[k][PROVIDES])
-                else: packages[k][PROVIDES] = None
+                packages[k] = binaries[k].as_dpkg_list()
             self.systems[a] = buildSystem(a, packages)
 
     def read_sources(self, basedir):
@@ -431,14 +411,12 @@ class Britney:
             # (in unstable) if some architectures have out-of-date
             # binaries.  We only ever consider the source with the
             # largest version for migration.
-            if pkg in sources and apt_pkg.VersionCompare(sources[pkg][0], ver) > 0:
+            if pkg in sources and apt_pkg.VersionCompare(sources[pkg].version, ver) > 0:
                 continue
-            sources[pkg] = [ver,
-                            get_field('Section'),
-                            [],
-                            get_field('Maintainer'),
-                            False,
-                           ]
+            sources[pkg] = SourcePackage (pkg,
+                                          ver,
+                                          get_field ('Section'),
+                                          get_field ('Maintainer'))
         return sources
 
     def read_binaries(self, basedir, distribution, arch):
@@ -497,42 +475,46 @@ class Britney:
             breaks = get_field('Breaks')
             if breaks:
                 final_conflicts_list.append(breaks)
-            dpkg = [version,
-                    get_field('Section'),
-                    pkg, 
-                    version,
-                    get_field('Architecture'),
-                    get_field('Pre-Depends'),
-                    get_field('Depends'),
-                    ', '.join(final_conflicts_list) or None,
-                    get_field('Provides'),
-                    [],
-                    [],
-                   ]
 
             # retrieve the name and the version of the source package
             source = get_field('Source')
-            if source:
-                dpkg[SOURCE] = source.split(" ")[0]
-                if "(" in source:
-                    dpkg[SOURCEVER] = source[source.find("(")+1:source.find(")")]
+            source_version = version
+            if not source:
+                source = pkg
+            if "(" in source:
+                source__version =  source[source.find("(")+1:source.find(")")]
+                source = source.split(" ")[0]
+
+            dpkg = BinaryPackage(source,
+                                 source_version,
+                                 get_field ('Section'),
+                                 pkg,
+                                 version,
+                                 get_field('Architecture'),
+                                 get_field('Pre-Depends'),
+                                 get_field('Depends'),
+                                 final_conflicts_list,
+                                 get_field('Provides'))
+
 
             # if the source package is available in the distribution, then register this binary package
-            if dpkg[SOURCE] in sources[distribution]:
-                sources[distribution][dpkg[SOURCE]][BINARIES].append(pkg + "/" + arch)
+            if dpkg.source in sources[distribution]:
+                sources[distribution][dpkg.source].binaries.append(pkg + "/" + arch)
             # if the source package doesn't exist, create a fake one
             else:
-                sources[distribution][dpkg[SOURCE]] = [dpkg[SOURCEVER], 'faux', [pkg + "/" + arch], None, True]
+                fake_src = SourcePackage(dpkg.source,
+                                         dpkg.source_version,
+                                         'faux',
+                                         None,
+                                         fake=True)
+                fake_src.binaries.append(pkg + "/" + arch)
 
             # register virtual packages and real packages that provide them
-            if dpkg[PROVIDES]:
-                parts = map(string.strip, dpkg[PROVIDES].split(","))
-                for p in parts:
+            if dpkg.provides:
+                for p in dpkg.provides:
                     if p not in provides:
                         provides[p] = []
                     provides[p].append(pkg)
-                dpkg[PROVIDES] = parts
-            else: dpkg[PROVIDES] = []
 
             # add the resulting dictionary to the package list
             packages[pkg] = dpkg
@@ -557,35 +539,35 @@ class Britney:
         """
         # register the list of the dependencies for the depending packages
         dependencies = []
-        if packages[pkg][DEPENDS]:
-            dependencies.extend(parse_depends(packages[pkg][DEPENDS]))
-        if packages[pkg][PREDEPENDS]:
-            dependencies.extend(parse_depends(packages[pkg][PREDEPENDS]))
+        if packages[pkg].depends:
+            dependencies.extend(packages[pkg].depends)
+        if packages[pkg].pre_depends:
+            dependencies.extend(packages[pkg].pre_depends)
         # go through the list
         for p in dependencies:
             for a in p:
                 # register real packages
-                if a[0] in packages and (not check_doubles or pkg not in packages[a[0]][RDEPENDS]):
-                    packages[a[0]][RDEPENDS].append(pkg)
+                if a[0] in packages and (not check_doubles or pkg not in packages[a[0]].reverse_depends):
+                    packages[a[0]].reverse_depends.append(pkg)
                 # register packages which provide a virtual package
                 elif a[0] in provides:
                     for i in provides.get(a[0]):
                         if i not in packages: continue
-                        if not check_doubles or pkg not in packages[i][RDEPENDS]:
-                            packages[i][RDEPENDS].append(pkg)
+                        if not check_doubles or pkg not in packages[i].reverse_depends:
+                            packages[i].reverse_depends.append(pkg)
         # register the list of the conflicts for the conflicting packages
-        if packages[pkg][CONFLICTS]:
-            for p in parse_depends(packages[pkg][CONFLICTS]):
+        if packages[pkg].conflicts:
+            for p in parse_depends(packages[pkg].conflicts):
                 for a in p:
                     # register real packages
-                    if a[0] in packages and (not check_doubles or pkg not in packages[a[0]][RCONFLICTS]):
-                        packages[a[0]][RCONFLICTS].append(pkg)
+                    if a[0] in packages and (not check_doubles or pkg not in packages[a[0]].reverse_conflicts):
+                        packages[a[0]].reverse_conflicts.append(pkg)
                     # register packages which provide a virtual package
                     elif a[0] in provides:
                         for i in provides[a[0]]:
                             if i not in packages: continue
-                            if not check_doubles or pkg not in packages[i][RCONFLICTS]:
-                                packages[i][RCONFLICTS].append(pkg)
+                            if not check_doubles or pkg not in packages[i].reverse_conflicts:
+                                packages[i].reverse_conflicts.append(pkg)
      
     def read_bugs(self, basedir):
         """Read the release critial bug summary from the specified directory
@@ -637,10 +619,10 @@ class Britney:
         """
         maxver = None
         if pkg in self.sources[dist]:
-            maxver = self.sources[dist][pkg][VERSION]
+            maxver = self.sources[dist][pkg].version
         for arch in self.options.architectures:
             if pkg not in self.binaries[dist][arch][0]: continue
-            pkgv = self.binaries[dist][arch][0][pkg][VERSION]
+            pkgv = self.binaries[dist][arch][0][pkg].version
             if maxver == None or apt_pkg.VersionCompare(pkgv, maxver) > 0:
                 maxver = pkgv
         return maxver
@@ -743,12 +725,12 @@ class Britney:
 
             # if the package exists in testing and it is more recent, do nothing
             tsrcv = self.sources['testing'].get(l[0], None)
-            if tsrcv and apt_pkg.VersionCompare(tsrcv[VERSION], l[1]) >= 0:
+            if tsrcv and apt_pkg.VersionCompare(tsrcv.version, l[1]) >= 0:
                 continue
 
             # if the package doesn't exist in unstable or it is older, do nothing
             usrcv = self.sources['unstable'].get(l[0], None)
-            if not usrcv or apt_pkg.VersionCompare(usrcv[VERSION], l[1]) < 0:
+            if not usrcv or apt_pkg.VersionCompare(usrcv.version, l[1]) < 0:
                 continue
 
             # update the urgency for the package
@@ -858,16 +840,16 @@ class Britney:
             binaries = self.binaries['testing'][arch][0]
             for pkg_name in sorted(binaries):
                 pkg = binaries[pkg_name]
-                pkgv = pkg[VERSION]
-                pkgarch = pkg[ARCHITECTURE] or 'all'
-                pkgsec = pkg[SECTION] or 'faux'
+                pkgv = pkg.version
+                pkgarch = pkg.arch or 'all'
+                pkgsec = pkg.section or 'faux'
                 f.write('%s %s %s %s\n' % (pkg_name, pkgv, pkgarch, pkgsec))
 
         # write sources
         for src_name in sorted(sources):
             src = sources[src_name]
-            srcv = src[VERSION]
-            srcsec = src[SECTION] or 'unknown'
+            srcv = src.version
+            srcsec = src.section or 'unknown'
             f.write('%s %s source %s\n' % (src_name, srcv, srcsec))
 
         f.close()
@@ -885,29 +867,28 @@ class Britney:
             filename = os.path.join(basedir, 'Packages_%s' % arch)
             f = open(filename, 'w')
             binaries = self.binaries[suite][arch][0]
-            for pkg in binaries:
-                output = "Package: %s\n" % pkg
-                for key, k in ((SECTION, 'Section'), (ARCHITECTURE, 'Architecture'), (SOURCE, 'Source'), (VERSION, 'Version'), 
-                          (PREDEPENDS, 'Pre-Depends'), (DEPENDS, 'Depends'), (PROVIDES, 'Provides'), (CONFLICTS, 'Conflicts')):
-                    if not binaries[pkg][key]: continue
-                    if key == SOURCE:
-                        if binaries[pkg][SOURCE] == pkg:
-                            if binaries[pkg][SOURCEVER] != binaries[pkg][VERSION]:
-                                source = binaries[pkg][SOURCE] + " (" + binaries[pkg][SOURCEVER] + ")"
-                            else: continue
-                        else:
-                            if binaries[pkg][SOURCEVER] != binaries[pkg][VERSION]:
-                                source = binaries[pkg][SOURCE] + " (" + binaries[pkg][SOURCEVER] + ")"
-                            else:
-                                source = binaries[pkg][SOURCE]
-                        output += (k + ": " + source + "\n")
-                        if sources[binaries[pkg][SOURCE]][MAINTAINER]:
-                            output += ("Maintainer: " + sources[binaries[pkg][SOURCE]][MAINTAINER] + "\n")
-                    elif key == PROVIDES:
-                        if len(binaries[pkg][key]) > 0:
-                            output += (k + ": " + ", ".join(binaries[pkg][key]) + "\n")
-                    else:
-                        output += (k + ": " + binaries[pkg][key] + "\n")
+            for pkg_name in binaries:
+                pkg = binaries[pkg_name]
+                source = None
+                if pkg.source_version != pkg.version:
+                    source = "%s (%s)" % (pkg.source, pkg.source_version)
+                elif pkg.source != pkg_name:
+                    source = pkg.source
+
+                output  = "Package: %s\n" % pkg_name
+                if source:
+                    output += "Source: %s\n" % source
+                if pkg.source in sources and sources[pkg.source].maintainer:
+                    output += "Maintainer: %s\n" % sources[pkg.source].maintainer
+
+                for val, k in ((pkg.section, 'Section'), (pkg.version, "Version"), (pkg.arch, "Architecture"), \
+                               (pkg.pre_depends, 'Pre-Depends'), (pkg.depends, 'Depends'), (pkg.conflicts, 'Conflicts')):
+                    if not val: continue
+                    output += "%s: %s\n" % (k, val)
+
+                if pkg.provides:
+                    output += "Provides: %s\n" % (", ".join(pkg.provides))
+
                 f.write(output + "\n")
             f.close()
 
@@ -915,9 +896,10 @@ class Britney:
         f = open(filename, 'w')
         for src in sources:
             output = "Package: %s\n" % src
-            for key, k in ((VERSION, 'Version'), (SECTION, 'Section'), (MAINTAINER, 'Maintainer')):
-                if not sources[src][key]: continue
-                output += (k + ": " + sources[src][key] + "\n")
+            for val, k in (sources[src].version, 'Version'), (sources[src].section, 'Section'), \
+                    (sources[src].maintainer, 'Maintainer'):
+                if not val: continue
+                output += (k + ": " + val + "\n")
             f.write(output + "\n")
         f.close()
 
@@ -987,7 +969,7 @@ class Britney:
             if name in binaries[0]:
                 package = binaries[0][name]
                 # check the versioned dependency (if present)
-                if op == '' and version == '' or apt_pkg.CheckDep(package[VERSION], op, version):
+                if op == '' and version == '' or apt_pkg.CheckDep(package.version, op, version):
                     packages.append(name)
 
             # look for the package in the virtual packages list and loop on them
@@ -998,7 +980,7 @@ class Britney:
                 # TODO: this is forbidden by the debian policy, which says that versioned
                 #       dependencies on virtual packages are never satisfied. The old britney
                 #       does it and we have to go with it, but at least a warning should be raised.
-                if op == '' and version == '' or not strict and apt_pkg.CheckDep(package[VERSION], op, version):
+                if op == '' and version == '' or not strict and apt_pkg.CheckDep(package.version, op, version):
                     packages.append(prov)
 
         return (len(packages) > 0, packages)
@@ -1023,23 +1005,23 @@ class Britney:
         strict = True # not self.options.compatible
 
         # analyze the dependency fields (if present)
-        for type_key, type in ((PREDEPENDS, 'Pre-Depends'), (DEPENDS, 'Depends')):
-            if not binary_u[type_key]:
+        for value, type in ((binary_u.pre_depends, 'Pre-Depends'), (binary_u.depends, 'Depends')):
+            if not value:
                 continue
 
             # for every block of dependency (which is formed as conjunction of disconjunction)
-            for block, block_txt in zip(parse_depends(binary_u[type_key]), binary_u[type_key].split(',')):
+            for block, block_txt in zip(parse_depends(value), value.split(',')):
                 # if the block is satisfied in testing, then skip the block
                 solved, packages = get_dependency_solvers(block, arch, 'testing', strict=strict)
                 if solved:
                     for p in packages:
                         if p not in self.binaries[suite][arch][0]: continue
-                        excuse.add_sane_dep(self.binaries[suite][arch][0][p][SOURCE])
+                        excuse.add_sane_dep(self.binaries[suite][arch][0][p].source)
                     continue
 
                 # check if the block can be satisfied in unstable, and list the solving packages
                 solved, packages = get_dependency_solvers(block, arch, suite, strict=strict)
-                packages = [self.binaries[suite][arch][0][p][SOURCE] for p in packages]
+                packages = [self.binaries[suite][arch][0][p].source for p in packages]
 
                 # if the dependency can be satisfied by the same source package, skip the block:
                 # obviously both binary packages will enter testing together
@@ -1081,9 +1063,9 @@ class Britney:
         # otherwise, add a new excuse for its removal and return True
         src = self.sources['testing'][pkg]
         excuse = Excuse("-" + pkg)
-        excuse.set_vers(src[VERSION], None)
-        src[MAINTAINER] and excuse.set_maint(src[MAINTAINER].strip())
-        src[SECTION] and excuse.set_section(src[SECTION].strip())
+        excuse.set_vers(src.version, None)
+        src.maintainer and excuse.set_maint(src.maintainer)
+        src.section and excuse.set_section(src.section)
 
         # if the package is blocked, skip it
         for hint in self.hints.search('block', package=pkg, removal=True):
@@ -1117,13 +1099,13 @@ class Britney:
         # build the common part of the excuse, which will be filled by the code below
         ref = "%s/%s%s" % (src, arch, suite != 'unstable' and "_" + suite or "")
         excuse = Excuse(ref)
-        excuse.set_vers(source_t[VERSION], source_t[VERSION])
-        source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip())
-        source_u[SECTION] and excuse.set_section(source_u[SECTION].strip())
+        excuse.set_vers(source_t.version, source_t.version)
+        source_u.maintainer and excuse.set_maint(source_u.maintainer)
+        source_u.section and excuse.set_section(source_u.section)
         
         # if there is a `remove' hint and the requested version is the same as the
         # version in testing, then stop here and return False
-        for hint in [ x for x in self.hints.search('remove', package=src) if self.same_source(source_t[VERSION], x.version) ]:
+        for hint in [ x for x in self.hints.search('remove', package=src) if self.same_source(source_t.version, x.version) ]:
             excuse.addhtml("Removal request by %s" % (hint.user))
             excuse.addhtml("Trying to remove package, not update it")
             excuse.addhtml("Not considered")
@@ -1135,25 +1117,25 @@ class Britney:
         anyworthdoing = False
 
         # for every binary package produced by this source in unstable for this architecture
-        for pkg in sorted(filter(lambda x: x.endswith("/" + arch), source_u[BINARIES]), key=lambda x: x.split("/")[0]):
+        for pkg in sorted(filter(lambda x: x.endswith("/" + arch), source_u.binaries), key=lambda x: x.split("/")[0]):
             pkg_name = pkg.split("/")[0]
 
             # retrieve the testing (if present) and unstable corresponding binary packages
-            binary_t = pkg in source_t[BINARIES] and self.binaries['testing'][arch][0][pkg_name] or None
+            binary_t = pkg in source_t.binaries and self.binaries['testing'][arch][0][pkg_name] or None
             binary_u = self.binaries[suite][arch][0][pkg_name]
 
             # this is the source version for the new binary package
-            pkgsv = self.binaries[suite][arch][0][pkg_name][SOURCEVER]
+            pkgsv = self.binaries[suite][arch][0][pkg_name].source_version
 
             # if the new binary package is architecture-independent, then skip it
-            if binary_u[ARCHITECTURE] == 'all':
-                excuse.addhtml("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u[VERSION], pkgsv))
+            if binary_u.arch == 'all':
+                excuse.addhtml("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u.version, pkgsv))
                 continue
 
             # if the new binary package is not from the same source as the testing one, then skip it
-            if not self.same_source(source_t[VERSION], pkgsv):
+            if not self.same_source(source_t.version, pkgsv):
                 anywrongver = True
-                excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_t[VERSION]))
+                excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_t.version))
                 break
 
             # find unsatisfied dependencies for the new binary package
@@ -1162,38 +1144,38 @@ class Britney:
             # if the binary is not present in testing, then it is a new binary;
             # in this case, there is something worth doing
             if not binary_t:
-                excuse.addhtml("New binary: %s (%s)" % (pkg_name, binary_u[VERSION]))
+                excuse.addhtml("New binary: %s (%s)" % (pkg_name, binary_u.version))
                 anyworthdoing = True
                 continue
 
             # at this point, the binary package is present in testing, so we can compare
             # the versions of the packages ...
-            vcompare = apt_pkg.VersionCompare(binary_t[VERSION], binary_u[VERSION])
+            vcompare = apt_pkg.VersionCompare(binary_t.version, binary_u.version)
 
             # ... if updating would mean downgrading, then stop here: there is something wrong
             if vcompare > 0:
                 anywrongver = True
-                excuse.addhtml("Not downgrading: %s (%s to %s)" % (pkg_name, binary_t[VERSION], binary_u[VERSION]))
+                excuse.addhtml("Not downgrading: %s (%s to %s)" % (pkg_name, binary_t.version, binary_u.version))
                 break
             # ... if updating would mean upgrading, then there is something worth doing
             elif vcompare < 0:
-                excuse.addhtml("Updated binary: %s (%s to %s)" % (pkg_name, binary_t[VERSION], binary_u[VERSION]))
+                excuse.addhtml("Updated binary: %s (%s to %s)" % (pkg_name, binary_t.version, binary_u.version))
                 anyworthdoing = True
 
         # if there is nothing wrong and there is something worth doing or the source
         # package is not fake, then check what packages should be removed
-        if not anywrongver and (anyworthdoing or not self.sources[suite][src][FAKESRC]):
-            srcv = self.sources[suite][src][VERSION]
-            ssrc = self.same_source(source_t[VERSION], srcv)
+        if not anywrongver and (anyworthdoing or not self.sources[suite][src].is_fake):
+            srcv = self.sources[suite][src].version
+            ssrc = self.same_source(source_t.version, srcv)
             # for every binary package produced by this source in testing for this architecture
-            for pkg in sorted([x.split("/")[0] for x in self.sources['testing'][src][BINARIES] if x.endswith("/"+arch)]):
+            for pkg in sorted([x.split("/")[0] for x in self.sources['testing'][src].binaries if x.endswith("/"+arch)]):
                 # if the package is architecture-independent, then ignore it
-                if self.binaries['testing'][arch][0][pkg][ARCHITECTURE] == 'all':
+                if self.binaries['testing'][arch][0][pkg].arch == 'all':
                     excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
                     continue
                 # if the package is not produced by the new source package, then remove it from testing
                 if pkg not in self.binaries[suite][arch][0]:
-                    tpkgv = self.binaries['testing'][arch][0][pkg][VERSION]
+                    tpkgv = self.binaries['testing'][arch][0][pkg].version
                     excuse.addhtml("Removed binary: %s %s" % (pkg, tpkgv))
                     if ssrc: anyworthdoing = True
 
@@ -1227,7 +1209,7 @@ class Britney:
         if src in self.sources['testing']:
             source_t = self.sources['testing'][src]
             # if testing and unstable have the same version, then this is a candidate for binary-NMUs only
-            if apt_pkg.VersionCompare(source_t[VERSION], source_u[VERSION]) == 0:
+            if apt_pkg.VersionCompare(source_t.version, source_u.version) == 0:
                 return False
         else:
             source_t = None
@@ -1235,21 +1217,21 @@ class Britney:
         # build the common part of the excuse, which will be filled by the code below
         ref = "%s%s" % (src, suite != 'unstable' and "_" + suite or "")
         excuse = Excuse(ref)
-        excuse.set_vers(source_t and source_t[VERSION] or None, source_u[VERSION])
-        source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip())
-        source_u[SECTION] and excuse.set_section(source_u[SECTION].strip())
+        excuse.set_vers(source_t and source_t.version or None, source_u.version)
+        source_u.maintainer and excuse.set_maint(source_u.maintainer)
+        source_u.section and excuse.set_section(source_u.section)
 
         # the starting point is that we will update the candidate
         update_candidate = True
         
         # if the version in unstable is older, then stop here with a warning in the excuse and return False
-        if source_t and apt_pkg.VersionCompare(source_u[VERSION], source_t[VERSION]) < 0:
-            excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t[VERSION], source_u[VERSION]))
+        if source_t and apt_pkg.VersionCompare(source_u.version, source_t.version) < 0:
+            excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t.version, source_u.version))
             self.excuses.append(excuse)
             return False
 
         # check if the source package really exists or if it is a fake one
-        if source_u[FAKESRC]:
+        if source_u.is_fake:
             excuse.addhtml("%s source package doesn't exist" % (src))
             update_candidate = False
 
@@ -1262,8 +1244,8 @@ class Britney:
         # if there is a `remove' hint and the requested version is the same as the
         # version in testing, then stop here and return False
         for item in self.hints.search('remove', package=src):
-            if source_t and self.same_source(source_t[VERSION], item.version) or \
-               self.same_source(source_u[VERSION], item.version):
+            if source_t and self.same_source(source_t.version, item.version) or \
+               self.same_source(source_u.version, item.version):
                 excuse.addhtml("Removal request by %s" % (item.user))
                 excuse.addhtml("Trying to remove package, not update it")
                 update_candidate = False
@@ -1283,7 +1265,7 @@ class Britney:
             unblock_cmd = "un" + block_cmd
             unblocks = self.hints.search(unblock_cmd, package=src)
 
-            if unblocks and self.same_source(unblocks[0].version, source_u[VERSION]):
+            if unblocks and self.same_source(unblocks[0].version, source_u.version):
                 excuse.addhtml("Ignoring %s request by %s, due to %s request by %s" %
                                (block_cmd, blocked[block_cmd].user, unblock_cmd, unblocks[0].user))
             else:
@@ -1300,15 +1282,15 @@ class Britney:
         # the age-days hint, if specified for the package
         if suite == 'unstable':
             if src not in self.dates:
-                self.dates[src] = (source_u[VERSION], self.date_now)
-            elif not self.same_source(self.dates[src][0], source_u[VERSION]):
-                self.dates[src] = (source_u[VERSION], self.date_now)
+                self.dates[src] = (source_u.version, self.date_now)
+            elif not self.same_source(self.dates[src][0], source_u.version):
+                self.dates[src] = (source_u.version, self.date_now)
 
             days_old = self.date_now - self.dates[src][1]
             min_days = self.MINDAYS[urgency]
 
             for age_days_hint in [ x for x in self.hints.search('age-days', package=src) if \
-               self.same_source(source_u[VERSION], x.version) ]:
+               self.same_source(source_u.version, x.version) ]:
                 excuse.addhtml("Overriding age needed from %d days to %d by %s" % (min_days,
                     int(age_days_hint.days), age_days_hint.user))
                 min_days = int(age_days_hint.days)
@@ -1316,7 +1298,7 @@ class Britney:
             excuse.setdaysold(days_old, min_days)
             if days_old < min_days:
                 urgent_hints = [ x for x in self.hints.search('urgent', package=src) if \
-                   self.same_source(source_u[VERSION], x.version) ]
+                   self.same_source(source_u.version, x.version) ]
                 if urgent_hints:
                     excuse.addhtml("Too young, but urgency pushed by %s" % (urgent_hints[0].user))
                 else:
@@ -1332,15 +1314,15 @@ class Britney:
                 # this architecture then it is ok
 
                 if not src in self.sources["testing"] or \
-                   (len([x for x in self.sources["testing"][src][BINARIES] if x.endswith("/"+arch) and self.binaries["testing"][arch][0][x.split("/")[0]][ARCHITECTURE] != 'all' ]) == 0) or \
-                   (len([x for x in self.sources[suite][src][BINARIES] if x.endswith("/"+arch) and self.binaries[suite][arch][0][x.split("/")[0]][ARCHITECTURE] != 'all' ]) > 0):
+                   (len([x for x in self.sources["testing"][src].binaries if x.endswith("/"+arch) and self.binaries["testing"][arch][0][x.split("/")[0]].arch != 'all' ]) == 0) or \
+                   (len([x for x in self.sources[suite][src].binaries if x.endswith("/"+arch) and self.binaries[suite][arch][0][x.split("/")[0]].arch != 'all' ]) > 0):
                     continue
 
                 if suite == 'tpu':
                     base = 'testing'
                 else:
                     base = 'stable'
-                text = "Not yet built on <a href=\"http://buildd.debian.org/status/logs.php?arch=%s&pkg=%s&ver=%s&suite=%s\" target=\"_blank\">%s</a> (relative to testing)" % (urllib.quote(arch), urllib.quote(src), urllib.quote(source_u[VERSION]), arch, base)
+                text = "Not yet built on <a href=\"http://buildd.debian.org/status/logs.php?arch=%s&pkg=%s&ver=%s&suite=%s\" target=\"_blank\">%s</a> (relative to testing)" % (urllib.quote(arch), urllib.quote(src), urllib.quote(source_u.version), arch, base)
 
                 if arch in self.options.fucked_arches.split():
                     text = text + " (but %s isn't keeping up, so never mind)" % (arch)
@@ -1355,16 +1337,16 @@ class Britney:
         for arch in self.options.architectures:
             oodbins = {}
             # for every binary package produced by this source in the suite for this architecture
-            for pkg in sorted([x.split("/")[0] for x in self.sources[suite][src][BINARIES] if x.endswith("/"+arch)]):
+            for pkg in sorted([x.split("/")[0] for x in self.sources[suite][src].binaries if x.endswith("/"+arch)]):
                 if pkg not in pkgs: pkgs[pkg] = []
                 pkgs[pkg].append(arch)
 
                 # retrieve the binary package and its source version
                 binary_u = self.binaries[suite][arch][0][pkg]
-                pkgsv = binary_u[SOURCEVER]
+                pkgsv = binary_u.source_version
 
                 # if it wasn't built by the same source, it is out-of-date
-                if not self.same_source(source_u[VERSION], pkgsv):
+                if not self.same_source(source_u.version, pkgsv):
                     if pkgsv not in oodbins:
                         oodbins[pkgsv] = []
                     oodbins[pkgsv].append(pkg)
@@ -1372,7 +1354,7 @@ class Britney:
 
                 # if the package is architecture-dependent or the current arch is `nobreakall'
                 # find unsatisfied dependencies for the binary package
-                if binary_u[ARCHITECTURE] != 'all' or arch in self.options.nobreakall_arches.split():
+                if binary_u.arch != 'all' or arch in self.options.nobreakall_arches.split():
                     self.excuse_unsat_deps(pkg, src, arch, suite, excuse)
 
             # if there are out-of-date packages, warn about them in the excuse and set update_candidate
@@ -1387,7 +1369,7 @@ class Britney:
                         (", ".join(sorted(oodbins[v])), urllib.quote(arch), urllib.quote(src), urllib.quote(v), v)
                 text = "out of date on <a href=\"http://buildd.debian.org/status/logs.php?" \
                     "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
-                    (urllib.quote(arch), urllib.quote(src), urllib.quote(source_u[VERSION]), arch, oodtxt)
+                    (urllib.quote(arch), urllib.quote(src), urllib.quote(source_u.version), arch, oodtxt)
 
                 if arch in self.options.fucked_arches.split():
                     text = text + " (but %s isn't keeping up, so nevermind)" % (arch)
@@ -1398,7 +1380,7 @@ class Britney:
                     excuse.addhtml(text)
 
         # if the source package has no binaries, set update_candidate to False to block the update
-        if len(self.sources[suite][src][BINARIES]) == 0:
+        if len(self.sources[suite][src].binaries) == 0:
             excuse.addhtml("%s has no binaries on any arch" % src)
             update_candidate = False
 
@@ -1439,7 +1421,7 @@ class Britney:
                         "though it fixes more than it introduces, whine at debian-release)" % pkg)
 
         # check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
-        forces = [ x for x in self.hints.search('force', package=src) if self.same_source(source_u[VERSION], x.version) ]
+        forces = [ x for x in self.hints.search('force', package=src) if self.same_source(source_u.version, x.version) ]
         if forces:
             excuse.dontinvalidate = 1
         if not update_candidate and forces:
@@ -1448,7 +1430,7 @@ class Britney:
 
         # if the suite is *-proposed-updates, the package needs an explicit approval in order to go in
         if suite in ['tpu', 'pu']:
-            approves = [ x for x in self.hints.search('approve', package=src) if self.same_source(source_u[VERSION], x.version) ]
+            approves = [ x for x in self.hints.search('approve', package=src) if self.same_source(source_u.version, x.version) ]
             if approves:
                 excuse.addhtml("Approved by %s" % approves[0].user)
             else:
@@ -1548,10 +1530,10 @@ class Britney:
 
         # for every source package in unstable check if it should be upgraded
         for pkg in sources['unstable']:
-            if sources['unstable'][pkg][FAKESRC]: continue
+            if sources['unstable'][pkg].is_fake: continue
             # if the source package is already present in testing,
             # check if it should be upgraded for every binary package
-            if pkg in sources['testing'] and not sources['testing'][pkg][FAKESRC]:
+            if pkg in sources['testing'] and not sources['testing'][pkg].is_fake:
                 for arch in architectures:
                     if should_upgrade_srcarch(pkg, arch, 'unstable'):
                         upgrade_me.append("%s/%s" % (pkg, arch))
@@ -1582,7 +1564,7 @@ class Britney:
             if src not in sources['testing']: continue
 
             # check if the version specified in the hint is the same as the considered package
-            tsrcv = sources['testing'][src][VERSION]
+            tsrcv = sources['testing'][src].version
             if not self.same_source(tsrcv, item.version): continue
 
             # add the removal of the package to upgrade_me and build a new excuse
@@ -1713,7 +1695,7 @@ class Britney:
             if skip_archall:
                 for pkg in nuninst[arch + "+all"]:
                     bpkg = binaries[arch][0][pkg]
-                    if bpkg[ARCHITECTURE] == 'all':
+                    if bpkg.arch == 'all':
                         nuninst[arch].remove(pkg)
 
         # return the dictionary with the results
@@ -1804,11 +1786,11 @@ class Britney:
                 # remove all the binaries
 
                 # first, build a list of eligible binaries
-                for p in source[BINARIES]:
+                for p in source.binaries:
                     binary, parch = p.split("/")
                     if item.architecture != 'source' and parch != item.architecture: continue
                     # do not remove binaries which have been hijacked by other sources
-                    if binaries[parch][0][binary][SOURCE] != item.package: continue
+                    if binaries[parch][0][binary].source != item.package: continue
                     bins.append(p)
 
                 for p in bins:
@@ -1817,14 +1799,14 @@ class Britney:
                     if not self.options.compatible and item.suite == 'unstable' and \
                        binary not in self.binaries[item.suite][parch][0] and \
                        ('ALL' in self.options.smooth_updates or \
-                        binaries[parch][0][binary][SECTION] in self.options.smooth_updates):
+                        binaries[parch][0][binary].section in self.options.smooth_updates):
 
                         # if the package has reverse-dependencies which are
                         # built from other sources, it's a valid candidate for
                         # a smooth update.  if not, it may still be a valid
                         # candidate if one if its r-deps is itself a candidate,
                         # so note it for checking later
-                        rdeps = binaries[parch][0][binary][RDEPENDS]
+                        rdeps = binaries[parch][0][binary].reverse_depends
 
                         if len([x for x in rdeps if x not in [y.split("/")[0] for y in bins]]) > 0:
                             smoothbins.append(p)
@@ -1836,7 +1818,7 @@ class Britney:
                 # outside of the current source
                 for p in check:
                     binary, parch = p.split("/")
-                    rdeps = [ bin for bin in binaries[parch][0][binary][RDEPENDS] \
+                    rdeps = [ bin for bin in binaries[parch][0][binary].reverse_depends \
                               if bin in [y.split("/")[0] for y in smoothbins] ]
                     if len(rdeps) > 0:
                         smoothbins.append(p)
@@ -1851,7 +1833,7 @@ class Britney:
                                        self.get_reverse_tree(binary, parch, 'testing') ] )
                     affected = list(set(affected))
                     # remove the provided virtual packages
-                    for j in binaries[parch][0][binary][PROVIDES]:
+                    for j in binaries[parch][0][binary].provides:
                         key = j + "/" + parch
                         if key not in undo['virtual']:
                             undo['virtual'][key] = binaries[parch][1][j][:]
@@ -1882,7 +1864,7 @@ class Britney:
         # add the new binary packages (if we are not removing)
         if not item.is_removal:
             source = sources[item.suite][item.package]
-            for p in source[BINARIES]:
+            for p in source.binaries:
                 binary, parch = p.split("/")
                 if item.architecture not in ['source', parch]: continue
                 key = (binary, parch)
@@ -1897,7 +1879,7 @@ class Britney:
                                         self.get_reverse_tree(binary, parch, 'testing') ] )
                     affected = list(set(affected))
                     # all the reverse conflicts and their dependency tree are affected by the change
-                    for j in binaries[parch][0][binary][RCONFLICTS]:
+                    for j in binaries[parch][0][binary].reverse_conflicts:
                         key = (j, parch)
                         if key not in affected: affected.append(key)
                         for p in self.get_full_tree(j, parch, 'testing'):
@@ -1914,18 +1896,17 @@ class Britney:
                     # XXX: and the reverse conflict tree?
                     for (tundo, tpkg) in hint_undo:
                         if p in tundo['binaries']:
-                            for rdep in tundo['binaries'][p][RDEPENDS]:
-                                if rdep in binaries[parch][0] and rdep not in source[BINARIES]:
+                            for rdep in tundo['binaries'][p].reverse_depends:
+                                if rdep in binaries[parch][0] and rdep not in source.binaries:
                                     affected.append( (rdep, parch) )
                                     affected.extend( [ (x, parch) for x in \
                                                         self.get_reverse_tree(rdep, parch, 'testing') ] )
                     affected = list(set(affected))
                 # add/update the binary package
                 binaries[parch][0][binary] = self.binaries[item.suite][parch][0][binary]
-                self.systems[parch].add_binary(binary, binaries[parch][0][binary][:PROVIDES] + \
-                    [", ".join(binaries[parch][0][binary][PROVIDES]) or None])
+                self.systems[parch].add_binary(binary, binaries[parch][0][binary].as_dpkg_list())
                 # register new provided packages
-                for j in binaries[parch][0][binary][PROVIDES]:
+                for j in binaries[parch][0][binary].provides:
                     key = j + "/" + parch
                     if j not in binaries[parch][1]:
                         undo['nvirtual'].append(key)
@@ -1939,7 +1920,7 @@ class Britney:
                 affected = list(set(affected))
 
             # register reverse dependencies and conflicts for the new binary packages
-            for p in source[BINARIES]:
+            for p in source.binaries:
                 binary, parch = p.split("/")
                 if item.architecture not in ['source', parch]: continue
                 self.register_reverses(binary, binaries[parch][0] , binaries[parch][1])
@@ -1954,14 +1935,14 @@ class Britney:
     def get_reverse_tree(self, pkg, arch, suite):
         binaries = self.binaries[suite][arch][0]
 
-        rev_deps = set(binaries[pkg][RDEPENDS])
+        rev_deps = set(binaries[pkg].reverse_depends)
         seen = set()
         while len(rev_deps) > 0:
             # mark all of the current iteration of packages as affected
             seen |= rev_deps
             # generate the next iteration, which is the reverse-dependencies of
             # the current iteration
-            new_rev_deps = [ binaries[x][RDEPENDS] for x in rev_deps \
+            new_rev_deps = [ binaries[x].reverse_depends for x in rev_deps \
                              if x in binaries ]
             # flatten the list-of-lists, filtering out already handled packages
             # in the process
@@ -1982,7 +1963,7 @@ class Britney:
             while len(packages) > l:
                 l = len(packages)
                 for p in packages[n:]:
-                    packages.extend([x for x in binaries[p][RDEPENDS] if x not in packages and x in binaries])
+                    packages.extend([x for x in binaries[p].reverse_depends if x not in packages and x in binaries])
                 n = l
             return packages
         else:
@@ -2084,14 +2065,14 @@ class Britney:
                         if p not in broken:
                             to_check.append(p)
                             broken.add(p)
-                        if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'):
+                        if not (skip_archall and binaries[arch][0][p].arch == 'all'):
                             if p not in nuninst[arch]:
                                 nuninst[arch].add(p)
                     else:
                         if p in broken:
                             to_check.append(p)
                             broken.remove(p)
-                        if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'):
+                        if not (skip_archall and binaries[arch][0][p].arch == 'all'):
                             # if the package was previously arch:all and uninstallable
                             # and has moved to being architecture-dependent, becoming
                             # installable in the process then it will not be in the
@@ -2105,21 +2086,21 @@ class Britney:
                 while to_check:
                     j = to_check.pop(0)
                     if j not in binaries[arch][0]: continue
-                    for p in binaries[arch][0][j][RDEPENDS]:
+                    for p in binaries[arch][0][j].reverse_depends:
                         if p in broken or p not in binaries[arch][0]: continue
                         r = systems[arch].is_installable(p)
                         if not r:
                             if p not in broken:
                                 broken.add(p)
                                 to_check.append(p)
-                            if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'):
+                            if not (skip_archall and binaries[arch][0][p].arch == 'all'):
                                 if p not in nuninst[arch]:
                                     nuninst[arch].add(p)
                         else:
                             if p in broken:
                                 broken.remove(p)
                                 to_check.append(p)
-                            if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'):
+                            if not (skip_archall and binaries[arch][0][p].arch == 'all'):
                                 # if the package was previously arch:all and uninstallable
                                 # and has moved to being architecture-dependent, becoming
                                 # installable in the process then it will not be in the
@@ -2176,7 +2157,7 @@ class Britney:
 
                 # undo the changes (new binaries)
                 if not item.is_removal and item.package in sources[item.suite]:
-                    for p in sources[item.suite][item.package][BINARIES]:
+                    for p in sources[item.suite][item.package].binaries:
                         binary, arch = p.split("/")
                         if item.architecture in ['source', arch]:
                             del binaries[arch][0][binary]
@@ -2191,8 +2172,7 @@ class Britney:
                     else:
                         binaries[arch][0][binary] = undo['binaries'][p]
                         self.systems[arch].remove_binary(binary)
-                        self.systems[arch].add_binary(binary, binaries[arch][0][binary][:PROVIDES] + \
-                            [", ".join(binaries[arch][0][binary][PROVIDES]) or None])
+                        self.systems[arch].add_binary(binary, binaries[arch][0][binary].as_dpkg_list())
 
                 # undo the changes (virtual packages)
                 for p in undo['nvirtual']:
@@ -2313,7 +2293,7 @@ class Britney:
             for (undo, item) in lundo:
                 # undo the changes (new binaries)
                 if not item.is_removal and item.package in self.sources[item.suite]:
-                    for p in self.sources[item.suite][item.package][BINARIES]:
+                    for p in self.sources[item.suite][item.package].binaries:
                         binary, arch = p.split("/")
                         if item.architecture in ['source', arch]:
                             del self.binaries['testing'][arch][0][binary]
@@ -2330,8 +2310,7 @@ class Britney:
                         binaries = self.binaries['testing'][arch][0]
                         binaries[binary] = undo['binaries'][p]
                         self.systems[arch].remove_binary(binary)
-                        self.systems[arch].add_binary(binary, binaries[binary][:PROVIDES] + \
-                            [", ".join(binaries[binary][PROVIDES]) or None])
+                        self.systems[arch].add_binary(binary, binaries[binary].as_dpkg_list())
 
             for (undo, item) in lundo:
                 # undo the changes (virtual packages)
@@ -2469,17 +2448,17 @@ class Britney:
 
         # for every source package in unstable check if it should be upgraded
         for pkg in sources['unstable']:
-            if sources['unstable'][pkg][FAKESRC]: continue
+            if sources['unstable'][pkg].is_fake: continue
             # if the source package is already present in testing,
             # check if it should be upgraded for every binary package
-            if pkg in sources['testing'] and not sources['testing'][pkg][FAKESRC]:
+            if pkg in sources['testing'] and not sources['testing'][pkg].is_fake:
                 for arch in architectures:
                     if should_upgrade_srcarch(pkg, arch, 'unstable'):
-                        upgrade_me.append("%s/%s/%s" % (pkg, arch, sources['unstable'][pkg][VERSION]))
+                        upgrade_me.append("%s/%s/%s" % (pkg, arch, sources['unstable'][pkg].version))
 
             # check if the source package should be upgraded
             if should_upgrade_src(pkg, 'unstable'):
-                upgrade_me.append("%s/%s" % (pkg, sources['unstable'][pkg][VERSION]))
+                upgrade_me.append("%s/%s" % (pkg, sources['unstable'][pkg].version))
 
         # for every source package in *-proposed-updates, check if it should be upgraded
         for suite in ['pu', 'tpu']:
@@ -2573,10 +2552,10 @@ class Britney:
                 continue
 
             inunstable = pkg.package in self.sources['unstable']
-            rightversion = inunstable and (apt_pkg.VersionCompare(self.sources['unstable'][pkg.package][VERSION], pkg.version) == 0)
+            rightversion = inunstable and (apt_pkg.VersionCompare(self.sources['unstable'][pkg.package].version, pkg.version) == 0)
             if pkg.suite == 'unstable' and not rightversion:
                 for suite in ['pu', 'tpu']:
-                    if pkg.package in self.sources[suite] and apt_pkg.VersionCompare(self.sources[suite][pkg.package][VERSION], pkg.version) == 0:
+                    if pkg.package in self.sources[suite] and apt_pkg.VersionCompare(self.sources[suite][pkg.package].version, pkg.version) == 0:
                         pkg.suite = suite
                         _pkgvers[idx] = pkg
                         break
@@ -2584,15 +2563,15 @@ class Britney:
             # handle *-proposed-updates
             if pkg.suite in ['pu', 'tpu']:
                 if pkg.package not in self.sources[pkg.suite]: continue
-                if apt_pkg.VersionCompare(self.sources[pkg.suite][pkg.package][VERSION], pkg.version) != 0:
-                    self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources[pkg.suite][pkg.package][VERSION]))
+                if apt_pkg.VersionCompare(self.sources[pkg.suite][pkg.package].version, pkg.version) != 0:
+                    self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources[pkg.suite][pkg.package].version))
                     ok = False
             # does the package exist in unstable?
             elif not inunstable:
                 self.output_write(" Source %s has no version in unstable\n" % pkg.package)
                 ok = False
             elif not rightversion:
-                self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources['unstable'][pkg.package][VERSION]))
+                self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources['unstable'][pkg.package].version))
                 ok = False
         if not ok:
             self.output_write("Not using hint\n")
@@ -2658,7 +2637,7 @@ class Britney:
             if e not in excuses:
                 return False
             excuse = excuses[e]
-            if e in self.sources['testing'] and self.sources['testing'][e][VERSION] == excuse.ver[1]:
+            if e in self.sources['testing'] and self.sources['testing'][e].version == excuse.ver[1]:
                 return True
             if not circular_first:
                 hint[e] = excuse.ver[1]
@@ -2675,7 +2654,7 @@ class Britney:
         mincands = []
         for e in excuses:
             excuse = excuses[e]
-            if e in self.sources['testing'] and self.sources['testing'][e][VERSION] == excuse.ver[1]:
+            if e in self.sources['testing'] and self.sources['testing'][e].version == excuse.ver[1]:
                 continue
             if len(excuse.deps) > 0:
                 hint = find_related(e, {}, True)
@@ -2728,7 +2707,7 @@ class Britney:
             for pkg_name in testing[arch][0]:
                 pkg = testing[arch][0][pkg_name]
                 if pkg_name not in unstable[arch][0] and \
-                   not self.same_source(sources[pkg[SOURCE]][VERSION], pkg[SOURCEVER]):
+                   not self.same_source(sources[pkg.source].version, pkg.source_version):
                     removals.append("-" + pkg_name + "/" + arch)
         return removals
 
@@ -2749,7 +2728,7 @@ class Britney:
         all = {}
         for p in nuninst[arch]:
             pkg = self.binaries['testing'][arch][0][p]
-            all.setdefault((pkg[SOURCE], pkg[SOURCEVER]), set()).add(p)
+            all.setdefault((pkg.source, pkg.source_version), set()).add(p)
 
         print '* %s' % (arch,)
 
diff --git a/package.py b/package.py
new file mode 100644
index 0000000..f175760
--- /dev/null
+++ b/package.py
@@ -0,0 +1,171 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2011 Niels Thykier <niels@thykier.net>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+import apt_pkg
+import string
+
+import copy
+
+class Package(object):
+
+    def __init__(self, source, source_version, section):
+        self._source = source
+        self._source_version = source_version
+        self._section = None
+
+        if section:
+            self._section = section.strip()
+
+    @property
+    def source(self):
+        return self._source
+
+    @property
+    def source_version(self):
+        return self._source_version
+
+    @property
+    def section(self):
+        return self._section
+
+    def _parse_deps(self, value, parser=None):
+        if parser is None:
+            try:
+                parser = apt_pkg.parse_depends
+            except AttributeError:
+                parser = apt_pkg.ParseDepends
+
+        return parser (value)
+
+class SourcePackage(Package):
+
+    def __init__(self, source, source_version, section, maintainer, fake=False):
+        Package.__init__(self, source, source_version, section)
+        self._binaries = []
+        self._maintainer = None
+        self._is_fake = fake
+
+        if maintainer:
+            self._maintainer = maintainer.strip()
+
+
+    @property
+    def version(self):
+        # Alias for source_version
+        return self.source_version
+
+    @property
+    def maintainer(self):
+        return self._maintainer
+
+    @property
+    def binaries(self):
+        return self._binaries
+
+    @property
+    def is_fake(self):
+        return self._is_fake
+
+
+class BinaryPackage(Package):
+
+    def __init__(self, source, source_version, section, package, version, arch, \
+                       predeps, deps, conflicts, provides):
+        Package.__init__(self, source, source_version, section)
+        self._package = package
+        self._version = version
+        self._arch = arch
+        self._predeps = predeps
+        self._deps = deps
+        self._conflicts = None
+        self._provides = []
+        self._rdeps = []
+        self._rconflicts = []
+
+        if conflicts:
+            self._conflicts = ', '.join (conflicts)
+
+        if provides:
+            self._provides = map (string.strip, provides.split (","))
+
+    @property
+    def package(self):
+        return self._package
+
+    @property
+    def version(self):
+        return self._version
+
+    @property
+    def arch(self):
+        return self._arch
+
+    @property
+    def depends(self):
+        return self._deps
+
+    @property
+    def pre_depends(self):
+        return self._predeps
+
+    @property
+    def conflicts(self):
+        return self._conflicts
+
+    @property
+    def provides(self):
+        return self._provides
+
+    @property
+    def reverse_depends(self):
+        return self._rdeps
+
+    @property
+    def reverse_conflicts(self):
+        return self._rconflicts
+
+    @reverse_depends.setter
+    def reverse_depends(self, value):
+        self._rdeps = value
+
+    @reverse_conflicts.setter
+    def reverse_conflicts(self, value):
+        self._rconflicts = value
+
+    def as_dpkg_list(self):
+        """Return a list representation of this element suitable for
+        the dpkg "buildSystem" code (in lib/britney-py.c).
+
+        The data returned is a copy.
+        """
+
+        # britney-py.c expects either "None" or a string in may cases
+        def string_or_none(x):
+            if x:
+                return ", ".join(x)
+            return None
+
+        result = [
+            self.version,
+            None, # Not used
+            self.source,
+            self.source_version,
+            self.arch,
+            self.pre_depends,
+            self.depends,
+            self.conflicts,
+            string_or_none(self.provides)
+        ]
+        # britney-py does not use rdeps nor rconflicts either
+        return result
-- 
1.7.7.3

