Rewrite installability tester

The new Installability Tester (IT) module replaces the remaining
C-parts.  Unlike C-implementation, it does not give up and emit an
"AIEEE" half-way through.

In order to determine installability, it uses two sets "musts" and
"never".  As the names suggest, the sets represents the packages that
must be (co-)installable with the package being tested and those that
can never be co-installable.  For a package to be installable, "musts"
and "never" have remain disjoint.

These sets are also used to reduce the number of alternatives that are
available to satisfy a given dependency.  When these sets are unable
to remove the choice completely, the new IT defers the choice to later.
This occasionally reduces backtracking as a later package may conflict
or unconditionally depend on one of the remaining alternatives.

Signed-off-by: Niels Thykier <niels@thykier.net>
debian
Niels Thykier 13 years ago
parent cf84bcfa58
commit 7051d5b0e9

@ -190,7 +190,7 @@ import urllib
import apt_pkg import apt_pkg
from functools import reduce, partial from functools import reduce, partial
from itertools import chain, ifilter from itertools import chain, ifilter, product
from operator import attrgetter from operator import attrgetter
if __name__ == '__main__': if __name__ == '__main__':
@ -208,6 +208,7 @@ if __name__ == '__main__':
# it useless). # it useless).
sys.path.insert(0, idir) sys.path.insert(0, idir)
from installability.builder import InstallabilityTesterBuilder
from excuse import Excuse from excuse import Excuse
from migrationitem import MigrationItem from migrationitem import MigrationItem
from hints import HintCollection from hints import HintCollection
@ -218,7 +219,7 @@ from britney_util import (old_libraries_format, same_source, undo_changes,
eval_uninst, newly_uninst, make_migrationitem) eval_uninst, newly_uninst, make_migrationitem)
from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC, from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC,
SOURCE, SOURCEVER, ARCHITECTURE, DEPENDS, CONFLICTS, SOURCE, SOURCEVER, ARCHITECTURE, DEPENDS, CONFLICTS,
PROVIDES, RDEPENDS, RCONFLICTS, MULTIARCH) PROVIDES, RDEPENDS, RCONFLICTS, MULTIARCH, ESSENTIAL)
__author__ = 'Fabio Tranchitella and the Debian Release Team' __author__ = 'Fabio Tranchitella and the Debian Release Team'
__version__ = '2.0' __version__ = '2.0'
@ -254,7 +255,6 @@ class Britney(object):
# initialize the apt_pkg back-end # initialize the apt_pkg back-end
apt_pkg.init() apt_pkg.init()
self.systems = {}
self.sources = {} self.sources = {}
self.binaries = {} self.binaries = {}
@ -291,12 +291,17 @@ class Britney(object):
self.binaries['tpu'][arch] = self.read_binaries(self.options.tpu, "tpu", arch) self.binaries['tpu'][arch] = self.read_binaries(self.options.tpu, "tpu", arch)
if hasattr(self.options, 'pu'): if hasattr(self.options, 'pu'):
self.binaries['pu'][arch] = self.read_binaries(self.options.pu, "pu", arch) self.binaries['pu'][arch] = self.read_binaries(self.options.pu, "pu", arch)
# build the testing system else:
self.build_systems(arch) # _build_installability_tester relies it being
# properly initialised, so insert two empty dicts
# here.
self.binaries['pu'][arch] = ({}, {})
self._build_installability_tester(self.options.architectures)
if not self.options.nuninst_cache: if not self.options.nuninst_cache:
self.__log("Building the list of non-installable packages for the full archive", type="I") self.__log("Building the list of non-installable packages for the full archive", type="I")
nuninst = {} nuninst = {}
self._inst_tester.compute_testing_installability()
for arch in self.options.architectures: for arch in self.options.architectures:
self.__log("> Checking for non-installable packages for architecture %s" % arch, type="I") self.__log("> Checking for non-installable packages for architecture %s" % arch, type="I")
result = self.get_nuninst(arch, build=True) result = self.get_nuninst(arch, build=True)
@ -384,7 +389,7 @@ class Britney(object):
arches += [x for x in allarches if x not in arches and x not in self.options.break_arches.split()] arches += [x for x in allarches if x not in arches and x not in self.options.break_arches.split()]
arches += [x for x in allarches if x not in arches and x not in self.options.new_arches.split()] arches += [x for x in allarches if x not in arches and x not in self.options.new_arches.split()]
arches += [x for x in allarches if x not in arches] arches += [x for x in allarches if x not in arches]
self.options.architectures = arches self.options.architectures = map(intern, arches)
self.options.smooth_updates = self.options.smooth_updates.split() self.options.smooth_updates = self.options.smooth_updates.split()
def __log(self, msg, type="I"): def __log(self, msg, type="I"):
@ -399,22 +404,64 @@ class Britney(object):
if self.options.verbose or type in ("E", "W"): if self.options.verbose or type in ("E", "W"):
print "%s: [%s] - %s" % (type, time.asctime(), msg) print "%s: [%s] - %s" % (type, time.asctime(), msg)
def _build_installability_tester(self, archs):
"""Create the installability tester"""
solvers = self.get_dependency_solvers
binaries = self.binaries
builder = InstallabilityTesterBuilder()
for (dist, arch) in product(binaries, archs):
testing = (dist == 'testing')
for pkgname in binaries[dist][arch][0]:
pkgdata = binaries[dist][arch][0][pkgname]
version = pkgdata[VERSION]
t = (pkgname, version, arch)
essential = pkgdata[ESSENTIAL]
if not builder.add_binary(t, essential=essential,
in_testing=testing):
continue
depends = []
conflicts = []
# We do not differ between depends and pre-depends
if pkgdata[DEPENDS]:
depends.extend(apt_pkg.parse_depends(pkgdata[DEPENDS], False))
if pkgdata[CONFLICTS]:
conflicts = apt_pkg.parse_depends(pkgdata[CONFLICTS], False)
with builder.relation_builder(t) as relations:
for (al, dep) in [(depends, True), \
(conflicts, False)]:
for block in al:
sat = set()
for dep_dist in binaries:
(_, pkgs) = solvers(block, arch, dep_dist)
for p in pkgs:
# version and arch is already interned, but solvers use
# the package name extracted from the field and is therefore
# not interned.
pdata = binaries[dep_dist][arch][0][p]
pt = (intern(p), pdata[VERSION], arch)
if dep:
sat.add(pt)
elif t != pt:
# if t satisfies its own
# conflicts relation, then it
# is using §7.6.2
relations.add_breaks(pt)
if dep:
relations.add_dependency_clause(sat)
self._inst_tester = builder.build()
# Data reading/writing methods # Data reading/writing methods
# ---------------------------- # ----------------------------
def build_systems(self, arch=None): def read_sources(self, basedir, intern=intern):
for a in self.options.architectures:
if arch and a != arch: continue
packages = {}
binaries = self.binaries['testing'][arch][0].copy()
for k in binaries:
packages[k] = binaries[k][:]
if packages[k][PROVIDES]:
packages[k][PROVIDES] = ", ".join(packages[k][PROVIDES])
else: packages[k][PROVIDES] = None
self.systems[a] = buildSystem(a, packages)
def read_sources(self, basedir):
"""Read the list of source packages from the specified directory """Read the list of source packages from the specified directory
The source packages are read from the `Sources' file within the The source packages are read from the `Sources' file within the
@ -445,15 +492,15 @@ class Britney(object):
# largest version for migration. # largest version for migration.
if pkg in sources and apt_pkg.version_compare(sources[pkg][0], ver) > 0: if pkg in sources and apt_pkg.version_compare(sources[pkg][0], ver) > 0:
continue continue
sources[pkg] = [ver, sources[intern(pkg)] = [intern(ver),
get_field('Section'), intern(get_field('Section')),
[], [],
get_field('Maintainer'), get_field('Maintainer'),
False, False,
] ]
return sources return sources
def read_binaries(self, basedir, distribution, arch): def read_binaries(self, basedir, distribution, arch, intern=intern):
"""Read the list of binary packages from the specified directory """Read the list of binary packages from the specified directory
The binary packages are read from the `Packages_${arch}' files The binary packages are read from the `Packages_${arch}' files
@ -498,6 +545,8 @@ class Britney(object):
# largest version for migration. # largest version for migration.
if pkg in packages and apt_pkg.version_compare(packages[pkg][0], version) > 0: if pkg in packages and apt_pkg.version_compare(packages[pkg][0], version) > 0:
continue continue
pkg = intern(pkg)
version = intern(version)
# Merge Pre-Depends with Depends and Conflicts with # Merge Pre-Depends with Depends and Conflicts with
# Breaks. Britney is not interested in the "finer # Breaks. Britney is not interested in the "finer
@ -509,6 +558,10 @@ class Britney(object):
elif pdeps: elif pdeps:
deps = pdeps deps = pdeps
ess = False
if get_field('Essential', 'no') == 'yes':
ess = True
final_conflicts_list = [] final_conflicts_list = []
conflicts = get_field('Conflicts') conflicts = get_field('Conflicts')
if conflicts: if conflicts:
@ -517,24 +570,25 @@ class Britney(object):
if breaks: if breaks:
final_conflicts_list.append(breaks) final_conflicts_list.append(breaks)
dpkg = [version, dpkg = [version,
get_field('Section'), intern(get_field('Section')),
pkg, pkg,
version, version,
get_field('Architecture'), intern(get_field('Architecture')),
get_field('Multi-Arch'), get_field('Multi-Arch'),
deps, deps,
', '.join(final_conflicts_list) or None, ', '.join(final_conflicts_list) or None,
get_field('Provides'), get_field('Provides'),
[], [],
[], [],
ess,
] ]
# retrieve the name and the version of the source package # retrieve the name and the version of the source package
source = get_field('Source') source = get_field('Source')
if source: if source:
dpkg[SOURCE] = source.split(" ")[0] dpkg[SOURCE] = intern(source.split(" ")[0])
if "(" in source: if "(" in source:
dpkg[SOURCEVER] = source[source.find("(")+1:source.find(")")] dpkg[SOURCEVER] = intern(source[source.find("(")+1:source.find(")")])
pkgarch = "%s/%s" % (pkg,arch) pkgarch = "%s/%s" % (pkg,arch)
# if the source package is available in the distribution, then register this binary package # if the source package is available in the distribution, then register this binary package
@ -822,7 +876,7 @@ class Britney(object):
for pkg in binaries: for pkg in binaries:
output = "Package: %s\n" % pkg output = "Package: %s\n" % pkg
for key, k in ((SECTION, 'Section'), (ARCHITECTURE, 'Architecture'), (MULTIARCH, 'Multi-Arch'), (SOURCE, 'Source'), (VERSION, 'Version'), for key, k in ((SECTION, 'Section'), (ARCHITECTURE, 'Architecture'), (MULTIARCH, 'Multi-Arch'), (SOURCE, 'Source'), (VERSION, 'Version'),
(DEPENDS, 'Depends'), (PROVIDES, 'Provides'), (CONFLICTS, 'Conflicts')): (DEPENDS, 'Depends'), (PROVIDES, 'Provides'), (CONFLICTS, 'Conflicts'), (ESSENTIAL, 'Essential')):
if not binaries[pkg][key]: continue if not binaries[pkg][key]: continue
if key == SOURCE: if key == SOURCE:
if binaries[pkg][SOURCE] == pkg: if binaries[pkg][SOURCE] == pkg:
@ -840,6 +894,9 @@ class Britney(object):
elif key == PROVIDES: elif key == PROVIDES:
if len(binaries[pkg][key]) > 0: if len(binaries[pkg][key]) > 0:
output += (k + ": " + ", ".join(binaries[pkg][key]) + "\n") output += (k + ": " + ", ".join(binaries[pkg][key]) + "\n")
elif key == ESSENTIAL:
if binaries[pkg][key]:
output += (k + ": " + " yes\n")
else: else:
output += (k + ": " + binaries[pkg][key] + "\n") output += (k + ": " + binaries[pkg][key] + "\n")
f.write(output + "\n") f.write(output + "\n")
@ -1624,7 +1681,7 @@ class Britney(object):
# local copies for better performance # local copies for better performance
binaries = self.binaries['testing'] binaries = self.binaries['testing']
systems = self.systems inst_tester = self._inst_tester
# for all the architectures # for all the architectures
for arch in self.options.architectures: for arch in self.options.architectures:
@ -1638,7 +1695,8 @@ class Britney(object):
# uninstallable package is found # uninstallable package is found
nuninst[arch] = set() nuninst[arch] = set()
for pkg_name in binaries[arch][0]: for pkg_name in binaries[arch][0]:
r = systems[arch].is_installable(pkg_name) pkgdata = binaries[arch][0][pkg_name]
r = inst_tester.is_installable(pkg_name, pkgdata[VERSION], arch)
if not r: if not r:
nuninst[arch].add(pkg_name) nuninst[arch].add(pkg_name)
@ -1844,8 +1902,9 @@ class Britney(object):
if len(binaries[parch][1][j]) == 0: if len(binaries[parch][1][j]) == 0:
del binaries[parch][1][j] del binaries[parch][1][j]
# finally, remove the binary package # finally, remove the binary package
version = binaries[parch][0][binary][VERSION]
del binaries[parch][0][binary] del binaries[parch][0][binary]
self.systems[parch].remove_binary(binary) self._inst_tester.remove_testing_binary(binary, version, parch)
# remove the source package # remove the source package
if item.architecture == 'source': if item.architecture == 'source':
undo['sources'][item.package] = source undo['sources'][item.package] = source
@ -1859,8 +1918,10 @@ class Britney(object):
elif item.package in binaries[item.architecture][0]: elif item.package in binaries[item.architecture][0]:
undo['binaries'][item.package + "/" + item.architecture] = binaries[item.architecture][0][item.package] undo['binaries'][item.package + "/" + item.architecture] = binaries[item.architecture][0][item.package]
affected.update(get_reverse_tree(item.package, item.architecture)) affected.update(get_reverse_tree(item.package, item.architecture))
version = binaries[item.architecture][0][item.package][VERSION]
del binaries[item.architecture][0][item.package] del binaries[item.architecture][0][item.package]
self.systems[item.architecture].remove_binary(item.package) self._inst_tester.remove_testing_binary(item.package, version, item.architecture)
# add the new binary packages (if we are not removing) # add the new binary packages (if we are not removing)
if not item.is_removal: if not item.is_removal:
@ -1883,7 +1944,8 @@ class Britney(object):
# all the reverse conflicts and their dependency tree are affected by the change # all the reverse conflicts and their dependency tree are affected by the change
for j in binaries[parch][0][binary][RCONFLICTS]: for j in binaries[parch][0][binary][RCONFLICTS]:
affected.update(get_reverse_tree(j, parch)) affected.update(get_reverse_tree(j, parch))
self.systems[parch].remove_binary(binary) version = binaries[parch][0][binary][VERSION]
self._inst_tester.remove_testing_binary(binary, version, parch)
else: else:
# the binary isn't in testing, but it may have been at # the binary isn't in testing, but it may have been at
# the start of the current hint and have been removed # the start of the current hint and have been removed
@ -1903,8 +1965,8 @@ class Britney(object):
affected.update(get_reverse_tree(rdep, parch)) affected.update(get_reverse_tree(rdep, parch))
# add/update the binary package # add/update the binary package
binaries[parch][0][binary] = self.binaries[item.suite][parch][0][binary] binaries[parch][0][binary] = self.binaries[item.suite][parch][0][binary]
self.systems[parch].add_binary(binary, binaries[parch][0][binary][:PROVIDES] + \ version = binaries[parch][0][binary][VERSION]
[", ".join(binaries[parch][0][binary][PROVIDES]) or None]) self._inst_tester.add_testing_binary(binary, version, parch)
# register new provided packages # register new provided packages
for j in binaries[parch][0][binary][PROVIDES]: for j in binaries[parch][0][binary][PROVIDES]:
key = j + "/" + parch key = j + "/" + parch
@ -1933,7 +1995,7 @@ class Britney(object):
return (item, affected, undo) return (item, affected, undo)
def _check_packages(self, binaries, systems, arch, affected, skip_archall, nuninst, pkg): def _check_packages(self, binaries, arch, affected, skip_archall, nuninst):
broken = nuninst[arch + "+all"] broken = nuninst[arch + "+all"]
to_check = [] to_check = []
@ -1941,10 +2003,13 @@ class Britney(object):
for p in (x[0] for x in affected if x[1] == arch): for p in (x[0] for x in affected if x[1] == arch):
if p not in binaries[arch][0]: if p not in binaries[arch][0]:
continue continue
pkgdata = binaries[arch][0][p]
version = pkgdata[VERSION]
parch = pkgdata[ARCHITECTURE]
nuninst_arch = None nuninst_arch = None
if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'): if not (skip_archall and parch == 'all'):
nuninst_arch = nuninst[arch] nuninst_arch = nuninst[arch]
self._installability_test(systems[arch], p, broken, to_check, nuninst_arch, pkg) self._installability_test(p, version, arch, broken, to_check, nuninst_arch)
# broken packages (second round, reverse dependencies of the first round) # broken packages (second round, reverse dependencies of the first round)
while to_check: while to_check:
@ -1953,10 +2018,13 @@ class Britney(object):
for p in binaries[arch][0][j][RDEPENDS]: for p in binaries[arch][0][j][RDEPENDS]:
if p in broken or p not in binaries[arch][0]: if p in broken or p not in binaries[arch][0]:
continue continue
pkgdata = binaries[arch][0][p]
version = pkgdata[VERSION]
parch = pkgdata[ARCHITECTURE]
nuninst_arch = None nuninst_arch = None
if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'): if not (skip_archall and parch == 'all'):
nuninst_arch = nuninst[arch] nuninst_arch = nuninst[arch]
self._installability_test(systems[arch], p, broken, to_check, nuninst_arch, pkg) self._installability_test(p, version, arch, broken, to_check, nuninst_arch)
def iter_packages(self, packages, selected, hint=False, nuninst=None, lundo=None): def iter_packages(self, packages, selected, hint=False, nuninst=None, lundo=None):
@ -1981,13 +2049,12 @@ class Britney(object):
# local copies for better performance # local copies for better performance
binaries = self.binaries['testing'] binaries = self.binaries['testing']
sources = self.sources sources = self.sources
systems = self.systems
architectures = self.options.architectures architectures = self.options.architectures
nobreakall_arches = self.options.nobreakall_arches.split() nobreakall_arches = self.options.nobreakall_arches.split()
new_arches = self.options.new_arches.split() new_arches = self.options.new_arches.split()
break_arches = self.options.break_arches.split() break_arches = self.options.break_arches.split()
dependencies = self.dependencies dependencies = self.dependencies
check_packages = partial(self._check_packages, binaries, systems) check_packages = partial(self._check_packages, binaries)
# pre-process a hint batch # pre-process a hint batch
pre_process = {} pre_process = {}
@ -2046,7 +2113,7 @@ class Britney(object):
nuninst[arch] = set(x for x in nuninst_comp[arch] if x in binaries[arch][0]) nuninst[arch] = set(x for x in nuninst_comp[arch] if x in binaries[arch][0])
nuninst[arch + "+all"] = set(x for x in nuninst_comp[arch + "+all"] if x in binaries[arch][0]) nuninst[arch + "+all"] = set(x for x in nuninst_comp[arch + "+all"] if x in binaries[arch][0])
check_packages(arch, affected, skip_archall, nuninst, pkg.uvname) check_packages(arch, affected, skip_archall, nuninst)
# if we are processing hints, go ahead # if we are processing hints, go ahead
if hint: if hint:
@ -2089,7 +2156,7 @@ class Britney(object):
skipped.append(item) skipped.append(item)
single_undo = [(undo, item)] single_undo = [(undo, item)]
# (local-scope) binaries is actually self.binaries["testing"] so we cannot use it here. # (local-scope) binaries is actually self.binaries["testing"] so we cannot use it here.
undo_changes(single_undo, systems, sources, self.binaries) undo_changes(single_undo, self._inst_tester, sources, self.binaries)
# if we are processing hints, return now # if we are processing hints, return now
if hint: if hint:
@ -2193,7 +2260,7 @@ class Britney(object):
if not lundo: return if not lundo: return
lundo.reverse() lundo.reverse()
undo_changes(lundo, self.systems, self.sources, self.binaries) undo_changes(lundo, self._inst_tester, self.sources, self.binaries)
def upgrade_testing(self): def upgrade_testing(self):
@ -2606,10 +2673,10 @@ class Britney(object):
self.__output.close() self.__output.close()
def _installability_test(self, system, p, broken, to_check, nuninst_arch, current_pkg): def _installability_test(self, pkg_name, pkg_version, pkg_arch, broken, to_check, nuninst_arch):
"""Test for installability of a package on an architecture """Test for installability of a package on an architecture
p is the package to check and system does the actual check. (pkg_name, pkg_version, pkg_arch) is the package to check.
broken is the set of broken packages. If p changes broken is the set of broken packages. If p changes
installability (e.g. goes from uninstallable to installable), installability (e.g. goes from uninstallable to installable),
@ -2622,23 +2689,20 @@ class Britney(object):
current_pkg is the package currently being tried, mainly used current_pkg is the package currently being tried, mainly used
to print where an AIEEE is coming from. to print where an AIEEE is coming from.
""" """
r = system.is_installable(p) r = self._inst_tester.is_installable(pkg_name, pkg_version, pkg_arch)
if r <= 0: if not r:
# AIEEE: print who's responsible for it
if r == -1:
sys.stderr.write("AIEEE triggered by: %s\n" % current_pkg)
# not installable # not installable
if p not in broken: if pkg_name not in broken:
broken.add(p) broken.add(pkg_name)
to_check.append(p) to_check.append(pkg_name)
if nuninst_arch is not None and p not in nuninst_arch: if nuninst_arch is not None and pkg_name not in nuninst_arch:
nuninst_arch.add(p) nuninst_arch.add(pkg_name)
else: else:
if p in broken: if pkg_name in broken:
to_check.append(p) to_check.append(pkg_name)
broken.remove(p) broken.remove(pkg_name)
if nuninst_arch is not None and p in nuninst_arch: if nuninst_arch is not None and pkg_name in nuninst_arch:
nuninst_arch.remove(p) nuninst_arch.remove(pkg_name)
if __name__ == '__main__': if __name__ == '__main__':

@ -82,12 +82,38 @@ def ifilter_only(container, iterable=None):
return partial(ifilter, container.__contains__) return partial(ifilter, container.__contains__)
def undo_changes(lundo, systems, sources, binaries, # iter_except is from the "itertools" recipe
def iter_except(func, exception, first=None):
""" Call a function repeatedly until an exception is raised.
Converts a call-until-exception interface to an iterator interface.
Like __builtin__.iter(func, sentinel) but uses an exception instead
of a sentinel to end the loop.
Examples:
bsddbiter = iter_except(db.next, bsddb.error, db.first)
heapiter = iter_except(functools.partial(heappop, h), IndexError)
dictiter = iter_except(d.popitem, KeyError)
dequeiter = iter_except(d.popleft, IndexError)
queueiter = iter_except(q.get_nowait, Queue.Empty)
setiter = iter_except(s.pop, KeyError)
"""
try:
if first is not None:
yield first()
while 1:
yield func()
except exception:
pass
def undo_changes(lundo, inst_tester, sources, binaries,
BINARIES=BINARIES, PROVIDES=PROVIDES): BINARIES=BINARIES, PROVIDES=PROVIDES):
"""Undoes one or more changes to testing """Undoes one or more changes to testing
* lundo is a list of (undo, item)-tuples * lundo is a list of (undo, item)-tuples
* systems is the britney-py.c system * inst_tester is an InstallabilityTester
* sources is the table of all source packages for all suites * sources is the table of all source packages for all suites
* binaries is the table of all binary packages for all suites * binaries is the table of all binary packages for all suites
and architectures and architectures
@ -120,8 +146,9 @@ def undo_changes(lundo, systems, sources, binaries,
for p in sources[item.suite][item.package][BINARIES]: for p in sources[item.suite][item.package][BINARIES]:
binary, arch = p.split("/") binary, arch = p.split("/")
if item.architecture in ['source', arch]: if item.architecture in ['source', arch]:
version = binaries["testing"][arch][0][binary][VERSION]
del binaries["testing"][arch][0][binary] del binaries["testing"][arch][0][binary]
systems[arch].remove_binary(binary) inst_tester.remove_testing_binary(binary, version, arch)
# STEP 3 # STEP 3
@ -130,14 +157,17 @@ def undo_changes(lundo, systems, sources, binaries,
for p in undo['binaries']: for p in undo['binaries']:
binary, arch = p.split("/") binary, arch = p.split("/")
if binary[0] == "-": if binary[0] == "-":
version = binaries["testing"][arch][0][binary][VERSION]
del binaries['testing'][arch][0][binary[1:]] del binaries['testing'][arch][0][binary[1:]]
systems[arch].remove_binary(binary[1:]) inst_tester.remove_testing_binary(binary, version, arch)
else: else:
binaries_t_a = binaries['testing'][arch][0] binaries_t_a = binaries['testing'][arch][0]
binaries_t_a[binary] = undo['binaries'][p] if p in binaries_t_a:
systems[arch].remove_binary(binary) rmpkgdata = binaries_t_a[p]
systems[arch].add_binary(binary, binaries_t_a[binary][:PROVIDES] + \ inst_tester.remove_testing_binary(binary, rmpkgdata[VERSION], arch)
[", ".join(binaries_t_a[binary][PROVIDES]) or None]) pkgdata = undo['binaries'][p]
binaries_t_a[binary] = pkgdata
inst_tester.add_testing_binary(binary, pkgdata[VERSION], arch)
# STEP 4 # STEP 4
# undo all changes to virtual packages # undo all changes to virtual packages

@ -35,3 +35,4 @@ CONFLICTS = 7
PROVIDES = 8 PROVIDES = 8
RDEPENDS = 9 RDEPENDS = 9
RCONFLICTS = 10 RCONFLICTS = 10
ESSENTIAL = 11

@ -0,0 +1,308 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Niels Thykier <niels@thykier.net>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from contextlib import contextmanager
from britney_util import ifilter_except, iter_except
from installability.tester import InstallabilityTester
class _RelationBuilder(object):
"""Private helper class to "build" relations"""
def __init__(self, itbuilder, binary):
self._itbuilder = itbuilder
self._binary = binary
binary_data = itbuilder._package_table[binary]
self._new_deps = set(binary_data[0])
self._new_breaks = set(binary_data[1])
def add_dependency_clause(self, or_clause):
"""Add a dependency clause
The clause must be a sequence of (name, version, architecture)
tuples. The clause is an OR clause, i.e. any tuple in the
sequence can satisfy the relation. It is irrelevant if the
dependency is from the "Depends" or the "Pre-Depends" field.
Note that is the sequence is empty, the dependency is assumed
to be unsatisfiable.
The binaries in the clause are not required to have been added
to the InstallabilityTesterBuilder when this method is called.
However, they must be added before the "build()" method is
called.
"""
clause = self._itbuilder._intern_set(or_clause)
binary = self._binary
itbuilder = self._itbuilder
package_table = itbuilder._package_table
reverse_package_table = itbuilder._reverse_package_table
okay = False
for dep_tuple in clause:
okay = True
reverse_relations = itbuilder._reverse_relations(dep_tuple)
reverse_relations[0].add(binary)
self._new_deps.add(clause)
if not okay:
self._itbuilder._broken.add(binary)
def add_breaks(self, broken_binary):
"""Add a Breaks-clause
Marks the given binary as being broken by the current
package. That is, the given package satisfies a relation
in either the "Breaks" or the "Conflicts" field. The binary
given must be a (name, version, architecture)-tuple.
The binary is not required to have been added to the
InstallabilityTesterBuilder when this method is called. However,
it must be added before the "build()" method is called.
"""
itbuilder = self._itbuilder
self._new_breaks.add(broken_binary)
reverse_relations = itbuilder._reverse_relations(broken_binary)
reverse_relations[1].add(self._binary)
def _commit(self):
itbuilder = self._itbuilder
data = (itbuilder._intern_set(self._new_deps),
itbuilder._intern_set(self._new_breaks))
itbuilder._package_table[self._binary] = data
class InstallabilityTesterBuilder(object):
"""Builder to create instances of InstallabilityTester"""
def __init__(self):
self._package_table = {}
self._reverse_package_table = {}
self._essentials = set()
self._testing = set()
self._internmap = {}
self._broken = set()
def add_binary(self, binary, essential=False, in_testing=False,
frozenset=frozenset):
"""Add a new binary package
Adds a new binary package. The binary must be given as a
(name, version, architecture)-tuple. Returns True if this
binary is new (i.e. has never been added before) or False
otherwise.
Keyword arguments:
* essential - Whether this package is "Essential: yes".
* in_testing - Whether this package is in testing.
The frozenset argument is a private optimisation.
Cave-at: arch:all packages should be "re-mapped" to given
architecture. That is, (pkg, version, "all") should be
added as:
for arch in architectures:
binary = (pkg, version, arch)
it.add_binary(binary)
The resulting InstallabilityTester relies on this for
correctness!
"""
# Note, even with a dup, we need to do these
if in_testing:
self._testing.add(binary)
if essential:
self._essentials.add(binary)
if binary not in self._package_table:
# Allow binaries to be added multiple times (happens
# when sid and testing have the same version)
self._package_table[binary] = (frozenset(), frozenset())
return True
return False
@contextmanager
def relation_builder(self, binary):
"""Returns a _RelationBuilder for a given binary [context]
This method returns a context-managed _RelationBuilder for a
given binary. So it should be used in a "with"-statment,
like:
with it.relation_builder(binary) as rel:
rel.add_dependency_clause(dependency_clause)
rel.add_breaks(pkgtuple)
...
The binary given must be a (name, version, architecture)-tuple.
Note, this method is optimised to be called at most once per
binary.
"""
if binary not in self._package_table:
raise ValueError("Binary %s/%s/%s does not exist" % binary)
rel = _RelationBuilder(self, binary)
yield rel
rel._commit()
def _intern_set(self, s, frozenset=frozenset):
"""Freeze and intern a given sequence (set variant of intern())
Given a sequence, create a frozenset copy (if it is not
already a frozenset) and intern that frozen set. Returns the
interned set.
At first glance, interning sets may seem absurd. However,
it does enable memory savings of up to 600MB when applied
to the "inner" sets of the dependency clauses and all the
conflicts relations as well.
"""
if type(s) == frozenset:
fset = s
else:
fset = frozenset(s)
if fset in self._internmap:
return self._internmap[fset]
self._internmap[fset] = fset
return fset
def _reverse_relations(self, binary, set=set):
"""Return the reverse relations for a binary
Fetch the reverse relations for a given binary, which are
created lazily.
"""
if binary in self._reverse_package_table:
return self._reverse_package_table[binary]
rel = [set(), set()]
self._reverse_package_table[binary] = rel
return rel
def build(self):
# Merge reverse conflicts with conflicts - this saves some
# operations in _check_loop since we only have to check one
# set (instead of two) and we remove a few duplicates here
# and there.
package_table = self._package_table
reverse_package_table = self._reverse_package_table
intern_set = self._intern_set
safe_set = set()
broken = self._broken
not_broken = ifilter_except(broken)
check = set(broken)
def safe_set_satisfies(t):
"""Check if t's dependencies can be satisfied by the safe set"""
if not package_table[t][0]:
# If it has no dependencies at all, then it is safe. :)
return True
for depgroup in package_table[t][0]:
if not any(dep for dep in depgroup if dep in safe_set):
return False
return True
for pkg in reverse_package_table:
if pkg not in package_table:
raise RuntimeError("%s/%s/%s referenced but not added!" % pkg)
if not reverse_package_table[pkg][1]:
# no rconflicts - ignore
continue
deps, con = package_table[pkg]
if not con:
con = intern_set(reverse_package_table[pkg][1])
else:
con = intern_set(con | reverse_package_table[pkg][1])
package_table[pkg] = (deps, con)
# Check if we can expand broken.
for t in not_broken(iter_except(check.pop, KeyError)):
# This package is not known to be broken... but it might be now
isb = False
for depgroup in package_table[t][0]:
if not any(not_broken(depgroup)):
# A single clause is unsatisfiable, the
# package can never be installed - add it to
# broken.
isb = True
break
if not isb:
continue
broken.add(t)
if t not in reverse_package_table:
continue
check.update(reverse_package_table[t][0] - broken)
if broken:
# Since a broken package will never be installable, nothing that depends on it
# will ever be installable. Thus, there is no point in keeping relations on
# the broken package.
seen = set()
empty_set = frozenset()
null_data = (frozenset([empty_set]), empty_set)
for b in (x for x in broken if x in reverse_package_table):
for rdep in (r for r in not_broken(reverse_package_table[b][0])
if r not in seen):
ndep = intern_set((x - broken) for x in package_table[rdep][0])
package_table[rdep] = (ndep, package_table[rdep][1] - broken)
seen.add(rdep)
# Since they won't affect the installability of any other package, we might as
# as well null their data. This memory for these packages, but likely there
# will only be a handful of these "at best" (fsvo of "best")
for b in broken:
package_table[b] = null_data
if b in reverse_package_table:
del reverse_package_table[b]
# Now find an initial safe set (if any)
check = set()
for pkg in package_table:
if package_table[pkg][1]:
# has (reverse) conflicts - not safe
continue
if not safe_set_satisfies(pkg):
continue
safe_set.add(pkg)
if pkg in reverse_package_table:
# add all rdeps (except those already in the safe_set)
check.update(reverse_package_table[pkg][0] - safe_set)
# Check if we can expand the initial safe set
for pkg in iter_except(check.pop, KeyError):
if package_table[pkg][1]:
# has (reverse) conflicts - not safe
continue
if safe_set_satisfies(pkg):
safe_set.add(pkg)
if pkg in reverse_package_table:
# add all rdeps (except those already in the safe_set)
check.update(reverse_package_table[pkg][0] - safe_set)
return InstallabilityTester(package_table,
frozenset(reverse_package_table),
self._testing, self._broken,
self._essentials, safe_set)

@ -0,0 +1,464 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Niels Thykier <niels@thykier.net>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from functools import partial
from itertools import ifilter, ifilterfalse
from britney_util import iter_except
class InstallabilityTester(object):
def __init__(self, universe, revuniverse, testing, broken, essentials,
safe_set):
"""Create a new installability tester
universe is a dict mapping package tuples to their
dependencies and conflicts.
revuniverse is a set of all packages with reverse relations
testing is a (mutable) set of package tuples that determines
which of the packages in universe are currently in testing.
broken is a (mutable) set of package tuples that are known to
be uninstallable.
essentials is a set of packages with "Essential: yes".
safe_set is a set of all packages which have no conflicts and
either have no dependencies or only depends on other "safe"
packages.
Package tuple: (pkg_name, pkg_version, pkg_arch)
- NB: arch:all packages are "re-mapped" to given architecture.
(simplifies caches and dependency checking)
"""
self._universe = universe
self._testing = testing
self._broken = broken
self._essentials = essentials
self._revuniverse = revuniverse
self._safe_set = safe_set
# Cache of packages known to be broken - we deliberately do not
# include "broken" in it. See _optimize for more info.
self._cache_broken = set()
# Cache of packages known to be installable
self._cache_inst = set()
# Per "arch" cache of the "minimal" (possibly incomplete)
# pseudo-essential set. This includes all the packages that
# are essential and packages that will always follow.
#
# It may not be a complete essential set, since alternatives
# are not always resolved. Noticably cases like "awk" may be
# left out (since it could be either gawk, mawk or
# original-awk) unless something in this sets depends strictly
# on one of them
self._cache_ess = {}
def compute_testing_installability(self):
"""Computes the installability of packages in testing
This method computes the installability of all packages in
testing and caches the result. This has the advantage of
making "is_installable" queries very fast for all packages
in testing.
"""
check_inst = self._check_inst
cbroken = self._cache_broken
cache_inst = self._cache_inst
tcopy = [x for x in self._testing]
for t in ifilterfalse(cache_inst.__contains__, tcopy):
if t in cbroken:
continue
check_inst(t)
def add_testing_binary(self, pkg_name, pkg_version, pkg_arch):
"""Add a binary package to "testing"
If the package is not known, this method will throw an
Keyrror.
"""
t = (pkg_name, pkg_version, pkg_arch)
if t not in self._universe:
raise KeyError(str(t))
if t in self._broken:
self._testing.add(t)
elif t not in self._testing:
self._testing.add(t)
self._cache_inst = set()
if self._cache_broken:
# Re-add broken packages as some of them may now be installable
self._testing |= self._cache_broken
self._cache_broken = set()
if t in self._essentials and t[2] in self._cache_ess:
# Adds new essential => "pseudo-essential" set needs to be
# recomputed
del self._cache_ess[t[2]]
return True
def remove_testing_binary(self, pkg_name, pkg_version, pkg_arch):
"""Remove a binary from "testing"
If the package is not known, this method will throw an
Keyrror.
"""
t = (pkg_name, pkg_version, pkg_arch)
if t not in self._universe:
raise KeyError(str(t))
self._cache_broken.discard(t)
if t in self._testing:
self._testing.remove(t)
if t[2] in self._cache_ess and t in self._cache_ess[t[2]][0]:
# Removes a package from the "pseudo-essential set"
del self._cache_ess[t[2]]
if t not in self._revuniverse:
# no reverse relations - safe
return True
if t not in self._broken and t in self._cache_inst:
# It is in our cache (and not guaranteed to be broken) - throw out the cache
self._cache_inst = set()
return True
def is_installable(self, pkg_name, pkg_version, pkg_arch):
"""Test if a package is installable in this package set
The package is assumed to be in "testing" and only packages in
"testing" can be used to satisfy relations.
Returns True iff the package is installable.
Returns False otherwise.
"""
t = (pkg_name, pkg_version, pkg_arch)
if t not in self._universe:
raise KeyError(str(t))
if t not in self._testing or t in self._broken:
return False
if t in self._cache_inst:
return True
return self._check_inst(t)
def _check_inst(self, t, musts=None, never=None, choices=None):
# See the explanation of musts, never and choices below.
cache_inst = self._cache_inst
if t in cache_inst and not never:
# use the inst cache only for direct queries/simple queries.
cache = True
if choices:
# This is a recursive call, where there is no "never" so far.
# We know t satisfies at least one of the remaining choices.
# If it satisfies all remaining choices, we can use the cache
# in this case (since never is empty).
#
# Otherwise, a later choice may be incompatible with t.
for choice in choices:
if t in choice:
continue
cache = False
break
if cache:
return True
universe = self._universe
testing = self._testing
cbroken = self._cache_broken
safe_set = self._safe_set
# Our installability verdict - start with "yes" and change if
# prove otherwise.
verdict = True
# set of packages that must be installed with this package
if musts is None:
musts = set()
musts.add(t)
# set of packages we can *never* choose (e.g. due to conflicts)
if never is None:
never = set()
# set of relations were we have a choice, but where we have not
# committed ourselves yet. Hopefully some choices may be taken
# for us (if one of the alternatives appear in "musts")
if choices is None:
choices = set()
# The subset of musts we haven't checked yet.
check = set([t])
if len(musts) == 1:
# Include the essential packages in testing as a starting point.
if t[2] not in self._cache_ess:
# The minimal essential set cache is not present -
# compute it now.
(start, ess_never) = self._get_min_pseudo_ess_set(t[2])
else:
(start, ess_never) = self._cache_ess[t[2]]
if t in ess_never:
# t conflicts with something in the essential set or the essential
# set conflicts with t - either way, t is f***ed
cbroken.add(t)
testing.remove(t)
return False
musts.update(start)
never.update(ess_never)
# curry check_loop
check_loop = partial(self._check_loop, universe, testing, musts,
never, choices, cbroken)
# Useful things to remember:
#
# * musts and never are disjointed at all times
# - if not, t cannot be installable. Either t, or one of
# its dependencies conflict with t or one of its (other)
# dependencies.
#
# * choices should generally be avoided as much as possible.
# - picking a bad choice requires backtracking
# - sometimes musts/never will eventually "solve" the choice.
#
# * check never includes choices (these are always in choices)
#
# * A package is installable if never and musts are disjoined
# and both check and choices are empty.
# - exception: _pick_choice may determine the installability
# of t via recursion (calls _check_inst). In this case
# check and choices are not (always) empty.
def _pick_choice(rebuild):
"""Picks a choice from choices and updates rebuild.
Prunes the choices and updates "rebuild" to reflect the
pruned choices.
Returns True if t is installable (determined via recursion).
Returns False if a choice was picked and added to check.
Returns None if t is uninstallable (no choice can be picked).
NB: If this returns False, choices should be replaced by
rebuild.
"""
# We already satisfied/chosen at least one of the litterals
# in the choice, so the choice is gone
for choice in ifilter(musts.isdisjoint, choices):
# cbroken is needed here because (in theory) it could
# have changed since the choice was discovered and it
# is smaller than testing (so presumably faster)
remain = choice - never - cbroken
if not remain:
# all alternatives would violate the conflicts => package is not installable
return None
if len(remain) > 1 and not remain.isdisjoint(safe_set):
first = None
for r in ifilter(safe_set.__contains__, remain):
# don't bother giving extra arguments to _check_inst. "safe" packages are
# usually trivial to satisfy on their own and will not involve conflicts
# (so never will not help)
if r in cache_inst or self._check_inst(r):
first = r
break
if first:
musts.add(first)
check.add(first)
continue
# None of the safe set choices are installable, so drop them
remain -= safe_set
if len(remain) == 1:
# the choice was reduced to one package we haven't checked - check that
check.update(remain)
musts.update(remain)
continue
# The choice is still deferred
rebuild.add(frozenset(remain))
if check or not rebuild:
return False
choice = iter(rebuild.pop())
last = next(choice) # pick one to go last
for p in choice:
musts_copy = musts.copy()
never_copy = never.copy()
choices_copy = choices.copy()
if self._check_inst(p, musts_copy, never_copy, choices_copy):
return True
# If we get here, we failed to find something that would satisfy choice (without breaking
# the installability of t). This means p cannot be used to satisfy the dependencies, so
# pretend to conflict with it - hopefully it will reduce future choices.
never.add(p)
# Optimization for the last case; avoid the recursive call and just
# assume the last will lead to a solution. If it doesn't there is
# no solution and if it does, we don't have to back-track anyway.
check.add(last)
musts.add(last)
return False
# END _pick_choice
while check:
if not check_loop(check):
verdict = False
break
if choices:
rebuild = set()
# We have to "guess" now, which is always fun, but not cheap
r = _pick_choice(rebuild)
if r is None:
verdict = False
break
if r:
# The recursive call have already updated the
# cache so there is not point in doing it again.
return True
choices = rebuild
if verdict:
# if t is installable, then so are all packages in musts
self._cache_inst.update(musts)
return verdict
def _check_loop(self, universe, testing, musts, never,
choices, cbroken, check):
"""Finds all guaranteed dependencies via "check".
If it returns False, t is not installable. If it returns True
then "check" is exhausted. If "choices" are empty and this
returns True, then t is installable.
"""
# Local variables for faster access...
l = len
fset = frozenset
not_satisfied = partial(ifilter, musts.isdisjoint)
# While we have guaranteed dependencies (in check), examine all
# of them.
for cur in iter_except(check.pop, KeyError):
(deps, cons) = universe[cur]
if cons:
# Conflicts?
if cur in never:
# cur adds a (reverse) conflict, so check if cur
# is in never.
#
# - there is a window where two conflicting
# packages can be in check. Example "A" depends
# on "B" and "C". If "B" conflicts with "C",
# then both "B" and "C" could end in "check".
return False
# We must install cur for the package to be installable,
# so "obviously" we can never choose any of its conflicts
never.update(cons & testing)
# depgroup can be satisifed by picking something that is
# already in musts - lets pick that (again). :)
for depgroup in not_satisfied(deps):
# Of all the packages listed in the relation remove those that
# are either:
# - not in testing
# - known to be broken (by cache)
# - in never
candidates = fset((depgroup & testing) - never)
if l(candidates) == 0:
# We got no candidates to satisfy it - this
# package cannot be installed with the current
# testing
if cur not in cbroken and depgroup.isdisjoint(never):
# cur's dependency cannot be satisfied even if never was empty.
# This means that cur itself is broken (as well).
cbroken.add(cur)
testing.remove(cur)
return False
if l(candidates) == 1:
# only one possible solution to this choice and we
# haven't seen it before
check.update(candidates)
musts.update(candidates)
else:
# defer this choice till later
choices.add(candidates)
return True
def _get_min_pseudo_ess_set(self, arch):
if arch not in self._cache_ess:
# The minimal essential set cache is not present -
# compute it now.
testing = self._testing
cbroken = self._cache_broken
universe = self._universe
safe_set = self._safe_set
ess_base = set(x for x in self._essentials if x[2] == arch and x in testing)
start = set(ess_base)
ess_never = set()
ess_choices = set()
not_satisified = partial(ifilter, start.isdisjoint)
while ess_base:
self._check_loop(universe, testing, start, ess_never,\
ess_choices, cbroken, ess_base)
if ess_choices:
# Try to break choices where possible
nchoice = set()
for choice in not_satisified(ess_choices):
b = False
for c in choice:
if universe[c][1] <= ess_never and \
not any(not_satisified(universe[c][0])):
ess_base.add(c)
b = True
break
if not b:
nchoice.add(choice)
ess_choices = nchoice
else:
break
for x in start:
ess_never.update(universe[x][1])
self._cache_ess[arch] = (frozenset(start), frozenset(ess_never))
return self._cache_ess[arch]
Loading…
Cancel
Save