Merge trunk up to 2014-08-05

This commit is contained in:
Colin Watson 2014-12-10 11:39:27 +00:00
commit 7c8fd39803
5 changed files with 447 additions and 160 deletions

View File

@ -192,6 +192,7 @@ import urllib
import apt_pkg import apt_pkg
from collections import defaultdict
from functools import reduce, partial from functools import reduce, partial
from itertools import chain, ifilter, product from itertools import chain, ifilter, product
from operator import attrgetter from operator import attrgetter
@ -220,7 +221,7 @@ from britney_util import (old_libraries_format, same_source, undo_changes,
read_nuninst, write_nuninst, write_heidi, read_nuninst, write_nuninst, write_heidi,
eval_uninst, newly_uninst, make_migrationitem, eval_uninst, newly_uninst, make_migrationitem,
write_excuses, write_heidi_delta, write_controlfiles, write_excuses, write_heidi_delta, write_controlfiles,
old_libraries, ensuredir) old_libraries, is_nuninst_asgood_generous, ensuredir)
from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC, from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC,
SOURCE, SOURCEVER, ARCHITECTURE, DEPENDS, CONFLICTS, SOURCE, SOURCEVER, ARCHITECTURE, DEPENDS, CONFLICTS,
PROVIDES, RDEPENDS, RCONFLICTS, MULTIARCH, ESSENTIAL) PROVIDES, RDEPENDS, RCONFLICTS, MULTIARCH, ESSENTIAL)
@ -454,10 +455,12 @@ class Britney(object):
depends = [] depends = []
conflicts = [] conflicts = []
possible_dep_ranges = {}
# We do not differ between depends and pre-depends # We do not differ between depends and pre-depends
if pkgdata[DEPENDS]: if pkgdata[DEPENDS]:
depends.extend(apt_pkg.parse_depends(pkgdata[DEPENDS], False)) depends.extend(apt_pkg.parse_depends(pkgdata[DEPENDS], False))
if pkgdata[CONFLICTS]: if pkgdata[CONFLICTS]:
conflicts = apt_pkg.parse_depends(pkgdata[CONFLICTS], False) conflicts = apt_pkg.parse_depends(pkgdata[CONFLICTS], False)
@ -465,8 +468,10 @@ class Britney(object):
for (al, dep) in [(depends, True), \ for (al, dep) in [(depends, True), \
(conflicts, False)]: (conflicts, False)]:
for block in al: for block in al:
sat = set() sat = set()
for dep_dist in binaries: for dep_dist in binaries:
(_, pkgs) = solvers(block, arch, dep_dist) (_, pkgs) = solvers(block, arch, dep_dist)
for p in pkgs: for p in pkgs:
@ -483,7 +488,37 @@ class Britney(object):
# is using §7.6.2 # is using §7.6.2
relations.add_breaks(pt) relations.add_breaks(pt)
if dep: if dep:
relations.add_dependency_clause(sat) if len(block) != 1:
relations.add_dependency_clause(sat)
else:
# This dependency might be a part
# of a version-range a la:
#
# Depends: pkg-a (>= 1),
# pkg-a (<< 2~)
#
# In such a case we want to reduce
# that to a single clause for
# efficiency.
#
# In theory, it could also happen
# with "non-minimal" dependencies
# a la:
#
# Depends: pkg-a, pkg-a (>= 1)
#
# But dpkg is known to fix that up
# at build time, so we will
# probably only see "ranges" here.
key = block[0][0]
if key in possible_dep_ranges:
possible_dep_ranges[key] &= sat
else:
possible_dep_ranges[key] = sat
if dep:
for clause in possible_dep_ranges.itervalues():
relations.add_dependency_clause(clause)
self._inst_tester = builder.build() self._inst_tester = builder.build()
@ -731,7 +766,7 @@ class Britney(object):
The method returns a dictionary where the key is the binary package The method returns a dictionary where the key is the binary package
name and the value is the list of open RC bugs for it. name and the value is the list of open RC bugs for it.
""" """
bugs = {} bugs = defaultdict(list)
filename = os.path.join(basedir, "BugsV") filename = os.path.join(basedir, "BugsV")
self.__log("Loading RC bugs data from %s" % filename) self.__log("Loading RC bugs data from %s" % filename)
try: try:
@ -742,7 +777,6 @@ class Britney(object):
type='W') type='W')
continue continue
pkg = l[0] pkg = l[0]
bugs.setdefault(pkg, [])
bugs[pkg] += l[1].split(",") bugs[pkg] += l[1].split(",")
except IOError: except IOError:
self.__log("%s missing; skipping bug-based processing" % filename) self.__log("%s missing; skipping bug-based processing" % filename)
@ -1196,7 +1230,7 @@ class Britney(object):
binary_u = self.binaries[suite][arch][0][pkg_name] binary_u = self.binaries[suite][arch][0][pkg_name]
# this is the source version for the new binary package # this is the source version for the new binary package
pkgsv = self.binaries[suite][arch][0][pkg_name][SOURCEVER] pkgsv = binary_u[SOURCEVER]
# if the new binary package is architecture-independent, then skip it # if the new binary package is architecture-independent, then skip it
if binary_u[ARCHITECTURE] == 'all': if binary_u[ARCHITECTURE] == 'all':
@ -1972,14 +2006,6 @@ class Britney(object):
return "%d+%d: %s" % (total, totalbreak, ":".join(res)) return "%d+%d: %s" % (total, totalbreak, ":".join(res))
def is_nuninst_asgood_generous(self, old, new):
diff = 0
for arch in self.options.architectures:
if arch in self.options.break_arches.split(): continue
diff = diff + (len(new[arch]) - len(old[arch]))
return diff <= 0
def _compute_groups(self, source_name, suite, migration_architecture, def _compute_groups(self, source_name, suite, migration_architecture,
is_removal, include_hijacked=False, is_removal, include_hijacked=False,
allow_smooth_updates=True, allow_smooth_updates=True,
@ -2159,9 +2185,9 @@ class Britney(object):
This method applies the changes required by the action `item` tracking This method applies the changes required by the action `item` tracking
them so it will be possible to revert them. them so it will be possible to revert them.
The method returns a list of the package name, the suite where the The method returns a tuple containing a set of packages
package comes from, the set of packages affected by the change and affected by the change (as (name, arch)-tuples) and the
the dictionary undo which can be used to rollback the changes. dictionary undo which can be used to rollback the changes.
""" """
undo = {'binaries': {}, 'sources': {}, 'virtual': {}, 'nvirtual': []} undo = {'binaries': {}, 'sources': {}, 'virtual': {}, 'nvirtual': []}
@ -2169,39 +2195,63 @@ class Britney(object):
# local copies for better performances # local copies for better performances
sources = self.sources sources = self.sources
binaries = self.binaries['testing'] packages_t = self.binaries['testing']
get_reverse_tree = partial(compute_reverse_tree, self.binaries["testing"]) get_reverse_tree = partial(compute_reverse_tree, packages_t)
inst_tester = self._inst_tester
eqv_set = set()
# remove all binary packages (if the source already exists) # remove all binary packages (if the source already exists)
if item.architecture == 'source' or not item.is_removal: if item.architecture == 'source' or not item.is_removal:
if item.package in sources['testing']: if item.package in sources['testing']:
source = sources['testing'][item.package] source = sources['testing'][item.package]
_, bins, _ = self._compute_groups(item.package, updates, rms, _ = self._compute_groups(item.package,
item.suite, item.suite,
item.architecture, item.architecture,
item.is_removal, item.is_removal,
removals=removals) removals=removals)
eqv_table = {}
for binary, version, parch in rms:
key = (binary, parch)
eqv_table[key] = version
for p1 in updates:
binary, _, parch = p1
key = (binary, parch)
old_version = eqv_table.get(key)
if old_version is not None:
p2 = (binary, old_version, parch)
if inst_tester.are_equivalent(p1, p2):
eqv_set.add(key)
# remove all the binaries which aren't being smooth updated # remove all the binaries which aren't being smooth updated
for bin_data in bins: for rm_tuple in rms:
binary, _, parch = bin_data binary, version, parch = rm_tuple
p = binary + "/" + parch p = binary + "/" + parch
binaries_t_a, provides_t_a = packages_t[parch]
pkey = (binary, parch)
pkg_data = binaries_t_a[binary]
# save the old binary for undo # save the old binary for undo
undo['binaries'][p] = binaries[parch][0][binary] undo['binaries'][p] = pkg_data
# all the reverse dependencies are affected by the change if pkey not in eqv_set:
affected.update(get_reverse_tree(binary, parch)) # all the reverse dependencies are affected by
# the change
affected.update(get_reverse_tree(binary, parch))
# remove the provided virtual packages # remove the provided virtual packages
for j in binaries[parch][0][binary][PROVIDES]: for j in pkg_data[PROVIDES]:
key = j + "/" + parch key = j + "/" + parch
if key not in undo['virtual']: if key not in undo['virtual']:
undo['virtual'][key] = binaries[parch][1][j][:] undo['virtual'][key] = provides_t_a[j][:]
binaries[parch][1][j].remove(binary) provides_t_a[j].remove(binary)
if len(binaries[parch][1][j]) == 0: if not provides_t_a[j]:
del binaries[parch][1][j] del provides_t_a[j]
# finally, remove the binary package # finally, remove the binary package
version = binaries[parch][0][binary][VERSION] del binaries_t_a[binary]
del binaries[parch][0][binary] inst_tester.remove_testing_binary(binary, version, parch)
self._inst_tester.remove_testing_binary(binary, version, parch)
# remove the source package # remove the source package
if item.architecture == 'source': if item.architecture == 'source':
undo['sources'][item.package] = source undo['sources'][item.package] = source
@ -2212,37 +2262,47 @@ class Britney(object):
# single binary removal; used for clearing up after smooth # single binary removal; used for clearing up after smooth
# updates but not supported as a manual hint # updates but not supported as a manual hint
elif item.package in binaries[item.architecture][0]: elif item.package in packages_t[item.architecture][0]:
undo['binaries'][item.package + "/" + item.architecture] = binaries[item.architecture][0][item.package] binaries_t_a = packages_t[item.architecture][0]
undo['binaries'][item.package + "/" + item.architecture] = binaries_t_a[item.package]
affected.update(get_reverse_tree(item.package, item.architecture)) affected.update(get_reverse_tree(item.package, item.architecture))
version = binaries[item.architecture][0][item.package][VERSION] version = binaries_t_a[item.package][VERSION]
del binaries[item.architecture][0][item.package] del binaries_t_a[item.package]
self._inst_tester.remove_testing_binary(item.package, version, item.architecture) inst_tester.remove_testing_binary(item.package, version, item.architecture)
# add the new binary packages (if we are not removing) # add the new binary packages (if we are not removing)
if not item.is_removal: if not item.is_removal:
source = sources[item.suite][item.package] source = sources[item.suite][item.package]
packages_s = self.binaries[item.suite]
for p in source[BINARIES]: for p in source[BINARIES]:
binary, parch = p.split("/") binary, parch = p.split("/")
if item.architecture not in ['source', parch]: continue if item.architecture not in ['source', parch]: continue
key = (binary, parch) key = (binary, parch)
binaries_t_a, provides_t_a = packages_t[parch]
equivalent_replacement = key in eqv_set
# obviously, added/modified packages are affected # obviously, added/modified packages are affected
if key not in affected: affected.add(key) if not equivalent_replacement and key not in affected:
affected.add(key)
# if the binary already exists in testing, it is currently # if the binary already exists in testing, it is currently
# built by another source package. we therefore remove the # built by another source package. we therefore remove the
# version built by the other source package, after marking # version built by the other source package, after marking
# all of its reverse dependencies as affected # all of its reverse dependencies as affected
if binary in binaries[parch][0]: if binary in binaries_t_a:
old_pkg_data = binaries_t_a[binary]
# save the old binary package # save the old binary package
undo['binaries'][p] = binaries[parch][0][binary] undo['binaries'][p] = old_pkg_data
# all the reverse dependencies are affected by the change if not equivalent_replacement:
affected.update(get_reverse_tree(binary, parch)) # all the reverse dependencies are affected by
# all the reverse conflicts and their dependency tree are affected by the change # the change
for j in binaries[parch][0][binary][RCONFLICTS]: affected.update(get_reverse_tree(binary, parch))
affected.update(get_reverse_tree(j, parch)) # all the reverse conflicts and their
version = binaries[parch][0][binary][VERSION] # dependency tree are affected by the change
self._inst_tester.remove_testing_binary(binary, version, parch) for j in old_pkg_data[RCONFLICTS]:
affected.update(get_reverse_tree(j, parch))
old_version = old_pkg_data[VERSION]
inst_tester.remove_testing_binary(binary, old_version, parch)
else: else:
# the binary isn't in testing, but it may have been at # the binary isn't in testing, but it may have been at
# the start of the current hint and have been removed # the start of the current hint and have been removed
@ -2258,23 +2318,26 @@ class Britney(object):
for (tundo, tpkg) in hint_undo: for (tundo, tpkg) in hint_undo:
if p in tundo['binaries']: if p in tundo['binaries']:
for rdep in tundo['binaries'][p][RDEPENDS]: for rdep in tundo['binaries'][p][RDEPENDS]:
if rdep in binaries[parch][0] and rdep not in source[BINARIES]: if rdep in binaries_t_a and rdep not in source[BINARIES]:
affected.update(get_reverse_tree(rdep, parch)) affected.update(get_reverse_tree(rdep, parch))
# add/update the binary package
binaries[parch][0][binary] = self.binaries[item.suite][parch][0][binary] # add/update the binary package from the source suite
version = binaries[parch][0][binary][VERSION] new_pkg_data = packages_s[parch][0][binary]
self._inst_tester.add_testing_binary(binary, version, parch) new_version = new_pkg_data[VERSION]
binaries_t_a[binary] = new_pkg_data
inst_tester.add_testing_binary(binary, new_version, parch)
# register new provided packages # register new provided packages
for j in binaries[parch][0][binary][PROVIDES]: for j in new_pkg_data[PROVIDES]:
key = j + "/" + parch key = j + "/" + parch
if j not in binaries[parch][1]: if j not in provides_t_a:
undo['nvirtual'].append(key) undo['nvirtual'].append(key)
binaries[parch][1][j] = [] provides_t_a[j] = []
elif key not in undo['virtual']: elif key not in undo['virtual']:
undo['virtual'][key] = binaries[parch][1][j][:] undo['virtual'][key] = provides_t_a[j][:]
binaries[parch][1][j].append(binary) provides_t_a[j].append(binary)
# all the reverse dependencies are affected by the change if not equivalent_replacement:
affected.update(get_reverse_tree(binary, parch)) # all the reverse dependencies are affected by the change
affected.update(get_reverse_tree(binary, parch))
# register reverse dependencies and conflicts for the new binary packages # register reverse dependencies and conflicts for the new binary packages
if item.architecture == 'source': if item.architecture == 'source':
@ -2282,14 +2345,14 @@ class Britney(object):
else: else:
ext = "/" + item.architecture ext = "/" + item.architecture
pkg_iter = (p.split("/")[0] for p in source[BINARIES] if p.endswith(ext)) pkg_iter = (p.split("/")[0] for p in source[BINARIES] if p.endswith(ext))
register_reverses(binaries[parch][0], binaries[parch][1], iterator=pkg_iter) register_reverses(binaries_t_a, provides_t_a, iterator=pkg_iter)
# add/update the source package # add/update the source package
if item.architecture == 'source': if item.architecture == 'source':
sources['testing'][item.package] = sources[item.suite][item.package] sources['testing'][item.package] = sources[item.suite][item.package]
# return the package name, the suite, the list of affected packages and the undo dictionary # return the package name, the suite, the list of affected packages and the undo dictionary
return (item, affected, undo) return (affected, undo)
def _check_packages(self, binaries, arch, affected, skip_archall, nuninst): def _check_packages(self, binaries, arch, affected, skip_archall, nuninst):
@ -2324,7 +2387,51 @@ class Britney(object):
self._installability_test(p, version, arch, broken, to_check, nuninst_arch) self._installability_test(p, version, arch, broken, to_check, nuninst_arch)
def iter_packages(self, packages, selected, hint=False, nuninst=None, lundo=None): def iter_packages_hint(self, hinted_packages, lundo=None):
"""Iter on hinted list of actions and apply them in one go
This method applies the changes from "hinted_packages" to
testing and computes the uninstallability counters after te
actions are performed.
The method returns the new uninstallability counters.
"""
removals = set()
all_affected = set()
nobreakall_arches = self.options.nobreakall_arches.split()
binaries_t = self.binaries['testing']
check_packages = partial(self._check_packages, binaries_t)
# Deep copy nuninst (in case the hint is undone)
nuninst = {k:v.copy() for k,v in self.nuninst_orig.iteritems()}
for item in hinted_packages:
_, rms, _ = self._compute_groups(item.package, item.suite,
item.architecture,
item.is_removal,
allow_smooth_updates=False)
removals.update(rms)
for item in hinted_packages:
affected, undo = self.doop_source(item,
removals=removals)
all_affected.update(affected)
if lundo is not None:
lundo.append((undo,item))
for arch in self.options.architectures:
if arch not in nobreakall_arches:
skip_archall = True
else:
skip_archall = False
check_packages(arch, all_affected, skip_archall, nuninst)
return nuninst
def iter_packages(self, packages, selected, nuninst=None, lundo=None):
"""Iter on the list of actions and apply them one-by-one """Iter on the list of actions and apply them one-by-one
This method applies the changes from `packages` to testing, checking the uninstallability This method applies the changes from `packages` to testing, checking the uninstallability
@ -2353,29 +2460,14 @@ class Britney(object):
dependencies = self.dependencies dependencies = self.dependencies
check_packages = partial(self._check_packages, binaries) check_packages = partial(self._check_packages, binaries)
# pre-process a hint batch
pre_process = {}
if selected and hint:
removals = set()
for item in selected:
_, rms, _ = self._compute_groups(item.package, item.suite,
item.architecture,
item.is_removal,
allow_smooth_updates=False)
removals.update(rms)
for package in selected:
pkg, affected, undo = self.doop_source(package,
removals=removals)
pre_process[package] = (pkg, affected, undo)
if lundo is None: if lundo is None:
lundo = [] lundo = []
if not hint:
self.output_write("recur: [%s] %s %d/%d\n" % ("", ",".join(x.uvname for x in selected), len(packages), len(extra))) self.output_write("recur: [%s] %s %d/%d\n" % ("", ",".join(x.uvname for x in selected), len(packages), len(extra)))
# loop on the packages (or better, actions) # loop on the packages (or better, actions)
while packages: while packages:
pkg = packages.pop(0) item = packages.pop(0)
# this is the marker for the first loop # this is the marker for the first loop
if not mark_passed and position < 0: if not mark_passed and position < 0:
@ -2387,61 +2479,48 @@ class Britney(object):
# defer packages if their dependency has been already skipped # defer packages if their dependency has been already skipped
if not mark_passed: if not mark_passed:
defer = False defer = False
for p in dependencies.get(pkg, []): for p in dependencies.get(item, []):
if p in skipped: if p in skipped:
deferred.append(make_migrationitem(pkg, self.sources)) deferred.append(item)
skipped.append(make_migrationitem(pkg, self.sources)) skipped.append(item)
defer = True defer = True
break break
if defer: continue if defer: continue
if not hint: self.output_write("trying: %s\n" % (item.uvname))
self.output_write("trying: %s\n" % (pkg.uvname))
better = True better = True
nuninst = {} nuninst = {}
# apply the changes # apply the changes
if pkg in pre_process: affected, undo = self.doop_source(item, lundo)
item, affected, undo = pre_process[pkg]
else:
item, affected, undo = self.doop_source(pkg, lundo)
if hint:
lundo.append((undo, item))
# check the affected packages on all the architectures # check the affected packages on all the architectures
for arch in (item.architecture == 'source' and architectures or (item.architecture,)): for arch in (item.architecture == 'source' and architectures or (item.architecture,)):
if arch not in nobreakall_arches: if arch not in nobreakall_arches:
skip_archall = True skip_archall = True
else: skip_archall = False else:
skip_archall = False
nuninst[arch] = set(x for x in nuninst_comp[arch] if x in binaries[arch][0]) nuninst[arch] = set(x for x in nuninst_comp[arch] if x in binaries[arch][0])
nuninst[arch + "+all"] = set(x for x in nuninst_comp[arch + "+all"] if x in binaries[arch][0]) nuninst[arch + "+all"] = set(x for x in nuninst_comp[arch + "+all"] if x in binaries[arch][0])
check_packages(arch, affected, skip_archall, nuninst) check_packages(arch, affected, skip_archall, nuninst)
# if we are processing hints, go ahead
if hint:
nuninst_comp[arch] = nuninst[arch]
nuninst_comp[arch + "+all"] = nuninst[arch + "+all"]
continue
# if the uninstallability counter is worse than before, break the loop # if the uninstallability counter is worse than before, break the loop
if ((item.architecture != 'source' and arch not in new_arches) or \ if ((item.architecture != 'source' and arch not in new_arches) or \
(arch not in break_arches)) and len(nuninst[arch]) > len(nuninst_comp[arch]): (arch not in break_arches)) and len(nuninst[arch]) > len(nuninst_comp[arch]):
better = False better = False
break break
# if we are processing hints or the package is already accepted, go ahead
if hint or item in selected: continue
# check if the action improved the uninstallability counters # check if the action improved the uninstallability counters
if better: if better:
lundo.append((undo, item)) lundo.append((undo, item))
selected.append(pkg) selected.append(item)
packages.extend(extra) packages.extend(extra)
extra = [] extra = []
self.output_write("accepted: %s\n" % (pkg.uvname)) self.output_write("accepted: %s\n" % (item.uvname))
self.output_write(" ori: %s\n" % (self.eval_nuninst(self.nuninst_orig))) self.output_write(" ori: %s\n" % (self.eval_nuninst(self.nuninst_orig)))
self.output_write(" pre: %s\n" % (self.eval_nuninst(nuninst_comp))) self.output_write(" pre: %s\n" % (self.eval_nuninst(nuninst_comp)))
self.output_write(" now: %s\n" % (self.eval_nuninst(nuninst, nuninst_comp))) self.output_write(" now: %s\n" % (self.eval_nuninst(nuninst, nuninst_comp)))
@ -2452,8 +2531,8 @@ class Britney(object):
for k in nuninst: for k in nuninst:
nuninst_comp[k] = nuninst[k] nuninst_comp[k] = nuninst[k]
else: else:
self.output_write("skipped: %s (%d <- %d)\n" % (pkg.uvname, len(extra), len(packages))) self.output_write("skipped: %s (%d <- %d)\n" % (item.uvname, len(extra), len(packages)))
self.output_write(" got: %s\n" % (self.eval_nuninst(nuninst, pkg.architecture != 'source' and nuninst_comp or None))) self.output_write(" got: %s\n" % (self.eval_nuninst(nuninst, item.architecture != 'source' and nuninst_comp or None)))
self.output_write(" * %s: %s\n" % (arch, ", ".join(sorted(b for b in nuninst[arch] if b not in nuninst_comp[arch])))) self.output_write(" * %s: %s\n" % (arch, ", ".join(sorted(b for b in nuninst[arch] if b not in nuninst_comp[arch]))))
extra.append(item) extra.append(item)
@ -2463,9 +2542,6 @@ class Britney(object):
# (local-scope) binaries is actually self.binaries["testing"] so we cannot use it here. # (local-scope) binaries is actually self.binaries["testing"] so we cannot use it here.
undo_changes(single_undo, self._inst_tester, sources, self.binaries) undo_changes(single_undo, self._inst_tester, sources, self.binaries)
# if we are processing hints, return now
if hint:
return (nuninst_comp, [])
self.output_write(" finish: [%s]\n" % ",".join( x.uvname for x in selected )) self.output_write(" finish: [%s]\n" % ",".join( x.uvname for x in selected ))
self.output_write("endloop: %s\n" % (self.eval_nuninst(self.nuninst_orig))) self.output_write("endloop: %s\n" % (self.eval_nuninst(self.nuninst_orig)))
@ -2476,6 +2552,7 @@ class Britney(object):
return (nuninst_comp, extra) return (nuninst_comp, extra)
def do_all(self, hinttype=None, init=None, actions=None): def do_all(self, hinttype=None, init=None, actions=None):
"""Testing update runner """Testing update runner
@ -2495,6 +2572,8 @@ class Britney(object):
recurse = True recurse = True
lundo = None lundo = None
nuninst_end = None nuninst_end = None
better = True
extra = () # empty tuple
if hinttype == "easy" or hinttype == "force-hint": if hinttype == "easy" or hinttype == "force-hint":
force = hinttype == "force-hint" force = hinttype == "force-hint"
@ -2519,7 +2598,11 @@ class Britney(object):
if init: if init:
# init => a hint (e.g. "easy") - so do the hint run # init => a hint (e.g. "easy") - so do the hint run
(nuninst_end, extra) = self.iter_packages(init, selected, hint=True, lundo=lundo) nuninst_end = self.iter_packages_hint(selected, lundo=lundo)
if recurse:
# Ensure upgrade_me and selected do not overlap, if we
# follow-up with a recurse ("hint"-hint).
upgrade_me = [x for x in upgrade_me if x not in set(selected)]
if recurse: if recurse:
# Either the main run or the recursive run of a "hint"-hint. # Either the main run or the recursive run of a "hint"-hint.
@ -2537,7 +2620,14 @@ class Britney(object):
self.output_write(eval_uninst(self.options.architectures, self.output_write(eval_uninst(self.options.architectures,
newly_uninst(nuninst_start, nuninst_end))) newly_uninst(nuninst_start, nuninst_end)))
if force or self.is_nuninst_asgood_generous(self.nuninst_orig, nuninst_end): if not force:
break_arches = self.options.break_arches.split()
better = is_nuninst_asgood_generous(self.options.architectures,
self.nuninst_orig,
nuninst_end,
break_arches)
if better:
# Result accepted either by force or by being better than the original result. # Result accepted either by force or by being better than the original result.
if recurse: if recurse:
self.output_write("Apparently successful\n") self.output_write("Apparently successful\n")
@ -2559,7 +2649,7 @@ class Britney(object):
if recurse: if recurse:
self.upgrade_me = sorted(extra) self.upgrade_me = sorted(extra)
else: else:
self.upgrade_me = [x for x in self.upgrade_me if x not in selected] self.upgrade_me = [x for x in self.upgrade_me if x not in set(selected)]
self.sort_actions() self.sort_actions()
else: else:
self.output_write("FAILED\n") self.output_write("FAILED\n")
@ -2952,10 +3042,11 @@ class Britney(object):
def nuninst_arch_report(self, nuninst, arch): def nuninst_arch_report(self, nuninst, arch):
"""Print a report of uninstallable packages for one architecture.""" """Print a report of uninstallable packages for one architecture."""
all = {} all = defaultdict(set)
for p in nuninst[arch]: for p in nuninst[arch]:
pkg = self.binaries['testing'][arch][0][p] pkg = self.binaries['testing'][arch][0][p]
all.setdefault((pkg[SOURCE], pkg[SOURCEVER]), set()).add(p) all[(pkg[SOURCE], pkg[SOURCEVER])].add(p)
print '* %s' % (arch,) print '* %s' % (arch,)

View File

@ -583,3 +583,24 @@ def old_libraries(sources, packages, same_source=same_source):
migration = "-" + "/".join((pkg_name, arch, pkg[SOURCEVER])) migration = "-" + "/".join((pkg_name, arch, pkg[SOURCEVER]))
removals.append(MigrationItem(migration)) removals.append(MigrationItem(migration))
return removals return removals
def is_nuninst_asgood_generous(architectures, old, new, break_arches=frozenset()):
"""Compares the nuninst counters to see if they improved
Given a list of architecters, the previous and the current nuninst
counters, this function determines if the current nuninst counter
is better than the previous one. Optionally it also accepts a set
of "break_arches", the nuninst counter for any architecture listed
in this set are completely ignored.
Returns True if the new nuninst counter is better than the
previous. Returns False otherwise.
"""
diff = 0
for arch in architectures:
if arch in break_arches:
continue
diff = diff + (len(new[arch]) - len(old[arch]))
return diff <= 0

View File

@ -12,6 +12,7 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details. # GNU General Public License for more details.
from collections import defaultdict
from contextlib import contextmanager from contextlib import contextmanager
from britney_util import ifilter_except, iter_except from britney_util import ifilter_except, iter_except
@ -28,7 +29,7 @@ class _RelationBuilder(object):
self._new_breaks = set(binary_data[1]) self._new_breaks = set(binary_data[1])
def add_dependency_clause(self, or_clause): def add_dependency_clause(self, or_clause, frozenset=frozenset):
"""Add a dependency clause """Add a dependency clause
The clause must be a sequence of (name, version, architecture) The clause must be a sequence of (name, version, architecture)
@ -48,12 +49,12 @@ class _RelationBuilder(object):
binary = self._binary binary = self._binary
itbuilder = self._itbuilder itbuilder = self._itbuilder
package_table = itbuilder._package_table package_table = itbuilder._package_table
reverse_package_table = itbuilder._reverse_package_table
okay = False okay = False
for dep_tuple in clause: for dep_tuple in clause:
okay = True okay = True
reverse_relations = itbuilder._reverse_relations(dep_tuple) rdeps, _, rdep_relations = itbuilder._reverse_relations(dep_tuple)
reverse_relations[0].add(binary) rdeps.add(binary)
rdep_relations.add(clause)
self._new_deps.add(clause) self._new_deps.add(clause)
if not okay: if not okay:
@ -193,15 +194,17 @@ class InstallabilityTesterBuilder(object):
if binary in self._reverse_package_table: if binary in self._reverse_package_table:
return self._reverse_package_table[binary] return self._reverse_package_table[binary]
rel = [set(), set()] rel = [set(), set(), set()]
self._reverse_package_table[binary] = rel self._reverse_package_table[binary] = rel
return rel return rel
def build(self): def build(self):
# Merge reverse conflicts with conflicts - this saves some """Compile the installability tester
# operations in _check_loop since we only have to check one
# set (instead of two) and we remove a few duplicates here This method will compile an installability tester from the
# and there. information given and (where possible) try to optimise a
few things.
"""
package_table = self._package_table package_table = self._package_table
reverse_package_table = self._reverse_package_table reverse_package_table = self._reverse_package_table
intern_set = self._intern_set intern_set = self._intern_set
@ -220,18 +223,26 @@ class InstallabilityTesterBuilder(object):
return False return False
return True return True
# Merge reverse conflicts with conflicts - this saves some
# operations in _check_loop since we only have to check one
# set (instead of two) and we remove a few duplicates here
# and there.
#
# At the same time, intern the rdep sets
for pkg in reverse_package_table: for pkg in reverse_package_table:
if pkg not in package_table: if pkg not in package_table:
raise RuntimeError("%s/%s/%s referenced but not added!" % pkg) raise RuntimeError("%s/%s/%s referenced but not added!" % pkg)
if not reverse_package_table[pkg][1]:
# no rconflicts - ignore
continue
deps, con = package_table[pkg] deps, con = package_table[pkg]
if not con: rdeps, rcon, rdep_relations = reverse_package_table[pkg]
con = intern_set(reverse_package_table[pkg][1]) if rcon:
else: if not con:
con = intern_set(con | reverse_package_table[pkg][1]) con = intern_set(rcon)
package_table[pkg] = (deps, con) else:
con = intern_set(con | rcon)
package_table[pkg] = (deps, con)
reverse_package_table[pkg] = (intern_set(rdeps), con,
intern_set(rdep_relations))
# Check if we can expand broken. # Check if we can expand broken.
for t in not_broken(iter_except(check.pop, KeyError)): for t in not_broken(iter_except(check.pop, KeyError)):
@ -301,8 +312,88 @@ class InstallabilityTesterBuilder(object):
# add all rdeps (except those already in the safe_set) # add all rdeps (except those already in the safe_set)
check.update(reverse_package_table[pkg][0] - safe_set) check.update(reverse_package_table[pkg][0] - safe_set)
eqv_table = self._build_eqv_packages_table(package_table,
reverse_package_table)
return InstallabilitySolver(package_table, return InstallabilitySolver(package_table,
reverse_package_table, reverse_package_table,
self._testing, self._broken, self._testing, self._broken,
self._essentials, safe_set) self._essentials, safe_set,
eqv_table)
def _build_eqv_packages_table(self, package_table,
reverse_package_table,
frozenset=frozenset):
"""Attempt to build a table of equivalent packages
This method attempts to create a table of packages that are
equivalent (in terms of installability). If two packages (A
and B) are equivalent then testing the installability of A is
the same as testing the installability of B. This equivalency
also applies to co-installability.
The example cases:
* aspell-*
* ispell-*
Cases that do *not* apply:
* MTA's
The theory:
The packages A and B are equivalent iff:
reverse_depends(A) == reverse_depends(B) AND
conflicts(A) == conflicts(B) AND
depends(A) == depends(B)
Where "reverse_depends(X)" is the set of reverse dependencies
of X, "conflicts(X)" is the set of negative dependencies of X
(Breaks and Conflicts plus the reverse ones of those combined)
and "depends(X)" is the set of strong dependencies of X
(Depends and Pre-Depends combined).
To be honest, we are actually equally interested another
property as well, namely substitutability. The package A can
always used instead of B, iff:
reverse_depends(A) >= reverse_depends(B) AND
conflicts(A) <= conflicts(B) AND
depends(A) == depends(B)
(With the same definitions as above). Note that equivalency
is just a special-case of substitutability, where A and B can
substitute each other (i.e. a two-way substituation).
Finally, note that the "depends(A) == depends(B)" for
substitutability is actually not a strict requirement. There
are cases where those sets are different without affecting the
property.
"""
# Despite talking about substitutability, the method currently
# only finds the equivalence cases. Lets leave
# substitutability for a future version.
find_eqv_table = defaultdict(list)
eqv_table = {}
for pkg in reverse_package_table:
rdeps = reverse_package_table[pkg][2]
if not rdeps:
# we don't care for things without rdeps (because
# it is not worth it)
continue
deps, con = package_table[pkg]
ekey = (deps, con, rdeps)
find_eqv_table[ekey].append(pkg)
for pkg_list in find_eqv_table.itervalues():
if len(pkg_list) < 2:
continue
eqv_set = frozenset(pkg_list)
for pkg in pkg_list:
eqv_table[pkg] = eqv_set
return eqv_table

View File

@ -24,7 +24,7 @@ from britney_util import (ifilter_only, iter_except)
class InstallabilitySolver(InstallabilityTester): class InstallabilitySolver(InstallabilityTester):
def __init__(self, universe, revuniverse, testing, broken, essentials, def __init__(self, universe, revuniverse, testing, broken, essentials,
safe_set): safe_set, eqv_table):
"""Create a new installability solver """Create a new installability solver
universe is a dict mapping package tuples to their universe is a dict mapping package tuples to their
@ -44,7 +44,7 @@ class InstallabilitySolver(InstallabilityTester):
(simplifies caches and dependency checking) (simplifies caches and dependency checking)
""" """
InstallabilityTester.__init__(self, universe, revuniverse, testing, InstallabilityTester.__init__(self, universe, revuniverse, testing,
broken, essentials, safe_set) broken, essentials, safe_set, eqv_table)
def solve_groups(self, groups): def solve_groups(self, groups):

View File

@ -20,7 +20,7 @@ from britney_util import iter_except
class InstallabilityTester(object): class InstallabilityTester(object):
def __init__(self, universe, revuniverse, testing, broken, essentials, def __init__(self, universe, revuniverse, testing, broken, essentials,
safe_set): safe_set, eqv_table):
"""Create a new installability tester """Create a new installability tester
universe is a dict mapping package tuples to their universe is a dict mapping package tuples to their
@ -51,6 +51,7 @@ class InstallabilityTester(object):
self._essentials = essentials self._essentials = essentials
self._revuniverse = revuniverse self._revuniverse = revuniverse
self._safe_set = safe_set self._safe_set = safe_set
self._eqv_table = eqv_table
# Cache of packages known to be broken - we deliberately do not # Cache of packages known to be broken - we deliberately do not
# include "broken" in it. See _optimize for more info. # include "broken" in it. See _optimize for more info.
@ -80,11 +81,33 @@ class InstallabilityTester(object):
check_inst = self._check_inst check_inst = self._check_inst
cbroken = self._cache_broken cbroken = self._cache_broken
cache_inst = self._cache_inst cache_inst = self._cache_inst
tcopy = [x for x in self._testing] eqv_table = self._eqv_table
testing = self._testing
tcopy = [x for x in testing]
for t in ifilterfalse(cache_inst.__contains__, tcopy): for t in ifilterfalse(cache_inst.__contains__, tcopy):
if t in cbroken: if t in cbroken:
continue continue
check_inst(t) res = check_inst(t)
if t in eqv_table:
eqv = (x for x in eqv_table[t] if x in testing)
if res:
cache_inst.update(eqv)
else:
eqv_set = frozenset(eqv)
testing -= eqv_set
cbroken |= eqv_set
def are_equivalent(self, p1, p2):
"""Test if p1 and p2 are equivalent
Returns True if p1 and p2 have the same "signature" in
the package dependency graph (i.e. relations can not tell
them appart sematically except for their name)
"""
eqv_table = self._eqv_table
return p1 in eqv_table and p2 in eqv_table[p1]
def add_testing_binary(self, pkg_name, pkg_version, pkg_arch): def add_testing_binary(self, pkg_name, pkg_version, pkg_arch):
"""Add a binary package to "testing" """Add a binary package to "testing"
@ -195,6 +218,7 @@ class InstallabilityTester(object):
testing = self._testing testing = self._testing
cbroken = self._cache_broken cbroken = self._cache_broken
safe_set = self._safe_set safe_set = self._safe_set
eqv_table = self._eqv_table
# Our installability verdict - start with "yes" and change if # Our installability verdict - start with "yes" and change if
# prove otherwise. # prove otherwise.
@ -235,8 +259,9 @@ class InstallabilityTester(object):
never.update(ess_never) never.update(ess_never)
# curry check_loop # curry check_loop
check_loop = partial(self._check_loop, universe, testing, musts, check_loop = partial(self._check_loop, universe, testing,
never, choices, cbroken) eqv_table, musts, never, choices,
cbroken)
# Useful things to remember: # Useful things to remember:
@ -258,7 +283,7 @@ class InstallabilityTester(object):
# of t via recursion (calls _check_inst). In this case # of t via recursion (calls _check_inst). In this case
# check and choices are not (always) empty. # check and choices are not (always) empty.
def _pick_choice(rebuild): def _pick_choice(rebuild, set=set, len=len):
"""Picks a choice from choices and updates rebuild. """Picks a choice from choices and updates rebuild.
Prunes the choices and updates "rebuild" to reflect the Prunes the choices and updates "rebuild" to reflect the
@ -317,18 +342,55 @@ class InstallabilityTester(object):
last = next(choice) # pick one to go last last = next(choice) # pick one to go last
for p in choice: for p in choice:
musts_copy = musts.copy() musts_copy = musts.copy()
never_copy = never.copy() never_tmp = set()
choices_copy = choices.copy() choices_tmp = set()
if self._check_inst(p, musts_copy, never_copy, choices_copy): check_tmp = set([p])
if not self._check_loop(universe, testing, eqv_table,
musts_copy, never_tmp,
choices_tmp, cbroken,
check_tmp):
# p cannot be chosen/is broken (unlikely, but ...)
continue
# Test if we can pick p without any consequences.
# - when we can, we avoid a backtrack point.
if never_tmp <= never and choices_tmp <= rebuild:
# we can pick p without picking up new conflicts
# or unresolved choices. Therefore we commit to
# using p.
#
# NB: Optimally, we would go to the start of this
# routine, but to conserve stack-space, we return
# and expect to be called again later.
musts.update(musts_copy)
return False
if not musts.isdisjoint(never_tmp):
# If we pick p, we will definitely end up making
# t uninstallable, so p is a no-go.
continue
# We are not sure that p is safe, setup a backtrack
# point and recurse.
never_tmp |= never
choices_tmp |= rebuild
if self._check_inst(p, musts_copy, never_tmp,
choices_tmp):
# Success, p was a valid choice and made it all
# installable
return True return True
# If we get here, we failed to find something that would satisfy choice (without breaking
# the installability of t). This means p cannot be used to satisfy the dependencies, so # If we get here, we failed to find something that
# pretend to conflict with it - hopefully it will reduce future choices. # would satisfy choice (without breaking the
# installability of t). This means p cannot be used
# to satisfy the dependencies, so pretend to conflict
# with it - hopefully it will reduce future choices.
never.add(p) never.add(p)
# Optimization for the last case; avoid the recursive call and just # Optimization for the last case; avoid the recursive call
# assume the last will lead to a solution. If it doesn't there is # and just assume the last will lead to a solution. If it
# no solution and if it does, we don't have to back-track anyway. # doesn't there is no solution and if it does, we don't
# have to back-track anyway.
check.add(last) check.add(last)
musts.add(last) musts.add(last)
return False return False
@ -359,8 +421,9 @@ class InstallabilityTester(object):
return verdict return verdict
def _check_loop(self, universe, testing, musts, never, def _check_loop(self, universe, testing, eqv_table, musts, never,
choices, cbroken, check): choices, cbroken, check, len=len,
frozenset=frozenset):
"""Finds all guaranteed dependencies via "check". """Finds all guaranteed dependencies via "check".
If it returns False, t is not installable. If it returns True If it returns False, t is not installable. If it returns True
@ -368,8 +431,6 @@ class InstallabilityTester(object):
returns True, then t is installable. returns True, then t is installable.
""" """
# Local variables for faster access... # Local variables for faster access...
l = len
fset = frozenset
not_satisfied = partial(ifilter, musts.isdisjoint) not_satisfied = partial(ifilter, musts.isdisjoint)
# While we have guaranteed dependencies (in check), examine all # While we have guaranteed dependencies (in check), examine all
@ -401,9 +462,9 @@ class InstallabilityTester(object):
# - not in testing # - not in testing
# - known to be broken (by cache) # - known to be broken (by cache)
# - in never # - in never
candidates = fset((depgroup & testing) - never) candidates = frozenset((depgroup & testing) - never)
if l(candidates) == 0: if len(candidates) == 0:
# We got no candidates to satisfy it - this # We got no candidates to satisfy it - this
# package cannot be installed with the current # package cannot be installed with the current
# testing # testing
@ -413,21 +474,43 @@ class InstallabilityTester(object):
cbroken.add(cur) cbroken.add(cur)
testing.remove(cur) testing.remove(cur)
return False return False
if l(candidates) == 1: if len(candidates) == 1:
# only one possible solution to this choice and we # only one possible solution to this choice and we
# haven't seen it before # haven't seen it before
check.update(candidates) check.update(candidates)
musts.update(candidates) musts.update(candidates)
else: else:
possible_eqv = set(x for x in candidates if x in eqv_table)
if len(possible_eqv) > 1:
# Exploit equivalency to reduce the number of
# candidates if possible. Basically, this
# code maps "similar" candidates into a single
# candidate that will give a identical result
# to any other candidate it eliminates.
#
# See InstallabilityTesterBuilder's
# _build_eqv_packages_table method for more
# information on how this works.
new_cand = set(x for x in candidates if x not in possible_eqv)
for chosen in iter_except(possible_eqv.pop, KeyError):
new_cand.add(chosen)
possible_eqv -= eqv_table[chosen]
if len(new_cand) == 1:
check.update(new_cand)
musts.update(new_cand)
continue
candidates = frozenset(new_cand)
# defer this choice till later # defer this choice till later
choices.add(candidates) choices.add(candidates)
return True return True
def _get_min_pseudo_ess_set(self, arch): def _get_min_pseudo_ess_set(self, arch):
if arch not in self._cache_ess: if arch not in self._cache_ess:
# The minimal essential set cache is not present - # The minimal essential set cache is not present -
# compute it now. # compute it now.
testing = self._testing testing = self._testing
eqv_table = self._eqv_table
cbroken = self._cache_broken cbroken = self._cache_broken
universe = self._universe universe = self._universe
safe_set = self._safe_set safe_set = self._safe_set
@ -439,8 +522,9 @@ class InstallabilityTester(object):
not_satisified = partial(ifilter, start.isdisjoint) not_satisified = partial(ifilter, start.isdisjoint)
while ess_base: while ess_base:
self._check_loop(universe, testing, start, ess_never,\ self._check_loop(universe, testing, eqv_table,
ess_choices, cbroken, ess_base) start, ess_never, ess_choices,
cbroken, ess_base)
if ess_choices: if ess_choices:
# Try to break choices where possible # Try to break choices where possible
nchoice = set() nchoice = set()