From 580f4a7f3d6ad8aa88da996978ce64e80e146175 Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 8 Jun 2014 17:05:23 +0200 Subject: [PATCH 01/10] Rewrite, rename and extend find_upgraded_binaries Rename find_upgraded_binaries into _compute_groups. The new method will also compute what binaries will be updated in or added to testing after migration. Signed-off-by: Niels Thykier --- britney.py | 232 +++++++++++++++++++++++++++++++---------------------- 1 file changed, 135 insertions(+), 97 deletions(-) diff --git a/britney.py b/britney.py index 336f623..8f21eeb 100755 --- a/britney.py +++ b/britney.py @@ -1116,10 +1116,10 @@ class Britney(object): if not (ssrc and suite != 'unstable'): # for every binary package produced by this source in testing for this architecture source_data = self.sources['testing'][src] - _, smoothbins = self.find_upgraded_binaries(src, - source_data, - arch, - suite) + _, _, smoothbins = self._compute_groups(src, + "unstable", + arch, + False) for pkg in sorted(x.split("/")[0] for x in source_data[BINARIES] if x.endswith("/"+arch)): # if the package is architecture-independent, then ignore it @@ -1727,110 +1727,147 @@ class Britney(object): return diff <= 0 - def find_upgraded_binaries(self, source_name, source_data, - architecture, suite): - # XXX: not the best name - really. - """Find smooth and non-smooth updatable binaries for upgrades + def _compute_groups(self, source_name, suite, migration_architecture, + is_removal, include_hijacked=False): + """Compute the groups of binaries being migrated by item - This method will compute the binaries that will be replaced in - testing and which of them are smooth updatable. + This method will compute the binaries that will be added, + replaced in testing and which of them are smooth updatable. Parameters: * "source_name" is the name of the source package, whose binaries are migrating. - * "source_data" is the fields of that source package from - testing. - * "architecture" is the architecture determines architecture of - the migrating binaries (can be "source" for a - "source"-migration, meaning all binaries regardless of - architecture). * "suite" is the suite from which the binaries are migrating. + [Same as item.suite, where available] + * "migration_architecture" is the architecture determines + architecture of the migrating binaries (can be "source" for + a "source"-migration, meaning all binaries regardless of + architecture). [Same as item.architecture, where available] + * "is_removal" is a boolean determining if this is a removal + or not [Same as item.is_removal, where available] + * "include_hijacked" determines whether hijacked binaries should + be included in results or not. (defaults: False) + + Returns a tuple (adds, rms, smoothbins). "adds" is a set of + binaries that will updated in or appear after the migration. + "rms" is a set of binaries that are not smooth-updatable (or + binaries that could be, but there is no reason to let them be + smooth updated). "smoothbins" is set of binaries that are to + be smooth-updated. + + Each "binary" in "adds", "rms" and "smoothbins" will be a + tuple of ("package-name", "version", "architecture") and are + thus tuples suitable for passing on to the + InstallabilityTester. - Returns a tuple (bins, smoothbins). "bins" is a set of binaries - that are not smooth-updatable (or binaries that could be, but - there is no reason to let them be smooth updated). - "smoothbins" is set of binaries that are to be smooth-updated Pre-Conditions: The source package must be in testing and this should only be used when considering to do an upgrade migration from the input suite. (e.g. do not use this for removals). - """ - bins = set() - smoothbins = set() - check = [] + Unlike doop_source, this will not modify any data structure. + """ + # local copies for better performances + sources = self.sources binaries_t = self.binaries['testing'] - # first, build a list of eligible binaries - for p in source_data[BINARIES]: - binary, parch = p.split("/") - if architecture != 'source': - # for a binary migration, binaries should not be removed: - # - unless they are for the correct architecture - if parch != architecture: - continue - # - if they are arch:all and the migration is via *pu, - # as the packages will not have been rebuilt and the - # source suite will not contain them - if binaries_t[parch][0][binary][ARCHITECTURE] == 'all' and \ - suite != 'unstable': + + adds = set() + rms = set() + smoothbins = {} + + # remove all binary packages (if the source already exists) + if migration_architecture == 'source' or not is_removal: + if source_name in sources['testing']: + source_data = sources['testing'][source_name] + + bins = [] + check = {} + # remove all the binaries + + # first, build a list of eligible binaries + for p in source_data[BINARIES]: + binary, parch = p.split("/") + if (migration_architecture != 'source' + and parch != migration_architecture): + continue + + if (not include_hijacked + and binaries_t[parch][0][binary][SOURCE] != source_name): + continue + + bins.append(p) + + for p in bins: + binary, parch = p.split("/") + # if a smooth update is possible for the package, skip it + if suite == 'unstable' and \ + binary not in self.binaries[suite][parch][0] and \ + ('ALL' in self.options.smooth_updates or \ + binaries_t[parch][0][binary][SECTION] in self.options.smooth_updates): + + # if the package has reverse-dependencies which are + # built from other sources, it's a valid candidate for + # a smooth update. if not, it may still be a valid + # candidate if one if its r-deps is itself a candidate, + # so note it for checking later + bin_data = binaries_t[parch][0][binary] + rdeps = bin_data[RDEPENDS] + + # the list of reverse-dependencies may be outdated + # if, for example, we're processing a hint and + # a new version of one of the apparent reverse-dependencies + # migrated earlier in the hint. walk the list to make + # sure that at least one of the entries is still + # valid + rrdeps = [x for x in rdeps if x not in [y.split("/")[0] for y in bins]] + if rrdeps: + for dep in rrdeps: + if dep in binaries_t[parch][0]: + bin = binaries_t[parch][0][dep] + deps = [] + if bin[DEPENDS] is not None: + deps.extend(apt_pkg.parse_depends(bin[DEPENDS], False)) + if any(binary == entry[0] for deplist in deps for entry in deplist): + smoothbins[p] = (binary, bin_data[VERSION], parch) + break + else: + check[p] = (binary, bin_data[VERSION], parch) + + # check whether we should perform a smooth update for + # packages which are candidates but do not have r-deps + # outside of the current source + for p in check: + binary, _, parch = check[p] + rdeps = [ bin for bin in binaries_t[parch][0][binary][RDEPENDS] \ + if bin in [y[0] for y in smoothbins.itervalues()] ] + if rdeps: + smoothbins.add(check[p]) + + # remove all the binaries which aren't being smooth updated + for p in ( bin for bin in bins if bin not in smoothbins ): + binary, parch = p.split("/") + version = binaries_t[parch][0][binary][VERSION] + rms.add((binary, version, parch)) + + # single binary removal; used for clearing up after smooth + # updates but not supported as a manual hint + elif source_name in binaries_t[item.architecture][0]: + version = binaries_t[item.architecture][0][source_name][VERSION] + rms.add((source_name, version, migration_architecture)) + + # add the new binary packages (if we are not removing) + if not is_removal: + source_data = sources[suite][source_name] + for p in source_data[BINARIES]: + binary, parch = p.split("/") + if migration_architecture not in ['source', parch]: continue - # do not remove binaries which have been hijacked by other sources - if binaries_t[parch][0][binary][SOURCE] != source_name: - continue - bins.add(p) - - if suite != 'unstable': - # We only allow smooth updates from unstable, so if it we - # are not migrating from unstable just exit now. - return (bins, smoothbins) - - for p in bins: - binary, parch = p.split("/") - # if a smooth update is possible for the package, skip it - if binary not in self.binaries[suite][parch][0] and \ - ('ALL' in self.options.smooth_updates or \ - binaries_t[parch][0][binary][SECTION] in self.options.smooth_updates): - - # if the package has reverse-dependencies which are - # built from other sources, it's a valid candidate for - # a smooth update. if not, it may still be a valid - # candidate if one if its r-deps is itself a candidate, - # so note it for checking later - rdeps = binaries_t[parch][0][binary][RDEPENDS] - - # the list of reverse-dependencies may be outdated - # if, for example, we're processing a hint and - # a new version of one of the apparent reverse-dependencies - # migrated earlier in the hint. walk the list to make - # sure that at least one of the entries is still - # valid - rrdeps = [x for x in rdeps if x not in [y.split("/")[0] for y in bins]] - if rrdeps: - for dep in rrdeps: - if dep in binaries_t[parch][0]: - bin = binaries_t[parch][0][dep] - deps = [] - if bin[DEPENDS] is not None: - deps.extend(apt_pkg.parse_depends(bin[DEPENDS], False)) - if any(binary == entry[0] for deplist in deps for entry in deplist): - smoothbins.add(p) - break - else: - check.append(p) + version = self.binaries[suite][parch][0][binary][VERSION] + adds.add((binary, version, parch)) - - # check whether we should perform a smooth update for - # packages which are candidates but do not have r-deps - # outside of the current source - for p in check: - binary, parch = p.split("/") - if any(bin for bin in binaries_t[parch][0][binary][RDEPENDS] \ - if bin in [y.split("/")[0] for y in smoothbins]): - smoothbins.add(p) + return (adds, rms, set(smoothbins.itervalues())) - bins -= smoothbins - return (bins, smoothbins) def doop_source(self, item, hint_undo=[]): """Apply a change to the testing distribution as requested by `pkg` @@ -1858,14 +1895,15 @@ class Britney(object): if item.package in sources['testing']: source = sources['testing'][item.package] - bins, _ = self.find_upgraded_binaries(item.package, - source, - item.architecture, - item.suite) + _, bins, _ = self._compute_groups(item.package, + item.suite, + item.architecture, + item.is_removal) # remove all the binaries which aren't being smooth updated - for p in bins: - binary, parch = p.split("/") + for bin_data in bins: + binary, _, parch = bin_data + p = binary + "/" + parch # save the old binary for undo undo['binaries'][p] = binaries[parch][0][binary] # all the reverse dependencies are affected by the change From ebc9af70fafffda61cfd681905dfec3b61151497 Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 8 Jun 2014 17:44:44 +0200 Subject: [PATCH 02/10] Handle smooth updates better in "easy"-hints Avoid smooth-updating libraries in hints, when all of their reverse dependencies will certainly disappear in the same hint. Note that in "hint"-hint, reverse dependencies removed in the following "full run" will not cause the smooth-updated library to be removed. Instead these will still be removed in the end as usual, but in some cases that is too late. Signed-off-by: Niels Thykier --- britney.py | 46 +++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/britney.py b/britney.py index 8f21eeb..e89f022 100755 --- a/britney.py +++ b/britney.py @@ -1728,7 +1728,9 @@ class Britney(object): def _compute_groups(self, source_name, suite, migration_architecture, - is_removal, include_hijacked=False): + is_removal, include_hijacked=False, + allow_smooth_updates=True, + removals=frozenset()): """Compute the groups of binaries being migrated by item This method will compute the binaries that will be added, @@ -1747,6 +1749,17 @@ class Britney(object): or not [Same as item.is_removal, where available] * "include_hijacked" determines whether hijacked binaries should be included in results or not. (defaults: False) + * "allow_smooth_updates" is a boolean determing whether smooth- + updates are permitted in this migration. When set to False, + the "smoothbins" return value will always be the empty set. + Any value that would have been there will now be in "rms" + instead. (defaults: True) + * "removals" is a set of binaries that is assumed to be + removed at the same time as this migration (e.g. in the same + "easy"-hint). This may affect what if some binaries are + smooth updated or not. (defaults: empty-set) + - Binaries must be given as ("package-name", "version", + "architecture") tuples. Returns a tuple (adds, rms, smoothbins). "adds" is a set of binaries that will updated in or appear after the migration. @@ -1801,7 +1814,7 @@ class Britney(object): for p in bins: binary, parch = p.split("/") # if a smooth update is possible for the package, skip it - if suite == 'unstable' and \ + if allow_smooth_updates and suite == 'unstable' and \ binary not in self.binaries[suite][parch][0] and \ ('ALL' in self.options.smooth_updates or \ binaries_t[parch][0][binary][SECTION] in self.options.smooth_updates): @@ -1826,6 +1839,14 @@ class Britney(object): if dep in binaries_t[parch][0]: bin = binaries_t[parch][0][dep] deps = [] + # If the package is being removed + # together with dep, then it is + # not a reason to smooth update + # the binary + t = (dep, bin[VERSION], parch) + if t in removals: + continue + if bin[DEPENDS] is not None: deps.extend(apt_pkg.parse_depends(bin[DEPENDS], False)) if any(binary == entry[0] for deplist in deps for entry in deplist): @@ -1869,12 +1890,18 @@ class Britney(object): return (adds, rms, set(smoothbins.itervalues())) - def doop_source(self, item, hint_undo=[]): + def doop_source(self, item, hint_undo=[], removals=frozenset()): """Apply a change to the testing distribution as requested by `pkg` An optional list of undo actions related to packages processed earlier in a hint may be passed in `hint_undo`. + An optional set of binaries may be passed in "removals". Binaries listed + in this set will be assumined to be removed at the same time as the "item" + will migrate. This may change what binaries will be smooth-updated. + - Binaries in this set must be ("package-name", "version", "architecture") + tuples. + This method applies the changes required by the action `item` tracking them so it will be possible to revert them. @@ -1898,7 +1925,8 @@ class Britney(object): _, bins, _ = self._compute_groups(item.package, item.suite, item.architecture, - item.is_removal) + item.is_removal, + removals=removals) # remove all the binaries which aren't being smooth updated for bin_data in bins: @@ -2074,8 +2102,16 @@ class Britney(object): # pre-process a hint batch pre_process = {} if selected and hint: + removals = set() + for item in selected: + _, rms, _ = self._compute_groups(item.package, item.suite, + item.architecture, + item.is_removal, + allow_smooth_updates=False) + removals.update(rms) for package in selected: - pkg, affected, undo = self.doop_source(package) + pkg, affected, undo = self.doop_source(package, + removals=removals) pre_process[package] = (pkg, affected, undo) if lundo is None: From c0409c1b04a772d65f35e562b17ba7bb68e9cf8d Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 8 Jun 2014 17:51:50 +0200 Subject: [PATCH 03/10] Add auto hinter on top of the new installability tester The "new" auto hinter relies on partial ordering to determine, when what can migrate (and what needs to migrate at the same time). At the same time, it leverages on "_compute_groups" to allow it to include "removals" in its hints. Signed-off-by: Niels Thykier --- britney.py | 26 +++- installability/builder.py | 6 +- installability/solver.py | 299 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 327 insertions(+), 4 deletions(-) create mode 100644 installability/solver.py diff --git a/britney.py b/britney.py index e89f022..f88f77b 100755 --- a/britney.py +++ b/britney.py @@ -2604,10 +2604,34 @@ class Britney(object): excuses relationships. If they build a circular dependency, which we already know as not-working with the standard do_all algorithm, try to `easy` them. """ - self.__log("> Processing hints from the auto hinter", type="I") + self.__log("> Processing hints from the auto hinter [Partial-ordering]", + type="I") # consider only excuses which are valid candidates excuses = dict((x.name, x) for x in self.excuses if x.name in [y.uvname for y in self.upgrade_me]) + sources_t = self.sources['testing'] + + groups = set() + for y in sorted((y for y in self.upgrade_me if y.uvname in excuses), key=attrgetter('uvname')): + if y.is_removal and y.uvname not in sources_t: + # Already removed + continue + if not y.is_removal: + excuse = excuses[y.uvname] + if y.architecture == 'source' and y.uvname in sources_t and sources_t[y.uvname][VERSION] == excuse.ver[1]: + # Already migrated + continue + adds, rms, _ = self._compute_groups(y.package, y.suite, + y.architecture, y.is_removal, + include_hijacked=True) + groups.add((y, frozenset(adds), frozenset(rms))) + + for comp in self._inst_tester.solve_groups(groups): + if len(comp) > 1: + self.do_hint("easy", "autohinter", [ MigrationItem("%s/%s" % (x.uvname, excuses[x.uvname].ver[1])) for x in comp]) + + self.__log("> Processing hints from the auto hinter [Original]", + type="I") def find_related(e, hint, circular_first=False): if e not in excuses: diff --git a/installability/builder.py b/installability/builder.py index 7ee0845..10b78eb 100644 --- a/installability/builder.py +++ b/installability/builder.py @@ -15,7 +15,7 @@ from contextlib import contextmanager from britney_util import ifilter_except, iter_except -from installability.tester import InstallabilityTester +from installability.solver import InstallabilitySolver class _RelationBuilder(object): """Private helper class to "build" relations""" @@ -302,7 +302,7 @@ class InstallabilityTesterBuilder(object): check.update(reverse_package_table[pkg][0] - safe_set) - return InstallabilityTester(package_table, - frozenset(reverse_package_table), + return InstallabilitySolver(package_table, + reverse_package_table, self._testing, self._broken, self._essentials, safe_set) diff --git a/installability/solver.py b/installability/solver.py new file mode 100644 index 0000000..be2fd93 --- /dev/null +++ b/installability/solver.py @@ -0,0 +1,299 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2012 Niels Thykier +# - Includes code by Paul Harrison +# (http://www.logarithmic.net/pfh-files/blog/01208083168/sort.py) + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +from functools import partial +import os + +from installability.tester import InstallabilityTester +from britney_util import (ifilter_only, iter_except) + + +class InstallabilitySolver(InstallabilityTester): + + def __init__(self, universe, revuniverse, testing, broken, essentials, + safe_set): + """Create a new installability solver + + universe is a dict mapping package tuples to their + dependencies and conflicts. + + revuniverse is a dict mapping package tuples to their reverse + dependencies and reverse conflicts. + + testing is a (mutable) set of package tuples that determines + which of the packages in universe are currently in testing. + + broken is a (mutable) set of package tuples that are known to + be uninstallable. + + Package tuple: (pkg_name, pkg_version, pkg_arch) + - NB: arch:all packages are "re-mapped" to given architecture. + (simplifies caches and dependency checking) + """ + InstallabilityTester.__init__(self, universe, revuniverse, testing, + broken, essentials, safe_set) + + + def solve_groups(self, groups): + sat_in_testing = self._testing.isdisjoint + universe = self._universe + revuniverse = self._revuniverse + result = [] + emitted = set() + check = set() + order = {} + ptable = {} + key2item = {} + going_out = set() + going_in = set() + debug_solver = 0 + + try: + debug_solver = int(os.environ.get('BRITNEY_DEBUG', '0')) + except: + pass + + # Build the tables + for (item, adds, rms) in groups: + key = str(item) + key2item[key] = item + order[key] = {'before': set(), 'after': set()} + going_in.update(adds) + going_out.update(rms) + for a in adds: + ptable[a] = key + for r in rms: + ptable[r] = key + + # This large loop will add ordering constrains on each "item" + # that migrates based on various rules. + for (item, adds, rms) in groups: + key = str(item) + oldcons = set() + newcons = set() + for r in rms: + oldcons.update(universe[r][1]) + for a in adds: + newcons.update(universe[a][1]) + current = newcons & oldcons + oldcons -= current + newcons -= current + if oldcons: + # Some of the old binaries have "conflicts" that will + # be removed. + for o in ifilter_only(ptable, oldcons): + # "key" removes a conflict with one of + # "other"'s binaries, so it is probably a good + # idea to migrate "key" before "other" + other = ptable[o] + if other == key: + # "Self-conflicts" => ignore + continue + if debug_solver and other not in order[key]['before']: + print "N: Conflict induced order: %s before %s" % (key, other) + order[key]['before'].add(other) + order[other]['after'].add(key) + + for r in ifilter_only(revuniverse, rms): + # The binaries have reverse dependencies in testing; + # check if we can/should migrate them first. + for rdep in revuniverse[r][0]: + for depgroup in universe[rdep][0]: + rigid = depgroup - going_out + if not sat_in_testing(rigid): + # (partly) satisfied by testing, assume it is okay + continue + if rdep in ptable: + other = ptable[rdep] + if other == key: + # "Self-dependency" => ignore + continue + if debug_solver and other not in order[key]['after']: + print "N: Removal induced order: %s before %s" % (key, other) + order[key]['after'].add(other) + order[other]['before'].add(key) + + for a in adds: + # Check if this item should migrate before others + # (e.g. because they depend on a new [version of a] + # binary provided by this item). + for depgroup in universe[a][0]: + rigid = depgroup - going_out + if not sat_in_testing(rigid): + # (partly) satisfied by testing, assume it is okay + continue + # okay - we got three cases now. + # - "swap" (replace existing binary with a newer version) + # - "addition" (add new binary without removing any) + # - "removal" (remove binary without providing a new) + # + # The problem is that only the two latter requires + # an ordering. A "swap" (in itself) should not + # affect us. + other_adds = set() + other_rms = set() + for d in ifilter_only(ptable, depgroup): + if d in going_in: + # "other" provides something "key" needs, + # schedule accordingly. + other = ptable[d] + other_adds.add(other) + else: + # "other" removes something "key" needs, + # schedule accordingly. + other = ptable[d] + other_rms.add(other) + + for other in (other_adds - other_rms): + if debug_solver and other != key and other not in order[key]['after']: + print "N: Dependency induced order (add): %s before %s" % (key, other) + order[key]['after'].add(other) + order[other]['before'].add(key) + + for other in (other_rms - other_adds): + if debug_solver and other != key and other not in order[key]['before']: + print "N: Dependency induced order (remove): %s before %s" % (key, other) + order[key]['before'].add(other) + order[other]['after'].add(key) + + ### MILESTONE: Partial-order constrains computed ### + + # At this point, we have computed all the partial-order + # constrains needed. Some of these may have created strongly + # connected components (SSC) [of size 2 or greater], which + # represents a group of items that (we believe) must migrate + # together. + # + # Each one of those components will become an "easy" hint. + + comps = self._compute_scc(order, ptable) + merged = {} + scc = {} + # Now that we got the SSCs (in comps), we select on item from + # each SSC to represent the group and become an ID for that + # SSC. + # * ssc[ssc_id] => All the items in that SSC + # * merged[item] => The ID of the SSC to which the item belongs. + # + # We also "repair" the ordering, so we know in which order the + # hints should be emitted. + for com in comps: + scc_id = com[0] + scc[scc_id] = com + merged[scc_id] = scc_id + if len(com) > 1: + so_before = order[scc_id]['before'] + so_after = order[scc_id]['after'] + for n in com: + if n == scc_id: + continue + so_before.update(order[n]['before']) + so_after.update(order[n]['after']) + merged[n] = scc_id + del order[n] + if debug_solver: + print "N: SCC: %s -- %s" % (scc_id, str(sorted(com))) + + for com in comps: + node = com[0] + nbefore = set(merged[b] for b in order[node]['before']) + nafter = set(merged[b] for b in order[node]['after']) + + # Drop self-relations (usually caused by the merging) + nbefore.discard(node) + nafter.discard(node) + order[node]['before'] = nbefore + order[node]['after'] = nafter + + + if debug_solver: + print "N: -- PARTIAL ORDER --" + + for com in sorted(order): + if debug_solver and order[com]['before']: + print "N: %s <= %s" % (com, str(sorted(order[com]['before']))) + if not order[com]['after']: + # This component can be scheduled immediately, add it + # to "check" + check.add(com) + elif debug_solver: + print "N: %s >= %s" % (com, str(sorted(order[com]['after']))) + + if debug_solver: + print "N: -- END PARTIAL ORDER --" + print "N: -- LINEARIZED ORDER --" + + for cur in iter_except(check.pop, KeyError): + if order[cur]['after'] <= emitted: + # This item is ready to be emitted right now + if debug_solver: + print "N: %s -- %s" % (cur, sorted(scc[cur])) + emitted.add(cur) + result.append([key2item[x] for x in scc[cur]]) + if order[cur]['before']: + # There are components that come after this one. + # Add it to "check": + # - if it is ready, it will be emitted. + # - else, it will be dropped and re-added later. + check.update(order[cur]['before'] - emitted) + + if debug_solver: + print "N: -- END LINEARIZED ORDER --" + + return result + + + def _compute_scc(self, order, ptable): + """ + Tarjan's algorithm and topological sorting implementation in Python + + Find the strongly connected components in a graph using + Tarjan's algorithm. + + by Paul Harrison + + Public domain, do with it as you will + """ + + result = [ ] + stack = [ ] + low = { } + + def visit(node): + if node in low: + return + + num = len(low) + low[node] = num + stack_pos = len(stack) + stack.append(node) + + for successor in order[node]['before']: + visit(successor) + low[node] = min(low[node], low[successor]) + + if num == low[node]: + component = tuple(stack[stack_pos:]) + del stack[stack_pos:] + result.append(component) + for item in component: + low[item] = len(ptable) + + for node in order: + visit(node) + + return result + From 271fb11b33e273cec0f3bed5d3ab014205799606 Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 15 Jun 2014 16:56:17 +0200 Subject: [PATCH 04/10] britney.py: Fix bug with hinting removals Fix a bug in the auto hinter that could make think that sources had already been removed. Signed-off-by: Niels Thykier --- britney.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/britney.py b/britney.py index f88f77b..aa807a5 100755 --- a/britney.py +++ b/britney.py @@ -2613,7 +2613,7 @@ class Britney(object): groups = set() for y in sorted((y for y in self.upgrade_me if y.uvname in excuses), key=attrgetter('uvname')): - if y.is_removal and y.uvname not in sources_t: + if y.is_removal and y.package not in sources_t: # Already removed continue if not y.is_removal: From 8d4753530ce1c3b06343e5437f2633bad4de960e Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 15 Jun 2014 18:20:22 +0200 Subject: [PATCH 05/10] Britney: Fix malformed removal hints from auto hinter Signed-off-by: Niels Thykier --- britney.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/britney.py b/britney.py index aa807a5..ffd560b 100755 --- a/britney.py +++ b/britney.py @@ -2628,7 +2628,7 @@ class Britney(object): for comp in self._inst_tester.solve_groups(groups): if len(comp) > 1: - self.do_hint("easy", "autohinter", [ MigrationItem("%s/%s" % (x.uvname, excuses[x.uvname].ver[1])) for x in comp]) + self.do_hint("easy", "autohinter", comp) self.__log("> Processing hints from the auto hinter [Original]", type="I") From 34d1a6c653bd3d38c7c83fde34088e069cd5b8da Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 15 Jun 2014 18:20:23 +0200 Subject: [PATCH 06/10] Solver: Add more (optional) debugging output Signed-off-by: Niels Thykier --- installability/solver.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/installability/solver.py b/installability/solver.py index be2fd93..cc5acee 100644 --- a/installability/solver.py +++ b/installability/solver.py @@ -78,6 +78,9 @@ class InstallabilitySolver(InstallabilityTester): for r in rms: ptable[r] = key + if debug_solver > 1: + self._dump_groups(groups) + # This large loop will add ordering constrains on each "item" # that migrates based on various rules. for (item, adds, rms) in groups: @@ -297,3 +300,9 @@ class InstallabilitySolver(InstallabilityTester): return result + def _dump_groups(self, groups): + print "N: === Groups ===" + for (item, adds, rms) in groups: + print "N: %s => A: %s, R: %s" % (str(item), str(adds), str(rms)) + print "N: === END Groups ===" + From 28389f730edee2c13c267f1c21add5b01201190f Mon Sep 17 00:00:00 2001 From: "Adam D. Barratt" Date: Sun, 15 Jun 2014 17:18:25 +0000 Subject: [PATCH 07/10] Remove an obsolete comment The C library is no longer used so stop suggesting changing it when there are issues. Signed-off-by: Adam D. Barratt --- britney.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/britney.py b/britney.py index ffd560b..b229542 100755 --- a/britney.py +++ b/britney.py @@ -273,8 +273,6 @@ class Britney(object): return # read the source and binary packages for the involved distributions - # if this takes a very long time, try increasing SIZEOFHASHMAP in - # lib/dpkg.c and rebuilding if 'testing' not in self.sources: self.sources['testing'] = self.read_sources(self.options.testing) self.sources['unstable'] = self.read_sources(self.options.unstable) From 9b24a1d4e7c68c2f858ab1d47cee88ddf818127d Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 15 Jun 2014 21:57:24 +0200 Subject: [PATCH 08/10] britney.py: Fix use of undefined variable A currently dead branch in _compute_group uses an undefined local variable. Signed-off-by: Niels Thykier --- britney.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/britney.py b/britney.py index b229542..07850a0 100755 --- a/britney.py +++ b/britney.py @@ -1871,8 +1871,8 @@ class Britney(object): # single binary removal; used for clearing up after smooth # updates but not supported as a manual hint - elif source_name in binaries_t[item.architecture][0]: - version = binaries_t[item.architecture][0][source_name][VERSION] + elif source_name in binaries_t[migration_architecture][0]: + version = binaries_t[migration_architecture][0][source_name][VERSION] rms.add((source_name, version, migration_architecture)) # add the new binary packages (if we are not removing) From ad84f50d1bcf8f826b1698c2bb0cd5574c9f856f Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 13 Jul 2014 10:01:14 +0200 Subject: [PATCH 09/10] britney.py: dict does not have a .add (in _compute_groups) Signed-off-by: Niels Thykier --- britney.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/britney.py b/britney.py index 07850a0..42ace95 100755 --- a/britney.py +++ b/britney.py @@ -1857,11 +1857,12 @@ class Britney(object): # packages which are candidates but do not have r-deps # outside of the current source for p in check: - binary, _, parch = check[p] + ptuple = check[p] + binary, _, parch = ptuple rdeps = [ bin for bin in binaries_t[parch][0][binary][RDEPENDS] \ if bin in [y[0] for y in smoothbins.itervalues()] ] if rdeps: - smoothbins.add(check[p]) + smoothbins[p] = ptuple # remove all the binaries which aren't being smooth updated for p in ( bin for bin in bins if bin not in smoothbins ): From 5e7417f4164a55a4c472350d7d076cd9ea57b0f2 Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 13 Jul 2014 10:01:14 +0200 Subject: [PATCH 10/10] britney.py: _compute_groups returns sets of tuples, not str Fix a call site to use a tuple instead of a str when testing if a package has been smooth-updated. Signed-off-by: Niels Thykier --- britney.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/britney.py b/britney.py index 42ace95..d0fcdd0 100755 --- a/britney.py +++ b/britney.py @@ -1136,7 +1136,7 @@ class Britney(object): # it "interesting" on its own. This case happens quite often with smooth updatable # packages, where the old binary "survives" a full run because it still has # reverse dependencies. - name = pkg + "/" + tpkg_data[ARCHITECTURE] + name = (pkg, tpkg_data[VERSION], tpkg_data[ARCHITECTURE]) if name not in smoothbins: anyworthdoing = True