mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-06-06 15:21:38 +00:00
merge trunk up to 2013-07-10
This commit is contained in:
commit
3f0fc5a940
648
britney.py
648
britney.py
@ -181,7 +181,6 @@ does for the generation of the update excuses.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
import string
|
import string
|
||||||
import time
|
import time
|
||||||
@ -191,7 +190,6 @@ import urllib
|
|||||||
import apt_pkg
|
import apt_pkg
|
||||||
|
|
||||||
from functools import reduce, partial
|
from functools import reduce, partial
|
||||||
from itertools import chain, repeat
|
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
@ -213,34 +211,18 @@ from excuse import Excuse
|
|||||||
from migrationitem import MigrationItem, HintItem
|
from migrationitem import MigrationItem, HintItem
|
||||||
from hints import HintCollection
|
from hints import HintCollection
|
||||||
from britney import buildSystem
|
from britney import buildSystem
|
||||||
|
from britney_util import (old_libraries_format, same_source, undo_changes,
|
||||||
|
register_reverses, compute_reverse_tree,
|
||||||
|
read_nuninst, write_nuninst, write_heidi,
|
||||||
|
eval_uninst, newly_uninst)
|
||||||
|
from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC,
|
||||||
|
SOURCE, SOURCEVER, ARCHITECTURE, DEPENDS, CONFLICTS,
|
||||||
|
PROVIDES, RDEPENDS, RCONFLICTS)
|
||||||
from autopkgtest import AutoPackageTest
|
from autopkgtest import AutoPackageTest
|
||||||
|
|
||||||
|
|
||||||
__author__ = 'Fabio Tranchitella and the Debian Release Team'
|
__author__ = 'Fabio Tranchitella and the Debian Release Team'
|
||||||
__version__ = '2.0'
|
__version__ = '2.0'
|
||||||
|
|
||||||
# source package
|
|
||||||
VERSION = 0
|
|
||||||
SECTION = 1
|
|
||||||
BINARIES = 2
|
|
||||||
MAINTAINER = 3
|
|
||||||
FAKESRC = 4
|
|
||||||
|
|
||||||
# binary package
|
|
||||||
SOURCE = 2
|
|
||||||
SOURCEVER = 3
|
|
||||||
ARCHITECTURE = 4
|
|
||||||
MULTIARCH = 5
|
|
||||||
# PREDEPENDS = 6 - No longer used by the python code
|
|
||||||
# - The C-code needs it for alignment reasons and still check it
|
|
||||||
# but ignore it if it is None (so keep it None).
|
|
||||||
DEPENDS = 7
|
|
||||||
CONFLICTS = 8
|
|
||||||
PROVIDES = 9
|
|
||||||
RDEPENDS = 10
|
|
||||||
RCONFLICTS = 11
|
|
||||||
|
|
||||||
|
|
||||||
class Britney(object):
|
class Britney(object):
|
||||||
"""Britney, the Debian testing updater script
|
"""Britney, the Debian testing updater script
|
||||||
|
|
||||||
@ -294,7 +276,7 @@ class Britney(object):
|
|||||||
if self.options.print_uninst:
|
if self.options.print_uninst:
|
||||||
self.nuninst_arch_report(nuninst, arch)
|
self.nuninst_arch_report(nuninst, arch)
|
||||||
if not self.options.print_uninst:
|
if not self.options.print_uninst:
|
||||||
self.write_nuninst(nuninst)
|
write_nuninst(self.options.noninst_status, nuninst)
|
||||||
else:
|
else:
|
||||||
self.__log("Not building the list of non-installable packages, as requested", type="I")
|
self.__log("Not building the list of non-installable packages, as requested", type="I")
|
||||||
|
|
||||||
@ -595,53 +577,11 @@ class Britney(object):
|
|||||||
packages[pkg] = dpkg
|
packages[pkg] = dpkg
|
||||||
|
|
||||||
# loop again on the list of packages to register reverse dependencies and conflicts
|
# loop again on the list of packages to register reverse dependencies and conflicts
|
||||||
register_reverses = self.register_reverses
|
register_reverses(packages, provides, check_doubles=False)
|
||||||
for pkg in packages:
|
|
||||||
register_reverses(pkg, packages, provides, check_doubles=False)
|
|
||||||
|
|
||||||
# return a tuple with the list of real and virtual packages
|
# return a tuple with the list of real and virtual packages
|
||||||
return (packages, provides)
|
return (packages, provides)
|
||||||
|
|
||||||
def register_reverses(self, pkg, packages, provides, check_doubles=True, parse_depends=apt_pkg.parse_depends):
|
|
||||||
"""Register reverse dependencies and conflicts for the specified package
|
|
||||||
|
|
||||||
This method registers the reverse dependencies and conflicts for
|
|
||||||
a given package using `packages` as the list of packages and `provides`
|
|
||||||
as the list of virtual packages.
|
|
||||||
|
|
||||||
The method has an optional parameter parse_depends which is there
|
|
||||||
just for performance reasons and is not meant to be overwritten.
|
|
||||||
"""
|
|
||||||
# register the list of the dependencies for the depending packages
|
|
||||||
dependencies = []
|
|
||||||
if packages[pkg][DEPENDS]:
|
|
||||||
dependencies.extend(parse_depends(packages[pkg][DEPENDS], False))
|
|
||||||
# go through the list
|
|
||||||
for p in dependencies:
|
|
||||||
for a in p:
|
|
||||||
# register real packages
|
|
||||||
if a[0] in packages and (not check_doubles or pkg not in packages[a[0]][RDEPENDS]):
|
|
||||||
packages[a[0]][RDEPENDS].append(pkg)
|
|
||||||
# also register packages which provide the package (if any)
|
|
||||||
if a[0] in provides:
|
|
||||||
for i in provides.get(a[0]):
|
|
||||||
if i not in packages: continue
|
|
||||||
if not check_doubles or pkg not in packages[i][RDEPENDS]:
|
|
||||||
packages[i][RDEPENDS].append(pkg)
|
|
||||||
# register the list of the conflicts for the conflicting packages
|
|
||||||
if packages[pkg][CONFLICTS]:
|
|
||||||
for p in parse_depends(packages[pkg][CONFLICTS], False):
|
|
||||||
for a in p:
|
|
||||||
# register real packages
|
|
||||||
if a[0] in packages and (not check_doubles or pkg not in packages[a[0]][RCONFLICTS]):
|
|
||||||
packages[a[0]][RCONFLICTS].append(pkg)
|
|
||||||
# also register packages which provide the package (if any)
|
|
||||||
if a[0] in provides:
|
|
||||||
for i in provides[a[0]]:
|
|
||||||
if i not in packages: continue
|
|
||||||
if not check_doubles or pkg not in packages[i][RCONFLICTS]:
|
|
||||||
packages[i][RCONFLICTS].append(pkg)
|
|
||||||
|
|
||||||
def merge_sources(self, source, target):
|
def merge_sources(self, source, target):
|
||||||
"""Merge sources from `source' into partial suite `target'."""
|
"""Merge sources from `source' into partial suite `target'."""
|
||||||
source_sources = self.sources[source]
|
source_sources = self.sources[source]
|
||||||
@ -704,10 +644,8 @@ class Britney(object):
|
|||||||
for pkg, value in target_binaries.items():
|
for pkg, value in target_binaries.items():
|
||||||
value[RDEPENDS] = []
|
value[RDEPENDS] = []
|
||||||
value[RCONFLICTS] = []
|
value[RCONFLICTS] = []
|
||||||
register_reverses = self.register_reverses
|
register_reverses(
|
||||||
for pkg in target_binaries:
|
target_binaries, target_provides, check_doubles=False)
|
||||||
register_reverses(
|
|
||||||
pkg, target_binaries, target_provides, check_doubles=False)
|
|
||||||
|
|
||||||
def read_bugs(self, basedir):
|
def read_bugs(self, basedir):
|
||||||
"""Read the release critial bug summary from the specified directory
|
"""Read the release critial bug summary from the specified directory
|
||||||
@ -738,21 +676,6 @@ class Britney(object):
|
|||||||
self.__log("%s missing; skipping bug-based processing" % filename)
|
self.__log("%s missing; skipping bug-based processing" % filename)
|
||||||
return bugs
|
return bugs
|
||||||
|
|
||||||
def write_bugs(self, basedir, bugs):
|
|
||||||
"""Write the release critical bug summary to the specified directory
|
|
||||||
|
|
||||||
For a more detailed explanation of the format, please check the method
|
|
||||||
read_bugs.
|
|
||||||
"""
|
|
||||||
filename = os.path.join(basedir, "BugsV")
|
|
||||||
self.__log("Writing RC bugs data to %s" % filename)
|
|
||||||
f = open(filename, 'w')
|
|
||||||
for pkg in sorted(bugs.keys()):
|
|
||||||
if not bugs[pkg]:
|
|
||||||
continue
|
|
||||||
f.write("%s %s\n" % (pkg, ','.join(bugs[pkg])))
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
def __maxver(self, pkg, dist):
|
def __maxver(self, pkg, dist):
|
||||||
"""Return the maximum version for a given package name
|
"""Return the maximum version for a given package name
|
||||||
|
|
||||||
@ -970,39 +893,6 @@ class Britney(object):
|
|||||||
|
|
||||||
return hints
|
return hints
|
||||||
|
|
||||||
def write_heidi(self, filename):
|
|
||||||
"""Write the output HeidiResult
|
|
||||||
|
|
||||||
This method write the output for Heidi, which contains all the
|
|
||||||
binary packages and the source packages in the form:
|
|
||||||
|
|
||||||
<pkg-name> <pkg-version> <pkg-architecture> <pkg-section>
|
|
||||||
<src-name> <src-version> source <src-section>
|
|
||||||
"""
|
|
||||||
self.__log("Writing Heidi results to %s" % filename)
|
|
||||||
f = open(filename, 'w')
|
|
||||||
|
|
||||||
# local copies
|
|
||||||
sources = self.sources['testing']
|
|
||||||
|
|
||||||
# write binary packages
|
|
||||||
for arch in sorted(self.options.architectures):
|
|
||||||
binaries = self.binaries['testing'][arch][0]
|
|
||||||
for pkg_name in sorted(binaries):
|
|
||||||
pkg = binaries[pkg_name]
|
|
||||||
pkgv = pkg[VERSION]
|
|
||||||
pkgarch = pkg[ARCHITECTURE] or 'all'
|
|
||||||
pkgsec = pkg[SECTION] or 'faux'
|
|
||||||
f.write('%s %s %s %s\n' % (pkg_name, pkgv, pkgarch, pkgsec))
|
|
||||||
|
|
||||||
# write sources
|
|
||||||
for src_name in sorted(sources):
|
|
||||||
src = sources[src_name]
|
|
||||||
srcv = src[VERSION]
|
|
||||||
srcsec = src[SECTION] or 'unknown'
|
|
||||||
f.write('%s %s source %s\n' % (src_name, srcv, srcsec))
|
|
||||||
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
def write_delta(self, filename):
|
def write_delta(self, filename):
|
||||||
"""Write the output delta
|
"""Write the output delta
|
||||||
@ -1082,52 +972,10 @@ class Britney(object):
|
|||||||
f.write(output + "\n")
|
f.write(output + "\n")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
def write_nuninst(self, nuninst):
|
|
||||||
"""Write the non-installable report"""
|
|
||||||
f = open(self.options.noninst_status, 'w')
|
|
||||||
f.write("Built on: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n")
|
|
||||||
f.write("Last update: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n\n")
|
|
||||||
f.write("".join([k + ": " + " ".join(nuninst[k]) + "\n" for k in nuninst]))
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
def read_nuninst(self):
|
|
||||||
"""Read the non-installable report"""
|
|
||||||
f = open(self.options.noninst_status)
|
|
||||||
nuninst = {}
|
|
||||||
for r in f:
|
|
||||||
if ":" not in r: continue
|
|
||||||
arch, packages = r.strip().split(":", 1)
|
|
||||||
if arch.split("+", 1)[0] in self.options.architectures:
|
|
||||||
nuninst[arch] = set(packages.split())
|
|
||||||
return nuninst
|
|
||||||
|
|
||||||
|
|
||||||
# Utility methods for package analysis
|
# Utility methods for package analysis
|
||||||
# ------------------------------------
|
# ------------------------------------
|
||||||
|
|
||||||
def same_source(self, sv1, sv2):
|
|
||||||
"""Check if two version numbers are built from the same source
|
|
||||||
|
|
||||||
This method returns a boolean value which is true if the two
|
|
||||||
version numbers specified as parameters are built from the same
|
|
||||||
source. The main use of this code is to detect binary-NMU.
|
|
||||||
"""
|
|
||||||
if sv1 == sv2:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if sv1 is None or sv2 is None:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
m = re.match(r'^(.*)\+b\d+$', sv1)
|
|
||||||
if m: sv1 = m.group(1)
|
|
||||||
m = re.match(r'^(.*)\+b\d+$', sv2)
|
|
||||||
if m: sv2 = m.group(1)
|
|
||||||
|
|
||||||
if sv1 == sv2:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def get_dependency_solvers(self, block, arch, distribution):
|
def get_dependency_solvers(self, block, arch, distribution):
|
||||||
"""Find the packages which satisfy a dependency block
|
"""Find the packages which satisfy a dependency block
|
||||||
|
|
||||||
@ -1239,7 +1087,7 @@ class Britney(object):
|
|||||||
present in the unstable distribution anymore.
|
present in the unstable distribution anymore.
|
||||||
|
|
||||||
It returns True if the package can be removed, False otherwise.
|
It returns True if the package can be removed, False otherwise.
|
||||||
In the former case, a new excuse is appended to the the object
|
In the former case, a new excuse is appended to the object
|
||||||
attribute excuses.
|
attribute excuses.
|
||||||
"""
|
"""
|
||||||
# if the source package is available in unstable, then do nothing
|
# if the source package is available in unstable, then do nothing
|
||||||
@ -1264,18 +1112,18 @@ class Britney(object):
|
|||||||
self.excuses.append(excuse)
|
self.excuses.append(excuse)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def should_upgrade_srcarch(self, src, arch, suite):
|
def should_upgrade_srcarch(self, src, arch, suite, same_source=same_source):
|
||||||
"""Check if a binary package should be upgraded
|
"""Check if a set of binary packages should be upgraded
|
||||||
|
|
||||||
This method checks if a binary package should be upgraded; this can
|
This method checks if the binary packages produced by the source
|
||||||
happen also if the binary package is a binary-NMU for the given arch.
|
package on the given architecture should be upgraded; this can
|
||||||
The analysis is performed for the source package specified by the
|
happen also if the migration is a binary-NMU for the given arch.
|
||||||
`src' parameter, checking the architecture `arch' for the distribution
|
|
||||||
`suite'.
|
|
||||||
|
|
||||||
It returns False if the given package doesn't need to be upgraded,
|
It returns False if the given packages don't need to be upgraded,
|
||||||
True otherwise. In the former case, a new excuse is appended to
|
True otherwise. In the former case, a new excuse is appended to
|
||||||
the the object attribute excuses.
|
the object attribute excuses.
|
||||||
|
|
||||||
|
same_source is an optimization to avoid "load global".
|
||||||
"""
|
"""
|
||||||
# retrieve the source packages for testing and suite
|
# retrieve the source packages for testing and suite
|
||||||
source_t = self.sources['testing'][src]
|
source_t = self.sources['testing'][src]
|
||||||
@ -1290,7 +1138,9 @@ class Britney(object):
|
|||||||
|
|
||||||
# if there is a `remove' hint and the requested version is the same as the
|
# if there is a `remove' hint and the requested version is the same as the
|
||||||
# version in testing, then stop here and return False
|
# version in testing, then stop here and return False
|
||||||
for hint in [ x for x in self.hints.search('remove', package=src) if self.same_source(source_t[VERSION], x.version) ]:
|
# (as a side effect, a removal may generate such excuses for both the source
|
||||||
|
# package and its binary packages on each architecture)
|
||||||
|
for hint in [ x for x in self.hints.search('remove', package=src) if same_source(source_t[VERSION], x.version) ]:
|
||||||
excuse.addhtml("Removal request by %s" % (hint.user))
|
excuse.addhtml("Removal request by %s" % (hint.user))
|
||||||
excuse.addhtml("Trying to remove package, not update it")
|
excuse.addhtml("Trying to remove package, not update it")
|
||||||
excuse.addhtml("Not considered")
|
excuse.addhtml("Not considered")
|
||||||
@ -1318,14 +1168,16 @@ class Britney(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# if the new binary package is not from the same source as the testing one, then skip it
|
# if the new binary package is not from the same source as the testing one, then skip it
|
||||||
if not self.same_source(source_t[VERSION], pkgsv):
|
# this implies that this binary migration is part of a source migration
|
||||||
|
if not same_source(source_t[VERSION], pkgsv):
|
||||||
if binary_t is None or binary_t[VERSION] != binary_u[VERSION]:
|
if binary_t is None or binary_t[VERSION] != binary_u[VERSION]:
|
||||||
anywrongver = True
|
anywrongver = True
|
||||||
excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_t[VERSION]))
|
excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_t[VERSION]))
|
||||||
break
|
break
|
||||||
|
|
||||||
# if the source package has been updated in unstable and this is a binary migration, skip it
|
# if the source package has been updated in unstable and this is a binary migration, skip it
|
||||||
if self.same_source(source_t[VERSION], pkgsv) and source_t[VERSION] != source_u[VERSION]:
|
# (the binaries are now out-of-date)
|
||||||
|
if same_source(source_t[VERSION], pkgsv) and source_t[VERSION] != source_u[VERSION]:
|
||||||
anywrongver = True
|
anywrongver = True
|
||||||
excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_u[VERSION]))
|
excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_u[VERSION]))
|
||||||
break
|
break
|
||||||
@ -1358,21 +1210,37 @@ class Britney(object):
|
|||||||
# package is not fake, then check what packages should be removed
|
# package is not fake, then check what packages should be removed
|
||||||
if not anywrongver and (anyworthdoing or not self.sources[suite][src][FAKESRC]):
|
if not anywrongver and (anyworthdoing or not self.sources[suite][src][FAKESRC]):
|
||||||
srcv = self.sources[suite][src][VERSION]
|
srcv = self.sources[suite][src][VERSION]
|
||||||
ssrc = self.same_source(source_t[VERSION], srcv)
|
ssrc = same_source(source_t[VERSION], srcv)
|
||||||
# if this is a binary-only migration via *pu, we never want to try
|
# if this is a binary-only migration via *pu, we never want to try
|
||||||
# removing binary packages
|
# removing binary packages
|
||||||
if not (ssrc and suite != 'unstable'):
|
if not (ssrc and suite != 'unstable'):
|
||||||
# for every binary package produced by this source in testing for this architecture
|
# for every binary package produced by this source in testing for this architecture
|
||||||
for pkg in sorted([x.split("/")[0] for x in self.sources['testing'][src][BINARIES] if x.endswith("/"+arch)]):
|
source_data = self.sources['testing'][src]
|
||||||
|
_, smoothbins = self.find_upgraded_binaries(src,
|
||||||
|
source_data,
|
||||||
|
arch,
|
||||||
|
suite)
|
||||||
|
|
||||||
|
for pkg in sorted([x.split("/")[0] for x in source_data[BINARIES] if x.endswith("/"+arch)]):
|
||||||
# if the package is architecture-independent, then ignore it
|
# if the package is architecture-independent, then ignore it
|
||||||
if self.binaries['testing'][arch][0][pkg][ARCHITECTURE] == 'all':
|
tpkg_data = self.binaries['testing'][arch][0][pkg]
|
||||||
|
if tpkg_data[ARCHITECTURE] == 'all':
|
||||||
excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
|
excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
|
||||||
continue
|
continue
|
||||||
# if the package is not produced by the new source package, then remove it from testing
|
# if the package is not produced by the new source package, then remove it from testing
|
||||||
if pkg not in self.binaries[suite][arch][0]:
|
if pkg not in self.binaries[suite][arch][0]:
|
||||||
tpkgv = self.binaries['testing'][arch][0][pkg][VERSION]
|
excuse.addhtml("Removed binary: %s %s" % (pkg, tpkg_data[VERSION]))
|
||||||
excuse.addhtml("Removed binary: %s %s" % (pkg, tpkgv))
|
# the removed binary is only interesting if this is a binary-only migration,
|
||||||
if ssrc: anyworthdoing = True
|
# as otherwise the updated source will already cause the binary packages
|
||||||
|
# to be updated
|
||||||
|
if ssrc:
|
||||||
|
# Special-case, if the binary is a candidate for smooth update, we do not consider
|
||||||
|
# it "interesting" on its own. This case happens quite often with smooth updatable
|
||||||
|
# packages, where the old binary "survives" a full run because it still has
|
||||||
|
# reverse dependencies.
|
||||||
|
name = pkg + "/" + tpkg_data[ARCHITECTURE]
|
||||||
|
if name not in smoothbins:
|
||||||
|
anyworthdoing = True
|
||||||
|
|
||||||
# if there is nothing wrong and there is something worth doing, this is a valid candidate
|
# if there is nothing wrong and there is something worth doing, this is a valid candidate
|
||||||
if not anywrongver and anyworthdoing:
|
if not anywrongver and anyworthdoing:
|
||||||
@ -1387,7 +1255,7 @@ class Britney(object):
|
|||||||
# otherwise, return False
|
# otherwise, return False
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def should_upgrade_src(self, src, suite):
|
def should_upgrade_src(self, src, suite, same_source=same_source):
|
||||||
"""Check if source package should be upgraded
|
"""Check if source package should be upgraded
|
||||||
|
|
||||||
This method checks if a source package should be upgraded. The analysis
|
This method checks if a source package should be upgraded. The analysis
|
||||||
@ -1397,6 +1265,8 @@ class Britney(object):
|
|||||||
It returns False if the given package doesn't need to be upgraded,
|
It returns False if the given package doesn't need to be upgraded,
|
||||||
True otherwise. In the former case, a new excuse is appended to
|
True otherwise. In the former case, a new excuse is appended to
|
||||||
the object attribute excuses.
|
the object attribute excuses.
|
||||||
|
|
||||||
|
same_source is an opt to avoid "load global".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# retrieve the source packages for testing (if available) and suite
|
# retrieve the source packages for testing (if available) and suite
|
||||||
@ -1441,8 +1311,8 @@ class Britney(object):
|
|||||||
# if there is a `remove' hint and the requested version is the same as the
|
# if there is a `remove' hint and the requested version is the same as the
|
||||||
# version in testing, then stop here and return False
|
# version in testing, then stop here and return False
|
||||||
for item in self.hints.search('remove', package=src):
|
for item in self.hints.search('remove', package=src):
|
||||||
if source_t and self.same_source(source_t[VERSION], item.version) or \
|
if source_t and same_source(source_t[VERSION], item.version) or \
|
||||||
self.same_source(source_u[VERSION], item.version):
|
same_source(source_u[VERSION], item.version):
|
||||||
excuse.addhtml("Removal request by %s" % (item.user))
|
excuse.addhtml("Removal request by %s" % (item.user))
|
||||||
excuse.addhtml("Trying to remove package, not update it")
|
excuse.addhtml("Trying to remove package, not update it")
|
||||||
update_candidate = False
|
update_candidate = False
|
||||||
@ -1467,7 +1337,7 @@ class Britney(object):
|
|||||||
unblock_cmd = "un" + block_cmd
|
unblock_cmd = "un" + block_cmd
|
||||||
unblocks = self.hints.search(unblock_cmd, package=src)
|
unblocks = self.hints.search(unblock_cmd, package=src)
|
||||||
|
|
||||||
if unblocks and unblocks[0].version is not None and self.same_source(unblocks[0].version, source_u[VERSION]):
|
if unblocks and unblocks[0].version is not None and same_source(unblocks[0].version, source_u[VERSION]):
|
||||||
if suite == 'unstable' or block_cmd == 'block-udeb':
|
if suite == 'unstable' or block_cmd == 'block-udeb':
|
||||||
excuse.addhtml("Ignoring %s request by %s, due to %s request by %s" %
|
excuse.addhtml("Ignoring %s request by %s, due to %s request by %s" %
|
||||||
(block_cmd, blocked[block_cmd].user, unblock_cmd, unblocks[0].user))
|
(block_cmd, blocked[block_cmd].user, unblock_cmd, unblocks[0].user))
|
||||||
@ -1495,14 +1365,14 @@ class Britney(object):
|
|||||||
if suite == 'unstable' and self.dates is not None:
|
if suite == 'unstable' and self.dates is not None:
|
||||||
if src not in self.dates:
|
if src not in self.dates:
|
||||||
self.dates[src] = (source_u[VERSION], self.date_now)
|
self.dates[src] = (source_u[VERSION], self.date_now)
|
||||||
elif not self.same_source(self.dates[src][0], source_u[VERSION]):
|
elif not same_source(self.dates[src][0], source_u[VERSION]):
|
||||||
self.dates[src] = (source_u[VERSION], self.date_now)
|
self.dates[src] = (source_u[VERSION], self.date_now)
|
||||||
|
|
||||||
days_old = self.date_now - self.dates[src][1]
|
days_old = self.date_now - self.dates[src][1]
|
||||||
min_days = self.MINDAYS[urgency]
|
min_days = self.MINDAYS[urgency]
|
||||||
|
|
||||||
for age_days_hint in [ x for x in self.hints.search('age-days', package=src) if \
|
for age_days_hint in [ x for x in self.hints.search('age-days', package=src) if \
|
||||||
self.same_source(source_u[VERSION], x.version) ]:
|
same_source(source_u[VERSION], x.version) ]:
|
||||||
excuse.addhtml("Overriding age needed from %d days to %d by %s" % (min_days,
|
excuse.addhtml("Overriding age needed from %d days to %d by %s" % (min_days,
|
||||||
int(age_days_hint.days), age_days_hint.user))
|
int(age_days_hint.days), age_days_hint.user))
|
||||||
min_days = int(age_days_hint.days)
|
min_days = int(age_days_hint.days)
|
||||||
@ -1510,7 +1380,7 @@ class Britney(object):
|
|||||||
excuse.setdaysold(days_old, min_days)
|
excuse.setdaysold(days_old, min_days)
|
||||||
if days_old < min_days:
|
if days_old < min_days:
|
||||||
urgent_hints = [ x for x in self.hints.search('urgent', package=src) if \
|
urgent_hints = [ x for x in self.hints.search('urgent', package=src) if \
|
||||||
self.same_source(source_u[VERSION], x.version) ]
|
same_source(source_u[VERSION], x.version) ]
|
||||||
if urgent_hints:
|
if urgent_hints:
|
||||||
excuse.addhtml("Too young, but urgency pushed by %s" % (urgent_hints[0].user))
|
excuse.addhtml("Too young, but urgency pushed by %s" % (urgent_hints[0].user))
|
||||||
else:
|
else:
|
||||||
@ -1569,7 +1439,7 @@ class Britney(object):
|
|||||||
pkgsv = binary_u[SOURCEVER]
|
pkgsv = binary_u[SOURCEVER]
|
||||||
|
|
||||||
# if it wasn't built by the same source, it is out-of-date
|
# if it wasn't built by the same source, it is out-of-date
|
||||||
if not self.same_source(source_u[VERSION], pkgsv):
|
if not same_source(source_u[VERSION], pkgsv):
|
||||||
if pkgsv not in oodbins:
|
if pkgsv not in oodbins:
|
||||||
oodbins[pkgsv] = []
|
oodbins[pkgsv] = []
|
||||||
oodbins[pkgsv].append(pkg)
|
oodbins[pkgsv].append(pkg)
|
||||||
@ -1653,7 +1523,7 @@ class Britney(object):
|
|||||||
"though it fixes more than it introduces, whine at debian-release)" % pkg)
|
"though it fixes more than it introduces, whine at debian-release)" % pkg)
|
||||||
|
|
||||||
# check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
|
# check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
|
||||||
forces = [ x for x in self.hints.search('force', package=src) if self.same_source(source_u[VERSION], x.version) ]
|
forces = [ x for x in self.hints.search('force', package=src) if same_source(source_u[VERSION], x.version) ]
|
||||||
if forces:
|
if forces:
|
||||||
excuse.dontinvalidate = True
|
excuse.dontinvalidate = True
|
||||||
if not update_candidate and forces:
|
if not update_candidate and forces:
|
||||||
@ -1728,12 +1598,14 @@ class Britney(object):
|
|||||||
exclookup[x].is_valid = False
|
exclookup[x].is_valid = False
|
||||||
i = i + 1
|
i = i + 1
|
||||||
|
|
||||||
def write_excuses(self):
|
def write_excuses(self, same_source=same_source):
|
||||||
"""Produce and write the update excuses
|
"""Produce and write the update excuses
|
||||||
|
|
||||||
This method handles the update excuses generation: the packages are
|
This method handles the update excuses generation: the packages are
|
||||||
looked at to determine whether they are valid candidates. For the details
|
looked at to determine whether they are valid candidates. For the details
|
||||||
of this procedure, please refer to the module docstring.
|
of this procedure, please refer to the module docstring.
|
||||||
|
|
||||||
|
same_source is an opt to avoid "load global".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.__log("Update Excuses generation started", type="I")
|
self.__log("Update Excuses generation started", type="I")
|
||||||
@ -1793,7 +1665,7 @@ class Britney(object):
|
|||||||
|
|
||||||
# check if the version specified in the hint is the same as the considered package
|
# check if the version specified in the hint is the same as the considered package
|
||||||
tsrcv = sources['testing'][src][VERSION]
|
tsrcv = sources['testing'][src][VERSION]
|
||||||
if not self.same_source(tsrcv, item.version): continue
|
if not same_source(tsrcv, item.version): continue
|
||||||
|
|
||||||
# add the removal of the package to upgrade_me and build a new excuse
|
# add the removal of the package to upgrade_me and build a new excuse
|
||||||
upgrade_me.append("-%s" % (src))
|
upgrade_me.append("-%s" % (src))
|
||||||
@ -1858,7 +1730,7 @@ class Britney(object):
|
|||||||
self.hints.search('force', package=adtsrc))
|
self.hints.search('force', package=adtsrc))
|
||||||
forces = [
|
forces = [
|
||||||
x for x in hints
|
x for x in hints
|
||||||
if self.same_source(adtver, x.version) ]
|
if same_source(adtver, x.version) ]
|
||||||
if forces:
|
if forces:
|
||||||
e.addhtml(
|
e.addhtml(
|
||||||
"Should wait for %s %s test, but forced by "
|
"Should wait for %s %s test, but forced by "
|
||||||
@ -1870,7 +1742,7 @@ class Britney(object):
|
|||||||
hints.extend(self.hints.search('force', package=e.name))
|
hints.extend(self.hints.search('force', package=e.name))
|
||||||
forces = [
|
forces = [
|
||||||
x for x in hints
|
x for x in hints
|
||||||
if self.same_source(e.ver[1], x.version) ]
|
if same_source(e.ver[1], x.version) ]
|
||||||
if forces:
|
if forces:
|
||||||
e.addhtml(
|
e.addhtml(
|
||||||
"Should wait for tests relating to %s %s, but "
|
"Should wait for tests relating to %s %s, but "
|
||||||
@ -1939,20 +1811,6 @@ class Britney(object):
|
|||||||
# Upgrade run
|
# Upgrade run
|
||||||
# -----------
|
# -----------
|
||||||
|
|
||||||
def newlyuninst(self, nuold, nunew):
|
|
||||||
"""Return a nuninst statstic with only new uninstallable packages
|
|
||||||
|
|
||||||
This method subtracts the uninstallable packages of the statistic
|
|
||||||
`nunew` from the statistic `nuold`.
|
|
||||||
|
|
||||||
It returns a dictionary with the architectures as keys and the list
|
|
||||||
of uninstallable packages as values.
|
|
||||||
"""
|
|
||||||
res = {}
|
|
||||||
for arch in nuold:
|
|
||||||
if arch not in nunew: continue
|
|
||||||
res[arch] = [x for x in nunew[arch] if x not in nuold[arch]]
|
|
||||||
return res
|
|
||||||
|
|
||||||
def get_nuninst(self, requested_arch=None, build=False):
|
def get_nuninst(self, requested_arch=None, build=False):
|
||||||
"""Return the uninstallability statistic for all the architectures
|
"""Return the uninstallability statistic for all the architectures
|
||||||
@ -1965,10 +1823,13 @@ class Britney(object):
|
|||||||
|
|
||||||
It returns a dictionary with the architectures as keys and the list
|
It returns a dictionary with the architectures as keys and the list
|
||||||
of uninstallable packages as values.
|
of uninstallable packages as values.
|
||||||
|
|
||||||
|
NB: If build is False, requested_arch is ignored.
|
||||||
"""
|
"""
|
||||||
# if we are not asked to build the nuninst, read it from the cache
|
# if we are not asked to build the nuninst, read it from the cache
|
||||||
if not build:
|
if not build:
|
||||||
return self.read_nuninst()
|
return read_nuninst(self.options.noninst_status,
|
||||||
|
self.options.architectures)
|
||||||
|
|
||||||
nuninst = {}
|
nuninst = {}
|
||||||
|
|
||||||
@ -2033,20 +1894,6 @@ class Britney(object):
|
|||||||
res.append("%s-%d" % (arch[0], n))
|
res.append("%s-%d" % (arch[0], n))
|
||||||
return "%d+%d: %s" % (total, totalbreak, ":".join(res))
|
return "%d+%d: %s" % (total, totalbreak, ":".join(res))
|
||||||
|
|
||||||
def eval_uninst(self, nuninst):
|
|
||||||
"""Return a string which represents the uninstallable packages
|
|
||||||
|
|
||||||
This method returns a string which represents the uninstallable
|
|
||||||
packages reading the uninstallability statistics `nuninst`.
|
|
||||||
|
|
||||||
An example of the output string is:
|
|
||||||
* i386: broken-pkg1, broken-pkg2
|
|
||||||
"""
|
|
||||||
parts = []
|
|
||||||
for arch in self.options.architectures:
|
|
||||||
if arch in nuninst and len(nuninst[arch]) > 0:
|
|
||||||
parts.append(" * %s: %s\n" % (arch,", ".join(sorted(nuninst[arch]))))
|
|
||||||
return "".join(parts)
|
|
||||||
|
|
||||||
def is_nuninst_asgood_generous(self, old, new):
|
def is_nuninst_asgood_generous(self, old, new):
|
||||||
diff = 0
|
diff = 0
|
||||||
@ -2056,6 +1903,111 @@ class Britney(object):
|
|||||||
return diff <= 0
|
return diff <= 0
|
||||||
|
|
||||||
|
|
||||||
|
def find_upgraded_binaries(self, source_name, source_data,
|
||||||
|
architecture, suite):
|
||||||
|
# XXX: not the best name - really.
|
||||||
|
"""Find smooth and non-smooth updatable binaries for upgrades
|
||||||
|
|
||||||
|
This method will compute the binaries that will be replaced in
|
||||||
|
testing and which of them are smooth updatable.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
* "source_name" is the name of the source package, whose
|
||||||
|
binaries are migrating.
|
||||||
|
* "source_data" is the fields of that source package from
|
||||||
|
testing.
|
||||||
|
* "architecture" is the architecture determines architecture of
|
||||||
|
the migrating binaries (can be "source" for a
|
||||||
|
"source"-migration, meaning all binaries regardless of
|
||||||
|
architecture).
|
||||||
|
* "suite" is the suite from which the binaries are migrating.
|
||||||
|
|
||||||
|
Returns a tuple (bins, smoothbins). "bins" is a set of binaries
|
||||||
|
that are not smooth-updatable (or binaries that could be, but
|
||||||
|
there is no reason to let them be smooth updated).
|
||||||
|
"smoothbins" is set of binaries that are to be smooth-updated
|
||||||
|
|
||||||
|
Pre-Conditions: The source package must be in testing and this
|
||||||
|
should only be used when considering to do an upgrade
|
||||||
|
migration from the input suite. (e.g. do not use this for
|
||||||
|
removals).
|
||||||
|
"""
|
||||||
|
bins = set()
|
||||||
|
smoothbins = set()
|
||||||
|
check = []
|
||||||
|
|
||||||
|
binaries_t = self.binaries['testing']
|
||||||
|
# first, build a list of eligible binaries
|
||||||
|
for p in source_data[BINARIES]:
|
||||||
|
binary, parch = p.split("/")
|
||||||
|
if architecture != 'source':
|
||||||
|
# for a binary migration, binaries should not be removed:
|
||||||
|
# - unless they are for the correct architecture
|
||||||
|
if parch != architecture:
|
||||||
|
continue
|
||||||
|
# - if they are arch:all and the migration is via *pu,
|
||||||
|
# as the packages will not have been rebuilt and the
|
||||||
|
# source suite will not contain them
|
||||||
|
if binaries_t[parch][0][binary][ARCHITECTURE] == 'all' and \
|
||||||
|
suite != 'unstable':
|
||||||
|
continue
|
||||||
|
# do not remove binaries which have been hijacked by other sources
|
||||||
|
if binaries_t[parch][0][binary][SOURCE] != source_name:
|
||||||
|
continue
|
||||||
|
bins.add(p)
|
||||||
|
|
||||||
|
if suite != 'unstable':
|
||||||
|
# We only allow smooth updates from unstable, so if it we
|
||||||
|
# are not migrating from unstable just exit now.
|
||||||
|
return (bins, smoothbins)
|
||||||
|
|
||||||
|
for p in bins:
|
||||||
|
binary, parch = p.split("/")
|
||||||
|
# if a smooth update is possible for the package, skip it
|
||||||
|
if binary not in self.binaries[suite][parch][0] and \
|
||||||
|
('ALL' in self.options.smooth_updates or \
|
||||||
|
binaries_t[parch][0][binary][SECTION] in self.options.smooth_updates):
|
||||||
|
|
||||||
|
# if the package has reverse-dependencies which are
|
||||||
|
# built from other sources, it's a valid candidate for
|
||||||
|
# a smooth update. if not, it may still be a valid
|
||||||
|
# candidate if one if its r-deps is itself a candidate,
|
||||||
|
# so note it for checking later
|
||||||
|
rdeps = binaries_t[parch][0][binary][RDEPENDS]
|
||||||
|
|
||||||
|
# the list of reverse-dependencies may be outdated
|
||||||
|
# if, for example, we're processing a hint and
|
||||||
|
# a new version of one of the apparent reverse-dependencies
|
||||||
|
# migrated earlier in the hint. walk the list to make
|
||||||
|
# sure that at least one of the entries is still
|
||||||
|
# valid
|
||||||
|
rrdeps = [x for x in rdeps if x not in [y.split("/")[0] for y in bins]]
|
||||||
|
if rrdeps:
|
||||||
|
for dep in rrdeps:
|
||||||
|
if dep in binaries_t[parch][0]:
|
||||||
|
bin = binaries_t[parch][0][dep]
|
||||||
|
deps = []
|
||||||
|
if bin[DEPENDS] is not None:
|
||||||
|
deps.extend(apt_pkg.parse_depends(bin[DEPENDS], False))
|
||||||
|
if any(binary == entry[0] for deplist in deps for entry in deplist):
|
||||||
|
smoothbins.add(p)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
check.append(p)
|
||||||
|
|
||||||
|
|
||||||
|
# check whether we should perform a smooth update for
|
||||||
|
# packages which are candidates but do not have r-deps
|
||||||
|
# outside of the current source
|
||||||
|
for p in check:
|
||||||
|
binary, parch = p.split("/")
|
||||||
|
if any(bin for bin in binaries_t[parch][0][binary][RDEPENDS] \
|
||||||
|
if bin in [y.split("/")[0] for y in smoothbins]):
|
||||||
|
smoothbins.add(p)
|
||||||
|
|
||||||
|
bins -= smoothbins
|
||||||
|
return (bins, smoothbins)
|
||||||
|
|
||||||
def doop_source(self, item, hint_undo=[]):
|
def doop_source(self, item, hint_undo=[]):
|
||||||
"""Apply a change to the testing distribution as requested by `pkg`
|
"""Apply a change to the testing distribution as requested by `pkg`
|
||||||
|
|
||||||
@ -2076,85 +2028,24 @@ class Britney(object):
|
|||||||
# local copies for better performances
|
# local copies for better performances
|
||||||
sources = self.sources
|
sources = self.sources
|
||||||
binaries = self.binaries['testing']
|
binaries = self.binaries['testing']
|
||||||
|
get_reverse_tree = partial(compute_reverse_tree, self.binaries["testing"])
|
||||||
# remove all binary packages (if the source already exists)
|
# remove all binary packages (if the source already exists)
|
||||||
if item.architecture == 'source' or not item.is_removal:
|
if item.architecture == 'source' or not item.is_removal:
|
||||||
if item.package in sources['testing']:
|
if item.package in sources['testing']:
|
||||||
source = sources['testing'][item.package]
|
source = sources['testing'][item.package]
|
||||||
|
|
||||||
bins = []
|
bins, _ = self.find_upgraded_binaries(item.package,
|
||||||
check = []
|
source,
|
||||||
smoothbins = []
|
item.architecture,
|
||||||
|
item.suite)
|
||||||
# remove all the binaries
|
|
||||||
|
|
||||||
# first, build a list of eligible binaries
|
|
||||||
for p in source[BINARIES]:
|
|
||||||
binary, parch = p.split("/")
|
|
||||||
if item.architecture != 'source':
|
|
||||||
# for a binary migration, binaries should not be removed:
|
|
||||||
# - unless they are for the correct architecture
|
|
||||||
if parch != item.architecture: continue
|
|
||||||
# - if they are arch:all and the migration is via *pu,
|
|
||||||
# as the packages will not have been rebuilt and the
|
|
||||||
# source suite will not contain them
|
|
||||||
if binaries[parch][0][binary][ARCHITECTURE] == 'all' and \
|
|
||||||
item.suite != 'unstable':
|
|
||||||
continue
|
|
||||||
# do not remove binaries which have been hijacked by other sources
|
|
||||||
if binaries[parch][0][binary][SOURCE] != item.package: continue
|
|
||||||
bins.append(p)
|
|
||||||
|
|
||||||
for p in bins:
|
|
||||||
binary, parch = p.split("/")
|
|
||||||
# if a smooth update is possible for the package, skip it
|
|
||||||
if item.suite == 'unstable' and \
|
|
||||||
binary not in self.binaries[item.suite][parch][0] and \
|
|
||||||
('ALL' in self.options.smooth_updates or \
|
|
||||||
binaries[parch][0][binary][SECTION] in self.options.smooth_updates):
|
|
||||||
|
|
||||||
# if the package has reverse-dependencies which are
|
|
||||||
# built from other sources, it's a valid candidate for
|
|
||||||
# a smooth update. if not, it may still be a valid
|
|
||||||
# candidate if one if its r-deps is itself a candidate,
|
|
||||||
# so note it for checking later
|
|
||||||
rdeps = binaries[parch][0][binary][RDEPENDS]
|
|
||||||
|
|
||||||
# the list of reverse-dependencies may be outdated
|
|
||||||
# if, for example, we're processing a hint and
|
|
||||||
# a new version of one of the apparent reverse-dependencies
|
|
||||||
# migrated earlier in the hint. walk the list to make
|
|
||||||
# sure that at least one of the entries is still
|
|
||||||
# valid
|
|
||||||
rrdeps = [x for x in rdeps if x not in [y.split("/")[0] for y in bins]]
|
|
||||||
if len(rrdeps) > 0:
|
|
||||||
for dep in rrdeps:
|
|
||||||
if dep in binaries[parch][0]:
|
|
||||||
bin = binaries[parch][0][dep]
|
|
||||||
deps = []
|
|
||||||
if bin[DEPENDS] is not None:
|
|
||||||
deps.extend(apt_pkg.parse_depends(bin[DEPENDS], False))
|
|
||||||
if any(binary == entry[0] for deplist in deps for entry in deplist):
|
|
||||||
smoothbins.append(p)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
check.append(p)
|
|
||||||
|
|
||||||
# check whether we should perform a smooth update for
|
|
||||||
# packages which are candidates but do not have r-deps
|
|
||||||
# outside of the current source
|
|
||||||
for p in check:
|
|
||||||
binary, parch = p.split("/")
|
|
||||||
if any(bin for bin in binaries[parch][0][binary][RDEPENDS] \
|
|
||||||
if bin in [y.split("/")[0] for y in smoothbins]):
|
|
||||||
smoothbins.append(p)
|
|
||||||
|
|
||||||
# remove all the binaries which aren't being smooth updated
|
# remove all the binaries which aren't being smooth updated
|
||||||
for p in [ bin for bin in bins if bin not in smoothbins ]:
|
for p in bins:
|
||||||
binary, parch = p.split("/")
|
binary, parch = p.split("/")
|
||||||
# save the old binary for undo
|
# save the old binary for undo
|
||||||
undo['binaries'][p] = binaries[parch][0][binary]
|
undo['binaries'][p] = binaries[parch][0][binary]
|
||||||
# all the reverse dependencies are affected by the change
|
# all the reverse dependencies are affected by the change
|
||||||
affected.update(self.get_reverse_tree(binary, parch, 'testing'))
|
affected.update(get_reverse_tree(binary, parch))
|
||||||
# remove the provided virtual packages
|
# remove the provided virtual packages
|
||||||
for j in binaries[parch][0][binary][PROVIDES]:
|
for j in binaries[parch][0][binary][PROVIDES]:
|
||||||
key = j + "/" + parch
|
key = j + "/" + parch
|
||||||
@ -2178,7 +2069,7 @@ class Britney(object):
|
|||||||
# updates but not supported as a manual hint
|
# updates but not supported as a manual hint
|
||||||
elif item.package in binaries[item.architecture][0]:
|
elif item.package in binaries[item.architecture][0]:
|
||||||
undo['binaries'][item.package + "/" + item.architecture] = binaries[item.architecture][0][item.package]
|
undo['binaries'][item.package + "/" + item.architecture] = binaries[item.architecture][0][item.package]
|
||||||
affected.update(self.get_reverse_tree(item.package, item.architecture, 'testing'))
|
affected.update(get_reverse_tree(item.package, item.architecture))
|
||||||
del binaries[item.architecture][0][item.package]
|
del binaries[item.architecture][0][item.package]
|
||||||
self.systems[item.architecture].remove_binary(item.package)
|
self.systems[item.architecture].remove_binary(item.package)
|
||||||
|
|
||||||
@ -2199,14 +2090,10 @@ class Britney(object):
|
|||||||
# save the old binary package
|
# save the old binary package
|
||||||
undo['binaries'][p] = binaries[parch][0][binary]
|
undo['binaries'][p] = binaries[parch][0][binary]
|
||||||
# all the reverse dependencies are affected by the change
|
# all the reverse dependencies are affected by the change
|
||||||
affected.update(self.get_reverse_tree(binary, parch, 'testing'))
|
affected.update(get_reverse_tree(binary, parch))
|
||||||
# all the reverse conflicts and their dependency tree are affected by the change
|
# all the reverse conflicts and their dependency tree are affected by the change
|
||||||
for j in binaries[parch][0][binary][RCONFLICTS]:
|
for j in binaries[parch][0][binary][RCONFLICTS]:
|
||||||
key = (j, parch)
|
affected.update(get_reverse_tree(j, parch))
|
||||||
if key not in affected: affected.add(key)
|
|
||||||
for p in self.get_full_tree(j, parch, 'testing'):
|
|
||||||
key = (p, parch)
|
|
||||||
if key not in affected: affected.add(key)
|
|
||||||
self.systems[parch].remove_binary(binary)
|
self.systems[parch].remove_binary(binary)
|
||||||
else:
|
else:
|
||||||
# the binary isn't in testing, but it may have been at
|
# the binary isn't in testing, but it may have been at
|
||||||
@ -2224,8 +2111,7 @@ class Britney(object):
|
|||||||
if p in tundo['binaries']:
|
if p in tundo['binaries']:
|
||||||
for rdep in tundo['binaries'][p][RDEPENDS]:
|
for rdep in tundo['binaries'][p][RDEPENDS]:
|
||||||
if rdep in binaries[parch][0] and rdep not in source[BINARIES]:
|
if rdep in binaries[parch][0] and rdep not in source[BINARIES]:
|
||||||
affected.add( (rdep, parch) )
|
affected.update(get_reverse_tree(rdep, parch))
|
||||||
affected.update(self.get_reverse_tree(rdep, parch, 'testing'))
|
|
||||||
# add/update the binary package
|
# add/update the binary package
|
||||||
binaries[parch][0][binary] = self.binaries[item.suite][parch][0][binary]
|
binaries[parch][0][binary] = self.binaries[item.suite][parch][0][binary]
|
||||||
self.systems[parch].add_binary(binary, binaries[parch][0][binary][:PROVIDES] + \
|
self.systems[parch].add_binary(binary, binaries[parch][0][binary][:PROVIDES] + \
|
||||||
@ -2240,13 +2126,15 @@ class Britney(object):
|
|||||||
undo['virtual'][key] = binaries[parch][1][j][:]
|
undo['virtual'][key] = binaries[parch][1][j][:]
|
||||||
binaries[parch][1][j].append(binary)
|
binaries[parch][1][j].append(binary)
|
||||||
# all the reverse dependencies are affected by the change
|
# all the reverse dependencies are affected by the change
|
||||||
affected.update(self.get_reverse_tree(binary, parch, 'testing'))
|
affected.update(get_reverse_tree(binary, parch))
|
||||||
|
|
||||||
# register reverse dependencies and conflicts for the new binary packages
|
# register reverse dependencies and conflicts for the new binary packages
|
||||||
for p in source[BINARIES]:
|
if item.architecture == 'source':
|
||||||
binary, parch = p.split("/")
|
pkg_iter = (p.split("/")[0] for p in source[BINARIES])
|
||||||
if item.architecture not in ['source', parch]: continue
|
else:
|
||||||
self.register_reverses(binary, binaries[parch][0] , binaries[parch][1])
|
ext = "/" + item.architecture
|
||||||
|
pkg_iter = (p.split("/")[0] for p in source[BINARIES] if p.endswith(ext))
|
||||||
|
register_reverses(binaries[parch][0], binaries[parch][1], iterator=pkg_iter)
|
||||||
|
|
||||||
# add/update the source package
|
# add/update the source package
|
||||||
if item.architecture == 'source':
|
if item.architecture == 'source':
|
||||||
@ -2255,42 +2143,6 @@ class Britney(object):
|
|||||||
# return the package name, the suite, the list of affected packages and the undo dictionary
|
# return the package name, the suite, the list of affected packages and the undo dictionary
|
||||||
return (item, affected, undo)
|
return (item, affected, undo)
|
||||||
|
|
||||||
def get_reverse_tree(self, pkg, arch, suite):
|
|
||||||
binaries = self.binaries[suite][arch][0]
|
|
||||||
|
|
||||||
rev_deps = set(binaries[pkg][RDEPENDS])
|
|
||||||
seen = set()
|
|
||||||
while len(rev_deps) > 0:
|
|
||||||
# mark all of the current iteration of packages as affected
|
|
||||||
seen |= rev_deps
|
|
||||||
# generate the next iteration, which is the reverse-dependencies of
|
|
||||||
# the current iteration
|
|
||||||
new_rev_deps = [ binaries[x][RDEPENDS] for x in rev_deps \
|
|
||||||
if x in binaries ]
|
|
||||||
# flatten the list-of-lists, filtering out already handled packages
|
|
||||||
# in the process
|
|
||||||
rev_deps = set([package for package in chain.from_iterable(new_rev_deps) \
|
|
||||||
if package not in seen ])
|
|
||||||
return zip(seen, repeat(arch))
|
|
||||||
|
|
||||||
def get_full_tree(self, pkg, arch, suite):
|
|
||||||
"""Calculate the full dependency tree for the given package
|
|
||||||
|
|
||||||
This method returns the full dependency tree for the package `pkg`,
|
|
||||||
inside the `arch` architecture for the suite `suite`.
|
|
||||||
"""
|
|
||||||
packages = [pkg]
|
|
||||||
binaries = self.binaries[suite][arch][0]
|
|
||||||
if pkg in binaries:
|
|
||||||
l = n = 0
|
|
||||||
while len(packages) > l:
|
|
||||||
l = len(packages)
|
|
||||||
for p in packages[n:]:
|
|
||||||
packages.extend([x for x in binaries[p][RDEPENDS] if x not in packages and x in binaries])
|
|
||||||
n = l
|
|
||||||
return packages
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _check_packages(self, binaries, systems, arch, affected, skip_archall, nuninst, pkg):
|
def _check_packages(self, binaries, systems, arch, affected, skip_archall, nuninst, pkg):
|
||||||
broken = nuninst[arch + "+all"]
|
broken = nuninst[arch + "+all"]
|
||||||
@ -2448,7 +2300,7 @@ class Britney(object):
|
|||||||
skipped.append(pkg)
|
skipped.append(pkg)
|
||||||
single_undo = [(undo, item)]
|
single_undo = [(undo, item)]
|
||||||
# (local-scope) binaries is actually self.binaries["testing"] so we cannot use it here.
|
# (local-scope) binaries is actually self.binaries["testing"] so we cannot use it here.
|
||||||
self.undo_changes(single_undo, systems, sources, self.binaries)
|
undo_changes(single_undo, systems, sources, self.binaries)
|
||||||
|
|
||||||
# if we are processing hints, return now
|
# if we are processing hints, return now
|
||||||
if hint:
|
if hint:
|
||||||
@ -2457,7 +2309,8 @@ class Britney(object):
|
|||||||
self.output_write(" finish: [%s]\n" % ",".join([ x.uvname for x in selected ]))
|
self.output_write(" finish: [%s]\n" % ",".join([ x.uvname for x in selected ]))
|
||||||
self.output_write("endloop: %s\n" % (self.eval_nuninst(self.nuninst_orig)))
|
self.output_write("endloop: %s\n" % (self.eval_nuninst(self.nuninst_orig)))
|
||||||
self.output_write(" now: %s\n" % (self.eval_nuninst(nuninst_comp)))
|
self.output_write(" now: %s\n" % (self.eval_nuninst(nuninst_comp)))
|
||||||
self.output_write(self.eval_uninst(self.newlyuninst(self.nuninst_orig, nuninst_comp)))
|
self.output_write(eval_uninst(self.options.architectures,
|
||||||
|
newly_uninst(self.nuninst_orig, nuninst_comp)))
|
||||||
self.output_write("\n")
|
self.output_write("\n")
|
||||||
|
|
||||||
return (nuninst_comp, extra)
|
return (nuninst_comp, extra)
|
||||||
@ -2520,7 +2373,8 @@ class Britney(object):
|
|||||||
self.output_write("easy: %s\n" % nuninst_end_str)
|
self.output_write("easy: %s\n" % nuninst_end_str)
|
||||||
|
|
||||||
if not force:
|
if not force:
|
||||||
self.output_write(self.eval_uninst(self.newlyuninst(nuninst_start, nuninst_end)) + "\n")
|
self.output_write(eval_uninst(self.options.architectures,
|
||||||
|
newly_uninst(nuninst_start, nuninst_end)) + "\n")
|
||||||
|
|
||||||
if force or self.is_nuninst_asgood_generous(self.nuninst_orig, nuninst_end):
|
if force or self.is_nuninst_asgood_generous(self.nuninst_orig, nuninst_end):
|
||||||
# Result accepted either by force or by being better than the original result.
|
# Result accepted either by force or by being better than the original result.
|
||||||
@ -2535,7 +2389,8 @@ class Britney(object):
|
|||||||
self.output_write(" end: %s\n" % nuninst_end_str)
|
self.output_write(" end: %s\n" % nuninst_end_str)
|
||||||
if force:
|
if force:
|
||||||
self.output_write("force breaks:\n")
|
self.output_write("force breaks:\n")
|
||||||
self.output_write(self.eval_uninst(self.newlyuninst(nuninst_start, nuninst_end)) + "\n")
|
self.output_write(eval_uninst(self.options.architectures,
|
||||||
|
newly_uninst(nuninst_start, nuninst_end)) + "\n")
|
||||||
self.output_write("SUCCESS (%d/%d)\n" % (len(actions or self.upgrade_me), len(extra)))
|
self.output_write("SUCCESS (%d/%d)\n" % (len(actions or self.upgrade_me), len(extra)))
|
||||||
self.nuninst_orig = nuninst_end
|
self.nuninst_orig = nuninst_end
|
||||||
self.all_selected += [x.uvname for x in selected]
|
self.all_selected += [x.uvname for x in selected]
|
||||||
@ -2550,75 +2405,7 @@ class Britney(object):
|
|||||||
if not lundo: return
|
if not lundo: return
|
||||||
lundo.reverse()
|
lundo.reverse()
|
||||||
|
|
||||||
self.undo_changes(lundo, self.systems, self.sources, self.binaries)
|
undo_changes(lundo, self.systems, self.sources, self.binaries)
|
||||||
|
|
||||||
|
|
||||||
def undo_changes(self, lundo, systems, sources, binaries):
|
|
||||||
"""Undoes one or more changes to testing
|
|
||||||
|
|
||||||
* lundo is a list of (undo, item)-tuples
|
|
||||||
* systems is the britney-py.c system
|
|
||||||
* sources is the table of all source packages for all suites
|
|
||||||
* binaries is the table of all binary packages for all suites
|
|
||||||
and architectures
|
|
||||||
"""
|
|
||||||
|
|
||||||
# We do the undo process in "4 steps" and each step must be
|
|
||||||
# fully completed for each undo-item before starting on the
|
|
||||||
# next.
|
|
||||||
#
|
|
||||||
# see commit:ef71f0e33a7c3d8ef223ec9ad5e9843777e68133 and
|
|
||||||
# #624716 for the issues we had when we did not do this.
|
|
||||||
|
|
||||||
|
|
||||||
# STEP 1
|
|
||||||
# undo all the changes for sources
|
|
||||||
for (undo, item) in lundo:
|
|
||||||
for k in undo['sources'].keys():
|
|
||||||
if k[0] == '-':
|
|
||||||
del sources["testing"][k[1:]]
|
|
||||||
else:
|
|
||||||
sources["testing"][k] = undo['sources'][k]
|
|
||||||
|
|
||||||
# STEP 2
|
|
||||||
# undo all new binaries (consequence of the above)
|
|
||||||
for (undo, item) in lundo:
|
|
||||||
if not item.is_removal and item.package in sources[item.suite]:
|
|
||||||
for p in sources[item.suite][item.package][BINARIES]:
|
|
||||||
binary, arch = p.split("/")
|
|
||||||
if item.architecture in ['source', arch]:
|
|
||||||
del binaries["testing"][arch][0][binary]
|
|
||||||
systems[arch].remove_binary(binary)
|
|
||||||
|
|
||||||
|
|
||||||
# STEP 3
|
|
||||||
# undo all other binary package changes (except virtual packages)
|
|
||||||
for (undo, item) in lundo:
|
|
||||||
for p in undo['binaries'].keys():
|
|
||||||
binary, arch = p.split("/")
|
|
||||||
if binary[0] == "-":
|
|
||||||
del binaries['testing'][arch][0][binary[1:]]
|
|
||||||
systems[arch].remove_binary(binary[1:])
|
|
||||||
else:
|
|
||||||
binaries_t_a = binaries['testing'][arch][0]
|
|
||||||
binaries_t_a[binary] = undo['binaries'][p]
|
|
||||||
systems[arch].remove_binary(binary)
|
|
||||||
systems[arch].add_binary(binary, binaries_t_a[binary][:PROVIDES] + \
|
|
||||||
[", ".join(binaries_t_a[binary][PROVIDES]) or None])
|
|
||||||
|
|
||||||
# STEP 4
|
|
||||||
# undo all changes to virtual packages
|
|
||||||
for (undo, item) in lundo:
|
|
||||||
for p in undo['nvirtual']:
|
|
||||||
j, arch = p.split("/")
|
|
||||||
del binaries['testing'][arch][1][j]
|
|
||||||
for p in undo['virtual']:
|
|
||||||
j, arch = p.split("/")
|
|
||||||
if j[0] == '-':
|
|
||||||
del binaries['testing'][arch][1][j[1:]]
|
|
||||||
else:
|
|
||||||
binaries['testing'][arch][1][j] = undo['virtual'][p]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -2713,14 +2500,14 @@ class Britney(object):
|
|||||||
removals = self.old_libraries()
|
removals = self.old_libraries()
|
||||||
if len(removals) > 0:
|
if len(removals) > 0:
|
||||||
self.output_write("Removing packages left in testing for smooth updates (%d):\n%s" % \
|
self.output_write("Removing packages left in testing for smooth updates (%d):\n%s" % \
|
||||||
(len(removals), self.old_libraries_format(removals)))
|
(len(removals), old_libraries_format(removals)))
|
||||||
self.do_all(actions=[ MigrationItem(x) for x in removals ])
|
self.do_all(actions=[ MigrationItem(x) for x in removals ])
|
||||||
removals = self.old_libraries()
|
removals = self.old_libraries()
|
||||||
else:
|
else:
|
||||||
removals = ()
|
removals = ()
|
||||||
|
|
||||||
self.output_write("List of old libraries in testing (%d):\n%s" % \
|
self.output_write("List of old libraries in testing (%d):\n%s" % \
|
||||||
(len(removals), self.old_libraries_format(removals)))
|
(len(removals), old_libraries_format(removals)))
|
||||||
|
|
||||||
# output files
|
# output files
|
||||||
if not self.options.dry_run:
|
if not self.options.dry_run:
|
||||||
@ -2733,7 +2520,9 @@ class Britney(object):
|
|||||||
self.write_dates(self.options.testing, self.dates)
|
self.write_dates(self.options.testing, self.dates)
|
||||||
|
|
||||||
# write HeidiResult
|
# write HeidiResult
|
||||||
self.write_heidi(self.options.heidi_output)
|
self.__log("Writing Heidi results to %s" % self.options.heidi_output)
|
||||||
|
write_heidi(self.options.heidi_output, self.sources["testing"],
|
||||||
|
self.binaries["testing"])
|
||||||
|
|
||||||
# write Delta
|
# write Delta
|
||||||
if hasattr(self.options, 'delta_output'):
|
if hasattr(self.options, 'delta_output'):
|
||||||
@ -2744,8 +2533,9 @@ class Britney(object):
|
|||||||
|
|
||||||
def printuninstchange(self):
|
def printuninstchange(self):
|
||||||
self.__log("Checking for newly uninstallable packages", type="I")
|
self.__log("Checking for newly uninstallable packages", type="I")
|
||||||
text = self.eval_uninst(self.newlyuninst(
|
text = eval_uninst(self.options.architectures, newly_uninst(
|
||||||
self.nuninst_orig_save, self.nuninst_orig))
|
self.nuninst_orig_save, self.nuninst_orig))
|
||||||
|
|
||||||
if text != '':
|
if text != '':
|
||||||
self.output_write("\nNewly uninstallable packages in testing:\n%s" % \
|
self.output_write("\nNewly uninstallable packages in testing:\n%s" % \
|
||||||
(text))
|
(text))
|
||||||
@ -2968,13 +2758,15 @@ class Britney(object):
|
|||||||
if i not in to_skip:
|
if i not in to_skip:
|
||||||
self.do_hint("easy", "autohinter", [ HintItem("%s/%s" % (x[0], x[1])) for x in l[i] ])
|
self.do_hint("easy", "autohinter", [ HintItem("%s/%s" % (x[0], x[1])) for x in l[i] ])
|
||||||
|
|
||||||
def old_libraries(self):
|
def old_libraries(self, same_source=same_source):
|
||||||
"""Detect old libraries left in testing for smooth transitions
|
"""Detect old libraries left in testing for smooth transitions
|
||||||
|
|
||||||
This method detects old libraries which are in testing but no longer
|
This method detects old libraries which are in testing but no longer
|
||||||
built from the source package: they are still there because other
|
built from the source package: they are still there because other
|
||||||
packages still depend on them, but they should be removed as soon
|
packages still depend on them, but they should be removed as soon
|
||||||
as possible.
|
as possible.
|
||||||
|
|
||||||
|
same_source is an opt to avoid "load global".
|
||||||
"""
|
"""
|
||||||
sources = self.sources['testing']
|
sources = self.sources['testing']
|
||||||
testing = self.binaries['testing']
|
testing = self.binaries['testing']
|
||||||
@ -2984,22 +2776,10 @@ class Britney(object):
|
|||||||
for pkg_name in testing[arch][0]:
|
for pkg_name in testing[arch][0]:
|
||||||
pkg = testing[arch][0][pkg_name]
|
pkg = testing[arch][0][pkg_name]
|
||||||
if pkg_name not in unstable[arch][0] and \
|
if pkg_name not in unstable[arch][0] and \
|
||||||
not self.same_source(sources[pkg[SOURCE]][VERSION], pkg[SOURCEVER]):
|
not same_source(sources[pkg[SOURCE]][VERSION], pkg[SOURCEVER]):
|
||||||
removals.append("-" + pkg_name + "/" + arch)
|
removals.append("-" + pkg_name + "/" + arch)
|
||||||
return removals
|
return removals
|
||||||
|
|
||||||
def old_libraries_format(self, libs):
|
|
||||||
"""Format old libraries in a smart table"""
|
|
||||||
libraries = {}
|
|
||||||
for i in libs:
|
|
||||||
pkg, arch = i.split("/")
|
|
||||||
pkg = pkg[1:]
|
|
||||||
if pkg in libraries:
|
|
||||||
libraries[pkg].append(arch)
|
|
||||||
else:
|
|
||||||
libraries[pkg] = [arch]
|
|
||||||
return "\n".join([" " + k + ": " + " ".join(libraries[k]) for k in libraries]) + "\n"
|
|
||||||
|
|
||||||
def nuninst_arch_report(self, nuninst, arch):
|
def nuninst_arch_report(self, nuninst, arch):
|
||||||
"""Print a report of uninstallable packages for one architecture."""
|
"""Print a report of uninstallable packages for one architecture."""
|
||||||
all = {}
|
all = {}
|
||||||
|
362
britney_util.py
Normal file
362
britney_util.py
Normal file
@ -0,0 +1,362 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Refactored parts from britney.py, which is/was:
|
||||||
|
# Copyright (C) 2001-2008 Anthony Towns <ajt@debian.org>
|
||||||
|
# Andreas Barth <aba@debian.org>
|
||||||
|
# Fabio Tranchitella <kobold@debian.org>
|
||||||
|
# Copyright (C) 2010-2012 Adam D. Barratt <adsb@debian.org>
|
||||||
|
# Copyright (C) 2012 Niels Thykier <niels@thykier.net>
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; either version 2 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
|
||||||
|
|
||||||
|
import apt_pkg
|
||||||
|
from functools import partial
|
||||||
|
from itertools import chain, ifilter, ifilterfalse, izip, repeat
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
from consts import (VERSION, BINARIES, PROVIDES, DEPENDS, CONFLICTS,
|
||||||
|
RDEPENDS, RCONFLICTS, ARCHITECTURE, SECTION)
|
||||||
|
|
||||||
|
binnmu_re = re.compile(r'^(.*)\+b\d+$')
|
||||||
|
|
||||||
|
def same_source(sv1, sv2, binnmu_re=binnmu_re):
|
||||||
|
"""Check if two version numbers are built from the same source
|
||||||
|
|
||||||
|
This method returns a boolean value which is true if the two
|
||||||
|
version numbers specified as parameters are built from the same
|
||||||
|
source. The main use of this code is to detect binary-NMU.
|
||||||
|
|
||||||
|
binnmu_re is an optimization to avoid "load global".
|
||||||
|
"""
|
||||||
|
if sv1 == sv2:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
m = binnmu_re.match(sv1)
|
||||||
|
if m: sv1 = m.group(1)
|
||||||
|
m = binnmu_re.match(sv2)
|
||||||
|
if m: sv2 = m.group(1)
|
||||||
|
|
||||||
|
if sv1 == sv2:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def ifilter_except(container, iterable=None):
|
||||||
|
"""Filter out elements in container
|
||||||
|
|
||||||
|
If given an iterable it returns a filtered iterator, otherwise it
|
||||||
|
returns a function to generate filtered iterators. The latter is
|
||||||
|
useful if the same filter has to be (re-)used on multiple
|
||||||
|
iterators that are not known on beforehand.
|
||||||
|
"""
|
||||||
|
if iterable is not None:
|
||||||
|
return ifilterfalse(container.__contains__, iterable)
|
||||||
|
return partial(ifilterfalse, container.__contains__)
|
||||||
|
|
||||||
|
|
||||||
|
def ifilter_only(container, iterable=None):
|
||||||
|
"""Filter out elements in which are not in container
|
||||||
|
|
||||||
|
If given an iterable it returns a filtered iterator, otherwise it
|
||||||
|
returns a function to generate filtered iterators. The latter is
|
||||||
|
useful if the same filter has to be (re-)used on multiple
|
||||||
|
iterators that are not known on beforehand.
|
||||||
|
"""
|
||||||
|
if iterable is not None:
|
||||||
|
return ifilter(container.__contains__, iterable)
|
||||||
|
return partial(ifilter, container.__contains__)
|
||||||
|
|
||||||
|
|
||||||
|
def undo_changes(lundo, systems, sources, binaries,
|
||||||
|
BINARIES=BINARIES, PROVIDES=PROVIDES):
|
||||||
|
"""Undoes one or more changes to testing
|
||||||
|
|
||||||
|
* lundo is a list of (undo, item)-tuples
|
||||||
|
* systems is the britney-py.c system
|
||||||
|
* sources is the table of all source packages for all suites
|
||||||
|
* binaries is the table of all binary packages for all suites
|
||||||
|
and architectures
|
||||||
|
|
||||||
|
The "X=X" parameters are optimizations to avoid "load global"
|
||||||
|
in loops.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# We do the undo process in "4 steps" and each step must be
|
||||||
|
# fully completed for each undo-item before starting on the
|
||||||
|
# next.
|
||||||
|
#
|
||||||
|
# see commit:ef71f0e33a7c3d8ef223ec9ad5e9843777e68133 and
|
||||||
|
# #624716 for the issues we had when we did not do this.
|
||||||
|
|
||||||
|
|
||||||
|
# STEP 1
|
||||||
|
# undo all the changes for sources
|
||||||
|
for (undo, item) in lundo:
|
||||||
|
for k in undo['sources']:
|
||||||
|
if k[0] == '-':
|
||||||
|
del sources["testing"][k[1:]]
|
||||||
|
else:
|
||||||
|
sources["testing"][k] = undo['sources'][k]
|
||||||
|
|
||||||
|
# STEP 2
|
||||||
|
# undo all new binaries (consequence of the above)
|
||||||
|
for (undo, item) in lundo:
|
||||||
|
if not item.is_removal and item.package in sources[item.suite]:
|
||||||
|
for p in sources[item.suite][item.package][BINARIES]:
|
||||||
|
binary, arch = p.split("/")
|
||||||
|
if item.architecture in ['source', arch]:
|
||||||
|
del binaries["testing"][arch][0][binary]
|
||||||
|
systems[arch].remove_binary(binary)
|
||||||
|
|
||||||
|
|
||||||
|
# STEP 3
|
||||||
|
# undo all other binary package changes (except virtual packages)
|
||||||
|
for (undo, item) in lundo:
|
||||||
|
for p in undo['binaries']:
|
||||||
|
binary, arch = p.split("/")
|
||||||
|
if binary[0] == "-":
|
||||||
|
del binaries['testing'][arch][0][binary[1:]]
|
||||||
|
systems[arch].remove_binary(binary[1:])
|
||||||
|
else:
|
||||||
|
binaries_t_a = binaries['testing'][arch][0]
|
||||||
|
binaries_t_a[binary] = undo['binaries'][p]
|
||||||
|
systems[arch].remove_binary(binary)
|
||||||
|
systems[arch].add_binary(binary, binaries_t_a[binary][:PROVIDES] + \
|
||||||
|
[", ".join(binaries_t_a[binary][PROVIDES]) or None])
|
||||||
|
|
||||||
|
# STEP 4
|
||||||
|
# undo all changes to virtual packages
|
||||||
|
for (undo, item) in lundo:
|
||||||
|
for p in undo['nvirtual']:
|
||||||
|
j, arch = p.split("/")
|
||||||
|
del binaries['testing'][arch][1][j]
|
||||||
|
for p in undo['virtual']:
|
||||||
|
j, arch = p.split("/")
|
||||||
|
if j[0] == '-':
|
||||||
|
del binaries['testing'][arch][1][j[1:]]
|
||||||
|
else:
|
||||||
|
binaries['testing'][arch][1][j] = undo['virtual'][p]
|
||||||
|
|
||||||
|
|
||||||
|
def old_libraries_format(libs):
|
||||||
|
"""Format old libraries in a smart table"""
|
||||||
|
libraries = {}
|
||||||
|
for i in libs:
|
||||||
|
pkg, arch = i.split("/")
|
||||||
|
pkg = pkg[1:]
|
||||||
|
if pkg in libraries:
|
||||||
|
libraries[pkg].append(arch)
|
||||||
|
else:
|
||||||
|
libraries[pkg] = [arch]
|
||||||
|
return "\n".join(" " + k + ": " + " ".join(libraries[k]) for k in libraries) + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def register_reverses(packages, provides, check_doubles=True, iterator=None,
|
||||||
|
parse_depends=apt_pkg.parse_depends,
|
||||||
|
DEPENDS=DEPENDS, CONFLICTS=CONFLICTS,
|
||||||
|
RDEPENDS=RDEPENDS, RCONFLICTS=RCONFLICTS):
|
||||||
|
"""Register reverse dependencies and conflicts for a given
|
||||||
|
sequence of packages
|
||||||
|
|
||||||
|
This method registers the reverse dependencies and conflicts for a
|
||||||
|
given sequence of packages. "packages" is a table of real
|
||||||
|
packages and "provides" is a table of virtual packages.
|
||||||
|
|
||||||
|
iterator is the sequence of packages for which the reverse
|
||||||
|
relations should be updated.
|
||||||
|
|
||||||
|
The "X=X" parameters are optimizations to avoid "load global" in
|
||||||
|
the loops.
|
||||||
|
"""
|
||||||
|
if iterator is None:
|
||||||
|
iterator = packages.iterkeys()
|
||||||
|
else:
|
||||||
|
iterator = ifilter_only(packages, iterator)
|
||||||
|
|
||||||
|
for pkg in iterator:
|
||||||
|
# register the list of the dependencies for the depending packages
|
||||||
|
dependencies = []
|
||||||
|
pkg_data = packages[pkg]
|
||||||
|
if pkg_data[DEPENDS]:
|
||||||
|
dependencies.extend(parse_depends(pkg_data[DEPENDS], False))
|
||||||
|
# go through the list
|
||||||
|
for p in dependencies:
|
||||||
|
for a in p:
|
||||||
|
dep = a[0]
|
||||||
|
# register real packages
|
||||||
|
if dep in packages and (not check_doubles or pkg not in packages[dep][RDEPENDS]):
|
||||||
|
packages[dep][RDEPENDS].append(pkg)
|
||||||
|
# also register packages which provide the package (if any)
|
||||||
|
if dep in provides:
|
||||||
|
for i in provides[dep]:
|
||||||
|
if i not in packages: continue
|
||||||
|
if not check_doubles or pkg not in packages[i][RDEPENDS]:
|
||||||
|
packages[i][RDEPENDS].append(pkg)
|
||||||
|
# register the list of the conflicts for the conflicting packages
|
||||||
|
if pkg_data[CONFLICTS]:
|
||||||
|
for p in parse_depends(pkg_data[CONFLICTS], False):
|
||||||
|
for a in p:
|
||||||
|
con = a[0]
|
||||||
|
# register real packages
|
||||||
|
if con in packages and (not check_doubles or pkg not in packages[con][RCONFLICTS]):
|
||||||
|
packages[con][RCONFLICTS].append(pkg)
|
||||||
|
# also register packages which provide the package (if any)
|
||||||
|
if con in provides:
|
||||||
|
for i in provides[con]:
|
||||||
|
if i not in packages: continue
|
||||||
|
if not check_doubles or pkg not in packages[i][RCONFLICTS]:
|
||||||
|
packages[i][RCONFLICTS].append(pkg)
|
||||||
|
|
||||||
|
|
||||||
|
def compute_reverse_tree(packages_s, pkg, arch,
|
||||||
|
set=set, flatten=chain.from_iterable,
|
||||||
|
RDEPENDS=RDEPENDS):
|
||||||
|
"""Calculate the full dependency tree for the given package
|
||||||
|
|
||||||
|
This method returns the full dependency tree for the package
|
||||||
|
"pkg", inside the "arch" architecture for a given suite flattened
|
||||||
|
as an iterable. The first argument "packages_s" is the binary
|
||||||
|
package table for that given suite (e.g. Britney().binaries["testing"]).
|
||||||
|
|
||||||
|
The tree (or graph) is returned as an iterable of (package, arch)
|
||||||
|
tuples and the iterable will contain ("pkg", "arch") if it is
|
||||||
|
available on that architecture.
|
||||||
|
|
||||||
|
If "pkg" is not available on that architecture in that suite,
|
||||||
|
this returns an empty iterable.
|
||||||
|
|
||||||
|
The method does not promise any ordering of the returned
|
||||||
|
elements and the iterable is not reusable.
|
||||||
|
|
||||||
|
The flatten=... and the "X=X" parameters are optimizations to
|
||||||
|
avoid "load global" in the loops.
|
||||||
|
"""
|
||||||
|
binaries = packages_s[arch][0]
|
||||||
|
if pkg not in binaries:
|
||||||
|
return frozenset()
|
||||||
|
rev_deps = set(binaries[pkg][RDEPENDS])
|
||||||
|
seen = set([pkg])
|
||||||
|
|
||||||
|
binfilt = ifilter_only(binaries)
|
||||||
|
revfilt = ifilter_except(seen)
|
||||||
|
|
||||||
|
while rev_deps:
|
||||||
|
# mark all of the current iteration of packages as affected
|
||||||
|
seen |= rev_deps
|
||||||
|
# generate the next iteration, which is the reverse-dependencies of
|
||||||
|
# the current iteration
|
||||||
|
rev_deps = set(revfilt(flatten( binaries[x][RDEPENDS] for x in binfilt(rev_deps) )))
|
||||||
|
return izip(seen, repeat(arch))
|
||||||
|
|
||||||
|
|
||||||
|
def write_nuninst(filename, nuninst):
|
||||||
|
"""Write the non-installable report
|
||||||
|
|
||||||
|
Write the non-installable report derived from "nuninst" to the
|
||||||
|
file denoted by "filename".
|
||||||
|
"""
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
# Having two fields with (almost) identical dates seems a bit
|
||||||
|
# redundant.
|
||||||
|
f.write("Built on: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n")
|
||||||
|
f.write("Last update: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n\n")
|
||||||
|
f.write("".join([k + ": " + " ".join(nuninst[k]) + "\n" for k in nuninst]))
|
||||||
|
|
||||||
|
|
||||||
|
def read_nuninst(filename, architectures):
|
||||||
|
"""Read the non-installable report
|
||||||
|
|
||||||
|
Read the non-installable report from the file denoted by
|
||||||
|
"filename" and return it. Only architectures in "architectures"
|
||||||
|
will be included in the report.
|
||||||
|
"""
|
||||||
|
nuninst = {}
|
||||||
|
with open(filename) as f:
|
||||||
|
for r in f:
|
||||||
|
if ":" not in r: continue
|
||||||
|
arch, packages = r.strip().split(":", 1)
|
||||||
|
if arch.split("+", 1)[0] in architectures:
|
||||||
|
nuninst[arch] = set(packages.split())
|
||||||
|
return nuninst
|
||||||
|
|
||||||
|
|
||||||
|
def newly_uninst(nuold, nunew):
|
||||||
|
"""Return a nuninst statstic with only new uninstallable packages
|
||||||
|
|
||||||
|
This method subtracts the uninstallable packages of the statistic
|
||||||
|
"nunew" from the statistic "nuold".
|
||||||
|
|
||||||
|
It returns a dictionary with the architectures as keys and the list
|
||||||
|
of uninstallable packages as values.
|
||||||
|
"""
|
||||||
|
res = {}
|
||||||
|
for arch in ifilter_only(nunew, nuold):
|
||||||
|
res[arch] = [x for x in nunew[arch] if x not in nuold[arch]]
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def eval_uninst(architectures, nuninst):
|
||||||
|
"""Return a string which represents the uninstallable packages
|
||||||
|
|
||||||
|
This method returns a string which represents the uninstallable
|
||||||
|
packages reading the uninstallability statistics "nuninst".
|
||||||
|
|
||||||
|
An example of the output string is:
|
||||||
|
* i386: broken-pkg1, broken-pkg2
|
||||||
|
"""
|
||||||
|
parts = []
|
||||||
|
for arch in architectures:
|
||||||
|
if arch in nuninst and nuninst[arch]:
|
||||||
|
parts.append(" * %s: %s\n" % (arch,", ".join(sorted(nuninst[arch]))))
|
||||||
|
return "".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def write_heidi(filename, sources_t, packages_t,
|
||||||
|
VERSION=VERSION, SECTION=SECTION,
|
||||||
|
ARCHITECTURE=ARCHITECTURE, sorted=sorted):
|
||||||
|
"""Write the output HeidiResult
|
||||||
|
|
||||||
|
This method write the output for Heidi, which contains all the
|
||||||
|
binary packages and the source packages in the form:
|
||||||
|
|
||||||
|
<pkg-name> <pkg-version> <pkg-architecture> <pkg-section>
|
||||||
|
<src-name> <src-version> source <src-section>
|
||||||
|
|
||||||
|
The file is written as "filename", it assumes all sources and
|
||||||
|
packages in "sources_t" and "packages_t" to be the packages in
|
||||||
|
"testing".
|
||||||
|
|
||||||
|
The "X=X" parameters are optimizations to avoid "load global" in
|
||||||
|
the loops.
|
||||||
|
"""
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
|
||||||
|
# write binary packages
|
||||||
|
for arch in sorted(packages_t):
|
||||||
|
binaries = packages_t[arch][0]
|
||||||
|
for pkg_name in sorted(binaries):
|
||||||
|
pkg = binaries[pkg_name]
|
||||||
|
pkgv = pkg[VERSION]
|
||||||
|
pkgarch = pkg[ARCHITECTURE] or 'all'
|
||||||
|
pkgsec = pkg[SECTION] or 'faux'
|
||||||
|
f.write('%s %s %s %s\n' % (pkg_name, pkgv, pkgarch, pkgsec))
|
||||||
|
|
||||||
|
# write sources
|
||||||
|
for src_name in sorted(sources_t):
|
||||||
|
src = sources_t[src_name]
|
||||||
|
srcv = src[VERSION]
|
||||||
|
srcsec = src[SECTION] or 'unknown'
|
||||||
|
f.write('%s %s source %s\n' % (src_name, srcv, srcsec))
|
40
consts.py
Normal file
40
consts.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Constants from britney.py
|
||||||
|
#
|
||||||
|
# Assuming constants are copyrightable, then they are:
|
||||||
|
# Copyright (C) 2001-2008 Anthony Towns <ajt@debian.org>
|
||||||
|
# Andreas Barth <aba@debian.org>
|
||||||
|
# Fabio Tranchitella <kobold@debian.org>
|
||||||
|
# Copyright (C) 2010-2012 Adam D. Barratt <adsb@debian.org>
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; either version 2 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
|
||||||
|
# source package
|
||||||
|
VERSION = 0
|
||||||
|
SECTION = 1
|
||||||
|
BINARIES = 2
|
||||||
|
MAINTAINER = 3
|
||||||
|
FAKESRC = 4
|
||||||
|
|
||||||
|
# binary package
|
||||||
|
SOURCE = 2
|
||||||
|
SOURCEVER = 3
|
||||||
|
ARCHITECTURE = 4
|
||||||
|
MULTIARCH = 5
|
||||||
|
# PREDEPENDS = 6 - No longer used by the python code
|
||||||
|
# - The C-code needs it for alignment reasons and still check it
|
||||||
|
# but ignore it if it is None (so keep it None).
|
||||||
|
DEPENDS = 7
|
||||||
|
CONFLICTS = 8
|
||||||
|
PROVIDES = 9
|
||||||
|
RDEPENDS = 10
|
||||||
|
RCONFLICTS = 11
|
Loading…
x
Reference in New Issue
Block a user