Merge branch 'master' into autopkgtest

ubuntu/rebased
Paul Gevers 7 years ago
commit fd03083ea5
No known key found for this signature in database
GPG Key ID: 9C5C99EB05BD750A

@ -197,7 +197,8 @@ from britney2.excuse import Excuse
from britney2.hints import HintParser
from britney2.installability.builder import build_installability_tester
from britney2.migrationitem import MigrationItem
from britney2.policies.policy import AgePolicy, RCBugPolicy, PiupartsPolicy, PolicyVerdict
from britney2.policies import PolicyVerdict
from britney2.policies.policy import AgePolicy, RCBugPolicy, PiupartsPolicy, BuildDependsPolicy
from britney2.policies.autopkgtest import AutopkgtestPolicy
from britney2.utils import (old_libraries_format, undo_changes,
compute_reverse_tree, possibly_compressed,
@ -523,6 +524,7 @@ class Britney(object):
if getattr(self.options, 'adt_enable') == 'yes':
self.policies.append(AutopkgtestPolicy(self.options, self.suite_info))
self.policies.append(AgePolicy(self.options, self.suite_info, MINDAYS))
self.policies.append(BuildDependsPolicy(self.options, self.suite_info))
for policy in self.policies:
policy.register_hints(self._hint_parser)
@ -580,6 +582,7 @@ class Britney(object):
[],
None,
True,
None,
[],
[],
)
@ -656,6 +659,7 @@ class Britney(object):
[],
None,
True,
None,
[],
[],
)
@ -859,7 +863,7 @@ class Britney(object):
srcdist[source].binaries.append(pkg_id)
# if the source package doesn't exist, create a fake one
else:
srcdist[source] = SourcePackage(source_version, 'faux', [pkg_id], None, True, [], [])
srcdist[source] = SourcePackage(source_version, 'faux', [pkg_id], None, True, None, [], [])
# add the resulting dictionary to the package list
packages[pkg] = dpkg
@ -1384,21 +1388,28 @@ class Britney(object):
# at this point, we check the status of the builds on all the supported architectures
# to catch the out-of-date ones
pkgs = {src: ["source"]}
all_binaries = self.all_binaries
for arch in self.options.architectures:
archs_to_consider = list(self.options.architectures)
archs_to_consider.append('all')
for arch in archs_to_consider:
oodbins = {}
uptodatebins = False
# for every binary package produced by this source in the suite for this architecture
for pkg_id in sorted(x for x in source_u.binaries if x.architecture == arch):
if arch == 'all':
consider_binaries = source_u.binaries
else:
consider_binaries = sorted(x for x in source_u.binaries if x.architecture == arch)
for pkg_id in consider_binaries:
pkg = pkg_id.package_name
if pkg not in pkgs: pkgs[pkg] = []
pkgs[pkg].append(arch)
# retrieve the binary package and its source version
binary_u = all_binaries[pkg_id]
pkgsv = binary_u.source_version
# arch:all packages are treated separately from arch:arch
if binary_u.architecture != arch:
continue
# if it wasn't built by the same source, it is out-of-date
# if there is at least one binary on this arch which is
# up-to-date, there is a build on this arch
@ -1409,10 +1420,7 @@ class Britney(object):
excuse.add_old_binary(pkg, pkgsv)
continue
else:
# if the binary is arch all, it doesn't count as
# up-to-date for this arch
if binary_u.architecture == arch:
uptodatebins = True
uptodatebins = True
# if the package is architecture-dependent or the current arch is `nobreakall'
# find unsatisfied dependencies for the binary package
@ -1549,15 +1557,15 @@ class Britney(object):
# this list will contain the packages which are valid candidates;
# if a package is going to be removed, it will have a "-" prefix
upgrade_me = []
upgrade_me_append = upgrade_me.append # Every . in a loop slows it down
upgrade_me = set()
upgrade_me_add = upgrade_me.add # Every . in a loop slows it down
excuses = self.excuses = {}
# for every source package in testing, check if it should be removed
for pkg in testing:
if should_remove_source(pkg):
upgrade_me_append("-" + pkg)
upgrade_me_add("-" + pkg)
# for every source package in unstable check if it should be upgraded
for pkg in unstable:
@ -1567,11 +1575,11 @@ class Britney(object):
if pkg in testing and not testing[pkg].is_fakesrc:
for arch in architectures:
if should_upgrade_srcarch(pkg, arch, 'unstable'):
upgrade_me_append("%s/%s" % (pkg, arch))
upgrade_me_add("%s/%s" % (pkg, arch))
# check if the source package should be upgraded
if should_upgrade_src(pkg, 'unstable'):
upgrade_me_append(pkg)
upgrade_me_add(pkg)
# for every source package in *-proposed-updates, check if it should be upgraded
for suite in ['pu', 'tpu']:
@ -1581,11 +1589,11 @@ class Britney(object):
if pkg in testing:
for arch in architectures:
if should_upgrade_srcarch(pkg, arch, suite):
upgrade_me_append("%s/%s_%s" % (pkg, arch, suite))
upgrade_me_add("%s/%s_%s" % (pkg, arch, suite))
# check if the source package should be upgraded
if should_upgrade_src(pkg, suite):
upgrade_me_append("%s_%s" % (pkg, suite))
upgrade_me_add("%s_%s" % (pkg, suite))
# process the `remove' hints, if the given package is not yet in upgrade_me
for hint in self.hints['remove']:
@ -1600,7 +1608,7 @@ class Britney(object):
continue
# add the removal of the package to upgrade_me and build a new excuse
upgrade_me_append("-%s" % (src))
upgrade_me_add("-%s" % (src))
excuse = Excuse("-%s" % (src))
excuse.set_vers(tsrcv, None)
excuse.addhtml("Removal request by %s" % (hint.user))
@ -1613,7 +1621,7 @@ class Britney(object):
excuses[excuse.name] = excuse
# extract the not considered packages, which are in the excuses but not in upgrade_me
unconsidered = [ename for ename in excuses if ename not in upgrade_me]
unconsidered = {ename for ename in excuses if ename not in upgrade_me}
# invalidate impossible excuses
for e in excuses.values():
@ -2508,7 +2516,8 @@ class Britney(object):
# write HeidiResult
self.log("Writing Heidi results to %s" % self.options.heidi_output)
write_heidi(self.options.heidi_output, self.sources["testing"],
self.binaries["testing"])
self.binaries["testing"],
outofsync_arches=self.options.outofsync_arches)
self.log("Writing delta to %s" % self.options.heidi_delta_output)
write_heidi_delta(self.options.heidi_delta_output,

@ -9,14 +9,15 @@ SuiteInfo = namedtuple('SuiteInfo', [
class SourcePackage(object):
__slots__ = ['version', 'section', 'binaries', 'maintainer', 'is_fakesrc', 'testsuite', 'testsuite_triggers']
__slots__ = ['version', 'section', 'binaries', 'maintainer', 'is_fakesrc', 'build_deps_arch', 'testsuite', 'testsuite_triggers']
def __init__(self, version, section, binaries, maintainer, is_fakesrc, testsuite, testsuite_triggers):
def __init__(self, version, section, binaries, maintainer, is_fakesrc, build_deps_arch, testsuite, testsuite_triggers):
self.version = version
self.section = section
self.binaries = binaries
self.maintainer = maintainer
self.is_fakesrc = is_fakesrc
self.build_deps_arch = build_deps_arch
self.testsuite = testsuite
self.testsuite_triggers = testsuite_triggers

@ -74,12 +74,13 @@ class Excuse(object):
self.forced = False
self._policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
self.invalid_deps = []
self.invalid_deps = set()
self.invalid_build_deps = set()
self.deps = {}
self.arch_build_deps = {}
self.sane_deps = []
self.break_deps = []
self.unsatisfiable_on_archs = []
self.bugs = []
self.newbugs = set()
self.oldbugs = set()
self.reason = {}
@ -145,9 +146,18 @@ class Excuse(object):
if arch not in self.unsatisfiable_on_archs:
self.unsatisfiable_on_archs.append(arch)
def add_arch_build_dep(self, name, arch):
if name not in self.arch_build_deps:
self.arch_build_deps[name] = []
self.arch_build_deps[name].append(arch)
def invalidate_dep(self, name):
"""Invalidate dependency"""
if name not in self.invalid_deps: self.invalid_deps.append(name)
self.invalid_deps.add(name)
def invalidate_build_dep(self, name):
"""Invalidate build-dependency"""
self.invalid_build_deps.add(name)
def setdaysold(self, daysold, mindays):
"""Set the number of days from the upload and the minimum number of days for the update"""
@ -219,6 +229,17 @@ class Excuse(object):
for (n,a) in self.break_deps:
if n not in self.deps:
res += "<li>Ignoring %s depends: <a href=\"#%s\">%s</a>\n" % (a, n, n)
lastdep = ""
for x in sorted(self.arch_build_deps, key=lambda x: x.split('/')[0]):
dep = x.split('/')[0]
if dep == lastdep:
continue
lastdep = dep
if x in self.invalid_build_deps:
res = res + "<li>Build-Depends(-Arch): %s <a href=\"#%s\">%s</a> (not ready)\n" % (self.name, dep, dep)
else:
res = res + "<li>Build-Depends(-Arch): %s <a href=\"#%s\">%s</a>\n" % (self.name, dep, dep)
if self.is_valid:
res += "<li>Valid candidate\n"
else:
@ -268,13 +289,14 @@ class Excuse(object):
'on-architectures': sorted(self.missing_builds),
'on-unimportant-architectures': sorted(self.missing_builds_ood_arch),
}
if self.deps or self.invalid_deps or self.break_deps:
if self.deps or self.invalid_deps or self.arch_build_deps or self.invalid_build_deps or self.break_deps:
excusedata['dependencies'] = dep_data = {}
migrate_after = sorted(x for x in self.deps if x not in self.invalid_deps)
migrate_after = sorted((self.deps.keys() - self.invalid_deps)
| (self.arch_build_deps.keys() - self.invalid_build_deps))
break_deps = [x for x, _ in self.break_deps if x not in self.deps]
if self.invalid_deps:
dep_data['blocked-by'] = sorted(self.invalid_deps)
if self.invalid_deps or self.invalid_build_deps:
dep_data['blocked-by'] = sorted(self.invalid_deps | self.invalid_build_deps)
if migrate_after:
dep_data['migrate-after'] = migrate_after
if break_deps:

@ -0,0 +1,53 @@
from enum import Enum, unique
@unique
class PolicyVerdict(Enum):
""""""
"""
The migration item passed the policy.
"""
PASS = 1
"""
The policy was completely overruled by a hint.
"""
PASS_HINTED = 2
"""
The migration item did not pass the policy, but the failure is believed
to be temporary
"""
REJECTED_TEMPORARILY = 3
"""
The migration item is temporarily unable to migrate due to another item. The other item is temporarily blocked.
"""
REJECTED_WAITING_FOR_ANOTHER_ITEM = 4
"""
The migration item is permanently unable to migrate due to another item. The other item is permanently blocked.
"""
REJECTED_BLOCKED_BY_ANOTHER_ITEM = 5
"""
The migration item needs approval to migrate
"""
REJECTED_NEEDS_APPROVAL = 6
"""
The migration item is blocked, but there is not enough information to determine
if this issue is permanent or temporary
"""
REJECTED_CANNOT_DETERMINE_IF_PERMANENT = 7
"""
The migration item did not pass the policy and the failure is believed
to be uncorrectable (i.e. a hint or a new version is needed)
"""
REJECTED_PERMANENTLY = 8
@property
def is_rejected(self):
return True if self.name.startswith('REJECTED') else False
def is_blocked(self):
"""Whether the item (probably) needs a fix or manual assistance to migrate"""
return self in {
PolicyVerdict.REJECTED_BLOCKED_BY_ANOTHER_ITEM,
PolicyVerdict.REJECTED_NEEDS_APPROVAL,
PolicyVerdict.REJECTED_CANNOT_DETERMINE_IF_PERMANENT, # Assuming the worst
PolicyVerdict.REJECTED_PERMANENTLY,
}

@ -2,65 +2,13 @@ import json
import os
import time
from abc import abstractmethod
from enum import Enum, unique
from urllib.parse import quote
import apt_pkg
from britney2.hints import Hint, split_into_one_hint_per_package
@unique
class PolicyVerdict(Enum):
""""""
"""
The migration item passed the policy.
"""
PASS = 1
"""
The policy was completely overruled by a hint.
"""
PASS_HINTED = 2
"""
The migration item did not pass the policy, but the failure is believed
to be temporary
"""
REJECTED_TEMPORARILY = 3
"""
The migration item is temporarily unable to migrate due to another item. The other item is temporarily blocked.
"""
REJECTED_WAITING_FOR_ANOTHER_ITEM = 4
"""
The migration item is permanently unable to migrate due to another item. The other item is permanently blocked.
"""
REJECTED_BLOCKED_BY_ANOTHER_ITEM = 5
"""
The migration item needs approval to migrate
"""
REJECTED_NEEDS_APPROVAL = 6
"""
The migration item is blocked, but there is not enough information to determine
if this issue is permanent or temporary
"""
REJECTED_CANNOT_DETERMINE_IF_PERMANENT = 7
"""
The migration item did not pass the policy and the failure is believed
to be uncorrectable (i.e. a hint or a new version is needed)
"""
REJECTED_PERMANENTLY = 8
@property
def is_rejected(self):
return True if self.name.startswith('REJECTED') else False
def is_blocked(self):
"""Whether the item (probably) needs a fix or manual assistance to migrate"""
return self in {
PolicyVerdict.REJECTED_BLOCKED_BY_ANOTHER_ITEM,
PolicyVerdict.REJECTED_NEEDS_APPROVAL,
PolicyVerdict.REJECTED_CANNOT_DETERMINE_IF_PERMANENT, # Assuming the worst
PolicyVerdict.REJECTED_PERMANENTLY,
}
from britney2.policies import PolicyVerdict
from britney2.utils import get_dependency_solvers
class BasePolicy(object):
@ -695,7 +643,92 @@ class PiupartsPolicy(BasePolicy):
item = next(iter(suite_data.values()))
state, _, url = item
if not keep_url:
keep_url = None
url = None
summary[source] = (state, url)
return summary
class BuildDependsPolicy(BasePolicy):
def __init__(self, options, suite_info):
super().__init__('build-depends', options, suite_info, {'unstable', 'tpu', 'pu'})
self._britney = None
def initialise(self, britney):
super().initialise(britney)
self._britney = britney
def apply_policy_impl(self, build_deps_info, suite, source_name, source_data_tdist, source_data_srcdist, excuse,
get_dependency_solvers=get_dependency_solvers):
verdict = PolicyVerdict.PASS
britney = self._britney
# local copies for better performance
parse_src_depends = apt_pkg.parse_src_depends
# analyze the dependency fields (if present)
deps = source_data_srcdist.build_deps_arch
if not deps:
return verdict
sources_s = None
sources_t = None
unsat_bd = {}
relevant_archs = {binary.architecture for binary in source_data_srcdist.binaries
if britney.all_binaries[binary].architecture != 'all'}
for arch in (arch for arch in self.options.architectures if arch in relevant_archs):
# retrieve the binary package from the specified suite and arch
binaries_s_a, provides_s_a = britney.binaries[suite][arch]
binaries_t_a, provides_t_a = britney.binaries['testing'][arch]
# for every dependency block (formed as conjunction of disjunction)
for block_txt in deps.split(','):
block = parse_src_depends(block_txt, False, arch)
# Unlike regular dependencies, some clauses of the Build-Depends(-Arch|-Indep) can be
# filtered out by (e.g.) architecture restrictions. We need to cope with this while
# keeping block_txt and block aligned.
if not block:
# Relation is not relevant for this architecture.
continue
block = block[0]
# if the block is satisfied in testing, then skip the block
if get_dependency_solvers(block, binaries_t_a, provides_t_a, build_depends=True):
# Satisfied in testing; all ok.
continue
# check if the block can be satisfied in the source suite, and list the solving packages
packages = get_dependency_solvers(block, binaries_s_a, provides_s_a, build_depends=True)
packages = [binaries_s_a[p].source for p in packages]
# if the dependency can be satisfied by the same source package, skip the block:
# obviously both binary packages will enter testing together
if source_name in packages:
continue
# if no package can satisfy the dependency, add this information to the excuse
if not packages:
excuse.addhtml("%s unsatisfiable Build-Depends(-Arch) on %s: %s" % (source_name, arch, block_txt.strip()))
if arch not in unsat_bd:
unsat_bd[arch] = []
unsat_bd[arch].append(block_txt.strip())
if verdict.value < PolicyVerdict.REJECTED_PERMANENTLY.value:
verdict = PolicyVerdict.REJECTED_PERMANENTLY
continue
if not sources_t:
sources_t = britney.sources['testing']
sources_s = britney.sources[suite]
# for the solving packages, update the excuse to add the dependencies
for p in packages:
if arch not in self.options.break_arches:
if p in sources_t and sources_t[p].version == sources_s[p].version:
excuse.add_arch_build_dep("%s/%s" % (p, arch), arch)
else:
excuse.add_arch_build_dep(p, arch)
if unsat_bd:
build_deps_info['unsatisfiable-arch-build-depends'] = unsat_bd
return verdict

@ -39,7 +39,7 @@ from britney2.consts import (VERSION, PROVIDES, DEPENDS, CONFLICTS,
SOURCE, MAINTAINER, MULTIARCH,
ESSENTIAL)
from britney2.migrationitem import MigrationItem, UnversionnedMigrationItem
from britney2.policies.policy import PolicyVerdict
from britney2.policies import PolicyVerdict
def ifilter_except(container, iterable=None):
@ -258,7 +258,7 @@ def eval_uninst(architectures, nuninst):
return "".join(parts)
def write_heidi(filename, sources_t, packages_t, sorted=sorted):
def write_heidi(filename, sources_t, packages_t, *, outofsync_arches=frozenset(), sorted=sorted):
"""Write the output HeidiResult
This method write the output for Heidi, which contains all the
@ -271,6 +271,10 @@ def write_heidi(filename, sources_t, packages_t, sorted=sorted):
packages in "sources_t" and "packages_t" to be the packages in
"testing".
outofsync_arches: If given, it is a set of architectures marked
as "out of sync". The output file may exclude some out of date
arch:all packages for those architectures to reduce the noise.
The "X=X" parameters are optimizations to avoid "load global" in
the loops.
"""
@ -288,7 +292,8 @@ def write_heidi(filename, sources_t, packages_t, sorted=sorted):
# Faux package; not really a part of testing
continue
if pkg.source_version and pkgarch == 'all' and \
pkg.source_version != sources_t[pkg.source].version:
pkg.source_version != sources_t[pkg.source].version and \
arch in outofsync_arches:
# when architectures are marked as "outofsync", their binary
# versions may be lower than those of the associated
# source package in testing. the binary package list for
@ -721,27 +726,41 @@ def read_sources_file(filename, sources=None, intern=sys.intern):
section = get_field('Section')
if section:
section = intern(section.strip())
build_deps_arch = ", ".join(x for x in (get_field('Build-Depends'), get_field('Build-Depends-Arch'))
if x is not None)
if build_deps_arch != '':
build_deps_arch = sys.intern(build_deps_arch)
else:
build_deps_arch = None
sources[intern(pkg)] = SourcePackage(intern(ver),
section,
[],
maint,
False,
build_deps_arch,
get_field('Testsuite', '').split(),
get_field('Testsuite-Triggers', '').replace(',', '').split(),
)
return sources
def get_dependency_solvers(block, binaries_s_a, provides_s_a, *, empty_set=frozenset()):
def get_dependency_solvers(block, binaries_s_a, provides_s_a, *, build_depends=False, empty_set=frozenset()):
"""Find the packages which satisfy a dependency block
This method returns the list of packages which satisfy a dependency
block (as returned by apt_pkg.parse_depends) in a package table
for a given suite and architecture (a la self.binaries[suite][arch])
:param block: The dependency block as parsed by apt_pkg.parse_depends
It can also handle build-dependency relations if the named parameter
"build_depends" is set to True. In this case, block should be based
on the return value from apt_pkg.parse_src_depends.
:param block: The dependency block as parsed by apt_pkg.parse_depends (or apt_pkg.parse_src_depends
if the "build_depends" is True)
:param binaries_s_a: A dict mapping package names to the relevant BinaryPackage
:param provides_s_a: A dict mapping package names to their providers (as generated by parse_provides)
:param build_depends: If True, treat the "block" parameter as a build-dependency relation rather than
a regular dependency relation.
:param empty_set: Internal implementation detail / optimisation
:return a list of package names solving the relation
"""
@ -760,7 +779,17 @@ def get_dependency_solvers(block, binaries_s_a, provides_s_a, *, empty_set=froze
# check the versioned dependency and architecture qualifier
# (if present)
if (op == '' and version == '') or apt_pkg.check_dep(package.version, op, version):
if archqual is None or (archqual == 'any' and package.multi_arch == 'allowed'):
if archqual is None:
packages.append(name)
elif build_depends:
# Multi-arch handling for build-dependencies
# - :native is ok iff the target is arch:any
if archqual == 'native' and package.architecture != 'all':
packages.append(name)
# Multi-arch handling for both build-dependencies and regular dependencies
# - :any is ok iff the target has "M-A: allowed"
if archqual == 'any' and package.multi_arch == 'allowed':
packages.append(name)
# look for the package in the virtual packages list and loop on them
@ -785,20 +814,23 @@ def invalidate_excuses(excuses, valid, invalid):
"""Invalidate impossible excuses
This method invalidates the impossible excuses, which depend
on invalid excuses. The two parameters contains the list of
on invalid excuses. The two parameters contains the sets of
`valid' and `invalid' excuses.
"""
# build the reverse dependencies
revdeps = defaultdict(list)
revbuilddeps = defaultdict(list)
for exc in excuses.values():
for d in exc.deps:
revdeps[d].append(exc.name)
for d in exc.arch_build_deps:
revbuilddeps[d].append(exc.name)
# loop on the invalid excuses
for i, ename in enumerate(invalid):
for ename in iter_except(invalid.pop, KeyError):
# if there is no reverse dependency, skip the item
if ename not in revdeps:
if ename not in revdeps and ename not in revbuilddeps:
continue
# if the dependency can be satisfied by a testing-proposed-updates excuse, skip the item
if (ename + "_tpu") in valid:
@ -809,21 +841,34 @@ def invalidate_excuses(excuses, valid, invalid):
rdep_verdict = PolicyVerdict.REJECTED_BLOCKED_BY_ANOTHER_ITEM
# loop on the reverse dependencies
for x in revdeps[ename]:
if x in valid:
# if the item is valid and it is marked as `forced', skip the item
if excuses[x].forced:
continue
# otherwise, invalidate the dependency and mark as invalidated and
# remove the depending excuses
excuses[x].invalidate_dep(ename)
p = valid.index(x)
invalid.append(valid.pop(p))
excuses[x].addhtml("Invalidated by dependency")
excuses[x].addreason("depends")
if excuses[x].policy_verdict.value < rdep_verdict.value:
excuses[x].policy_verdict = rdep_verdict
if ename in revdeps:
for x in revdeps[ename]:
# if the item is valid and it is not marked as `forced', then we invalidate it
if x in valid and not excuses[x].forced:
# otherwise, invalidate the dependency and mark as invalidated and
# remove the depending excuses
excuses[x].invalidate_dep(ename)
valid.discard(x)
invalid.add(x)
excuses[x].addhtml("Invalidated by dependency")
excuses[x].addreason("depends")
if excuses[x].policy_verdict.value < rdep_verdict.value:
excuses[x].policy_verdict = rdep_verdict
if ename in revbuilddeps:
for x in revbuilddeps[ename]:
# if the item is valid and it is not marked as `forced', then we invalidate it
if x in valid and not excuses[x].forced:
# otherwise, invalidate the dependency and mark as invalidated and
# remove the depending excuses
excuses[x].invalidate_build_dep(ename)
valid.discard(x)
invalid.add(x)
excuses[x].addhtml("Invalidated by build-dependency")
if excuses[x].policy_verdict.value < rdep_verdict.value:
excuses[x].policy_verdict = rdep_verdict
def compile_nuninst(binaries_t, inst_tester, architectures, nobreakall_arches):

@ -39,7 +39,7 @@ def create_excuse(name):
def create_source_package(version, section='devel', binaries=None):
if binaries is None:
binaries = []
return SourcePackage(version, section, binaries, 'Random tester', False, '', '')
return SourcePackage(version, section, binaries, 'Random tester', False, None, '', '')
def create_policy_objects(source_name, target_version, source_version):

Loading…
Cancel
Save