mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-04-08 09:41:43 +00:00
Add BuildDependsPolicy to check Build-Depends(-Arch) availability
Add a new "BuildDependsPolicy" that will check the satisfiability of the build-dependencies listed in the Build-Depends and Build-Depends-Arch fields. This enables gating of packages based on missing / broken build-dependencies. There are some limitations: * Build-Depends-Indep is ignored for now. Missing or broken packages listed in Build-Depends-Indep will be continue to be silently ignored. * Being a policy check, it does not enforce "self-containedness" as a package can still migrate before a build-dependency. However, this can only happen if the build-dependency is ready to migrate itself. If the build-dependency is not ready (e.g. new RC bugs), then packages build-depending on it cannot migrate either (unless the version in testing satisfies there requirements). Signed-off-by: Niels Thykier <niels@thykier.net>
This commit is contained in:
parent
c537f0554f
commit
ee27d7a67c
@ -198,7 +198,7 @@ from britney2.hints import HintParser
|
||||
from britney2.installability.builder import build_installability_tester
|
||||
from britney2.migrationitem import MigrationItem
|
||||
from britney2.policies import PolicyVerdict
|
||||
from britney2.policies.policy import AgePolicy, RCBugPolicy, PiupartsPolicy
|
||||
from britney2.policies.policy import AgePolicy, RCBugPolicy, PiupartsPolicy, BuildDependsPolicy
|
||||
from britney2.utils import (old_libraries_format, undo_changes,
|
||||
compute_reverse_tree, possibly_compressed,
|
||||
read_nuninst, write_nuninst, write_heidi,
|
||||
@ -511,6 +511,7 @@ class Britney(object):
|
||||
self.policies.append(AgePolicy(self.options, self.suite_info, MINDAYS))
|
||||
self.policies.append(RCBugPolicy(self.options, self.suite_info))
|
||||
self.policies.append(PiupartsPolicy(self.options, self.suite_info))
|
||||
self.policies.append(BuildDependsPolicy(self.options, self.suite_info))
|
||||
|
||||
for policy in self.policies:
|
||||
policy.register_hints(self._hint_parser)
|
||||
@ -568,6 +569,7 @@ class Britney(object):
|
||||
[],
|
||||
None,
|
||||
True,
|
||||
None
|
||||
)
|
||||
|
||||
self.sources['testing'][pkg_name] = src_data
|
||||
@ -642,6 +644,7 @@ class Britney(object):
|
||||
[],
|
||||
None,
|
||||
True,
|
||||
None,
|
||||
)
|
||||
self.sources['testing'][pkg_name] = src_data
|
||||
self.sources['unstable'][pkg_name] = src_data
|
||||
@ -843,7 +846,7 @@ class Britney(object):
|
||||
srcdist[source].binaries.append(pkg_id)
|
||||
# if the source package doesn't exist, create a fake one
|
||||
else:
|
||||
srcdist[source] = SourcePackage(source_version, 'faux', [pkg_id], None, True)
|
||||
srcdist[source] = SourcePackage(source_version, 'faux', [pkg_id], None, True, None)
|
||||
|
||||
# add the resulting dictionary to the package list
|
||||
packages[pkg] = dpkg
|
||||
|
@ -9,14 +9,15 @@ SuiteInfo = namedtuple('SuiteInfo', [
|
||||
|
||||
class SourcePackage(object):
|
||||
|
||||
__slots__ = ['version', 'section', 'binaries', 'maintainer', 'is_fakesrc']
|
||||
__slots__ = ['version', 'section', 'binaries', 'maintainer', 'is_fakesrc', 'build_deps_arch']
|
||||
|
||||
def __init__(self, version, section, binaries, maintainer, is_fakesrc):
|
||||
def __init__(self, version, section, binaries, maintainer, is_fakesrc, build_deps_arch):
|
||||
self.version = version
|
||||
self.section = section
|
||||
self.binaries = binaries
|
||||
self.maintainer = maintainer
|
||||
self.is_fakesrc = is_fakesrc
|
||||
self.build_deps_arch = build_deps_arch
|
||||
|
||||
def __getitem__(self, item):
|
||||
return getattr(self, self.__slots__[item])
|
||||
|
@ -75,7 +75,9 @@ class Excuse(object):
|
||||
self._policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
|
||||
|
||||
self.invalid_deps = set()
|
||||
self.invalid_build_deps = set()
|
||||
self.deps = {}
|
||||
self.arch_build_deps = {}
|
||||
self.sane_deps = []
|
||||
self.break_deps = []
|
||||
self.newbugs = set()
|
||||
@ -135,10 +137,19 @@ class Excuse(object):
|
||||
if (name, arch) not in self.break_deps:
|
||||
self.break_deps.append( (name, arch) )
|
||||
|
||||
def add_arch_build_dep(self, name, arch):
|
||||
if name not in self.arch_build_deps:
|
||||
self.arch_build_deps[name] = []
|
||||
self.arch_build_deps[name].append(arch)
|
||||
|
||||
def invalidate_dep(self, name):
|
||||
"""Invalidate dependency"""
|
||||
self.invalid_deps.add(name)
|
||||
|
||||
def invalidate_build_dep(self, name):
|
||||
"""Invalidate build-dependency"""
|
||||
self.invalid_build_deps.add(name)
|
||||
|
||||
def setdaysold(self, daysold, mindays):
|
||||
"""Set the number of days from the upload and the minimum number of days for the update"""
|
||||
self.daysold = daysold
|
||||
@ -209,6 +220,17 @@ class Excuse(object):
|
||||
for (n,a) in self.break_deps:
|
||||
if n not in self.deps:
|
||||
res += "<li>Ignoring %s depends: <a href=\"#%s\">%s</a>\n" % (a, n, n)
|
||||
lastdep = ""
|
||||
for x in sorted(self.arch_build_deps, key=lambda x: x.split('/')[0]):
|
||||
dep = x.split('/')[0]
|
||||
if dep == lastdep:
|
||||
continue
|
||||
lastdep = dep
|
||||
if x in self.invalid_build_deps:
|
||||
res = res + "<li>Build-Depends(-Arch): %s <a href=\"#%s\">%s</a> (not ready)\n" % (self.name, dep, dep)
|
||||
else:
|
||||
res = res + "<li>Build-Depends(-Arch): %s <a href=\"#%s\">%s</a>\n" % (self.name, dep, dep)
|
||||
|
||||
if self.is_valid:
|
||||
res += "<li>Valid candidate\n"
|
||||
else:
|
||||
@ -258,13 +280,14 @@ class Excuse(object):
|
||||
'on-architectures': sorted(self.missing_builds),
|
||||
'on-unimportant-architectures': sorted(self.missing_builds_ood_arch),
|
||||
}
|
||||
if self.deps or self.invalid_deps or self.break_deps:
|
||||
if self.deps or self.invalid_deps or self.arch_build_deps or self.invalid_build_deps or self.break_deps:
|
||||
excusedata['dependencies'] = dep_data = {}
|
||||
migrate_after = sorted(x for x in self.deps if x not in self.invalid_deps)
|
||||
migrate_after = sorted((self.deps.keys() - self.invalid_deps)
|
||||
| (self.arch_build_deps.keys() - self.invalid_build_deps))
|
||||
break_deps = [x for x, _ in self.break_deps if x not in self.deps]
|
||||
|
||||
if self.invalid_deps:
|
||||
dep_data['blocked-by'] = sorted(self.invalid_deps)
|
||||
if self.invalid_deps or self.invalid_build_deps:
|
||||
dep_data['blocked-by'] = sorted(self.invalid_deps | self.invalid_build_deps)
|
||||
if migrate_after:
|
||||
dep_data['migrate-after'] = migrate_after
|
||||
if break_deps:
|
||||
|
@ -8,6 +8,7 @@ import apt_pkg
|
||||
|
||||
from britney2.hints import Hint, split_into_one_hint_per_package
|
||||
from britney2.policies import PolicyVerdict
|
||||
from britney2.utils import get_dependency_solvers
|
||||
|
||||
|
||||
class BasePolicy(object):
|
||||
@ -616,3 +617,80 @@ class PiupartsPolicy(BasePolicy):
|
||||
summary[source] = (state, url)
|
||||
|
||||
return summary
|
||||
|
||||
|
||||
class BuildDependsPolicy(BasePolicy):
|
||||
|
||||
def __init__(self, options, suite_info):
|
||||
super().__init__('build-depends', options, suite_info, {'unstable', 'tpu', 'pu'})
|
||||
self._britney = None
|
||||
|
||||
def initialise(self, britney):
|
||||
super().initialise(britney)
|
||||
self._britney = britney
|
||||
|
||||
def apply_policy_impl(self, build_deps_info, suite, source_name, source_data_tdist, source_data_srcdist, excuse,
|
||||
get_dependency_solvers=get_dependency_solvers):
|
||||
verdict = PolicyVerdict.PASS
|
||||
britney = self._britney
|
||||
|
||||
# local copies for better performance
|
||||
parse_src_depends = apt_pkg.parse_src_depends
|
||||
|
||||
# analyze the dependency fields (if present)
|
||||
deps = source_data_srcdist.build_deps_arch
|
||||
if not deps:
|
||||
return verdict
|
||||
|
||||
sources_s = None
|
||||
sources_t = None
|
||||
unsat_bd = {}
|
||||
relevant_archs = {binary.architecture for binary in source_data_srcdist.binaries
|
||||
if britney.all_binaries[binary].architecture != 'all'}
|
||||
|
||||
for arch in (arch for arch in self.options.architectures if arch in relevant_archs):
|
||||
# retrieve the binary package from the specified suite and arch
|
||||
binaries_s_a, provides_s_a = britney.binaries[suite][arch]
|
||||
binaries_t_a, provides_t_a = britney.binaries['testing'][arch]
|
||||
# for every dependency block (formed as conjunction of disjunction)
|
||||
for block, block_txt in zip(parse_src_depends(deps, False, arch), deps.split(',')):
|
||||
# if the block is satisfied in testing, then skip the block
|
||||
if get_dependency_solvers(block, binaries_t_a, provides_t_a):
|
||||
# Satisfied in testing; all ok.
|
||||
continue
|
||||
|
||||
# check if the block can be satisfied in the source suite, and list the solving packages
|
||||
packages = get_dependency_solvers(block, binaries_s_a, provides_s_a)
|
||||
packages = [binaries_s_a[p].source for p in packages]
|
||||
|
||||
# if the dependency can be satisfied by the same source package, skip the block:
|
||||
# obviously both binary packages will enter testing together
|
||||
if source_name in packages:
|
||||
continue
|
||||
|
||||
# if no package can satisfy the dependency, add this information to the excuse
|
||||
if not packages:
|
||||
excuse.addhtml("%s unsatisfiable Build-Depends(-Arch) on %s: %s" % (source_name, arch, block_txt.strip()))
|
||||
if arch not in unsat_bd:
|
||||
unsat_bd[arch] = []
|
||||
unsat_bd[arch].append(block_txt.strip())
|
||||
if verdict.value < PolicyVerdict.REJECTED_PERMANENTLY.value:
|
||||
verdict = PolicyVerdict.REJECTED_PERMANENTLY
|
||||
continue
|
||||
|
||||
if not sources_t:
|
||||
sources_t = britney.sources['testing']
|
||||
sources_s = britney.sources[suite]
|
||||
|
||||
# for the solving packages, update the excuse to add the dependencies
|
||||
for p in packages:
|
||||
if arch not in self.options.break_arches:
|
||||
if p in sources_t and sources_t[p].version == sources_s[p].version:
|
||||
excuse.add_arch_build_dep("%s/%s" % (p, arch), arch)
|
||||
else:
|
||||
excuse.add_arch_build_dep(p, arch)
|
||||
if unsat_bd:
|
||||
build_deps_info['unsatisfiable-arch-build-depends'] = unsat_bd
|
||||
|
||||
return verdict
|
||||
|
||||
|
@ -721,11 +721,18 @@ def read_sources_file(filename, sources=None, intern=sys.intern):
|
||||
section = get_field('Section')
|
||||
if section:
|
||||
section = intern(section.strip())
|
||||
build_deps_arch = ", ".join(x for x in (get_field('Build-Depends'), get_field('Build-Depends-Arch'))
|
||||
if x is not None)
|
||||
if build_deps_arch != '':
|
||||
build_deps_arch = sys.intern(build_deps_arch)
|
||||
else:
|
||||
build_deps_arch = None
|
||||
sources[intern(pkg)] = SourcePackage(intern(ver),
|
||||
section,
|
||||
[],
|
||||
maint,
|
||||
False,
|
||||
build_deps_arch,
|
||||
)
|
||||
return sources
|
||||
|
||||
@ -789,14 +796,17 @@ def invalidate_excuses(excuses, valid, invalid):
|
||||
|
||||
# build the reverse dependencies
|
||||
revdeps = defaultdict(list)
|
||||
revbuilddeps = defaultdict(list)
|
||||
for exc in excuses.values():
|
||||
for d in exc.deps:
|
||||
revdeps[d].append(exc.name)
|
||||
for d in exc.arch_build_deps:
|
||||
revbuilddeps[d].append(exc.name)
|
||||
|
||||
# loop on the invalid excuses
|
||||
for ename in iter_except(invalid.pop, KeyError):
|
||||
# if there is no reverse dependency, skip the item
|
||||
if ename not in revdeps:
|
||||
if ename not in revdeps and ename not in revbuilddeps:
|
||||
continue
|
||||
# if the dependency can be satisfied by a testing-proposed-updates excuse, skip the item
|
||||
if (ename + "_tpu") in valid:
|
||||
@ -807,21 +817,34 @@ def invalidate_excuses(excuses, valid, invalid):
|
||||
rdep_verdict = PolicyVerdict.REJECTED_BLOCKED_BY_ANOTHER_ITEM
|
||||
|
||||
# loop on the reverse dependencies
|
||||
for x in revdeps[ename]:
|
||||
if x in valid:
|
||||
# if the item is valid and it is marked as `forced', skip the item
|
||||
if excuses[x].forced:
|
||||
continue
|
||||
if ename in revdeps:
|
||||
for x in revdeps[ename]:
|
||||
# if the item is valid and it is not marked as `forced', then we invalidate it
|
||||
if x in valid and not excuses[x].forced:
|
||||
|
||||
# otherwise, invalidate the dependency and mark as invalidated and
|
||||
# remove the depending excuses
|
||||
excuses[x].invalidate_dep(ename)
|
||||
valid.discard(x)
|
||||
invalid.add(x)
|
||||
excuses[x].addhtml("Invalidated by dependency")
|
||||
excuses[x].addreason("depends")
|
||||
if excuses[x].policy_verdict.value < rdep_verdict.value:
|
||||
excuses[x].policy_verdict = rdep_verdict
|
||||
# otherwise, invalidate the dependency and mark as invalidated and
|
||||
# remove the depending excuses
|
||||
excuses[x].invalidate_dep(ename)
|
||||
valid.discard(x)
|
||||
invalid.add(x)
|
||||
excuses[x].addhtml("Invalidated by dependency")
|
||||
excuses[x].addreason("depends")
|
||||
if excuses[x].policy_verdict.value < rdep_verdict.value:
|
||||
excuses[x].policy_verdict = rdep_verdict
|
||||
|
||||
if ename in revbuilddeps:
|
||||
for x in revbuilddeps[ename]:
|
||||
# if the item is valid and it is not marked as `forced', then we invalidate it
|
||||
if x in valid and not excuses[x].forced:
|
||||
|
||||
# otherwise, invalidate the dependency and mark as invalidated and
|
||||
# remove the depending excuses
|
||||
excuses[x].invalidate_build_dep(ename)
|
||||
valid.discard(x)
|
||||
invalid.add(x)
|
||||
excuses[x].addhtml("Invalidated by build-dependency")
|
||||
if excuses[x].policy_verdict.value < rdep_verdict.value:
|
||||
excuses[x].policy_verdict = rdep_verdict
|
||||
|
||||
|
||||
def compile_nuninst(binaries_t, inst_tester, architectures, nobreakall_arches):
|
||||
|
@ -39,7 +39,7 @@ def create_excuse(name):
|
||||
def create_source_package(version, section='devel', binaries=None):
|
||||
if binaries is None:
|
||||
binaries = []
|
||||
return SourcePackage(version, section, binaries, 'Random tester', False)
|
||||
return SourcePackage(version, section, binaries, 'Random tester', False, None)
|
||||
|
||||
|
||||
def create_policy_objects(source_name, target_version, source_version):
|
||||
|
Loading…
x
Reference in New Issue
Block a user