mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-05-18 22:11:30 +00:00
Specify dependencies between excuses based on packages
Rework the dependencies between excuses. Dependencies are specified based on packages (source or binary). Based on these packages, dependencies on other excuses (that have these packages) are calculated. As with package dependencies, dependencies between excuses can contain alternatives. Each alternative can satisfy the dependency, so the excuse only becomes invalid if all of the alternatives of a specific dependency are invalidated. Tracking of the alternatives and their validity is moved to separate objects.
This commit is contained in:
parent
b5285f21d7
commit
8b6638c566
@ -18,6 +18,7 @@ from collections import defaultdict
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from britney2 import DependencyType
|
from britney2 import DependencyType
|
||||||
|
from britney2.excusedeps import DependencySpec, DependencyState, ImpossibleDependencyState
|
||||||
from britney2.policies.policy import PolicyVerdict
|
from britney2.policies.policy import PolicyVerdict
|
||||||
|
|
||||||
VERDICT2DESC = {
|
VERDICT2DESC = {
|
||||||
@ -40,6 +41,87 @@ VERDICT2DESC = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ExcuseDependency(object):
|
||||||
|
"""Object to represent a specific dependecy of an excuse on a package
|
||||||
|
(source or binary) or on other excuses"""
|
||||||
|
|
||||||
|
def __init__(self, spec, depstates):
|
||||||
|
"""
|
||||||
|
:param: spec: DependencySpec
|
||||||
|
:param: depstates: list of DependencyState, each of which can satisfy
|
||||||
|
the dependency
|
||||||
|
"""
|
||||||
|
self.spec = spec
|
||||||
|
self.depstates = depstates
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deptype(self):
|
||||||
|
return self.spec.deptype
|
||||||
|
|
||||||
|
@property
|
||||||
|
def valid(self):
|
||||||
|
if {d for d in self.depstates if d.valid}:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deps(self):
|
||||||
|
return {d.dep for d in self.depstates}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def possible(self):
|
||||||
|
if {d for d in self.depstates if d.possible}:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def first_dep(self):
|
||||||
|
"""return the first valid dependency, if there is one, otherwise the
|
||||||
|
first possible one
|
||||||
|
|
||||||
|
return None if there are only impossible dependencies
|
||||||
|
"""
|
||||||
|
first = None
|
||||||
|
for d in self.depstates:
|
||||||
|
if d.valid:
|
||||||
|
return d.dep
|
||||||
|
elif d.possible and not first:
|
||||||
|
first = d.dep
|
||||||
|
return first
|
||||||
|
|
||||||
|
@property
|
||||||
|
def first_impossible_dep(self):
|
||||||
|
"""return the first impossible dependency, if there is one"""
|
||||||
|
first = None
|
||||||
|
for d in self.depstates:
|
||||||
|
if not d.possible:
|
||||||
|
return d.desc
|
||||||
|
return first
|
||||||
|
|
||||||
|
@property
|
||||||
|
def verdict(self):
|
||||||
|
return min({d.verdict for d in self.depstates})
|
||||||
|
|
||||||
|
def invalidate(self, excuse, verdict):
|
||||||
|
"""invalidate the dependencies on a specific excuse
|
||||||
|
|
||||||
|
:param excuse: the excuse which is no longer valid
|
||||||
|
:param verdict: the PolicyVerdict causing the invalidation
|
||||||
|
"""
|
||||||
|
invalidated_alternative = False
|
||||||
|
valid_alternative_left = False
|
||||||
|
for ds in self.depstates:
|
||||||
|
if ds.dep == excuse:
|
||||||
|
ds.invalidate(verdict)
|
||||||
|
invalidated_alternative = True
|
||||||
|
elif ds.valid:
|
||||||
|
valid_alternative_left = True
|
||||||
|
|
||||||
|
return valid_alternative_left
|
||||||
|
|
||||||
|
|
||||||
class Excuse(object):
|
class Excuse(object):
|
||||||
"""Excuse class
|
"""Excuse class
|
||||||
|
|
||||||
@ -75,8 +157,7 @@ class Excuse(object):
|
|||||||
self.forced = False
|
self.forced = False
|
||||||
self._policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
|
self._policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
|
||||||
|
|
||||||
self.all_invalid_deps = {}
|
self.all_deps = []
|
||||||
self.all_deps = {}
|
|
||||||
self.break_deps = []
|
self.break_deps = []
|
||||||
self.unsatisfiable_on_archs = []
|
self.unsatisfiable_on_archs = []
|
||||||
self.unsat_deps = defaultdict(set)
|
self.unsat_deps = defaultdict(set)
|
||||||
@ -91,15 +172,14 @@ class Excuse(object):
|
|||||||
self.verdict_info = defaultdict(list)
|
self.verdict_info = defaultdict(list)
|
||||||
self.infoline = []
|
self.infoline = []
|
||||||
self.detailed_info = []
|
self.detailed_info = []
|
||||||
|
self.dep_info_rendered = False
|
||||||
|
|
||||||
# packages (source and binary) that will migrate to testing if the
|
# packages (source and binary) that will migrate to testing if the
|
||||||
# item from this excuse migrates
|
# item from this excuse migrates
|
||||||
self.packages = defaultdict(set)
|
self.packages = defaultdict(set)
|
||||||
|
|
||||||
# for each deptype, there is a set that contains
|
# list of ExcuseDependency, with dependencies on packages
|
||||||
# frozensets of PackageIds (of sources or binaries) that can satisfy
|
self.depends_packages = []
|
||||||
# the dep
|
|
||||||
self.depends_packages = defaultdict(set)
|
|
||||||
# contains all PackageIds in any over the sets above
|
# contains all PackageIds in any over the sets above
|
||||||
self.depends_packages_flattened = set()
|
self.depends_packages_flattened = set()
|
||||||
|
|
||||||
@ -144,13 +224,27 @@ class Excuse(object):
|
|||||||
"""Set the section of the package"""
|
"""Set the section of the package"""
|
||||||
self.section = section
|
self.section = section
|
||||||
|
|
||||||
def add_dependency(self, deptype, name, arch):
|
def add_dependency(self, dep, spec):
|
||||||
"""Add a dependency of type deptype """
|
"""Add a dependency of type deptype
|
||||||
if name not in self.all_deps:
|
|
||||||
self.all_deps[name] = {}
|
:param dep: set with names of excuses, each of which satisfies the dep
|
||||||
if deptype not in self.all_deps[name]:
|
:param spec: DependencySpec
|
||||||
self.all_deps[name][deptype] = []
|
|
||||||
self.all_deps[name][deptype].append(arch)
|
"""
|
||||||
|
|
||||||
|
assert dep != frozenset(), "%s: Adding empty list of dependencies" % self.name
|
||||||
|
|
||||||
|
deps = []
|
||||||
|
for d in dep:
|
||||||
|
if isinstance(d, DependencyState):
|
||||||
|
deps.append(d)
|
||||||
|
else:
|
||||||
|
deps.append(DependencyState(d))
|
||||||
|
ed = ExcuseDependency(spec, deps)
|
||||||
|
self.all_deps.append(ed)
|
||||||
|
if not ed.valid:
|
||||||
|
self.do_invalidate(ed)
|
||||||
|
return ed.valid
|
||||||
|
|
||||||
def get_deps(self):
|
def get_deps(self):
|
||||||
# the autohinter uses the excuses data to query dependencies between
|
# the autohinter uses the excuses data to query dependencies between
|
||||||
@ -158,9 +252,12 @@ class Excuse(object):
|
|||||||
# the data that was in the old deps set
|
# the data that was in the old deps set
|
||||||
""" Get the dependencies of type DEPENDS """
|
""" Get the dependencies of type DEPENDS """
|
||||||
deps = set()
|
deps = set()
|
||||||
for dep in self.all_deps:
|
for dep in [d for d in self.all_deps if d.deptype == DependencyType.DEPENDS]:
|
||||||
if DependencyType.DEPENDS in self.all_deps[dep]:
|
# add the first valid dependency
|
||||||
deps.add(dep)
|
for d in dep.depstates:
|
||||||
|
if d.valid:
|
||||||
|
deps.add(d.dep)
|
||||||
|
break
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
def add_break_dep(self, name, arch):
|
def add_break_dep(self, name, arch):
|
||||||
@ -177,9 +274,24 @@ class Excuse(object):
|
|||||||
"""Add an unsatisfiable dependency"""
|
"""Add an unsatisfiable dependency"""
|
||||||
self.unsat_deps[arch].add(signature)
|
self.unsat_deps[arch].add(signature)
|
||||||
|
|
||||||
|
def do_invalidate(self, dep):
|
||||||
|
"""
|
||||||
|
param: dep: ExcuseDependency
|
||||||
|
"""
|
||||||
|
self.addreason(dep.deptype.get_reason())
|
||||||
|
if self.policy_verdict < dep.verdict:
|
||||||
|
self.policy_verdict = dep.verdict
|
||||||
|
|
||||||
def invalidate_dependency(self, name, verdict):
|
def invalidate_dependency(self, name, verdict):
|
||||||
"""Invalidate dependency"""
|
"""Invalidate dependency"""
|
||||||
self.all_invalid_deps[name] = verdict
|
invalidate = False
|
||||||
|
|
||||||
|
for dep in self.all_deps:
|
||||||
|
if not dep.invalidate(name, verdict):
|
||||||
|
invalidate = True
|
||||||
|
self.do_invalidate(dep)
|
||||||
|
|
||||||
|
return not invalidate
|
||||||
|
|
||||||
def setdaysold(self, daysold, mindays):
|
def setdaysold(self, daysold, mindays):
|
||||||
"""Set the number of days from the upload and the minimum number of days for the update"""
|
"""Set the number of days from the upload and the minimum number of days for the update"""
|
||||||
@ -224,9 +336,21 @@ class Excuse(object):
|
|||||||
def add_package(self, pkg_id):
|
def add_package(self, pkg_id):
|
||||||
self.packages[pkg_id.architecture].add(pkg_id)
|
self.packages[pkg_id.architecture].add(pkg_id)
|
||||||
|
|
||||||
def add_package_depends(self, deptype, depends):
|
def add_package_depends(self, spec, depends):
|
||||||
"""depends is a set of PackageIds (source or binary) that can satisfy the dependency"""
|
"""Add dependency on a package (source or binary)
|
||||||
self.depends_packages[deptype].add(frozenset(depends))
|
|
||||||
|
:param spec: DependencySpec
|
||||||
|
:param depends: set of PackageIds (source or binary), each of which can satisfy the dependency
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert depends != frozenset(), "%s: Adding empty list of package dependencies" % self.name
|
||||||
|
|
||||||
|
# we use DependencyState for consistency with excuse dependencies, but
|
||||||
|
# package dependencies are never invalidated, they are used to add
|
||||||
|
# excuse dependencies (in invalidate_excuses()), and these are
|
||||||
|
# (potentially) invalidated
|
||||||
|
ed = ExcuseDependency(spec, [DependencyState(d) for d in depends])
|
||||||
|
self.depends_packages.append(ed)
|
||||||
self.depends_packages_flattened |= depends
|
self.depends_packages_flattened |= depends
|
||||||
|
|
||||||
def _format_verdict_summary(self):
|
def _format_verdict_summary(self):
|
||||||
@ -235,25 +359,32 @@ class Excuse(object):
|
|||||||
return VERDICT2DESC[verdict]
|
return VERDICT2DESC[verdict]
|
||||||
return "UNKNOWN: Missing description for {0} - Please file a bug against Britney".format(verdict.name)
|
return "UNKNOWN: Missing description for {0} - Please file a bug against Britney".format(verdict.name)
|
||||||
|
|
||||||
def _render_dep_issues(self, dep_issues, invalid_deps):
|
def _render_dep_issues(self):
|
||||||
lastdep = ""
|
if self.dep_info_rendered:
|
||||||
res = []
|
return
|
||||||
for x in sorted(dep_issues, key=lambda x: x.split('/')[0]):
|
|
||||||
dep = x.split('/')[0]
|
|
||||||
if dep != lastdep:
|
|
||||||
seen = {}
|
|
||||||
lastdep = dep
|
|
||||||
for deptype in sorted(dep_issues[x], key=lambda y: str(y)):
|
|
||||||
field = deptype
|
|
||||||
if deptype in seen:
|
|
||||||
continue
|
|
||||||
seen[deptype] = True
|
|
||||||
if x in invalid_deps:
|
|
||||||
res.append("%s: %s <a href=\"#%s\">%s</a> (not considered)" % (field, self.name, dep, dep))
|
|
||||||
else:
|
|
||||||
res.append("%s: %s <a href=\"#%s\">%s</a>" % (field, self.name, dep, dep))
|
|
||||||
|
|
||||||
return res
|
dep_issues = defaultdict(set)
|
||||||
|
for d in self.all_deps:
|
||||||
|
dep = d.first_dep
|
||||||
|
info = ""
|
||||||
|
if d.valid:
|
||||||
|
info = "%s: %s <a href=\"#%s\">%s</a>" % (d.deptype, self.name, dep, dep)
|
||||||
|
elif not d.possible:
|
||||||
|
desc = d.first_impossible_dep
|
||||||
|
info = "Impossible %s: %s -> %s" % (d.deptype, self.name, desc)
|
||||||
|
else:
|
||||||
|
info = "%s: %s <a href=\"#%s\">%s</a> (not considered)" % (d.deptype, self.name, dep, dep)
|
||||||
|
dep_issues[d.verdict].add("Invalidated by %s" % d.deptype.get_description())
|
||||||
|
dep_issues[d.verdict].add(info)
|
||||||
|
|
||||||
|
seen = set()
|
||||||
|
for v in sorted(dep_issues.keys(), reverse=True):
|
||||||
|
for i in sorted(dep_issues[v]):
|
||||||
|
if i not in seen:
|
||||||
|
self.add_verdict_info(v, i)
|
||||||
|
seen.add(i)
|
||||||
|
|
||||||
|
self.dep_info_rendered = True
|
||||||
|
|
||||||
def html(self):
|
def html(self):
|
||||||
"""Render the excuse in HTML"""
|
"""Render the excuse in HTML"""
|
||||||
@ -276,6 +407,7 @@ class Excuse(object):
|
|||||||
|
|
||||||
def _text(self):
|
def _text(self):
|
||||||
"""Render the excuse in text"""
|
"""Render the excuse in text"""
|
||||||
|
self._render_dep_issues()
|
||||||
res = []
|
res = []
|
||||||
res.append(
|
res.append(
|
||||||
"Migration status for %s (%s to %s): %s" %
|
"Migration status for %s (%s to %s): %s" %
|
||||||
@ -285,10 +417,6 @@ class Excuse(object):
|
|||||||
for v in sorted(self.verdict_info.keys(), reverse=True):
|
for v in sorted(self.verdict_info.keys(), reverse=True):
|
||||||
for x in self.verdict_info[v]:
|
for x in self.verdict_info[v]:
|
||||||
res.append("" + x + "")
|
res.append("" + x + "")
|
||||||
di = [x for x in self.all_invalid_deps.keys() if self.all_invalid_deps[x] == v]
|
|
||||||
ad = {x: self.all_deps[x] for x in di}
|
|
||||||
for x in self._render_dep_issues(ad, di):
|
|
||||||
res.append("" + x + "")
|
|
||||||
if self.infoline:
|
if self.infoline:
|
||||||
res.append("Additional info:")
|
res.append("Additional info:")
|
||||||
for x in self.infoline:
|
for x in self.infoline:
|
||||||
@ -326,16 +454,21 @@ class Excuse(object):
|
|||||||
'on-architectures': sorted(self.missing_builds),
|
'on-architectures': sorted(self.missing_builds),
|
||||||
'on-unimportant-architectures': sorted(self.missing_builds_ood_arch),
|
'on-unimportant-architectures': sorted(self.missing_builds_ood_arch),
|
||||||
}
|
}
|
||||||
if self.all_invalid_deps:
|
if {d for d in self.all_deps if not d.valid and d.possible}:
|
||||||
excusedata['invalidated-by-other-package'] = True
|
excusedata['invalidated-by-other-package'] = True
|
||||||
if self.all_deps or self.all_invalid_deps.keys() \
|
if self.all_deps \
|
||||||
or self.break_deps or self.unsat_deps:
|
or self.break_deps or self.unsat_deps:
|
||||||
excusedata['dependencies'] = dep_data = {}
|
excusedata['dependencies'] = dep_data = {}
|
||||||
migrate_after = sorted(self.all_deps.keys() - self.all_invalid_deps.keys())
|
migrate_after = sorted(set(d.first_dep for d in self.all_deps if d.valid))
|
||||||
break_deps = [x for x, _ in self.break_deps if x not in self.all_deps]
|
blocked_by = sorted(set(d.first_dep for d in self.all_deps
|
||||||
|
if not d.valid and d.possible))
|
||||||
|
|
||||||
if self.all_invalid_deps.keys():
|
break_deps = [x for x, _ in self.break_deps if
|
||||||
dep_data['blocked-by'] = sorted(self.all_invalid_deps.keys())
|
x not in migrate-after and
|
||||||
|
x not in blocked-by]
|
||||||
|
|
||||||
|
if blocked_by:
|
||||||
|
dep_data['blocked-by'] = blocked_by
|
||||||
if migrate_after:
|
if migrate_after:
|
||||||
dep_data['migrate-after'] = migrate_after
|
dep_data['migrate-after'] = migrate_after
|
||||||
if break_deps:
|
if break_deps:
|
||||||
|
50
britney2/excusedeps.py
Normal file
50
britney2/excusedeps.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
from britney2.policies import PolicyVerdict
|
||||||
|
|
||||||
|
|
||||||
|
class DependencySpec(object):
|
||||||
|
|
||||||
|
def __init__(self, deptype, architecture=None):
|
||||||
|
self.deptype = deptype
|
||||||
|
self.architecture = architecture
|
||||||
|
assert self.architecture != 'all', "all not allowed for DependencySpec"
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyState(object):
|
||||||
|
|
||||||
|
def __init__(self, dep):
|
||||||
|
"""State of a dependency
|
||||||
|
|
||||||
|
:param dep: the excuse that we are depending on
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.valid = True
|
||||||
|
self.verdict = PolicyVerdict.PASS
|
||||||
|
self.dep = dep
|
||||||
|
|
||||||
|
@property
|
||||||
|
def possible(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def invalidate(self, verdict):
|
||||||
|
self.valid = False
|
||||||
|
if verdict > self.verdict:
|
||||||
|
self.verdict = verdict
|
||||||
|
|
||||||
|
|
||||||
|
class ImpossibleDependencyState(DependencyState):
|
||||||
|
"""Object tracking an impossible dependency"""
|
||||||
|
|
||||||
|
def __init__(self, verdict, desc):
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param desc: description of the impossible dependency
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.valid = False
|
||||||
|
self.verdict = verdict
|
||||||
|
self.desc = desc
|
||||||
|
self.dep = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def possible(self):
|
||||||
|
return False
|
@ -5,8 +5,9 @@ import apt_pkg
|
|||||||
|
|
||||||
from britney2 import DependencyType, PackageId
|
from britney2 import DependencyType, PackageId
|
||||||
from britney2.excuse import Excuse
|
from britney2.excuse import Excuse
|
||||||
|
from britney2.excusedeps import DependencySpec
|
||||||
from britney2.policies import PolicyVerdict
|
from britney2.policies import PolicyVerdict
|
||||||
from britney2.utils import (invalidate_excuses, find_smooth_updateable_binaries, compute_item_name,
|
from britney2.utils import (invalidate_excuses, find_smooth_updateable_binaries,
|
||||||
get_dependency_solvers,
|
get_dependency_solvers,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -68,11 +69,11 @@ class ExcuseFinder(object):
|
|||||||
|
|
||||||
# check if the block can be satisfied in the source suite, and list the solving packages
|
# check if the block can be satisfied in the source suite, and list the solving packages
|
||||||
packages = get_dependency_solvers(block, binaries_s_a, provides_s_a)
|
packages = get_dependency_solvers(block, binaries_s_a, provides_s_a)
|
||||||
packages = sorted(p.source for p in packages)
|
sources = sorted(p.source for p in packages)
|
||||||
|
|
||||||
# if the dependency can be satisfied by the same source package, skip the block:
|
# if the dependency can be satisfied by the same source package, skip the block:
|
||||||
# obviously both binary packages will enter testing together
|
# obviously both binary packages will enter testing together
|
||||||
if src in packages:
|
if src in sources:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if no package can satisfy the dependency, add this information to the excuse
|
# if no package can satisfy the dependency, add this information to the excuse
|
||||||
@ -100,11 +101,10 @@ class ExcuseFinder(object):
|
|||||||
sources_t = target_suite.sources
|
sources_t = target_suite.sources
|
||||||
sources_s = source_suite.sources
|
sources_s = source_suite.sources
|
||||||
for p in packages:
|
for p in packages:
|
||||||
item_name = compute_item_name(sources_t, sources_s, p, arch)
|
excuse.add_package_depends(DependencyType.DEPENDS, {p.pkg_id})
|
||||||
excuse.add_dependency(DependencyType.DEPENDS, item_name, arch)
|
|
||||||
else:
|
else:
|
||||||
for p in packages:
|
for p in packages:
|
||||||
excuse.add_break_dep(p, arch)
|
excuse.add_break_dep(p.source, arch)
|
||||||
|
|
||||||
return is_all_ok
|
return is_all_ok
|
||||||
|
|
||||||
@ -652,45 +652,6 @@ class ExcuseFinder(object):
|
|||||||
# extract the not considered packages, which are in the excuses but not in upgrade_me
|
# extract the not considered packages, which are in the excuses but not in upgrade_me
|
||||||
unconsidered = {ename for ename in excuses if ename not in actionable_items}
|
unconsidered = {ename for ename in excuses if ename not in actionable_items}
|
||||||
|
|
||||||
# invalidate impossible excuses
|
|
||||||
for e in excuses.values():
|
|
||||||
# parts[0] == package name
|
|
||||||
# parts[1] == optional architecture
|
|
||||||
parts = e.name.split('/')
|
|
||||||
for d in sorted(e.all_deps):
|
|
||||||
for deptype in e.all_deps[d]:
|
|
||||||
ok = False
|
|
||||||
# source -> source dependency; both packages must have
|
|
||||||
# valid excuses
|
|
||||||
if d in actionable_items or d in unconsidered:
|
|
||||||
ok = True
|
|
||||||
# if the excuse is for a binNMU, also consider d/$arch as a
|
|
||||||
# valid excuse
|
|
||||||
elif len(parts) == 2:
|
|
||||||
bd = '%s/%s' % (d, parts[1])
|
|
||||||
if bd in actionable_items or bd in unconsidered:
|
|
||||||
ok = True
|
|
||||||
# if the excuse is for a source package, check each of the
|
|
||||||
# architectures on which the excuse lists a dependency on d,
|
|
||||||
# and consider the excuse valid if it is possible on each
|
|
||||||
# architecture
|
|
||||||
else:
|
|
||||||
arch_ok = True
|
|
||||||
for arch in e.all_deps[d][deptype]:
|
|
||||||
bd = '%s/%s' % (d, arch)
|
|
||||||
if bd not in actionable_items and bd not in unconsidered:
|
|
||||||
arch_ok = False
|
|
||||||
break
|
|
||||||
if arch_ok:
|
|
||||||
ok = True
|
|
||||||
if not ok:
|
|
||||||
# TODO this should actually invalidate the excuse
|
|
||||||
# would that be correct in all cases?
|
|
||||||
# - arch all on non-nobreakall arch?
|
|
||||||
# - pkg in testing already uninstallable?
|
|
||||||
e.addinfo("Impossible %s: %s -> %s" % (deptype, e.name, d))
|
|
||||||
e.addreason(deptype.get_reason())
|
|
||||||
|
|
||||||
invalidate_excuses(excuses, actionable_items, unconsidered)
|
invalidate_excuses(excuses, actionable_items, unconsidered)
|
||||||
|
|
||||||
mi_factory = self._migration_item_factory
|
mi_factory = self._migration_item_factory
|
||||||
|
@ -9,12 +9,13 @@ from urllib.parse import quote
|
|||||||
|
|
||||||
import apt_pkg
|
import apt_pkg
|
||||||
|
|
||||||
from britney2 import SuiteClass
|
from britney2 import SuiteClass, PackageId
|
||||||
from britney2.hints import Hint, split_into_one_hint_per_package
|
from britney2.hints import Hint, split_into_one_hint_per_package
|
||||||
from britney2.inputs.suiteloader import SuiteContentLoader
|
from britney2.inputs.suiteloader import SuiteContentLoader
|
||||||
from britney2.policies import PolicyVerdict, ApplySrcPolicy
|
from britney2.policies import PolicyVerdict, ApplySrcPolicy
|
||||||
from britney2.utils import get_dependency_solvers, compute_item_name
|
from britney2.utils import get_dependency_solvers
|
||||||
from britney2 import DependencyType
|
from britney2 import DependencyType
|
||||||
|
from britney2.excusedeps import DependencySpec
|
||||||
|
|
||||||
|
|
||||||
class PolicyEngine(object):
|
class PolicyEngine(object):
|
||||||
@ -841,8 +842,8 @@ class BuildDependsPolicy(BasePolicy):
|
|||||||
# for the solving packages, update the excuse to add the dependencies
|
# for the solving packages, update the excuse to add the dependencies
|
||||||
for p in packages:
|
for p in packages:
|
||||||
if arch not in self.options.break_arches:
|
if arch not in self.options.break_arches:
|
||||||
item_name = compute_item_name(sources_t, sources_s, p, arch)
|
spec = DependencySpec(dep_type, arch)
|
||||||
excuse.add_dependency(dep_type, item_name, arch)
|
excuse.add_package_depends(spec, {p.pkg_id})
|
||||||
|
|
||||||
if arch in results:
|
if arch in results:
|
||||||
if results[arch] == BuildDepResult.FAILED:
|
if results[arch] == BuildDepResult.FAILED:
|
||||||
@ -915,11 +916,11 @@ class BuildDependsPolicy(BasePolicy):
|
|||||||
|
|
||||||
# check if the block can be satisfied in the source suite, and list the solving packages
|
# check if the block can be satisfied in the source suite, and list the solving packages
|
||||||
packages = get_dependency_solvers(block, binaries_s_a, provides_s_a, build_depends=True)
|
packages = get_dependency_solvers(block, binaries_s_a, provides_s_a, build_depends=True)
|
||||||
packages = sorted(p.source for p in packages)
|
sources = sorted(p.source for p in packages)
|
||||||
|
|
||||||
# if the dependency can be satisfied by the same source package, skip the block:
|
# if the dependency can be satisfied by the same source package, skip the block:
|
||||||
# obviously both binary packages will enter the target suite together
|
# obviously both binary packages will enter the target suite together
|
||||||
if source_name in packages:
|
if source_name in sources:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if no package can satisfy the dependency, add this information to the excuse
|
# if no package can satisfy the dependency, add this information to the excuse
|
||||||
@ -1016,12 +1017,15 @@ class BuiltUsingPolicy(BasePolicy):
|
|||||||
s_ver = s_source.version
|
s_ver = s_source.version
|
||||||
if apt_pkg.version_compare(s_ver, bu_version) >= 0:
|
if apt_pkg.version_compare(s_ver, bu_version) >= 0:
|
||||||
found = True
|
found = True
|
||||||
item_name = compute_item_name(sources_t, source_suite.sources, bu_source, arch)
|
dep = PackageId(bu_source, s_ver, "source")
|
||||||
if arch in self.options.break_arches:
|
if arch in self.options.break_arches:
|
||||||
excuse.add_detailed_info("Ignoring Built-Using for %s/%s on %s" % (pkg_name, arch, item_name))
|
excuse.add_detailed_info("Ignoring Built-Using for %s/%s on %s" % (
|
||||||
|
pkg_name, arch, dep.uvname))
|
||||||
else:
|
else:
|
||||||
excuse.add_dependency(DependencyType.BUILT_USING, item_name, arch)
|
spec = DependencySpec(DependencyType.BUILT_USING, arch)
|
||||||
excuse.add_detailed_info("%s/%s has Built-Using on %s" % (pkg_name, arch, item_name))
|
excuse.add_package_depends(spec, {dep})
|
||||||
|
excuse.add_detailed_info("%s/%s has Built-Using on %s" % (
|
||||||
|
pkg_name, arch, dep.uvname))
|
||||||
|
|
||||||
return found
|
return found
|
||||||
|
|
||||||
|
@ -35,6 +35,7 @@ from itertools import filterfalse, chain
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from britney2 import SourcePackage
|
from britney2 import SourcePackage
|
||||||
|
from britney2.excusedeps import DependencySpec, ImpossibleDependencyState
|
||||||
from britney2.policies import PolicyVerdict
|
from britney2.policies import PolicyVerdict
|
||||||
|
|
||||||
|
|
||||||
@ -658,19 +659,50 @@ def invalidate_excuses(excuses, valid, invalid):
|
|||||||
`valid' and `invalid' excuses.
|
`valid' and `invalid' excuses.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# build the reverse dependencies
|
# make a list of all packages (source and binary) that are present in the
|
||||||
allrevdeps = defaultdict(dict)
|
# excuses we have
|
||||||
|
excuses_packages = defaultdict(set)
|
||||||
for exc in excuses.values():
|
for exc in excuses.values():
|
||||||
for d in exc.all_deps:
|
for arch in exc.packages:
|
||||||
if exc.name not in allrevdeps[d]:
|
for pkg_id in exc.packages[arch]:
|
||||||
allrevdeps[d][exc.name] = set()
|
# note that the same package can be in multiple excuses
|
||||||
for deptype in exc.all_deps[d]:
|
# eg. when unstable and TPU have the same packages
|
||||||
allrevdeps[d][exc.name].add(deptype)
|
excuses_packages[pkg_id].add(exc.name)
|
||||||
|
|
||||||
|
# create dependencies between excuses based on packages
|
||||||
|
excuses_rdeps = defaultdict(set)
|
||||||
|
for exc in excuses.values():
|
||||||
|
for deptype in exc.all_deps:
|
||||||
|
for d in exc.all_deps[deptype]:
|
||||||
|
excuses_rdeps[d].add(exc.name)
|
||||||
|
|
||||||
|
for pkg_dep in exc.depends_packages:
|
||||||
|
# set of excuses, each of which can satisfy this specific
|
||||||
|
# dependency
|
||||||
|
# if there is a dependency on a package that doesn't exist, the
|
||||||
|
# set will contain an ImpossibleDependencyState
|
||||||
|
dep_exc = set()
|
||||||
|
for pkg_id in pkg_dep.deps:
|
||||||
|
pkg_excuses = excuses_packages[pkg_id]
|
||||||
|
# if the dependency isn't found, we get an empty set
|
||||||
|
if pkg_excuses == frozenset():
|
||||||
|
imp_dep = ImpossibleDependencyState(
|
||||||
|
PolicyVerdict.REJECTED_PERMANENTLY,
|
||||||
|
"%s" % (pkg_id.name))
|
||||||
|
dep_exc.add(imp_dep)
|
||||||
|
|
||||||
|
else:
|
||||||
|
dep_exc |= pkg_excuses
|
||||||
|
for e in pkg_excuses:
|
||||||
|
excuses_rdeps[e].add(exc.name)
|
||||||
|
if not exc.add_dependency(dep_exc, pkg_dep.spec):
|
||||||
|
valid.discard(exc.name)
|
||||||
|
invalid.add(exc.name)
|
||||||
|
|
||||||
# loop on the invalid excuses
|
# loop on the invalid excuses
|
||||||
for ename in iter_except(invalid.pop, KeyError):
|
for ename in iter_except(invalid.pop, KeyError):
|
||||||
# if there is no reverse dependency, skip the item
|
# if there is no reverse dependency, skip the item
|
||||||
if ename not in allrevdeps:
|
if ename not in excuses_rdeps:
|
||||||
continue
|
continue
|
||||||
# if the dependency can be satisfied by a testing-proposed-updates excuse, skip the item
|
# if the dependency can be satisfied by a testing-proposed-updates excuse, skip the item
|
||||||
if (ename + "_tpu") in valid:
|
if (ename + "_tpu") in valid:
|
||||||
@ -681,21 +713,19 @@ def invalidate_excuses(excuses, valid, invalid):
|
|||||||
rdep_verdict = PolicyVerdict.REJECTED_BLOCKED_BY_ANOTHER_ITEM
|
rdep_verdict = PolicyVerdict.REJECTED_BLOCKED_BY_ANOTHER_ITEM
|
||||||
|
|
||||||
# loop on the reverse dependencies
|
# loop on the reverse dependencies
|
||||||
if ename in allrevdeps:
|
for x in excuses_rdeps[ename]:
|
||||||
for x in allrevdeps[ename]:
|
exc = excuses[x]
|
||||||
# if the item is valid and it is not marked as `forced', then we invalidate it
|
# if the item is valid and it is not marked as `forced', then we
|
||||||
if x in valid and not excuses[x].forced:
|
# invalidate this specfic dependency
|
||||||
|
if x in valid and not exc.forced:
|
||||||
|
# mark this specific dependency as invalid
|
||||||
|
still_valid = exc.invalidate_dependency(ename, rdep_verdict)
|
||||||
|
|
||||||
if excuses[x].policy_verdict < rdep_verdict:
|
# if there are no alternatives left for this dependency,
|
||||||
excuses[x].policy_verdict = rdep_verdict
|
# invalidate the excuse
|
||||||
# otherwise, invalidate the dependency and mark as invalidated and
|
if not still_valid:
|
||||||
# remove the depending excuses
|
|
||||||
excuses[x].invalidate_dependency(ename, rdep_verdict)
|
|
||||||
valid.discard(x)
|
valid.discard(x)
|
||||||
invalid.add(x)
|
invalid.add(x)
|
||||||
for deptype in allrevdeps[ename][x]:
|
|
||||||
excuses[x].add_verdict_info(rdep_verdict, "Invalidated by %s" % deptype.get_description())
|
|
||||||
excuses[x].addreason(deptype.get_reason())
|
|
||||||
|
|
||||||
|
|
||||||
def compile_nuninst(target_suite, architectures, nobreakall_arches):
|
def compile_nuninst(target_suite, architectures, nobreakall_arches):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user