Separate inst_tester from universe

Signed-off-by: Niels Thykier <niels@thykier.net>
ubuntu/rebased
Niels Thykier 6 years ago
parent 35b06ee007
commit 0a669461ca
No known key found for this signature in database
GPG Key ID: A65B78DBE67C7AAC

@ -352,7 +352,7 @@ class Britney(object):
}
self.logger.info("Compiling Installability tester")
_, self._inst_tester = build_installability_tester(self.suite_info, self.options.architectures)
self.pkg_universe, self._inst_tester = build_installability_tester(self.suite_info, self.options.architectures)
if not self.options.nuninst_cache:
self.logger.info("Building the list of non-installable packages for the full archive")
@ -1830,6 +1830,7 @@ class Britney(object):
binaries_s = source_suite.binaries
binaries_t = target_suite.binaries
inst_tester = self._inst_tester
pkg_universe = self.pkg_universe
adds = set()
rms = set()
@ -1863,6 +1864,7 @@ class Britney(object):
if allow_smooth_updates and source_suite.suite_class.is_primary_source:
smoothbins = find_smooth_updateable_binaries(bins,
source_suite.sources[source_name],
pkg_universe,
inst_tester,
binaries_t,
binaries_s,
@ -1953,6 +1955,7 @@ class Britney(object):
packages_t = target_suite.binaries
provides_t = target_suite.provides_table
inst_tester = self._inst_tester
pkg_universe = self.pkg_universe
eqv_set = set()
updates, rms, _, skip = self._compute_groups(item.package,
@ -1989,7 +1992,7 @@ class Britney(object):
key = (binary, parch)
old_pkg_id = eqv_table.get(key)
if old_pkg_id is not None:
if inst_tester.are_equivalent(new_pkg_id, old_pkg_id):
if pkg_universe.are_equivalent(new_pkg_id, old_pkg_id):
eqv_set.add(key)
# remove all the binaries which aren't being smooth updated
@ -2005,8 +2008,8 @@ class Britney(object):
if pkey not in eqv_set:
# all the reverse dependencies are affected by
# the change
affected_direct.update(inst_tester.reverse_dependencies_of(rm_pkg_id))
affected_direct.update(inst_tester.negative_dependencies_of(rm_pkg_id))
affected_direct.update(pkg_universe.reverse_dependencies_of(rm_pkg_id))
affected_direct.update(pkg_universe.negative_dependencies_of(rm_pkg_id))
# remove the provided virtual packages
for provided_pkg, prov_version, _ in pkg_data.provides:
@ -2055,7 +2058,7 @@ class Britney(object):
undo['binaries'][key] = old_pkg_id
if not equivalent_replacement:
# all the reverse conflicts
affected_direct.update(inst_tester.reverse_dependencies_of(old_pkg_id))
affected_direct.update(pkg_universe.reverse_dependencies_of(old_pkg_id))
inst_tester.remove_testing_binary(old_pkg_id)
elif hint_undo:
# the binary isn't in testing, but it may have been at
@ -2071,7 +2074,7 @@ class Britney(object):
for (tundo, tpkg) in hint_undo:
if key in tundo['binaries']:
tpkg_id = tundo['binaries'][key]
affected_direct.update(inst_tester.reverse_dependencies_of(tpkg_id))
affected_direct.update(pkg_universe.reverse_dependencies_of(tpkg_id))
# add/update the binary package from the source suite
new_pkg_data = packages_s[parch][binary]
@ -2089,11 +2092,11 @@ class Britney(object):
if not equivalent_replacement:
# all the reverse dependencies are affected by the change
affected_direct.add(updated_pkg_id)
affected_direct.update(inst_tester.negative_dependencies_of(updated_pkg_id))
affected_direct.update(pkg_universe.negative_dependencies_of(updated_pkg_id))
# Also include the transitive rdeps of the packages found so far
affected_all = affected_direct.copy()
compute_reverse_tree(inst_tester, affected_all)
compute_reverse_tree(pkg_universe, affected_all)
# return the package name, the suite, the list of affected packages and the undo dictionary
return (affected_direct, affected_all, undo)

@ -389,6 +389,7 @@ class AutopkgtestPolicy(BasePolicy):
return False
def request_tests_for_source(self, suite, arch, source_name, source_version, pkg_arch_result):
pkg_universe = self.britney.pkg_universe
inst_tester = self.britney._inst_tester
suite_info = self.suite_info
sources_s = suite_info[suite].sources
@ -437,7 +438,7 @@ class AutopkgtestPolicy(BasePolicy):
# We add slightly too much here, because new binaries
# will also show up, but they are already properly
# installed. Nevermind.
depends = inst_tester.dependencies_of(binary)
depends = pkg_universe.dependencies_of(binary)
names_testing = set()
names_unstable = set()
# depends is a frozenset{frozenset{BinaryPackageId, ..}}
@ -461,7 +462,7 @@ class AutopkgtestPolicy(BasePolicy):
bin_broken = set()
for binary in bin_depends:
# broken is a frozenset{BinaryPackageId, ..}
broken = inst_tester.negative_dependencies_of(binary)
broken = pkg_universe.negative_dependencies_of(binary)
names_testing = set()
names_unstable = set()
for broken_bin in broken:
@ -573,10 +574,11 @@ class AutopkgtestPolicy(BasePolicy):
except KeyError:
pass
pkg_universe = self.britney.pkg_universe
# plus all direct reverse dependencies and test triggers of its
# binaries which have an autopkgtest
for binary in srcinfo.binaries + extra_bins:
rdeps = self.britney._inst_tester.reverse_dependencies_of(binary)
rdeps = pkg_universe.reverse_dependencies_of(binary)
for rdep in rdeps:
try:
rdep_src = binaries_info[rdep.package_name].source

@ -178,13 +178,12 @@ def log_and_format_old_libraries(logger, libs):
logger.info(" %s: %s", lib, " ".join(libraries[lib]))
def compute_reverse_tree(inst_tester, affected):
def compute_reverse_tree(pkg_universe, affected):
"""Calculate the full dependency tree for a set of packages
This method returns the full dependency tree for a given set of
packages. The first argument is an instance of the InstallabilityTester
and the second argument are a set of packages ids (as defined in
the constructor of the InstallabilityTester).
packages. The first argument is an instance of the BinaryPackageUniverse
and the second argument are a set of BinaryPackageId.
The set of affected packages will be updated in place and must
therefore be mutable.
@ -192,7 +191,7 @@ def compute_reverse_tree(inst_tester, affected):
remain = list(affected)
while remain:
pkg_id = remain.pop()
new_pkg_ids = inst_tester.reverse_dependencies_of(pkg_id) - affected
new_pkg_ids = pkg_universe.reverse_dependencies_of(pkg_id) - affected
affected.update(new_pkg_ids)
remain.extend(new_pkg_ids)
return None
@ -946,8 +945,14 @@ def compile_nuninst(binaries_t, inst_tester, architectures, nobreakall_arches):
return nuninst
def find_smooth_updateable_binaries(binaries_to_check, source_data,
inst_tester, binaries_t, binaries_s, removals, smooth_updates):
def find_smooth_updateable_binaries(binaries_to_check,
source_data,
pkg_universe,
inst_tester,
binaries_t,
binaries_s,
removals,
smooth_updates):
check = set()
smoothbins = set()
@ -969,7 +974,7 @@ def find_smooth_updateable_binaries(binaries_to_check, source_data,
# a smooth update. if not, it may still be a valid
# candidate if one if its r-deps is itself a candidate,
# so note it for checking later
rdeps = set(inst_tester.reverse_dependencies_of(pkg_id))
rdeps = set(pkg_universe.reverse_dependencies_of(pkg_id))
# We ignore all binaries listed in "removals" as we
# assume they will leave at the same time as the
# given package.
@ -980,7 +985,7 @@ def find_smooth_updateable_binaries(binaries_to_check, source_data,
combined = set(smoothbins)
combined.add(pkg_id)
for rdep in rdeps:
for dep_clause in inst_tester.dependencies_of(rdep):
for dep_clause in pkg_universe.dependencies_of(rdep):
if dep_clause <= combined:
smooth_update_it = True
break
@ -996,7 +1001,7 @@ def find_smooth_updateable_binaries(binaries_to_check, source_data,
while 1:
found_any = False
for pkg_id in check:
rdeps = inst_tester.reverse_dependencies_of(pkg_id)
rdeps = pkg_universe.reverse_dependencies_of(pkg_id)
if not rdeps.isdisjoint(smoothbins):
smoothbins.add(pkg_id)
found_any = True

Loading…
Cancel
Save