mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-05-16 13:01:29 +00:00
Make migration and undo code suite agnotisc
Signed-off-by: Niels Thykier <niels@thykier.net>
This commit is contained in:
parent
382ced2a68
commit
609bb9e960
65
britney.py
65
britney.py
@ -1270,7 +1270,7 @@ class Britney(object):
|
|||||||
if not (ssrc and source_suite is not primary_source_suite):
|
if not (ssrc and source_suite is not primary_source_suite):
|
||||||
# for every binary package produced by this source in testing for this architecture
|
# for every binary package produced by this source in testing for this architecture
|
||||||
_, _, smoothbins = self._compute_groups(src,
|
_, _, smoothbins = self._compute_groups(src,
|
||||||
primary_source_suite.name,
|
primary_source_suite,
|
||||||
arch,
|
arch,
|
||||||
False)
|
False)
|
||||||
|
|
||||||
@ -1777,21 +1777,21 @@ class Britney(object):
|
|||||||
res.append("%s-%d" % (arch[0], n))
|
res.append("%s-%d" % (arch[0], n))
|
||||||
return "%d+%d: %s" % (total, totalbreak, ":".join(res))
|
return "%d+%d: %s" % (total, totalbreak, ":".join(res))
|
||||||
|
|
||||||
|
def _compute_groups(self, source_name, source_suite, migration_architecture,
|
||||||
def _compute_groups(self, source_name, suite, migration_architecture,
|
|
||||||
is_removal,
|
is_removal,
|
||||||
allow_smooth_updates=True,
|
allow_smooth_updates=True,
|
||||||
removals=frozenset()):
|
removals=frozenset()):
|
||||||
"""Compute the groups of binaries being migrated by item
|
"""Compute the groups of binaries being migrated by item
|
||||||
|
|
||||||
This method will compute the binaries that will be added,
|
This method will compute the binaries that will be added to,
|
||||||
replaced in testing and which of them are smooth updatable.
|
replaced in or removed from the target suite and which of
|
||||||
|
the removals are smooth updatable.
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
* "source_name" is the name of the source package, whose
|
* "source_name" is the name of the source package, whose
|
||||||
binaries are migrating.
|
binaries are migrating.
|
||||||
* "suite" is the suite from which the binaries are migrating.
|
* "suite" is the source suite from which the binaries are migrating.
|
||||||
[Same as item.suite.name, where available]
|
[Same as item.suite, where available]
|
||||||
* "migration_architecture" is the architecture determines
|
* "migration_architecture" is the architecture determines
|
||||||
architecture of the migrating binaries (can be "source" for
|
architecture of the migrating binaries (can be "source" for
|
||||||
a "source"-migration, meaning all binaries regardless of
|
a "source"-migration, meaning all binaries regardless of
|
||||||
@ -1826,9 +1826,9 @@ class Britney(object):
|
|||||||
Unlike doop_source, this will not modify any data structure.
|
Unlike doop_source, this will not modify any data structure.
|
||||||
"""
|
"""
|
||||||
# local copies for better performances
|
# local copies for better performances
|
||||||
sources = self.sources
|
target_suite = self.suite_info.target_suite
|
||||||
binaries_s = self.binaries[suite]
|
binaries_s = source_suite.binaries
|
||||||
binaries_t = self.binaries['testing']
|
binaries_t = target_suite.binaries
|
||||||
inst_tester = self._inst_tester
|
inst_tester = self._inst_tester
|
||||||
|
|
||||||
adds = set()
|
adds = set()
|
||||||
@ -1837,8 +1837,9 @@ class Britney(object):
|
|||||||
|
|
||||||
# remove all binary packages (if the source already exists)
|
# remove all binary packages (if the source already exists)
|
||||||
if migration_architecture == 'source' or not is_removal:
|
if migration_architecture == 'source' or not is_removal:
|
||||||
if source_name in sources['testing']:
|
sources_t = target_suite.sources
|
||||||
source_data = sources['testing'][source_name]
|
if source_name in sources_t:
|
||||||
|
source_data = sources_t[source_name]
|
||||||
|
|
||||||
bins = []
|
bins = []
|
||||||
check = set()
|
check = set()
|
||||||
@ -1863,7 +1864,7 @@ class Britney(object):
|
|||||||
for pkg_id in bins:
|
for pkg_id in bins:
|
||||||
binary, _, parch = pkg_id
|
binary, _, parch = pkg_id
|
||||||
# if a smooth update is possible for the package, skip it
|
# if a smooth update is possible for the package, skip it
|
||||||
if allow_smooth_updates and suite == 'unstable' and \
|
if allow_smooth_updates and source_suite.suite_class.is_primary_source and \
|
||||||
binary not in binaries_s[parch][0] and \
|
binary not in binaries_s[parch][0] and \
|
||||||
('ALL' in self.options.smooth_updates or \
|
('ALL' in self.options.smooth_updates or \
|
||||||
binaries_t[parch][0][binary].section in self.options.smooth_updates):
|
binaries_t[parch][0][binary].section in self.options.smooth_updates):
|
||||||
@ -1917,7 +1918,7 @@ class Britney(object):
|
|||||||
# must keep them around; they will not be re-added by the
|
# must keep them around; they will not be re-added by the
|
||||||
# migration so will end up missing from testing
|
# migration so will end up missing from testing
|
||||||
if migration_architecture != 'source' and \
|
if migration_architecture != 'source' and \
|
||||||
suite != 'unstable' and \
|
source_suite.suite_class.is_additional_source and \
|
||||||
binaries_t[parch][0][binary].architecture == 'all':
|
binaries_t[parch][0][binary].architecture == 'all':
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
@ -1932,7 +1933,7 @@ class Britney(object):
|
|||||||
|
|
||||||
# add the new binary packages (if we are not removing)
|
# add the new binary packages (if we are not removing)
|
||||||
if not is_removal:
|
if not is_removal:
|
||||||
source_data = sources[suite][source_name]
|
source_data = source_suite.sources[source_name]
|
||||||
for pkg_id in source_data.binaries:
|
for pkg_id in source_data.binaries:
|
||||||
binary, _, parch = pkg_id
|
binary, _, parch = pkg_id
|
||||||
if migration_architecture not in ['source', parch]:
|
if migration_architecture not in ['source', parch]:
|
||||||
@ -1961,7 +1962,7 @@ class Britney(object):
|
|||||||
return (adds, rms, smoothbins)
|
return (adds, rms, smoothbins)
|
||||||
|
|
||||||
def doop_source(self, item, hint_undo=None, removals=frozenset()):
|
def doop_source(self, item, hint_undo=None, removals=frozenset()):
|
||||||
"""Apply a change to the testing distribution as requested by `pkg`
|
"""Apply a change to the target suite as requested by `item`
|
||||||
|
|
||||||
An optional list of undo actions related to packages processed earlier
|
An optional list of undo actions related to packages processed earlier
|
||||||
in a hint may be passed in `hint_undo`.
|
in a hint may be passed in `hint_undo`.
|
||||||
@ -1969,8 +1970,7 @@ class Britney(object):
|
|||||||
An optional set of binaries may be passed in "removals". Binaries listed
|
An optional set of binaries may be passed in "removals". Binaries listed
|
||||||
in this set will be assumed to be removed at the same time as the "item"
|
in this set will be assumed to be removed at the same time as the "item"
|
||||||
will migrate. This may change what binaries will be smooth-updated.
|
will migrate. This may change what binaries will be smooth-updated.
|
||||||
- Binaries in this set must be ("package-name", "version", "architecture")
|
- Binaries in this set must be instances of BinaryPackageId.
|
||||||
tuples.
|
|
||||||
|
|
||||||
This method applies the changes required by the action `item` tracking
|
This method applies the changes required by the action `item` tracking
|
||||||
them so it will be possible to revert them.
|
them so it will be possible to revert them.
|
||||||
@ -1985,30 +1985,32 @@ class Britney(object):
|
|||||||
affected_remain = set()
|
affected_remain = set()
|
||||||
|
|
||||||
# local copies for better performance
|
# local copies for better performance
|
||||||
sources = self.sources
|
source_suite = item.suite
|
||||||
packages_t = self.binaries['testing']
|
target_suite = self.suite_info.target_suite
|
||||||
|
packages_t = target_suite.binaries
|
||||||
inst_tester = self._inst_tester
|
inst_tester = self._inst_tester
|
||||||
eqv_set = set()
|
eqv_set = set()
|
||||||
|
|
||||||
updates, rms, _ = self._compute_groups(item.package,
|
updates, rms, _ = self._compute_groups(item.package,
|
||||||
item.suite.name,
|
source_suite,
|
||||||
item.architecture,
|
item.architecture,
|
||||||
item.is_removal,
|
item.is_removal,
|
||||||
removals=removals)
|
removals=removals)
|
||||||
|
|
||||||
# Handle the source package
|
# Handle the source package
|
||||||
if item.architecture == 'source':
|
if item.architecture == 'source':
|
||||||
if item.package in sources['testing']:
|
sources_t = target_suite.sources
|
||||||
source = sources['testing'][item.package]
|
if item.package in sources_t:
|
||||||
|
source = sources_t[item.package]
|
||||||
undo['sources'][item.package] = source
|
undo['sources'][item.package] = source
|
||||||
del sources['testing'][item.package]
|
del sources_t[item.package]
|
||||||
else:
|
else:
|
||||||
# the package didn't exist, so we mark it as to-be-removed in case of undo
|
# the package didn't exist, so we mark it as to-be-removed in case of undo
|
||||||
undo['sources']['-' + item.package] = True
|
undo['sources']['-' + item.package] = True
|
||||||
|
|
||||||
# add/update the source package
|
# add/update the source package
|
||||||
if not item.is_removal:
|
if not item.is_removal:
|
||||||
sources['testing'][item.package] = sources[item.suite.name][item.package]
|
sources_t[item.package] = source_suite.sources[item.package]
|
||||||
|
|
||||||
# If we are removing *and* updating packages, then check for eqv. packages
|
# If we are removing *and* updating packages, then check for eqv. packages
|
||||||
if rms and updates:
|
if rms and updates:
|
||||||
@ -2056,7 +2058,7 @@ class Britney(object):
|
|||||||
|
|
||||||
# Add/Update binary packages in testing
|
# Add/Update binary packages in testing
|
||||||
if updates:
|
if updates:
|
||||||
packages_s = self.binaries[item.suite.name]
|
packages_s = source_suite.binaries
|
||||||
|
|
||||||
for updated_pkg_id in updates:
|
for updated_pkg_id in updates:
|
||||||
binary, new_version, parch = updated_pkg_id
|
binary, new_version, parch = updated_pkg_id
|
||||||
@ -2125,7 +2127,8 @@ class Britney(object):
|
|||||||
is_accepted = True
|
is_accepted = True
|
||||||
affected_architectures = set()
|
affected_architectures = set()
|
||||||
item = actions
|
item = actions
|
||||||
packages_t = self.binaries['testing']
|
target_suite = self.suite_info.target_suite
|
||||||
|
packages_t = target_suite.binaries
|
||||||
|
|
||||||
nobreakall_arches = self.options.nobreakall_arches
|
nobreakall_arches = self.options.nobreakall_arches
|
||||||
new_arches = self.options.new_arches
|
new_arches = self.options.new_arches
|
||||||
@ -2147,7 +2150,7 @@ class Britney(object):
|
|||||||
affected_pos = set()
|
affected_pos = set()
|
||||||
affected_remain = set()
|
affected_remain = set()
|
||||||
for item in actions:
|
for item in actions:
|
||||||
_, rms, _ = self._compute_groups(item.package, item.suite.name,
|
_, rms, _ = self._compute_groups(item.package, item.suite,
|
||||||
item.architecture,
|
item.architecture,
|
||||||
item.is_removal,
|
item.is_removal,
|
||||||
allow_smooth_updates=False)
|
allow_smooth_updates=False)
|
||||||
@ -2210,7 +2213,7 @@ class Britney(object):
|
|||||||
# check if the action improved the uninstallability counters
|
# check if the action improved the uninstallability counters
|
||||||
if not is_accepted and automatic_revert:
|
if not is_accepted and automatic_revert:
|
||||||
undo_copy = list(reversed(undo_list))
|
undo_copy = list(reversed(undo_list))
|
||||||
undo_changes(undo_copy, self._inst_tester, self.sources, self.binaries, self.all_binaries)
|
undo_changes(undo_copy, self._inst_tester, self.suite_info, self.all_binaries)
|
||||||
|
|
||||||
return (is_accepted, nuninst_after, undo_list, arch)
|
return (is_accepted, nuninst_after, undo_list, arch)
|
||||||
|
|
||||||
@ -2228,7 +2231,7 @@ class Britney(object):
|
|||||||
output_logger = self.output_logger
|
output_logger = self.output_logger
|
||||||
|
|
||||||
for y in sorted((y for y in packages), key=attrgetter('uvname')):
|
for y in sorted((y for y in packages), key=attrgetter('uvname')):
|
||||||
updates, rms, _ = self._compute_groups(y.package, y.suite.name, y.architecture, y.is_removal)
|
updates, rms, _ = self._compute_groups(y.package, y.suite, y.architecture, y.is_removal)
|
||||||
result = (y, frozenset(updates), frozenset(rms))
|
result = (y, frozenset(updates), frozenset(rms))
|
||||||
group_info[y] = result
|
group_info[y] = result
|
||||||
|
|
||||||
@ -2421,7 +2424,7 @@ class Britney(object):
|
|||||||
return
|
return
|
||||||
lundo.reverse()
|
lundo.reverse()
|
||||||
|
|
||||||
undo_changes(lundo, self._inst_tester, self.sources, self.binaries, self.all_binaries)
|
undo_changes(lundo, self._inst_tester, self.suite_info, self.all_binaries)
|
||||||
|
|
||||||
output_logger.info("")
|
output_logger.info("")
|
||||||
|
|
||||||
|
@ -95,17 +95,14 @@ def iter_except(func, exception, first=None):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages):
|
def undo_changes(lundo, inst_tester, suite_info, all_binary_packages):
|
||||||
"""Undoes one or more changes to testing
|
"""Undoes one or more changes to the target suite
|
||||||
|
|
||||||
* lundo is a list of (undo, item)-tuples
|
* lundo is a list of (undo, item)-tuples
|
||||||
* inst_tester is an InstallabilityTester
|
* inst_tester is an InstallabilityTester
|
||||||
* sources is the table of all source packages for all suites
|
* suite_info is the Suites object
|
||||||
* binaries is the table of all binary packages for all suites
|
* all_binary_packages is the table of all binary packages for
|
||||||
and architectures
|
all suites and architectures
|
||||||
|
|
||||||
The "X=X" parameters are optimizations to avoid "load global"
|
|
||||||
in loops.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# We do the undo process in "4 steps" and each step must be
|
# We do the undo process in "4 steps" and each step must be
|
||||||
@ -115,26 +112,29 @@ def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages):
|
|||||||
# see commit:ef71f0e33a7c3d8ef223ec9ad5e9843777e68133 and
|
# see commit:ef71f0e33a7c3d8ef223ec9ad5e9843777e68133 and
|
||||||
# #624716 for the issues we had when we did not do this.
|
# #624716 for the issues we had when we did not do this.
|
||||||
|
|
||||||
|
target_suite = suite_info.target_suite
|
||||||
|
sources_t = target_suite.sources
|
||||||
|
binaries_t = target_suite.binaries
|
||||||
|
|
||||||
# STEP 1
|
# STEP 1
|
||||||
# undo all the changes for sources
|
# undo all the changes for sources
|
||||||
for (undo, item) in lundo:
|
for (undo, item) in lundo:
|
||||||
for k in undo['sources']:
|
for k in undo['sources']:
|
||||||
if k[0] == '-':
|
if k[0] == '-':
|
||||||
del sources["testing"][k[1:]]
|
del sources_t[k[1:]]
|
||||||
else:
|
else:
|
||||||
sources["testing"][k] = undo['sources'][k]
|
sources_t[k] = undo['sources'][k]
|
||||||
|
|
||||||
# STEP 2
|
# STEP 2
|
||||||
# undo all new binaries (consequence of the above)
|
# undo all new binaries (consequence of the above)
|
||||||
for (undo, item) in lundo:
|
for (undo, item) in lundo:
|
||||||
if not item.is_removal and item.package in sources[item.suite.name]:
|
if not item.is_removal and item.package in item.suite.sources:
|
||||||
source_data = sources[item.suite.name][item.package]
|
source_data = item.suite.sources[item.package]
|
||||||
for pkg_id in source_data.binaries:
|
for pkg_id in source_data.binaries:
|
||||||
binary, _, arch = pkg_id
|
binary, _, arch = pkg_id
|
||||||
if item.architecture in ['source', arch]:
|
if item.architecture in ['source', arch]:
|
||||||
try:
|
try:
|
||||||
del binaries["testing"][arch][0][binary]
|
del binaries_t[arch][0][binary]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# If this happens, pkg_id must be a cruft item that
|
# If this happens, pkg_id must be a cruft item that
|
||||||
# was *not* migrated.
|
# was *not* migrated.
|
||||||
@ -142,13 +142,12 @@ def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages):
|
|||||||
assert not inst_tester.any_of_these_are_in_testing((pkg_id,))
|
assert not inst_tester.any_of_these_are_in_testing((pkg_id,))
|
||||||
inst_tester.remove_testing_binary(pkg_id)
|
inst_tester.remove_testing_binary(pkg_id)
|
||||||
|
|
||||||
|
|
||||||
# STEP 3
|
# STEP 3
|
||||||
# undo all other binary package changes (except virtual packages)
|
# undo all other binary package changes (except virtual packages)
|
||||||
for (undo, item) in lundo:
|
for (undo, item) in lundo:
|
||||||
for p in undo['binaries']:
|
for p in undo['binaries']:
|
||||||
binary, arch = p
|
binary, arch = p
|
||||||
binaries_t_a = binaries['testing'][arch][0]
|
binaries_t_a = binaries_t[arch][0]
|
||||||
assert binary not in binaries_t_a
|
assert binary not in binaries_t_a
|
||||||
pkgdata = all_binary_packages[undo['binaries'][p]]
|
pkgdata = all_binary_packages[undo['binaries'][p]]
|
||||||
binaries_t_a[binary] = pkgdata
|
binaries_t_a[binary] = pkgdata
|
||||||
@ -158,10 +157,10 @@ def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages):
|
|||||||
# undo all changes to virtual packages
|
# undo all changes to virtual packages
|
||||||
for (undo, item) in lundo:
|
for (undo, item) in lundo:
|
||||||
for provided_pkg, arch in undo['nvirtual']:
|
for provided_pkg, arch in undo['nvirtual']:
|
||||||
del binaries['testing'][arch][1][provided_pkg]
|
del binaries_t[arch][1][provided_pkg]
|
||||||
for p in undo['virtual']:
|
for p in undo['virtual']:
|
||||||
provided_pkg, arch = p
|
provided_pkg, arch = p
|
||||||
binaries['testing'][arch][1][provided_pkg] = undo['virtual'][p]
|
binaries_t[arch][1][provided_pkg] = undo['virtual'][p]
|
||||||
|
|
||||||
|
|
||||||
def log_and_format_old_libraries(logger, libs):
|
def log_and_format_old_libraries(logger, libs):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user