From 0664c580b08a67f6edb734073854c4d367faa97a Mon Sep 17 00:00:00 2001 From: Niels Thykier Date: Sun, 25 Sep 2016 05:45:36 +0000 Subject: [PATCH] Prefer s.x to s[X] for SourcePackage objects Signed-off-by: Niels Thykier --- britney.py | 129 ++++++++++++++++++++++++--------------------- britney_util.py | 27 +++++----- consts.py | 4 +- policies/policy.py | 21 ++++---- 4 files changed, 92 insertions(+), 89 deletions(-) diff --git a/britney.py b/britney.py index 530da76..307fc03 100755 --- a/britney.py +++ b/britney.py @@ -209,8 +209,9 @@ from britney_util import (old_libraries_format, undo_changes, create_provides_map, ) from policies.policy import AgePolicy, RCBugPolicy, PolicyVerdict -from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC, - SOURCE, SOURCEVER, ARCHITECTURE, CONFLICTS, DEPENDS, + +# Check the "check_field_name" reflection before removing an import here. +from consts import (SOURCE, SOURCEVER, ARCHITECTURE, CONFLICTS, DEPENDS, PROVIDES, MULTIARCH) __author__ = 'Fabio Tranchitella and the Debian Release Team' @@ -593,7 +594,7 @@ class Britney(object): pkg_id, ) - src_data[BINARIES].append(pkg_id) + src_data.binaries.append(pkg_id) self.binaries['testing'][arch][0][pkg_name] = bin_data self.binaries['unstable'][arch][0][pkg_name] = bin_data self.all_binaries[pkg_id] = bin_data @@ -681,7 +682,7 @@ class Britney(object): False, pkg_id, ) - src_data[BINARIES].append(pkg_id) + src_data.binaries.append(pkg_id) self.binaries['testing'][arch][0][pkg_name] = bin_data self.binaries['unstable'][arch][0][pkg_name] = bin_data self.all_binaries[pkg_id] = bin_data @@ -800,10 +801,16 @@ class Britney(object): # largest version for migration. if pkg in sources and apt_pkg.version_compare(sources[pkg][0], ver) > 0: continue + maint = get_field('Maintainer') + if maint: + maint = intern(maint.strip()) + section = get_field('Section') + if section: + section = intern(section.strip()) sources[intern(pkg)] = SourcePackage(intern(ver), - intern(get_field('Section')), + section, [], - get_field('Maintainer'), + maint, False, ) return sources @@ -881,7 +888,7 @@ class Britney(object): if apt_pkg.version_compare(old_pkg_data.version, version) > 0: continue old_pkg_id = old_pkg_data.pkg_id - old_src_binaries = srcdist[old_pkg_data[SOURCE]][BINARIES] + old_src_binaries = srcdist[old_pkg_data[SOURCE]].binaries old_src_binaries.remove(old_pkg_id) # This may seem weird at first glance, but the current code rely # on this behaviour to avoid issues like #709460. Admittedly it @@ -950,8 +957,8 @@ class Britney(object): # source -> binary mapping once. It doesn't matter which # of the versions we include as only the package name and # architecture are recorded. - if pkg_id not in srcdist[source][BINARIES]: - srcdist[source][BINARIES].append(pkg_id) + if pkg_id not in srcdist[source].binaries: + srcdist[source].binaries.append(pkg_id) # if the source package doesn't exist, create a fake one else: srcdist[source] = SourcePackage(source_version, 'faux', [pkg_id], None, True) @@ -1211,7 +1218,7 @@ class Britney(object): # for the solving packages, update the excuse to add the dependencies for p in packages: if arch not in self.options.break_arches: - if p in self.sources['testing'] and self.sources['testing'][p][VERSION] == self.sources[suite][p][VERSION]: + if p in self.sources['testing'] and self.sources['testing'][p].version == self.sources[suite][p].version: excuse.add_dep("%s/%s" % (p, arch), arch) else: excuse.add_dep(p, arch) @@ -1240,9 +1247,9 @@ class Britney(object): src = self.sources['testing'][pkg] excuse = Excuse("-" + pkg) excuse.addhtml("Package not in unstable, will try to remove") - excuse.set_vers(src[VERSION], None) - src[MAINTAINER] and excuse.set_maint(src[MAINTAINER].strip()) - src[SECTION] and excuse.set_section(src[SECTION].strip()) + excuse.set_vers(src.version, None) + src.maintainer and excuse.set_maint(src.maintainer) + src.section and excuse.set_section(src.section) # if the package is blocked, skip it for hint in self.hints.search('block', package=pkg, removal=True): @@ -1274,15 +1281,15 @@ class Britney(object): # build the common part of the excuse, which will be filled by the code below ref = "%s/%s%s" % (src, arch, suite != 'unstable' and "_" + suite or "") excuse = Excuse(ref) - excuse.set_vers(source_t[VERSION], source_t[VERSION]) - source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip()) - source_u[SECTION] and excuse.set_section(source_u[SECTION].strip()) + excuse.set_vers(source_t.version, source_t.version) + source_u.maintainer and excuse.set_maint(source_u.maintainer) + source_u.section and excuse.set_section(source_u.section) # if there is a `remove' hint and the requested version is the same as the # version in testing, then stop here and return False # (as a side effect, a removal may generate such excuses for both the source # package and its binary packages on each architecture) - for hint in self.hints.search('remove', package=src, version=source_t[VERSION]): + for hint in self.hints.search('remove', package=src, version=source_t.version): excuse.add_hint(hint) excuse.addhtml("Removal request by %s" % (hint.user)) excuse.addhtml("Trying to remove package, not update it") @@ -1297,7 +1304,7 @@ class Britney(object): packages_s_a = self.binaries[suite][arch][0] # for every binary package produced by this source in unstable for this architecture - for pkg_id in sorted(x for x in source_u[BINARIES] if x.architecture == arch): + for pkg_id in sorted(x for x in source_u.binaries if x.architecture == arch): pkg_name = pkg_id.package_name # retrieve the testing (if present) and unstable corresponding binary packages @@ -1309,20 +1316,20 @@ class Britney(object): # if the new binary package is architecture-independent, then skip it if binary_u.architecture == 'all': - if pkg_id not in source_t[BINARIES]: + if pkg_id not in source_t.binaries: # only add a note if the arch:all does not match the expected version excuse.addhtml("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u.version, pkgsv)) continue # if the new binary package is not from the same source as the testing one, then skip it # this implies that this binary migration is part of a source migration - if source_u[VERSION] == pkgsv and source_t[VERSION] != pkgsv: + if source_u.version == pkgsv and source_t.version != pkgsv: anywrongver = True - excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_t[VERSION])) + excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_t.version)) continue # cruft in unstable - if source_u[VERSION] != pkgsv and source_t[VERSION] != pkgsv: + if source_u.version != pkgsv and source_t.version != pkgsv: if self.options.ignore_cruft: excuse.addhtml("Old cruft: %s %s (but ignoring cruft, so nevermind)" % (pkg_name, pkgsv)) else: @@ -1332,9 +1339,9 @@ class Britney(object): # if the source package has been updated in unstable and this is a binary migration, skip it # (the binaries are now out-of-date) - if source_t[VERSION] == pkgsv and source_t[VERSION] != source_u[VERSION]: + if source_t.version == pkgsv and source_t.version != source_u.version: anywrongver = True - excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_u[VERSION])) + excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_u.version)) continue # find unsatisfied dependencies for the new binary package @@ -1363,9 +1370,9 @@ class Britney(object): # if there is nothing wrong and there is something worth doing or the source # package is not fake, then check what packages should be removed - if not anywrongver and (anyworthdoing or not source_u[FAKESRC]): - srcv = source_u[VERSION] - ssrc = source_t[VERSION] == srcv + if not anywrongver and (anyworthdoing or not source_u.is_fakesrc): + srcv = source_u.version + ssrc = source_t.version == srcv # if this is a binary-only migration via *pu, we never want to try # removing binary packages if not (ssrc and suite != 'unstable'): @@ -1375,12 +1382,12 @@ class Britney(object): arch, False) - for pkg_id in sorted(x for x in source_t[BINARIES] if x.architecture == arch): + for pkg_id in sorted(x for x in source_t.binaries if x.architecture == arch): pkg = pkg_id.package_name # if the package is architecture-independent, then ignore it tpkg_data = packages_t_a[pkg] if tpkg_data.version == 'all': - if pkg_id not in source_u[BINARIES]: + if pkg_id not in source_u.binaries: # only add a note if the arch:all does not match the expected version excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg)) continue @@ -1427,7 +1434,7 @@ class Britney(object): if src in self.sources['testing']: source_t = self.sources['testing'][src] # if testing and unstable have the same version, then this is a candidate for binary-NMUs only - if apt_pkg.version_compare(source_t[VERSION], source_u[VERSION]) == 0: + if apt_pkg.version_compare(source_t.version, source_u.version) == 0: return False else: source_t = None @@ -1435,30 +1442,30 @@ class Britney(object): # build the common part of the excuse, which will be filled by the code below ref = "%s%s" % (src, suite != 'unstable' and "_" + suite or "") excuse = Excuse(ref) - excuse.set_vers(source_t and source_t[VERSION] or None, source_u[VERSION]) - source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip()) - source_u[SECTION] and excuse.set_section(source_u[SECTION].strip()) + excuse.set_vers(source_t and source_t.version or None, source_u.version) + source_u.maintainer and excuse.set_maint(source_u.maintainer) + source_u.section and excuse.set_section(source_u.section) # the starting point is that we will update the candidate update_candidate = True # if the version in unstable is older, then stop here with a warning in the excuse and return False - if source_t and apt_pkg.version_compare(source_u[VERSION], source_t[VERSION]) < 0: - excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t[VERSION], source_u[VERSION])) + if source_t and apt_pkg.version_compare(source_u.version, source_t.version) < 0: + excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t.version, source_u.version)) self.excuses[excuse.name] = excuse excuse.addreason("newerintesting") return False # check if the source package really exists or if it is a fake one - if source_u[FAKESRC]: + if source_u.is_fakesrc: excuse.addhtml("%s source package doesn't exist" % (src)) update_candidate = False # if there is a `remove' hint and the requested version is the same as the # version in testing, then stop here and return False for hint in self.hints.search('remove', package=src): - if source_t and source_t[VERSION] == hint.version or \ - source_u[VERSION] == hint.version: + if source_t and source_t.version == hint.version or \ + source_u.version == hint.version: excuse.add_hint(hint) excuse.addhtml("Removal request by %s" % (hint.user)) excuse.addhtml("Trying to remove package, not update it") @@ -1490,7 +1497,7 @@ class Britney(object): unblock_cmd = "un" + block_cmd unblocks = self.hints.search(unblock_cmd, package=src) - if unblocks and unblocks[0].version is not None and unblocks[0].version == source_u[VERSION]: + if unblocks and unblocks[0].version is not None and unblocks[0].version == source_u.version: excuse.add_hint(unblocks[0]) if block_cmd == 'block-udeb' or not excuse.needs_approval: excuse.addhtml("Ignoring %s request by %s, due to %s request by %s" % @@ -1582,7 +1589,7 @@ class Britney(object): for arch in self.options.architectures: # if the package in testing has no binaries on this # architecture, it can't be out-of-date - if not any(x for x in source_t[BINARIES] + if not any(x for x in source_t.binaries if x.architecture == arch and all_binaries[x].architecture != 'all'): continue @@ -1591,7 +1598,7 @@ class Britney(object): # uploads to (t-)p-u which intentionally drop binary # packages if any(x for x in self.binaries[suite][arch][0].values() \ - if x.source == src and x.source_version == source_u[VERSION] and \ + if x.source == src and x.source_version == source_u.version and \ x.architecture != 'all'): continue @@ -1599,7 +1606,7 @@ class Britney(object): base = 'testing' else: base = 'stable' - text = "Not yet built on %s (relative to testing)" % (quote(arch), quote(src), quote(source_u[VERSION]), base, arch) + text = "Not yet built on %s (relative to testing)" % (quote(arch), quote(src), quote(source_u.version), base, arch) if arch in self.options.fucked_arches: text = text + " (but %s isn't keeping up, so never mind)" % (arch) @@ -1617,7 +1624,7 @@ class Britney(object): oodbins = {} uptodatebins = False # for every binary package produced by this source in the suite for this architecture - for pkg_id in sorted(x for x in source_u[BINARIES] if x.architecture == arch): + for pkg_id in sorted(x for x in source_u.binaries if x.architecture == arch): pkg = pkg_id.package_name if pkg not in pkgs: pkgs[pkg] = [] pkgs[pkg].append(arch) @@ -1629,7 +1636,7 @@ class Britney(object): # if it wasn't built by the same source, it is out-of-date # if there is at least one binary on this arch which is # up-to-date, there is a build on this arch - if source_u[VERSION] != pkgsv: + if source_u.version != pkgsv: if pkgsv not in oodbins: oodbins[pkgsv] = [] oodbins[pkgsv].append(pkg) @@ -1661,11 +1668,11 @@ class Britney(object): if uptodatebins: text = "old binaries left on %s: %s" % \ - (quote(arch), quote(src), quote(source_u[VERSION]), arch, oodtxt) + (quote(arch), quote(src), quote(source_u.version), arch, oodtxt) else: text = "missing build on %s: %s" % \ - (quote(arch), quote(src), quote(source_u[VERSION]), arch, oodtxt) + (quote(arch), quote(src), quote(source_u.version), arch, oodtxt) if arch in self.options.fucked_arches: text = text + " (but %s isn't keeping up, so nevermind)" % (arch) @@ -1686,13 +1693,13 @@ class Britney(object): excuse.addhtml(text) # if the source package has no binaries, set update_candidate to False to block the update - if not source_u[BINARIES]: + if not source_u.binaries: excuse.addhtml("%s has no binaries on any arch" % src) excuse.addreason("no-binaries") update_candidate = False # check if there is a `force' hint for this package, which allows it to go in even if it is not updateable - forces = self.hints.search('force', package=src, version=source_u[VERSION]) + forces = self.hints.search('force', package=src, version=source_u.version) if forces: excuse.dontinvalidate = True if not update_candidate and forces: @@ -1793,10 +1800,10 @@ class Britney(object): # for every source package in unstable check if it should be upgraded for pkg in unstable: - if unstable[pkg][FAKESRC]: continue + if unstable[pkg].is_fakesrc: continue # if the source package is already present in testing, # check if it should be upgraded for every binary package - if pkg in testing and not testing[pkg][FAKESRC]: + if pkg in testing and not testing[pkg].is_fakesrc: for arch in architectures: if should_upgrade_srcarch(pkg, arch, 'unstable'): upgrade_me_append("%s/%s" % (pkg, arch)) @@ -1827,7 +1834,7 @@ class Britney(object): if src not in testing: continue # check if the version specified in the hint is the same as the considered package - tsrcv = testing[src][VERSION] + tsrcv = testing[src].version if tsrcv != hint.version: continue @@ -2046,7 +2053,7 @@ class Britney(object): # remove all the binaries # first, build a list of eligible binaries - for pkg_id in source_data[BINARIES]: + for pkg_id in source_data.binaries: binary, _, parch = pkg_id if (migration_architecture != 'source' and parch != migration_architecture): @@ -2133,7 +2140,7 @@ class Britney(object): # add the new binary packages (if we are not removing) if not is_removal: source_data = sources[suite][source_name] - for pkg_id in source_data[BINARIES]: + for pkg_id in source_data.binaries: binary, _, parch = pkg_id if migration_architecture not in ['source', parch]: continue @@ -2152,7 +2159,7 @@ class Britney(object): # Don't add the binary if it is old cruft that is no longer in testing if (parch not in self.options.fucked_arches and - source_data[VERSION] != self.binaries[suite][parch][0][binary].source_version and + source_data.version != self.binaries[suite][parch][0][binary].source_version and binary not in binaries_t[parch][0]): continue @@ -2737,7 +2744,7 @@ class Britney(object): for arch in binaries for binary in binaries[arch][0] ) - removals = [ MigrationItem("-%s/%s" % (source, sources[source][VERSION])) + removals = [ MigrationItem("-%s/%s" % (source, sources[source].version)) for source in sources if source not in used ] if len(removals) > 0: @@ -2875,10 +2882,10 @@ class Britney(object): continue inunstable = pkg.package in self.sources['unstable'] - rightversion = inunstable and (apt_pkg.version_compare(self.sources['unstable'][pkg.package][VERSION], pkg.version) == 0) + rightversion = inunstable and (apt_pkg.version_compare(self.sources['unstable'][pkg.package].version, pkg.version) == 0) if pkg.suite == 'unstable' and not rightversion: for suite in ['pu', 'tpu']: - if pkg.package in self.sources[suite] and apt_pkg.version_compare(self.sources[suite][pkg.package][VERSION], pkg.version) == 0: + if pkg.package in self.sources[suite] and apt_pkg.version_compare(self.sources[suite][pkg.package].version, pkg.version) == 0: pkg.suite = suite _pkgvers[idx] = pkg break @@ -2886,15 +2893,15 @@ class Britney(object): # handle *-proposed-updates if pkg.suite in ['pu', 'tpu']: if pkg.package not in self.sources[pkg.suite]: continue - if apt_pkg.version_compare(self.sources[pkg.suite][pkg.package][VERSION], pkg.version) != 0: - self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources[pkg.suite][pkg.package][VERSION])) + if apt_pkg.version_compare(self.sources[pkg.suite][pkg.package].version, pkg.version) != 0: + self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources[pkg.suite][pkg.package].version)) ok = False # does the package exist in unstable? elif not inunstable: self.output_write(" Source %s has no version in unstable\n" % pkg.package) ok = False elif not rightversion: - self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources['unstable'][pkg.package][VERSION])) + self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources['unstable'][pkg.package].version)) ok = False if not ok: self.output_write("Not using hint\n") @@ -2926,7 +2933,7 @@ class Britney(object): # consider only excuses which are valid candidates and still relevant. valid_excuses = frozenset(y.uvname for y in self.upgrade_me - if y not in sources_t or sources_t[y][VERSION] != excuses[y].ver[1]) + if y not in sources_t or sources_t[y].version != excuses[y].ver[1]) excuses_deps = {name: valid_excuses.intersection(excuse.deps) for name, excuse in excuses.items() if name in valid_excuses} excuses_rdeps = defaultdict(set) diff --git a/britney_util.py b/britney_util.py index a8151e5..e5ecd45 100644 --- a/britney_util.py +++ b/britney_util.py @@ -32,7 +32,7 @@ import errno from migrationitem import MigrationItem, UnversionnedMigrationItem -from consts import (VERSION, BINARIES, PROVIDES, DEPENDS, CONFLICTS, +from consts import (VERSION, PROVIDES, DEPENDS, CONFLICTS, ARCHITECTURE, SECTION, SOURCE, MAINTAINER, MULTIARCH, ESSENTIAL) @@ -90,8 +90,7 @@ def iter_except(func, exception, first=None): pass -def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages, - BINARIES=BINARIES): +def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages): """Undoes one or more changes to testing * lundo is a list of (undo, item)-tuples @@ -126,7 +125,7 @@ def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages, for (undo, item) in lundo: if not item.is_removal and item.package in sources[item.suite]: source_data = sources[item.suite][item.package] - for pkg_id in source_data[BINARIES]: + for pkg_id in source_data.binaries: binary, _, arch = pkg_id if item.architecture in ['source', arch]: try: @@ -134,7 +133,7 @@ def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages, except KeyError: # If this happens, pkg_id must be a cruft item that # was *not* migrated. - assert source_data[VERSION] != all_binary_packages[pkg_id].version + assert source_data.version != all_binary_packages[pkg_id].version assert not inst_tester.any_of_these_are_in_testing((pkg_id,)) inst_tester.remove_testing_binary(pkg_id) @@ -264,9 +263,7 @@ def eval_uninst(architectures, nuninst): return "".join(parts) -def write_heidi(filename, sources_t, packages_t, - VERSION=VERSION, SECTION=SECTION, - sorted=sorted): +def write_heidi(filename, sources_t, packages_t, sorted=sorted): """Write the output HeidiResult This method write the output for Heidi, which contains all the @@ -296,7 +293,7 @@ def write_heidi(filename, sources_t, packages_t, # Faux package; not really a part of testing continue if pkg.source_version and pkgarch == 'all' and \ - pkg.source_version != sources_t[pkg.source][VERSION]: + pkg.source_version != sources_t[pkg.source].version: # when architectures are marked as "fucked", their binary # versions may be lower than those of the associated # source package in testing. the binary package list for @@ -309,8 +306,8 @@ def write_heidi(filename, sources_t, packages_t, # write sources for src_name in sorted(sources_t): src = sources_t[src_name] - srcv = src[VERSION] - srcsec = src[SECTION] or 'unknown' + srcv = src.version + srcsec = src.section or 'unknown' if srcsec == 'faux' or srcsec.endswith('/faux'): # Faux package; not really a part of testing continue @@ -352,7 +349,7 @@ def make_migrationitem(package, sources, VERSION=VERSION): """ item = UnversionnedMigrationItem(package) - return MigrationItem("%s/%s" % (item.uvname, sources[item.suite][item.package][VERSION])) + return MigrationItem("%s/%s" % (item.uvname, sources[item.suite][item.package].version)) def write_excuses(excuselist, dest_file, output_format="yaml"): @@ -446,8 +443,8 @@ def write_controlfiles(sources, packages, suite, basedir): continue if key == SOURCE: src = bin_data.source - if sources_s[src][MAINTAINER]: - output += ("Maintainer: " + sources_s[src][MAINTAINER] + "\n") + if sources_s[src].maintainer: + output += ("Maintainer: " + sources_s[src].maintainer + "\n") if src == pkg: if bin_data.source_version != bin_data.version: @@ -489,7 +486,7 @@ def old_libraries(sources, packages, fucked_arches=frozenset()): for arch in testing: for pkg_name in testing[arch][0]: pkg = testing[arch][0][pkg_name] - if sources_t[pkg.source][VERSION] != pkg.source_version and \ + if sources_t[pkg.source].version != pkg.source_version and \ (arch not in fucked_arches or pkg_name not in unstable[arch][0]): migration = "-" + "/".join((pkg_name, arch, pkg.source_version)) removals.append(MigrationItem(migration)) diff --git a/consts.py b/consts.py index de5d05b..8f760d1 100644 --- a/consts.py +++ b/consts.py @@ -21,9 +21,9 @@ # source package VERSION = 0 SECTION = 1 -BINARIES = 2 +# BINARIES = 2 - value reversed, but const is no longer used MAINTAINER = 3 -FAKESRC = 4 +# FAKESRC = 4 - value reserved, but const is no longer used # binary package SOURCE = 2 diff --git a/policies/policy.py b/policies/policy.py index bdb8e59..19ece2c 100644 --- a/policies/policy.py +++ b/policies/policy.py @@ -4,7 +4,6 @@ import apt_pkg import os import time -from consts import VERSION, BINARIES from hints import Hint, split_into_one_hint_per_package @@ -245,9 +244,9 @@ class AgePolicy(BasePolicy): urgency = self.options.default_urgency if source_name not in self._dates: - self._dates[source_name] = (source_data_srcdist[VERSION], self._date_now) - elif self._dates[source_name][0] != source_data_srcdist[VERSION]: - self._dates[source_name] = (source_data_srcdist[VERSION], self._date_now) + self._dates[source_name] = (source_data_srcdist.version, self._date_now) + elif self._dates[source_name][0] != source_data_srcdist.version: + self._dates[source_name] = (source_data_srcdist.version, self._date_now) days_old = self._date_now - self._dates[source_name][1] min_days = self._min_days[urgency] @@ -255,7 +254,7 @@ class AgePolicy(BasePolicy): age_info['current-age'] = days_old for age_days_hint in self.hints.search('age-days', package=source_name, - version=source_data_srcdist[VERSION]): + version=source_data_srcdist.version): new_req = age_days_hint.days age_info['age-requirement-reduced'] = { 'new-requirement': new_req, @@ -265,7 +264,7 @@ class AgePolicy(BasePolicy): if days_old < min_days: urgent_hints = self.hints.search('urgent', package=source_name, - version=source_data_srcdist[VERSION]) + version=source_data_srcdist.version) if urgent_hints: age_info['age-requirement-reduced'] = { 'new-requirement': 0, @@ -342,12 +341,12 @@ class AgePolicy(BasePolicy): # if the package exists in testing and it is more recent, do nothing tsrcv = britney.sources['testing'].get(l[0], None) - if tsrcv and apt_pkg.version_compare(tsrcv[VERSION], l[1]) >= 0: + if tsrcv and apt_pkg.version_compare(tsrcv.version, l[1]) >= 0: continue # if the package doesn't exist in unstable or it is older, do nothing usrcv = britney.sources['unstable'].get(l[0], None) - if not usrcv or apt_pkg.version_compare(usrcv[VERSION], l[1]) < 0: + if not usrcv or apt_pkg.version_compare(usrcv.version, l[1]) < 0: continue # update the urgency for the package @@ -430,11 +429,11 @@ class RCBugPolicy(BasePolicy): if src_key in self._bugs['unstable']: bugs_u.update(self._bugs['unstable'][src_key]) - for pkg, _, _ in source_data_srcdist[BINARIES]: + for pkg, _, _ in source_data_srcdist.binaries: if pkg in self._bugs['unstable']: bugs_u |= self._bugs['unstable'][pkg] if source_data_tdist: - for pkg, _, _ in source_data_tdist[BINARIES]: + for pkg, _, _ in source_data_tdist.binaries: if pkg in self._bugs['testing']: bugs_t |= self._bugs['testing'][pkg] @@ -451,7 +450,7 @@ class RCBugPolicy(BasePolicy): success_verdict = PolicyVerdict.PASS for ignore_hint in self.hints.search('ignore-rc-bugs', package=source_name, - version=source_data_srcdist[VERSION]): + version=source_data_srcdist.version): ignored_bugs = ignore_hint.ignored_rcbugs # Only handle one hint for now