mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-06-01 12:51:43 +00:00
Prefer s.x to s[X] for SourcePackage objects
Signed-off-by: Niels Thykier <niels@thykier.net>
This commit is contained in:
parent
5a19b4f74d
commit
0664c580b0
129
britney.py
129
britney.py
@ -209,8 +209,9 @@ from britney_util import (old_libraries_format, undo_changes,
|
|||||||
create_provides_map,
|
create_provides_map,
|
||||||
)
|
)
|
||||||
from policies.policy import AgePolicy, RCBugPolicy, PolicyVerdict
|
from policies.policy import AgePolicy, RCBugPolicy, PolicyVerdict
|
||||||
from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC,
|
|
||||||
SOURCE, SOURCEVER, ARCHITECTURE, CONFLICTS, DEPENDS,
|
# Check the "check_field_name" reflection before removing an import here.
|
||||||
|
from consts import (SOURCE, SOURCEVER, ARCHITECTURE, CONFLICTS, DEPENDS,
|
||||||
PROVIDES, MULTIARCH)
|
PROVIDES, MULTIARCH)
|
||||||
|
|
||||||
__author__ = 'Fabio Tranchitella and the Debian Release Team'
|
__author__ = 'Fabio Tranchitella and the Debian Release Team'
|
||||||
@ -593,7 +594,7 @@ class Britney(object):
|
|||||||
pkg_id,
|
pkg_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
src_data[BINARIES].append(pkg_id)
|
src_data.binaries.append(pkg_id)
|
||||||
self.binaries['testing'][arch][0][pkg_name] = bin_data
|
self.binaries['testing'][arch][0][pkg_name] = bin_data
|
||||||
self.binaries['unstable'][arch][0][pkg_name] = bin_data
|
self.binaries['unstable'][arch][0][pkg_name] = bin_data
|
||||||
self.all_binaries[pkg_id] = bin_data
|
self.all_binaries[pkg_id] = bin_data
|
||||||
@ -681,7 +682,7 @@ class Britney(object):
|
|||||||
False,
|
False,
|
||||||
pkg_id,
|
pkg_id,
|
||||||
)
|
)
|
||||||
src_data[BINARIES].append(pkg_id)
|
src_data.binaries.append(pkg_id)
|
||||||
self.binaries['testing'][arch][0][pkg_name] = bin_data
|
self.binaries['testing'][arch][0][pkg_name] = bin_data
|
||||||
self.binaries['unstable'][arch][0][pkg_name] = bin_data
|
self.binaries['unstable'][arch][0][pkg_name] = bin_data
|
||||||
self.all_binaries[pkg_id] = bin_data
|
self.all_binaries[pkg_id] = bin_data
|
||||||
@ -800,10 +801,16 @@ class Britney(object):
|
|||||||
# largest version for migration.
|
# largest version for migration.
|
||||||
if pkg in sources and apt_pkg.version_compare(sources[pkg][0], ver) > 0:
|
if pkg in sources and apt_pkg.version_compare(sources[pkg][0], ver) > 0:
|
||||||
continue
|
continue
|
||||||
|
maint = get_field('Maintainer')
|
||||||
|
if maint:
|
||||||
|
maint = intern(maint.strip())
|
||||||
|
section = get_field('Section')
|
||||||
|
if section:
|
||||||
|
section = intern(section.strip())
|
||||||
sources[intern(pkg)] = SourcePackage(intern(ver),
|
sources[intern(pkg)] = SourcePackage(intern(ver),
|
||||||
intern(get_field('Section')),
|
section,
|
||||||
[],
|
[],
|
||||||
get_field('Maintainer'),
|
maint,
|
||||||
False,
|
False,
|
||||||
)
|
)
|
||||||
return sources
|
return sources
|
||||||
@ -881,7 +888,7 @@ class Britney(object):
|
|||||||
if apt_pkg.version_compare(old_pkg_data.version, version) > 0:
|
if apt_pkg.version_compare(old_pkg_data.version, version) > 0:
|
||||||
continue
|
continue
|
||||||
old_pkg_id = old_pkg_data.pkg_id
|
old_pkg_id = old_pkg_data.pkg_id
|
||||||
old_src_binaries = srcdist[old_pkg_data[SOURCE]][BINARIES]
|
old_src_binaries = srcdist[old_pkg_data[SOURCE]].binaries
|
||||||
old_src_binaries.remove(old_pkg_id)
|
old_src_binaries.remove(old_pkg_id)
|
||||||
# This may seem weird at first glance, but the current code rely
|
# This may seem weird at first glance, but the current code rely
|
||||||
# on this behaviour to avoid issues like #709460. Admittedly it
|
# on this behaviour to avoid issues like #709460. Admittedly it
|
||||||
@ -950,8 +957,8 @@ class Britney(object):
|
|||||||
# source -> binary mapping once. It doesn't matter which
|
# source -> binary mapping once. It doesn't matter which
|
||||||
# of the versions we include as only the package name and
|
# of the versions we include as only the package name and
|
||||||
# architecture are recorded.
|
# architecture are recorded.
|
||||||
if pkg_id not in srcdist[source][BINARIES]:
|
if pkg_id not in srcdist[source].binaries:
|
||||||
srcdist[source][BINARIES].append(pkg_id)
|
srcdist[source].binaries.append(pkg_id)
|
||||||
# if the source package doesn't exist, create a fake one
|
# if the source package doesn't exist, create a fake one
|
||||||
else:
|
else:
|
||||||
srcdist[source] = SourcePackage(source_version, 'faux', [pkg_id], None, True)
|
srcdist[source] = SourcePackage(source_version, 'faux', [pkg_id], None, True)
|
||||||
@ -1211,7 +1218,7 @@ class Britney(object):
|
|||||||
# for the solving packages, update the excuse to add the dependencies
|
# for the solving packages, update the excuse to add the dependencies
|
||||||
for p in packages:
|
for p in packages:
|
||||||
if arch not in self.options.break_arches:
|
if arch not in self.options.break_arches:
|
||||||
if p in self.sources['testing'] and self.sources['testing'][p][VERSION] == self.sources[suite][p][VERSION]:
|
if p in self.sources['testing'] and self.sources['testing'][p].version == self.sources[suite][p].version:
|
||||||
excuse.add_dep("%s/%s" % (p, arch), arch)
|
excuse.add_dep("%s/%s" % (p, arch), arch)
|
||||||
else:
|
else:
|
||||||
excuse.add_dep(p, arch)
|
excuse.add_dep(p, arch)
|
||||||
@ -1240,9 +1247,9 @@ class Britney(object):
|
|||||||
src = self.sources['testing'][pkg]
|
src = self.sources['testing'][pkg]
|
||||||
excuse = Excuse("-" + pkg)
|
excuse = Excuse("-" + pkg)
|
||||||
excuse.addhtml("Package not in unstable, will try to remove")
|
excuse.addhtml("Package not in unstable, will try to remove")
|
||||||
excuse.set_vers(src[VERSION], None)
|
excuse.set_vers(src.version, None)
|
||||||
src[MAINTAINER] and excuse.set_maint(src[MAINTAINER].strip())
|
src.maintainer and excuse.set_maint(src.maintainer)
|
||||||
src[SECTION] and excuse.set_section(src[SECTION].strip())
|
src.section and excuse.set_section(src.section)
|
||||||
|
|
||||||
# if the package is blocked, skip it
|
# if the package is blocked, skip it
|
||||||
for hint in self.hints.search('block', package=pkg, removal=True):
|
for hint in self.hints.search('block', package=pkg, removal=True):
|
||||||
@ -1274,15 +1281,15 @@ class Britney(object):
|
|||||||
# build the common part of the excuse, which will be filled by the code below
|
# build the common part of the excuse, which will be filled by the code below
|
||||||
ref = "%s/%s%s" % (src, arch, suite != 'unstable' and "_" + suite or "")
|
ref = "%s/%s%s" % (src, arch, suite != 'unstable' and "_" + suite or "")
|
||||||
excuse = Excuse(ref)
|
excuse = Excuse(ref)
|
||||||
excuse.set_vers(source_t[VERSION], source_t[VERSION])
|
excuse.set_vers(source_t.version, source_t.version)
|
||||||
source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip())
|
source_u.maintainer and excuse.set_maint(source_u.maintainer)
|
||||||
source_u[SECTION] and excuse.set_section(source_u[SECTION].strip())
|
source_u.section and excuse.set_section(source_u.section)
|
||||||
|
|
||||||
# if there is a `remove' hint and the requested version is the same as the
|
# if there is a `remove' hint and the requested version is the same as the
|
||||||
# version in testing, then stop here and return False
|
# version in testing, then stop here and return False
|
||||||
# (as a side effect, a removal may generate such excuses for both the source
|
# (as a side effect, a removal may generate such excuses for both the source
|
||||||
# package and its binary packages on each architecture)
|
# package and its binary packages on each architecture)
|
||||||
for hint in self.hints.search('remove', package=src, version=source_t[VERSION]):
|
for hint in self.hints.search('remove', package=src, version=source_t.version):
|
||||||
excuse.add_hint(hint)
|
excuse.add_hint(hint)
|
||||||
excuse.addhtml("Removal request by %s" % (hint.user))
|
excuse.addhtml("Removal request by %s" % (hint.user))
|
||||||
excuse.addhtml("Trying to remove package, not update it")
|
excuse.addhtml("Trying to remove package, not update it")
|
||||||
@ -1297,7 +1304,7 @@ class Britney(object):
|
|||||||
packages_s_a = self.binaries[suite][arch][0]
|
packages_s_a = self.binaries[suite][arch][0]
|
||||||
|
|
||||||
# for every binary package produced by this source in unstable for this architecture
|
# for every binary package produced by this source in unstable for this architecture
|
||||||
for pkg_id in sorted(x for x in source_u[BINARIES] if x.architecture == arch):
|
for pkg_id in sorted(x for x in source_u.binaries if x.architecture == arch):
|
||||||
pkg_name = pkg_id.package_name
|
pkg_name = pkg_id.package_name
|
||||||
|
|
||||||
# retrieve the testing (if present) and unstable corresponding binary packages
|
# retrieve the testing (if present) and unstable corresponding binary packages
|
||||||
@ -1309,20 +1316,20 @@ class Britney(object):
|
|||||||
|
|
||||||
# if the new binary package is architecture-independent, then skip it
|
# if the new binary package is architecture-independent, then skip it
|
||||||
if binary_u.architecture == 'all':
|
if binary_u.architecture == 'all':
|
||||||
if pkg_id not in source_t[BINARIES]:
|
if pkg_id not in source_t.binaries:
|
||||||
# only add a note if the arch:all does not match the expected version
|
# only add a note if the arch:all does not match the expected version
|
||||||
excuse.addhtml("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u.version, pkgsv))
|
excuse.addhtml("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u.version, pkgsv))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if the new binary package is not from the same source as the testing one, then skip it
|
# if the new binary package is not from the same source as the testing one, then skip it
|
||||||
# this implies that this binary migration is part of a source migration
|
# this implies that this binary migration is part of a source migration
|
||||||
if source_u[VERSION] == pkgsv and source_t[VERSION] != pkgsv:
|
if source_u.version == pkgsv and source_t.version != pkgsv:
|
||||||
anywrongver = True
|
anywrongver = True
|
||||||
excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_t[VERSION]))
|
excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_t.version))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# cruft in unstable
|
# cruft in unstable
|
||||||
if source_u[VERSION] != pkgsv and source_t[VERSION] != pkgsv:
|
if source_u.version != pkgsv and source_t.version != pkgsv:
|
||||||
if self.options.ignore_cruft:
|
if self.options.ignore_cruft:
|
||||||
excuse.addhtml("Old cruft: %s %s (but ignoring cruft, so nevermind)" % (pkg_name, pkgsv))
|
excuse.addhtml("Old cruft: %s %s (but ignoring cruft, so nevermind)" % (pkg_name, pkgsv))
|
||||||
else:
|
else:
|
||||||
@ -1332,9 +1339,9 @@ class Britney(object):
|
|||||||
|
|
||||||
# if the source package has been updated in unstable and this is a binary migration, skip it
|
# if the source package has been updated in unstable and this is a binary migration, skip it
|
||||||
# (the binaries are now out-of-date)
|
# (the binaries are now out-of-date)
|
||||||
if source_t[VERSION] == pkgsv and source_t[VERSION] != source_u[VERSION]:
|
if source_t.version == pkgsv and source_t.version != source_u.version:
|
||||||
anywrongver = True
|
anywrongver = True
|
||||||
excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_u[VERSION]))
|
excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u.version, pkgsv, source_u.version))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# find unsatisfied dependencies for the new binary package
|
# find unsatisfied dependencies for the new binary package
|
||||||
@ -1363,9 +1370,9 @@ class Britney(object):
|
|||||||
|
|
||||||
# if there is nothing wrong and there is something worth doing or the source
|
# if there is nothing wrong and there is something worth doing or the source
|
||||||
# package is not fake, then check what packages should be removed
|
# package is not fake, then check what packages should be removed
|
||||||
if not anywrongver and (anyworthdoing or not source_u[FAKESRC]):
|
if not anywrongver and (anyworthdoing or not source_u.is_fakesrc):
|
||||||
srcv = source_u[VERSION]
|
srcv = source_u.version
|
||||||
ssrc = source_t[VERSION] == srcv
|
ssrc = source_t.version == srcv
|
||||||
# if this is a binary-only migration via *pu, we never want to try
|
# if this is a binary-only migration via *pu, we never want to try
|
||||||
# removing binary packages
|
# removing binary packages
|
||||||
if not (ssrc and suite != 'unstable'):
|
if not (ssrc and suite != 'unstable'):
|
||||||
@ -1375,12 +1382,12 @@ class Britney(object):
|
|||||||
arch,
|
arch,
|
||||||
False)
|
False)
|
||||||
|
|
||||||
for pkg_id in sorted(x for x in source_t[BINARIES] if x.architecture == arch):
|
for pkg_id in sorted(x for x in source_t.binaries if x.architecture == arch):
|
||||||
pkg = pkg_id.package_name
|
pkg = pkg_id.package_name
|
||||||
# if the package is architecture-independent, then ignore it
|
# if the package is architecture-independent, then ignore it
|
||||||
tpkg_data = packages_t_a[pkg]
|
tpkg_data = packages_t_a[pkg]
|
||||||
if tpkg_data.version == 'all':
|
if tpkg_data.version == 'all':
|
||||||
if pkg_id not in source_u[BINARIES]:
|
if pkg_id not in source_u.binaries:
|
||||||
# only add a note if the arch:all does not match the expected version
|
# only add a note if the arch:all does not match the expected version
|
||||||
excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
|
excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
|
||||||
continue
|
continue
|
||||||
@ -1427,7 +1434,7 @@ class Britney(object):
|
|||||||
if src in self.sources['testing']:
|
if src in self.sources['testing']:
|
||||||
source_t = self.sources['testing'][src]
|
source_t = self.sources['testing'][src]
|
||||||
# if testing and unstable have the same version, then this is a candidate for binary-NMUs only
|
# if testing and unstable have the same version, then this is a candidate for binary-NMUs only
|
||||||
if apt_pkg.version_compare(source_t[VERSION], source_u[VERSION]) == 0:
|
if apt_pkg.version_compare(source_t.version, source_u.version) == 0:
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
source_t = None
|
source_t = None
|
||||||
@ -1435,30 +1442,30 @@ class Britney(object):
|
|||||||
# build the common part of the excuse, which will be filled by the code below
|
# build the common part of the excuse, which will be filled by the code below
|
||||||
ref = "%s%s" % (src, suite != 'unstable' and "_" + suite or "")
|
ref = "%s%s" % (src, suite != 'unstable' and "_" + suite or "")
|
||||||
excuse = Excuse(ref)
|
excuse = Excuse(ref)
|
||||||
excuse.set_vers(source_t and source_t[VERSION] or None, source_u[VERSION])
|
excuse.set_vers(source_t and source_t.version or None, source_u.version)
|
||||||
source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip())
|
source_u.maintainer and excuse.set_maint(source_u.maintainer)
|
||||||
source_u[SECTION] and excuse.set_section(source_u[SECTION].strip())
|
source_u.section and excuse.set_section(source_u.section)
|
||||||
|
|
||||||
# the starting point is that we will update the candidate
|
# the starting point is that we will update the candidate
|
||||||
update_candidate = True
|
update_candidate = True
|
||||||
|
|
||||||
# if the version in unstable is older, then stop here with a warning in the excuse and return False
|
# if the version in unstable is older, then stop here with a warning in the excuse and return False
|
||||||
if source_t and apt_pkg.version_compare(source_u[VERSION], source_t[VERSION]) < 0:
|
if source_t and apt_pkg.version_compare(source_u.version, source_t.version) < 0:
|
||||||
excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t[VERSION], source_u[VERSION]))
|
excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t.version, source_u.version))
|
||||||
self.excuses[excuse.name] = excuse
|
self.excuses[excuse.name] = excuse
|
||||||
excuse.addreason("newerintesting")
|
excuse.addreason("newerintesting")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# check if the source package really exists or if it is a fake one
|
# check if the source package really exists or if it is a fake one
|
||||||
if source_u[FAKESRC]:
|
if source_u.is_fakesrc:
|
||||||
excuse.addhtml("%s source package doesn't exist" % (src))
|
excuse.addhtml("%s source package doesn't exist" % (src))
|
||||||
update_candidate = False
|
update_candidate = False
|
||||||
|
|
||||||
# if there is a `remove' hint and the requested version is the same as the
|
# if there is a `remove' hint and the requested version is the same as the
|
||||||
# version in testing, then stop here and return False
|
# version in testing, then stop here and return False
|
||||||
for hint in self.hints.search('remove', package=src):
|
for hint in self.hints.search('remove', package=src):
|
||||||
if source_t and source_t[VERSION] == hint.version or \
|
if source_t and source_t.version == hint.version or \
|
||||||
source_u[VERSION] == hint.version:
|
source_u.version == hint.version:
|
||||||
excuse.add_hint(hint)
|
excuse.add_hint(hint)
|
||||||
excuse.addhtml("Removal request by %s" % (hint.user))
|
excuse.addhtml("Removal request by %s" % (hint.user))
|
||||||
excuse.addhtml("Trying to remove package, not update it")
|
excuse.addhtml("Trying to remove package, not update it")
|
||||||
@ -1490,7 +1497,7 @@ class Britney(object):
|
|||||||
unblock_cmd = "un" + block_cmd
|
unblock_cmd = "un" + block_cmd
|
||||||
unblocks = self.hints.search(unblock_cmd, package=src)
|
unblocks = self.hints.search(unblock_cmd, package=src)
|
||||||
|
|
||||||
if unblocks and unblocks[0].version is not None and unblocks[0].version == source_u[VERSION]:
|
if unblocks and unblocks[0].version is not None and unblocks[0].version == source_u.version:
|
||||||
excuse.add_hint(unblocks[0])
|
excuse.add_hint(unblocks[0])
|
||||||
if block_cmd == 'block-udeb' or not excuse.needs_approval:
|
if block_cmd == 'block-udeb' or not excuse.needs_approval:
|
||||||
excuse.addhtml("Ignoring %s request by %s, due to %s request by %s" %
|
excuse.addhtml("Ignoring %s request by %s, due to %s request by %s" %
|
||||||
@ -1582,7 +1589,7 @@ class Britney(object):
|
|||||||
for arch in self.options.architectures:
|
for arch in self.options.architectures:
|
||||||
# if the package in testing has no binaries on this
|
# if the package in testing has no binaries on this
|
||||||
# architecture, it can't be out-of-date
|
# architecture, it can't be out-of-date
|
||||||
if not any(x for x in source_t[BINARIES]
|
if not any(x for x in source_t.binaries
|
||||||
if x.architecture == arch and all_binaries[x].architecture != 'all'):
|
if x.architecture == arch and all_binaries[x].architecture != 'all'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -1591,7 +1598,7 @@ class Britney(object):
|
|||||||
# uploads to (t-)p-u which intentionally drop binary
|
# uploads to (t-)p-u which intentionally drop binary
|
||||||
# packages
|
# packages
|
||||||
if any(x for x in self.binaries[suite][arch][0].values() \
|
if any(x for x in self.binaries[suite][arch][0].values() \
|
||||||
if x.source == src and x.source_version == source_u[VERSION] and \
|
if x.source == src and x.source_version == source_u.version and \
|
||||||
x.architecture != 'all'):
|
x.architecture != 'all'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -1599,7 +1606,7 @@ class Britney(object):
|
|||||||
base = 'testing'
|
base = 'testing'
|
||||||
else:
|
else:
|
||||||
base = 'stable'
|
base = 'stable'
|
||||||
text = "Not yet built on <a href=\"https://buildd.debian.org/status/logs.php?arch=%s&pkg=%s&ver=%s&suite=%s\" target=\"_blank\">%s</a> (relative to testing)" % (quote(arch), quote(src), quote(source_u[VERSION]), base, arch)
|
text = "Not yet built on <a href=\"https://buildd.debian.org/status/logs.php?arch=%s&pkg=%s&ver=%s&suite=%s\" target=\"_blank\">%s</a> (relative to testing)" % (quote(arch), quote(src), quote(source_u.version), base, arch)
|
||||||
|
|
||||||
if arch in self.options.fucked_arches:
|
if arch in self.options.fucked_arches:
|
||||||
text = text + " (but %s isn't keeping up, so never mind)" % (arch)
|
text = text + " (but %s isn't keeping up, so never mind)" % (arch)
|
||||||
@ -1617,7 +1624,7 @@ class Britney(object):
|
|||||||
oodbins = {}
|
oodbins = {}
|
||||||
uptodatebins = False
|
uptodatebins = False
|
||||||
# for every binary package produced by this source in the suite for this architecture
|
# for every binary package produced by this source in the suite for this architecture
|
||||||
for pkg_id in sorted(x for x in source_u[BINARIES] if x.architecture == arch):
|
for pkg_id in sorted(x for x in source_u.binaries if x.architecture == arch):
|
||||||
pkg = pkg_id.package_name
|
pkg = pkg_id.package_name
|
||||||
if pkg not in pkgs: pkgs[pkg] = []
|
if pkg not in pkgs: pkgs[pkg] = []
|
||||||
pkgs[pkg].append(arch)
|
pkgs[pkg].append(arch)
|
||||||
@ -1629,7 +1636,7 @@ class Britney(object):
|
|||||||
# if it wasn't built by the same source, it is out-of-date
|
# if it wasn't built by the same source, it is out-of-date
|
||||||
# if there is at least one binary on this arch which is
|
# if there is at least one binary on this arch which is
|
||||||
# up-to-date, there is a build on this arch
|
# up-to-date, there is a build on this arch
|
||||||
if source_u[VERSION] != pkgsv:
|
if source_u.version != pkgsv:
|
||||||
if pkgsv not in oodbins:
|
if pkgsv not in oodbins:
|
||||||
oodbins[pkgsv] = []
|
oodbins[pkgsv] = []
|
||||||
oodbins[pkgsv].append(pkg)
|
oodbins[pkgsv].append(pkg)
|
||||||
@ -1661,11 +1668,11 @@ class Britney(object):
|
|||||||
if uptodatebins:
|
if uptodatebins:
|
||||||
text = "old binaries left on <a href=\"https://buildd.debian.org/status/logs.php?" \
|
text = "old binaries left on <a href=\"https://buildd.debian.org/status/logs.php?" \
|
||||||
"arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
|
"arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
|
||||||
(quote(arch), quote(src), quote(source_u[VERSION]), arch, oodtxt)
|
(quote(arch), quote(src), quote(source_u.version), arch, oodtxt)
|
||||||
else:
|
else:
|
||||||
text = "missing build on <a href=\"https://buildd.debian.org/status/logs.php?" \
|
text = "missing build on <a href=\"https://buildd.debian.org/status/logs.php?" \
|
||||||
"arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
|
"arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
|
||||||
(quote(arch), quote(src), quote(source_u[VERSION]), arch, oodtxt)
|
(quote(arch), quote(src), quote(source_u.version), arch, oodtxt)
|
||||||
|
|
||||||
if arch in self.options.fucked_arches:
|
if arch in self.options.fucked_arches:
|
||||||
text = text + " (but %s isn't keeping up, so nevermind)" % (arch)
|
text = text + " (but %s isn't keeping up, so nevermind)" % (arch)
|
||||||
@ -1686,13 +1693,13 @@ class Britney(object):
|
|||||||
excuse.addhtml(text)
|
excuse.addhtml(text)
|
||||||
|
|
||||||
# if the source package has no binaries, set update_candidate to False to block the update
|
# if the source package has no binaries, set update_candidate to False to block the update
|
||||||
if not source_u[BINARIES]:
|
if not source_u.binaries:
|
||||||
excuse.addhtml("%s has no binaries on any arch" % src)
|
excuse.addhtml("%s has no binaries on any arch" % src)
|
||||||
excuse.addreason("no-binaries")
|
excuse.addreason("no-binaries")
|
||||||
update_candidate = False
|
update_candidate = False
|
||||||
|
|
||||||
# check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
|
# check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
|
||||||
forces = self.hints.search('force', package=src, version=source_u[VERSION])
|
forces = self.hints.search('force', package=src, version=source_u.version)
|
||||||
if forces:
|
if forces:
|
||||||
excuse.dontinvalidate = True
|
excuse.dontinvalidate = True
|
||||||
if not update_candidate and forces:
|
if not update_candidate and forces:
|
||||||
@ -1793,10 +1800,10 @@ class Britney(object):
|
|||||||
|
|
||||||
# for every source package in unstable check if it should be upgraded
|
# for every source package in unstable check if it should be upgraded
|
||||||
for pkg in unstable:
|
for pkg in unstable:
|
||||||
if unstable[pkg][FAKESRC]: continue
|
if unstable[pkg].is_fakesrc: continue
|
||||||
# if the source package is already present in testing,
|
# if the source package is already present in testing,
|
||||||
# check if it should be upgraded for every binary package
|
# check if it should be upgraded for every binary package
|
||||||
if pkg in testing and not testing[pkg][FAKESRC]:
|
if pkg in testing and not testing[pkg].is_fakesrc:
|
||||||
for arch in architectures:
|
for arch in architectures:
|
||||||
if should_upgrade_srcarch(pkg, arch, 'unstable'):
|
if should_upgrade_srcarch(pkg, arch, 'unstable'):
|
||||||
upgrade_me_append("%s/%s" % (pkg, arch))
|
upgrade_me_append("%s/%s" % (pkg, arch))
|
||||||
@ -1827,7 +1834,7 @@ class Britney(object):
|
|||||||
if src not in testing: continue
|
if src not in testing: continue
|
||||||
|
|
||||||
# check if the version specified in the hint is the same as the considered package
|
# check if the version specified in the hint is the same as the considered package
|
||||||
tsrcv = testing[src][VERSION]
|
tsrcv = testing[src].version
|
||||||
if tsrcv != hint.version:
|
if tsrcv != hint.version:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -2046,7 +2053,7 @@ class Britney(object):
|
|||||||
# remove all the binaries
|
# remove all the binaries
|
||||||
|
|
||||||
# first, build a list of eligible binaries
|
# first, build a list of eligible binaries
|
||||||
for pkg_id in source_data[BINARIES]:
|
for pkg_id in source_data.binaries:
|
||||||
binary, _, parch = pkg_id
|
binary, _, parch = pkg_id
|
||||||
if (migration_architecture != 'source'
|
if (migration_architecture != 'source'
|
||||||
and parch != migration_architecture):
|
and parch != migration_architecture):
|
||||||
@ -2133,7 +2140,7 @@ class Britney(object):
|
|||||||
# add the new binary packages (if we are not removing)
|
# add the new binary packages (if we are not removing)
|
||||||
if not is_removal:
|
if not is_removal:
|
||||||
source_data = sources[suite][source_name]
|
source_data = sources[suite][source_name]
|
||||||
for pkg_id in source_data[BINARIES]:
|
for pkg_id in source_data.binaries:
|
||||||
binary, _, parch = pkg_id
|
binary, _, parch = pkg_id
|
||||||
if migration_architecture not in ['source', parch]:
|
if migration_architecture not in ['source', parch]:
|
||||||
continue
|
continue
|
||||||
@ -2152,7 +2159,7 @@ class Britney(object):
|
|||||||
|
|
||||||
# Don't add the binary if it is old cruft that is no longer in testing
|
# Don't add the binary if it is old cruft that is no longer in testing
|
||||||
if (parch not in self.options.fucked_arches and
|
if (parch not in self.options.fucked_arches and
|
||||||
source_data[VERSION] != self.binaries[suite][parch][0][binary].source_version and
|
source_data.version != self.binaries[suite][parch][0][binary].source_version and
|
||||||
binary not in binaries_t[parch][0]):
|
binary not in binaries_t[parch][0]):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -2737,7 +2744,7 @@ class Britney(object):
|
|||||||
for arch in binaries
|
for arch in binaries
|
||||||
for binary in binaries[arch][0]
|
for binary in binaries[arch][0]
|
||||||
)
|
)
|
||||||
removals = [ MigrationItem("-%s/%s" % (source, sources[source][VERSION]))
|
removals = [ MigrationItem("-%s/%s" % (source, sources[source].version))
|
||||||
for source in sources if source not in used
|
for source in sources if source not in used
|
||||||
]
|
]
|
||||||
if len(removals) > 0:
|
if len(removals) > 0:
|
||||||
@ -2875,10 +2882,10 @@ class Britney(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
inunstable = pkg.package in self.sources['unstable']
|
inunstable = pkg.package in self.sources['unstable']
|
||||||
rightversion = inunstable and (apt_pkg.version_compare(self.sources['unstable'][pkg.package][VERSION], pkg.version) == 0)
|
rightversion = inunstable and (apt_pkg.version_compare(self.sources['unstable'][pkg.package].version, pkg.version) == 0)
|
||||||
if pkg.suite == 'unstable' and not rightversion:
|
if pkg.suite == 'unstable' and not rightversion:
|
||||||
for suite in ['pu', 'tpu']:
|
for suite in ['pu', 'tpu']:
|
||||||
if pkg.package in self.sources[suite] and apt_pkg.version_compare(self.sources[suite][pkg.package][VERSION], pkg.version) == 0:
|
if pkg.package in self.sources[suite] and apt_pkg.version_compare(self.sources[suite][pkg.package].version, pkg.version) == 0:
|
||||||
pkg.suite = suite
|
pkg.suite = suite
|
||||||
_pkgvers[idx] = pkg
|
_pkgvers[idx] = pkg
|
||||||
break
|
break
|
||||||
@ -2886,15 +2893,15 @@ class Britney(object):
|
|||||||
# handle *-proposed-updates
|
# handle *-proposed-updates
|
||||||
if pkg.suite in ['pu', 'tpu']:
|
if pkg.suite in ['pu', 'tpu']:
|
||||||
if pkg.package not in self.sources[pkg.suite]: continue
|
if pkg.package not in self.sources[pkg.suite]: continue
|
||||||
if apt_pkg.version_compare(self.sources[pkg.suite][pkg.package][VERSION], pkg.version) != 0:
|
if apt_pkg.version_compare(self.sources[pkg.suite][pkg.package].version, pkg.version) != 0:
|
||||||
self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources[pkg.suite][pkg.package][VERSION]))
|
self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources[pkg.suite][pkg.package].version))
|
||||||
ok = False
|
ok = False
|
||||||
# does the package exist in unstable?
|
# does the package exist in unstable?
|
||||||
elif not inunstable:
|
elif not inunstable:
|
||||||
self.output_write(" Source %s has no version in unstable\n" % pkg.package)
|
self.output_write(" Source %s has no version in unstable\n" % pkg.package)
|
||||||
ok = False
|
ok = False
|
||||||
elif not rightversion:
|
elif not rightversion:
|
||||||
self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources['unstable'][pkg.package][VERSION]))
|
self.output_write(" Version mismatch, %s %s != %s\n" % (pkg.package, pkg.version, self.sources['unstable'][pkg.package].version))
|
||||||
ok = False
|
ok = False
|
||||||
if not ok:
|
if not ok:
|
||||||
self.output_write("Not using hint\n")
|
self.output_write("Not using hint\n")
|
||||||
@ -2926,7 +2933,7 @@ class Britney(object):
|
|||||||
|
|
||||||
# consider only excuses which are valid candidates and still relevant.
|
# consider only excuses which are valid candidates and still relevant.
|
||||||
valid_excuses = frozenset(y.uvname for y in self.upgrade_me
|
valid_excuses = frozenset(y.uvname for y in self.upgrade_me
|
||||||
if y not in sources_t or sources_t[y][VERSION] != excuses[y].ver[1])
|
if y not in sources_t or sources_t[y].version != excuses[y].ver[1])
|
||||||
excuses_deps = {name: valid_excuses.intersection(excuse.deps)
|
excuses_deps = {name: valid_excuses.intersection(excuse.deps)
|
||||||
for name, excuse in excuses.items() if name in valid_excuses}
|
for name, excuse in excuses.items() if name in valid_excuses}
|
||||||
excuses_rdeps = defaultdict(set)
|
excuses_rdeps = defaultdict(set)
|
||||||
|
@ -32,7 +32,7 @@ import errno
|
|||||||
|
|
||||||
from migrationitem import MigrationItem, UnversionnedMigrationItem
|
from migrationitem import MigrationItem, UnversionnedMigrationItem
|
||||||
|
|
||||||
from consts import (VERSION, BINARIES, PROVIDES, DEPENDS, CONFLICTS,
|
from consts import (VERSION, PROVIDES, DEPENDS, CONFLICTS,
|
||||||
ARCHITECTURE, SECTION,
|
ARCHITECTURE, SECTION,
|
||||||
SOURCE, MAINTAINER, MULTIARCH,
|
SOURCE, MAINTAINER, MULTIARCH,
|
||||||
ESSENTIAL)
|
ESSENTIAL)
|
||||||
@ -90,8 +90,7 @@ def iter_except(func, exception, first=None):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages,
|
def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages):
|
||||||
BINARIES=BINARIES):
|
|
||||||
"""Undoes one or more changes to testing
|
"""Undoes one or more changes to testing
|
||||||
|
|
||||||
* lundo is a list of (undo, item)-tuples
|
* lundo is a list of (undo, item)-tuples
|
||||||
@ -126,7 +125,7 @@ def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages,
|
|||||||
for (undo, item) in lundo:
|
for (undo, item) in lundo:
|
||||||
if not item.is_removal and item.package in sources[item.suite]:
|
if not item.is_removal and item.package in sources[item.suite]:
|
||||||
source_data = sources[item.suite][item.package]
|
source_data = sources[item.suite][item.package]
|
||||||
for pkg_id in source_data[BINARIES]:
|
for pkg_id in source_data.binaries:
|
||||||
binary, _, arch = pkg_id
|
binary, _, arch = pkg_id
|
||||||
if item.architecture in ['source', arch]:
|
if item.architecture in ['source', arch]:
|
||||||
try:
|
try:
|
||||||
@ -134,7 +133,7 @@ def undo_changes(lundo, inst_tester, sources, binaries, all_binary_packages,
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
# If this happens, pkg_id must be a cruft item that
|
# If this happens, pkg_id must be a cruft item that
|
||||||
# was *not* migrated.
|
# was *not* migrated.
|
||||||
assert source_data[VERSION] != all_binary_packages[pkg_id].version
|
assert source_data.version != all_binary_packages[pkg_id].version
|
||||||
assert not inst_tester.any_of_these_are_in_testing((pkg_id,))
|
assert not inst_tester.any_of_these_are_in_testing((pkg_id,))
|
||||||
inst_tester.remove_testing_binary(pkg_id)
|
inst_tester.remove_testing_binary(pkg_id)
|
||||||
|
|
||||||
@ -264,9 +263,7 @@ def eval_uninst(architectures, nuninst):
|
|||||||
return "".join(parts)
|
return "".join(parts)
|
||||||
|
|
||||||
|
|
||||||
def write_heidi(filename, sources_t, packages_t,
|
def write_heidi(filename, sources_t, packages_t, sorted=sorted):
|
||||||
VERSION=VERSION, SECTION=SECTION,
|
|
||||||
sorted=sorted):
|
|
||||||
"""Write the output HeidiResult
|
"""Write the output HeidiResult
|
||||||
|
|
||||||
This method write the output for Heidi, which contains all the
|
This method write the output for Heidi, which contains all the
|
||||||
@ -296,7 +293,7 @@ def write_heidi(filename, sources_t, packages_t,
|
|||||||
# Faux package; not really a part of testing
|
# Faux package; not really a part of testing
|
||||||
continue
|
continue
|
||||||
if pkg.source_version and pkgarch == 'all' and \
|
if pkg.source_version and pkgarch == 'all' and \
|
||||||
pkg.source_version != sources_t[pkg.source][VERSION]:
|
pkg.source_version != sources_t[pkg.source].version:
|
||||||
# when architectures are marked as "fucked", their binary
|
# when architectures are marked as "fucked", their binary
|
||||||
# versions may be lower than those of the associated
|
# versions may be lower than those of the associated
|
||||||
# source package in testing. the binary package list for
|
# source package in testing. the binary package list for
|
||||||
@ -309,8 +306,8 @@ def write_heidi(filename, sources_t, packages_t,
|
|||||||
# write sources
|
# write sources
|
||||||
for src_name in sorted(sources_t):
|
for src_name in sorted(sources_t):
|
||||||
src = sources_t[src_name]
|
src = sources_t[src_name]
|
||||||
srcv = src[VERSION]
|
srcv = src.version
|
||||||
srcsec = src[SECTION] or 'unknown'
|
srcsec = src.section or 'unknown'
|
||||||
if srcsec == 'faux' or srcsec.endswith('/faux'):
|
if srcsec == 'faux' or srcsec.endswith('/faux'):
|
||||||
# Faux package; not really a part of testing
|
# Faux package; not really a part of testing
|
||||||
continue
|
continue
|
||||||
@ -352,7 +349,7 @@ def make_migrationitem(package, sources, VERSION=VERSION):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
item = UnversionnedMigrationItem(package)
|
item = UnversionnedMigrationItem(package)
|
||||||
return MigrationItem("%s/%s" % (item.uvname, sources[item.suite][item.package][VERSION]))
|
return MigrationItem("%s/%s" % (item.uvname, sources[item.suite][item.package].version))
|
||||||
|
|
||||||
|
|
||||||
def write_excuses(excuselist, dest_file, output_format="yaml"):
|
def write_excuses(excuselist, dest_file, output_format="yaml"):
|
||||||
@ -446,8 +443,8 @@ def write_controlfiles(sources, packages, suite, basedir):
|
|||||||
continue
|
continue
|
||||||
if key == SOURCE:
|
if key == SOURCE:
|
||||||
src = bin_data.source
|
src = bin_data.source
|
||||||
if sources_s[src][MAINTAINER]:
|
if sources_s[src].maintainer:
|
||||||
output += ("Maintainer: " + sources_s[src][MAINTAINER] + "\n")
|
output += ("Maintainer: " + sources_s[src].maintainer + "\n")
|
||||||
|
|
||||||
if src == pkg:
|
if src == pkg:
|
||||||
if bin_data.source_version != bin_data.version:
|
if bin_data.source_version != bin_data.version:
|
||||||
@ -489,7 +486,7 @@ def old_libraries(sources, packages, fucked_arches=frozenset()):
|
|||||||
for arch in testing:
|
for arch in testing:
|
||||||
for pkg_name in testing[arch][0]:
|
for pkg_name in testing[arch][0]:
|
||||||
pkg = testing[arch][0][pkg_name]
|
pkg = testing[arch][0][pkg_name]
|
||||||
if sources_t[pkg.source][VERSION] != pkg.source_version and \
|
if sources_t[pkg.source].version != pkg.source_version and \
|
||||||
(arch not in fucked_arches or pkg_name not in unstable[arch][0]):
|
(arch not in fucked_arches or pkg_name not in unstable[arch][0]):
|
||||||
migration = "-" + "/".join((pkg_name, arch, pkg.source_version))
|
migration = "-" + "/".join((pkg_name, arch, pkg.source_version))
|
||||||
removals.append(MigrationItem(migration))
|
removals.append(MigrationItem(migration))
|
||||||
|
@ -21,9 +21,9 @@
|
|||||||
# source package
|
# source package
|
||||||
VERSION = 0
|
VERSION = 0
|
||||||
SECTION = 1
|
SECTION = 1
|
||||||
BINARIES = 2
|
# BINARIES = 2 - value reversed, but const is no longer used
|
||||||
MAINTAINER = 3
|
MAINTAINER = 3
|
||||||
FAKESRC = 4
|
# FAKESRC = 4 - value reserved, but const is no longer used
|
||||||
|
|
||||||
# binary package
|
# binary package
|
||||||
SOURCE = 2
|
SOURCE = 2
|
||||||
|
@ -4,7 +4,6 @@ import apt_pkg
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from consts import VERSION, BINARIES
|
|
||||||
from hints import Hint, split_into_one_hint_per_package
|
from hints import Hint, split_into_one_hint_per_package
|
||||||
|
|
||||||
|
|
||||||
@ -245,9 +244,9 @@ class AgePolicy(BasePolicy):
|
|||||||
urgency = self.options.default_urgency
|
urgency = self.options.default_urgency
|
||||||
|
|
||||||
if source_name not in self._dates:
|
if source_name not in self._dates:
|
||||||
self._dates[source_name] = (source_data_srcdist[VERSION], self._date_now)
|
self._dates[source_name] = (source_data_srcdist.version, self._date_now)
|
||||||
elif self._dates[source_name][0] != source_data_srcdist[VERSION]:
|
elif self._dates[source_name][0] != source_data_srcdist.version:
|
||||||
self._dates[source_name] = (source_data_srcdist[VERSION], self._date_now)
|
self._dates[source_name] = (source_data_srcdist.version, self._date_now)
|
||||||
|
|
||||||
days_old = self._date_now - self._dates[source_name][1]
|
days_old = self._date_now - self._dates[source_name][1]
|
||||||
min_days = self._min_days[urgency]
|
min_days = self._min_days[urgency]
|
||||||
@ -255,7 +254,7 @@ class AgePolicy(BasePolicy):
|
|||||||
age_info['current-age'] = days_old
|
age_info['current-age'] = days_old
|
||||||
|
|
||||||
for age_days_hint in self.hints.search('age-days', package=source_name,
|
for age_days_hint in self.hints.search('age-days', package=source_name,
|
||||||
version=source_data_srcdist[VERSION]):
|
version=source_data_srcdist.version):
|
||||||
new_req = age_days_hint.days
|
new_req = age_days_hint.days
|
||||||
age_info['age-requirement-reduced'] = {
|
age_info['age-requirement-reduced'] = {
|
||||||
'new-requirement': new_req,
|
'new-requirement': new_req,
|
||||||
@ -265,7 +264,7 @@ class AgePolicy(BasePolicy):
|
|||||||
|
|
||||||
if days_old < min_days:
|
if days_old < min_days:
|
||||||
urgent_hints = self.hints.search('urgent', package=source_name,
|
urgent_hints = self.hints.search('urgent', package=source_name,
|
||||||
version=source_data_srcdist[VERSION])
|
version=source_data_srcdist.version)
|
||||||
if urgent_hints:
|
if urgent_hints:
|
||||||
age_info['age-requirement-reduced'] = {
|
age_info['age-requirement-reduced'] = {
|
||||||
'new-requirement': 0,
|
'new-requirement': 0,
|
||||||
@ -342,12 +341,12 @@ class AgePolicy(BasePolicy):
|
|||||||
|
|
||||||
# if the package exists in testing and it is more recent, do nothing
|
# if the package exists in testing and it is more recent, do nothing
|
||||||
tsrcv = britney.sources['testing'].get(l[0], None)
|
tsrcv = britney.sources['testing'].get(l[0], None)
|
||||||
if tsrcv and apt_pkg.version_compare(tsrcv[VERSION], l[1]) >= 0:
|
if tsrcv and apt_pkg.version_compare(tsrcv.version, l[1]) >= 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if the package doesn't exist in unstable or it is older, do nothing
|
# if the package doesn't exist in unstable or it is older, do nothing
|
||||||
usrcv = britney.sources['unstable'].get(l[0], None)
|
usrcv = britney.sources['unstable'].get(l[0], None)
|
||||||
if not usrcv or apt_pkg.version_compare(usrcv[VERSION], l[1]) < 0:
|
if not usrcv or apt_pkg.version_compare(usrcv.version, l[1]) < 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# update the urgency for the package
|
# update the urgency for the package
|
||||||
@ -430,11 +429,11 @@ class RCBugPolicy(BasePolicy):
|
|||||||
if src_key in self._bugs['unstable']:
|
if src_key in self._bugs['unstable']:
|
||||||
bugs_u.update(self._bugs['unstable'][src_key])
|
bugs_u.update(self._bugs['unstable'][src_key])
|
||||||
|
|
||||||
for pkg, _, _ in source_data_srcdist[BINARIES]:
|
for pkg, _, _ in source_data_srcdist.binaries:
|
||||||
if pkg in self._bugs['unstable']:
|
if pkg in self._bugs['unstable']:
|
||||||
bugs_u |= self._bugs['unstable'][pkg]
|
bugs_u |= self._bugs['unstable'][pkg]
|
||||||
if source_data_tdist:
|
if source_data_tdist:
|
||||||
for pkg, _, _ in source_data_tdist[BINARIES]:
|
for pkg, _, _ in source_data_tdist.binaries:
|
||||||
if pkg in self._bugs['testing']:
|
if pkg in self._bugs['testing']:
|
||||||
bugs_t |= self._bugs['testing'][pkg]
|
bugs_t |= self._bugs['testing'][pkg]
|
||||||
|
|
||||||
@ -451,7 +450,7 @@ class RCBugPolicy(BasePolicy):
|
|||||||
success_verdict = PolicyVerdict.PASS
|
success_verdict = PolicyVerdict.PASS
|
||||||
|
|
||||||
for ignore_hint in self.hints.search('ignore-rc-bugs', package=source_name,
|
for ignore_hint in self.hints.search('ignore-rc-bugs', package=source_name,
|
||||||
version=source_data_srcdist[VERSION]):
|
version=source_data_srcdist.version):
|
||||||
ignored_bugs = ignore_hint.ignored_rcbugs
|
ignored_bugs = ignore_hint.ignored_rcbugs
|
||||||
|
|
||||||
# Only handle one hint for now
|
# Only handle one hint for now
|
||||||
|
Loading…
x
Reference in New Issue
Block a user