Rename fucked_arches to outofsync_arches

To avoid getting in trouble when bringing up new commercially-supported
architectures.

Signed-off-by: Niels Thykier <niels@thykier.net>
master
Colin Watson 11 years ago committed by Niels Thykier
parent 0664c580b0
commit fe7cc466e1

@ -31,7 +31,7 @@ ARCHITECTURES = i386 amd64 arm64 armel armhf mips mipsel mips64el powerpc pp
NOBREAKALL_ARCHES = i386 amd64
# if you're in this list, your packages may not stay in sync with the source
FUCKED_ARCHES = mips64el
OUTOFSYNC_ARCHES = mips64el
# if you're in this list, your uninstallability count may increase
BREAK_ARCHES = mips64el

@ -493,14 +493,14 @@ class Britney(object):
self.options.heidi_delta_output = self.options.heidi_output + "Delta"
self.options.nobreakall_arches = self.options.nobreakall_arches.split()
self.options.fucked_arches = self.options.fucked_arches.split()
self.options.outofsync_arches = self.options.outofsync_arches.split()
self.options.break_arches = self.options.break_arches.split()
self.options.new_arches = self.options.new_arches.split()
# Sort the architecture list
allarches = sorted(self.options.architectures.split())
arches = [x for x in allarches if x in self.options.nobreakall_arches]
arches += [x for x in allarches if x not in arches and x not in self.options.fucked_arches]
arches += [x for x in allarches if x not in arches and x not in self.options.outofsync_arches]
arches += [x for x in allarches if x not in arches and x not in self.options.break_arches]
arches += [x for x in allarches if x not in arches and x not in self.options.new_arches]
arches += [x for x in allarches if x not in arches]
@ -1608,7 +1608,7 @@ class Britney(object):
base = 'stable'
text = "Not yet built on <a href=\"https://buildd.debian.org/status/logs.php?arch=%s&pkg=%s&ver=%s&suite=%s\" target=\"_blank\">%s</a> (relative to testing)" % (quote(arch), quote(src), quote(source_u.version), base, arch)
if arch in self.options.fucked_arches:
if arch in self.options.outofsync_arches:
text = text + " (but %s isn't keeping up, so never mind)" % (arch)
excuse.missing_build_on_ood_arch(arch)
else:
@ -1657,7 +1657,7 @@ class Britney(object):
# if there are out-of-date packages, warn about them in the excuse and set update_candidate
# to False to block the update; if the architecture where the package is out-of-date is
# in the `fucked_arches' list, then do not block the update
# in the `outofsync_arches' list, then do not block the update
if oodbins:
oodtxt = ""
for v in oodbins.keys():
@ -1674,7 +1674,7 @@ class Britney(object):
"arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
(quote(arch), quote(src), quote(source_u.version), arch, oodtxt)
if arch in self.options.fucked_arches:
if arch in self.options.outofsync_arches:
text = text + " (but %s isn't keeping up, so nevermind)" % (arch)
if not uptodatebins:
excuse.missing_build_on_ood_arch(arch)
@ -2158,7 +2158,7 @@ class Britney(object):
continue
# Don't add the binary if it is old cruft that is no longer in testing
if (parch not in self.options.fucked_arches and
if (parch not in self.options.outofsync_arches and
source_data.version != self.binaries[suite][parch][0][binary].source_version and
binary not in binaries_t[parch][0]):
continue
@ -2752,14 +2752,14 @@ class Britney(object):
self.do_all(actions=removals)
# smooth updates
removals = old_libraries(self.sources, self.binaries, self.options.fucked_arches)
removals = old_libraries(self.sources, self.binaries, self.options.outofsync_arches)
if self.options.smooth_updates:
self.log("> Removing old packages left in testing from smooth updates", type="I")
if removals:
self.output_write("Removing packages left in testing for smooth updates (%d):\n%s" % \
(len(removals), old_libraries_format(removals)))
self.do_all(actions=removals)
removals = old_libraries(self.sources, self.binaries, self.options.fucked_arches)
removals = old_libraries(self.sources, self.binaries, self.options.outofsync_arches)
else:
self.log("> Not removing old packages left in testing from smooth updates (smooth-updates disabled)",
type="I")

@ -31,7 +31,7 @@ ARCHITECTURES = i386 amd64 arm64 armel armhf mips mipsel mips64el powerpc pp
NOBREAKALL_ARCHES = i386 amd64 arm64 armel armhf mips mipsel mips64el powerpc ppc64el s390x
# if you're in this list, your packages may not stay in sync with the source
FUCKED_ARCHES = mips64el
OUTOFSYNC_ARCHES = mips64el
# if you're in this list, your uninstallability count may increase
BREAK_ARCHES = mips64el

@ -294,7 +294,7 @@ def write_heidi(filename, sources_t, packages_t, sorted=sorted):
continue
if pkg.source_version and pkgarch == 'all' and \
pkg.source_version != sources_t[pkg.source].version:
# when architectures are marked as "fucked", their binary
# when architectures are marked as "outofsync", their binary
# versions may be lower than those of the associated
# source package in testing. the binary package list for
# such architectures will include arch:all packages
@ -467,7 +467,7 @@ def write_controlfiles(sources, packages, suite, basedir):
write_sources(sources_s, os.path.join(basedir, 'Sources'))
def old_libraries(sources, packages, fucked_arches=frozenset()):
def old_libraries(sources, packages, outofsync_arches=frozenset()):
"""Detect old libraries left in testing for smooth transitions
This method detects old libraries which are in testing but no
@ -475,7 +475,7 @@ def old_libraries(sources, packages, fucked_arches=frozenset()):
other packages still depend on them, but they should be removed as
soon as possible.
For "fucked" architectures, outdated binaries are allowed to be in
For "outofsync" architectures, outdated binaries are allowed to be in
testing, so they are only added to the removal list if they are no longer
in unstable.
"""
@ -487,7 +487,7 @@ def old_libraries(sources, packages, fucked_arches=frozenset()):
for pkg_name in testing[arch][0]:
pkg = testing[arch][0][pkg_name]
if sources_t[pkg.source].version != pkg.source_version and \
(arch not in fucked_arches or pkg_name not in unstable[arch][0]):
(arch not in outofsync_arches or pkg_name not in unstable[arch][0]):
migration = "-" + "/".join((pkg_name, arch, pkg.source_version))
removals.append(MigrationItem(migration))
return removals

Loading…
Cancel
Save