Use f-strings

pylint complains about C0209: Formatting a regular string which could be
a f-string (consider-using-f-string)

Signed-off-by: Benjamin Drung <benjamin.drung@canonical.com>
This commit is contained in:
Benjamin Drung 2023-01-31 19:32:58 +01:00
parent 069a6926c0
commit aa556af89d
29 changed files with 258 additions and 339 deletions

View File

@ -202,7 +202,7 @@ def find_release_package(mirror, workdir, package, version, source_release, conf
distribution = system_distribution() distribution = system_distribution()
mirrors = [mirror] if mirror else [] mirrors = [mirror] if mirror else []
mirrors.append(config.get_value("%s_MIRROR" % distribution.upper())) mirrors.append(config.get_value(f"{distribution.upper()}_MIRROR"))
if not version: if not version:
archive = Distribution(distribution.lower()).getArchive() archive = Distribution(distribution.lower()).getArchive()
@ -259,13 +259,13 @@ def get_backport_version(version, suffix, upload, release):
release_version = debian_distro_info.version(release) release_version = debian_distro_info.version(release)
if not release_version: if not release_version:
error("Can't find the release version for %s", release) error("Can't find the release version for %s", release)
backport_version = "{}~bpo{}+1".format(version, release_version) backport_version = f"{version}~bpo{release_version}+1"
else: else:
error("%s is not a supported release (%s)", release, debian_codenames) error("%s is not a supported release (%s)", release, debian_codenames)
elif distribution == "Ubuntu": elif distribution == "Ubuntu":
series = Distribution(distribution.lower()).getSeries(name_or_version=release) series = Distribution(distribution.lower()).getSeries(name_or_version=release)
backport_version = version + ("~bpo%s.1" % (series.version)) backport_version = f"{version}~bpo{series.version}.1"
else: else:
error("Unknown distribution «%s» for release «%s»", distribution, release) error("Unknown distribution «%s» for release «%s»", distribution, release)
if suffix is not None: if suffix is not None:
@ -291,7 +291,7 @@ def get_old_version(source, release):
def get_backport_dist(release, release_pocket): def get_backport_dist(release, release_pocket):
if release_pocket: if release_pocket:
return release return release
return "%s-backports" % release return f"{release}-backports"
def do_build(workdir, dsc, release, builder, update): def do_build(workdir, dsc, release, builder, update):
@ -309,9 +309,9 @@ def do_build(workdir, dsc, release, builder, update):
def do_upload(workdir, package, bp_version, changes, upload, prompt): def do_upload(workdir, package, bp_version, changes, upload, prompt):
print("Please check %s %s in file://%s carefully!" % (package, bp_version, workdir)) print(f"Please check {package} {bp_version} in file://{workdir} carefully!")
if prompt or upload == "ubuntu": if prompt or upload == "ubuntu":
question = "Do you want to upload the package to %s" % upload question = f"Do you want to upload the package to {upload}"
answer = YesNoQuestion().ask(question, "yes") answer = YesNoQuestion().ask(question, "yes")
if answer == "no": if answer == "no":
return return
@ -329,13 +329,11 @@ def orig_needed(upload, workdir, pkg):
version = pkg.version.upstream_version version = pkg.version.upstream_version
http = Http() http = Http()
for filename in glob.glob(os.path.join(workdir, "%s_%s.orig*" % (pkg.source, version))): for filename in glob.glob(os.path.join(workdir, f"{pkg.source}_{version}.orig*")):
url = "https://launchpad.net/~%s/+archive/%s/+sourcefiles/%s/%s/%s" % ( url = (
quote(user), f"https://launchpad.net/~{quote(user)}/+archive/{quote(ppa)}/+sourcefiles"
quote(ppa), f"/{quote(pkg.source)}/{quote(pkg.version.full_version)}"
quote(pkg.source), f"/{quote(os.path.basename(filename))}"
quote(pkg.version.full_version),
quote(os.path.basename(filename)),
) )
try: try:
headers = http.request(url, "HEAD")[0] headers = http.request(url, "HEAD")[0]
@ -364,11 +362,11 @@ def do_backport(
keyid, keyid,
prompt, prompt,
): ):
dirname = "%s-%s" % (pkg.source, release) dirname = f"{pkg.source}-{release}"
srcdir = os.path.join(workdir, dirname) srcdir = os.path.join(workdir, dirname)
if os.path.exists(srcdir): if os.path.exists(srcdir):
question = "Working directory %s already exists. Delete it?" % srcdir question = f"Working directory {srcdir} already exists. Delete it?"
if YesNoQuestion().ask(question, "no") == "no": if YesNoQuestion().ask(question, "no") == "no":
sys.exit(1) sys.exit(1)
shutil.rmtree(srcdir) shutil.rmtree(srcdir)
@ -379,9 +377,9 @@ def do_backport(
old_version = get_old_version(pkg.source, release) old_version = get_old_version(pkg.source, release)
bp_dist = get_backport_dist(release, release_pocket) bp_dist = get_backport_dist(release, release_pocket)
changelog = "%s backport to %s." % (message, release) changelog = f"{message} backport to {release}."
if close: if close:
changelog += " (LP: #%s)" % (close,) changelog += f" (LP: #{close})"
check_call( check_call(
[ [
"dch", "dch",
@ -403,7 +401,7 @@ def do_backport(
else: else:
cmd.append("-sd") cmd.append("-sd")
if old_version: if old_version:
cmd.append("-v%s" % old_version) cmd.append(f"-v{old_version}")
env = os.environ.copy() env = os.environ.copy()
# An ubuntu.com e-mail address would make dpkg-buildpackage fail if there # An ubuntu.com e-mail address would make dpkg-buildpackage fail if there
# wasn't an Ubuntu maintainer for an ubuntu-versioned package. LP: #1007042 # wasn't an Ubuntu maintainer for an ubuntu-versioned package. LP: #1007042

View File

@ -37,7 +37,7 @@ def main():
parser.add_argument("filename", help=argparse.SUPPRESS) parser.add_argument("filename", help=argparse.SUPPRESS)
args = parser.parse_args() args = parser.parse_args()
if not os.path.isfile(args.filename): if not os.path.isfile(args.filename):
parser.error("File %s does not exist" % args.filename) parser.error(f"File {args.filename} does not exist")
if "UDT_EDIT_WRAPPER_EDITOR" in os.environ: if "UDT_EDIT_WRAPPER_EDITOR" in os.environ:
os.environ["EDITOR"] = os.environ["UDT_EDIT_WRAPPER_EDITOR"] os.environ["EDITOR"] = os.environ["UDT_EDIT_WRAPPER_EDITOR"]

View File

@ -55,7 +55,7 @@ def main():
"multiverse-manual", "multiverse-manual",
): ):
url = "https://merges.ubuntu.com/%s.json" % component url = f"https://merges.ubuntu.com/{component}.json"
try: try:
headers, page = Http().request(url) headers, page = Http().request(url)
except HttpLib2Error as e: except HttpLib2Error as e:
@ -71,10 +71,10 @@ def main():
if merge.get("user"): if merge.get("user"):
author = merge["user"] author = merge["user"]
if merge.get("uploader"): if merge.get("uploader"):
uploader = "(%s)" % merge["uploader"] uploader = f"({merge['uploader']})"
teams = merge.get("teams", []) teams = merge.get("teams", [])
pretty_uploader = "{} {}".format(author, uploader) pretty_uploader = f"{author} {uploader}"
if ( if (
args.string is None args.string is None
or args.string in package or args.string in package

View File

@ -129,10 +129,7 @@ def main():
err = True err = True
continue continue
description = "Imported from Debian bug http://bugs.debian.org/%d:\n\n%s" % ( description = f"Imported from Debian bug http://bugs.debian.org/{bug_num}:\n\n{summary}"
bug_num,
summary,
)
# LP limits descriptions to 50K chars # LP limits descriptions to 50K chars
description = (description[:49994] + " [...]") if len(description) > 50000 else description description = (description[:49994] + " [...]") if len(description) > 50000 else description

View File

@ -154,7 +154,7 @@ class PbuilderDist:
# should work nevertheless. # should work nevertheless.
if distro not in self._debian_distros: if distro not in self._debian_distros:
question = ( question = (
'Warning: Unknown distribution "%s". ' "Do you want to continue" % distro f'Warning: Unknown distribution "{distro}". ' "Do you want to continue"
) )
answer = YesNoQuestion().ask(question, "no") answer = YesNoQuestion().ask(question, "no")
if answer == "no": if answer == "no":
@ -208,7 +208,7 @@ class PbuilderDist:
prefix = os.path.join(self.base, self.chroot_string) prefix = os.path.join(self.base, self.chroot_string)
if "--buildresult" not in remaining_arguments: if "--buildresult" not in remaining_arguments:
result = os.path.normpath("%s_result/" % prefix) result = os.path.normpath(f"{prefix}_result/")
else: else:
location_of_arg = remaining_arguments.index("--buildresult") location_of_arg = remaining_arguments.index("--buildresult")
result = os.path.normpath(remaining_arguments[location_of_arg + 1]) result = os.path.normpath(remaining_arguments[location_of_arg + 1])
@ -241,7 +241,7 @@ class PbuilderDist:
sys.exit(1) sys.exit(1)
arguments = [ arguments = [
"--%s" % self.operation, f"--{self.operation}",
"--distribution", "--distribution",
self.target_distro, self.target_distro,
"--buildresult", "--buildresult",
@ -282,9 +282,9 @@ class PbuilderDist:
arguments += ["--mirror", mirror] arguments += ["--mirror", mirror]
othermirrors = [] othermirrors = []
localrepo = "/var/cache/archive/" + self.target_distro localrepo = f"/var/cache/archive/{self.target_distro}"
if os.path.exists(localrepo): if os.path.exists(localrepo):
repo = "deb file:///var/cache/archive/ %s/" % self.target_distro repo = f"deb file:///var/cache/archive/ {self.target_distro}/"
othermirrors.append(repo) othermirrors.append(repo)
if self.target_distro in self._debian_distros: if self.target_distro in self._debian_distros:
@ -307,21 +307,17 @@ class PbuilderDist:
if float(debian_info.version(codename)) < 11.0: if float(debian_info.version(codename)) < 11.0:
pocket = "/updates" pocket = "/updates"
othermirrors.append( othermirrors.append(
"deb %s %s%s %s" f"deb {config.get_value('DEBSEC_MIRROR')}"
% (config.get_value("DEBSEC_MIRROR"), self.target_distro, pocket, components) f" {self.target_distro}{pocket} {components}"
) )
if self.enable_updates: if self.enable_updates:
othermirrors.append( othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}")
"deb %s %s-updates %s" % (mirror, self.target_distro, components)
)
if self.enable_proposed: if self.enable_proposed:
othermirrors.append( othermirrors.append(
"deb %s %s-proposed-updates %s" % (mirror, self.target_distro, components) f"deb {mirror} {self.target_distro}-proposed-updates {components}"
) )
if self.enable_backports: if self.enable_backports:
othermirrors.append( othermirrors.append(f"deb {mirror} {self.target_distro}-backports {components}")
"deb %s %s-backports %s" % (mirror, self.target_distro, components)
)
aptcache = os.path.join(self.base, "aptcache", "debian") aptcache = os.path.join(self.base, "aptcache", "debian")
else: else:
@ -336,17 +332,11 @@ class PbuilderDist:
self.enable_updates = False self.enable_updates = False
if self.enable_security: if self.enable_security:
othermirrors.append( othermirrors.append(f"deb {mirror} {self.target_distro}-security {components}")
"deb %s %s-security %s" % (mirror, self.target_distro, components)
)
if self.enable_updates: if self.enable_updates:
othermirrors.append( othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}")
"deb %s %s-updates %s" % (mirror, self.target_distro, components)
)
if self.enable_proposed: if self.enable_proposed:
othermirrors.append( othermirrors.append(f"deb {mirror} {self.target_distro}-proposed {components}")
"deb %s %s-proposed %s" % (mirror, self.target_distro, components)
)
aptcache = os.path.join(self.base, "aptcache", "ubuntu") aptcache = os.path.join(self.base, "aptcache", "ubuntu")
@ -392,7 +382,7 @@ class PbuilderDist:
if self.build_architecture != self.system_architecture: if self.build_architecture != self.system_architecture:
arguments += ["--debootstrapopts", "--arch=" + self.build_architecture] arguments += ["--debootstrapopts", "--arch=" + self.build_architecture]
apt_conf_dir = os.path.join(self.base, "etc/%s/apt.conf" % self.target_distro) apt_conf_dir = os.path.join(self.base, f"etc/{self.target_distro}/apt.conf")
if os.path.exists(apt_conf_dir): if os.path.exists(apt_conf_dir):
arguments += ["--aptconfdir", apt_conf_dir] arguments += ["--aptconfdir", apt_conf_dir]

View File

@ -36,7 +36,7 @@ Logger = getLogger()
def previous_version(package, version, distance): def previous_version(package, version, distance):
"Given an (extracted) package, determine the version distance versions ago" "Given an (extracted) package, determine the version distance versions ago"
upver = Version(version).upstream_version upver = Version(version).upstream_version
filename = "%s-%s/debian/changelog" % (package, upver) filename = f"{package}-{upver}/debian/changelog"
changelog_file = open(filename, "r", encoding="utf-8") changelog_file = open(filename, "r", encoding="utf-8")
changelog = debian.changelog.Changelog(changelog_file.read()) changelog = debian.changelog.Changelog(changelog_file.read())
changelog_file.close() changelog_file.close()

View File

@ -41,11 +41,11 @@ def determine_destinations(source, destination):
destination = ubuntu_info.lts() destination = ubuntu_info.lts()
if source not in ubuntu_info.all: if source not in ubuntu_info.all:
raise DestinationException("Source release %s does not exist" % source) raise DestinationException(f"Source release {source} does not exist")
if destination not in ubuntu_info.all: if destination not in ubuntu_info.all:
raise DestinationException("Destination release %s does not exist" % destination) raise DestinationException(f"Destination release {destination} does not exist")
if destination not in ubuntu_info.supported(): if destination not in ubuntu_info.supported():
raise DestinationException("Destination release %s is not supported" % destination) raise DestinationException(f"Destination release {destination} is not supported")
found = False found = False
destinations = [] destinations = []
@ -137,9 +137,9 @@ def find_rdepends(releases, published_binaries):
for binpkg, rdeps in intermediate.items(): for binpkg, rdeps in intermediate.items():
output += ["", binpkg, "-" * len(binpkg)] output += ["", binpkg, "-" * len(binpkg)]
for pkg, appearences in rdeps.items(): for pkg, appearences in rdeps.items():
output += ["* %s" % pkg] output += [f"* {pkg}"]
for release, relationship in appearences: for release, relationship in appearences:
output += [" [ ] %s (%s)" % (release, relationship)] output += [f" [ ] {release} ({relationship})"]
found_any = sum(len(rdeps) for rdeps in intermediate.values()) found_any = sum(len(rdeps) for rdeps in intermediate.values())
if found_any: if found_any:
@ -200,11 +200,9 @@ def request_backport(package_spph, source, destinations):
testing = ["[Testing]", ""] testing = ["[Testing]", ""]
for dest in destinations: for dest in destinations:
testing += [" * %s:" % dest.capitalize()] testing += [f" * {dest.capitalize()}:"]
testing += [" [ ] Package builds without modification"] testing += [" [ ] Package builds without modification"]
testing += [ testing += [f" [ ] {binary} installs cleanly and runs" for binary in published_binaries]
" [ ] %s installs cleanly and runs" % binary for binary in published_binaries
]
subst = { subst = {
"package": package_spph.getPackageName(), "package": package_spph.getPackageName(),

View File

@ -299,17 +299,14 @@ def main():
check_existing_reports(srcpkg) check_existing_reports(srcpkg)
# Generate bug report # Generate bug report
pkg_to_sync = "%s %s (%s) from Debian %s (%s)" % ( pkg_to_sync = (
srcpkg, f"{srcpkg} {debian_version} ({ubuntu_component})"
debian_version, f" from Debian {distro} ({debian_component})"
ubuntu_component,
distro,
debian_component,
) )
title = "Sync %s" % pkg_to_sync title = f"Sync {pkg_to_sync}"
if ffe: if ffe:
title = "FFe: " + title title = "FFe: " + title
report = "Please sync %s\n\n" % pkg_to_sync report = f"Please sync {pkg_to_sync}\n\n"
if "ubuntu" in str(ubuntu_version): if "ubuntu" in str(ubuntu_version):
need_interaction = True need_interaction = True
@ -318,9 +315,8 @@ def main():
Logger.info("Please edit the report and give an explanation.") Logger.info("Please edit the report and give an explanation.")
Logger.info("Not saving the report file will abort the request.") Logger.info("Not saving the report file will abort the request.")
report += ( report += (
"Explanation of the Ubuntu delta and why it can be " f"Explanation of the Ubuntu delta and why it can be dropped:\n"
"dropped:\n%s\n>>> ENTER_EXPLANATION_HERE <<<\n\n" f"{get_ubuntu_delta_changelog(ubuntu_srcpkg)}\n>>> ENTER_EXPLANATION_HERE <<<\n\n"
% get_ubuntu_delta_changelog(ubuntu_srcpkg)
) )
if ffe: if ffe:
@ -339,10 +335,7 @@ def main():
if newsource: if newsource:
report += "All changelog entries:\n\n" report += "All changelog entries:\n\n"
else: else:
report += "Changelog entries since current %s version %s:\n\n" % ( report += f"Changelog entries since current {args.release} version {ubuntu_version}:\n\n"
args.release,
ubuntu_version,
)
changelog = debian_srcpkg.getChangelog(since_version=base_version) changelog = debian_srcpkg.getChangelog(since_version=base_version)
if not changelog: if not changelog:
if not args.missing_changelog_ok: if not args.missing_changelog_ok:

View File

@ -114,7 +114,7 @@ def main():
# Convert unstable/testing aliases to codenames: # Convert unstable/testing aliases to codenames:
distribution = codename_to_distribution(options.release) distribution = codename_to_distribution(options.release)
if not distribution: if not distribution:
parser.error("Unknown release codename %s" % options.release) parser.error(f"Unknown release codename {options.release}")
distro_info = vendor_to_distroinfo(distribution)() distro_info = vendor_to_distroinfo(distribution)()
try: try:
options.release = distro_info.codename(options.release, default=options.release) options.release = distro_info.codename(options.release, default=options.release)
@ -184,13 +184,13 @@ def display_verbose(package, values):
return return
def log_package(values, package, arch, dependency, offset=0): def log_package(values, package, arch, dependency, offset=0):
line = " " * offset + "* %s" % package line = f"{' ' * offset}* {package}"
if all_archs and set(arch) != all_archs: if all_archs and set(arch) != all_archs:
line += " [%s]" % " ".join(sorted(arch)) line += f" [{' '.join(sorted(arch))}]"
if dependency: if dependency:
if len(line) < 30: if len(line) < 30:
line += " " * (30 - len(line)) line += " " * (30 - len(line))
line += " (for %s)" % dependency line += f" (for {dependency})"
Logger.info(line) Logger.info(line)
data = values.get(package) data = values.get(package)
if data: if data:

View File

@ -81,7 +81,7 @@ def present_on(appearences):
for flavor, types in present.items(): for flavor, types in present.items():
if len(types) > 1: if len(types) > 1:
types.discard("supported") types.discard("supported")
output = [" %s: %s" % (flavor, ", ".join(sorted(types))) for flavor, types in present.items()] output = [f" {flavor}: {', '.join(sorted(types))}" for flavor, types in present.items()]
output.sort() output.sort()
return "\n".join(output) return "\n".join(output)

View File

@ -38,7 +38,7 @@ def parse(script_name):
"%(prog)s [options] <bug number>\n" "%(prog)s [options] <bug number>\n"
"One of --upload, --workdir, or --sponsor must be specified." "One of --upload, --workdir, or --sponsor must be specified."
) )
epilog = "See %s(1) for more info." % (script_name) epilog = f"See {script_name}(1) for more info."
parser = argparse.ArgumentParser(usage=usage, epilog=epilog) parser = argparse.ArgumentParser(usage=usage, epilog=epilog)
parser.add_argument( parser.add_argument(

View File

@ -93,7 +93,7 @@ def gen_debdiff(tmpdir, changelog):
newver = next(changelog_it).version newver = next(changelog_it).version
oldver = next(changelog_it).version oldver = next(changelog_it).version
debdiff = os.path.join(tmpdir, "%s_%s.debdiff" % (pkg, newver)) debdiff = os.path.join(tmpdir, f"{pkg}_{newver}.debdiff")
diff_cmd = ["bzr", "diff", "-r", "tag:" + str(oldver)] diff_cmd = ["bzr", "diff", "-r", "tag:" + str(oldver)]
if call(diff_cmd, stdout=DEVNULL, stderr=DEVNULL) == 1: if call(diff_cmd, stdout=DEVNULL, stderr=DEVNULL) == 1:
@ -104,8 +104,8 @@ def gen_debdiff(tmpdir, changelog):
if newver.epoch is not None: if newver.epoch is not None:
newver = str(newver)[str(newver).index(":") + 1 :] newver = str(newver)[str(newver).index(":") + 1 :]
olddsc = "../%s_%s.dsc" % (pkg, oldver) olddsc = f"../{pkg}_{oldver}.dsc"
newdsc = "../%s_%s.dsc" % (pkg, newver) newdsc = f"../{pkg}_{newver}.dsc"
check_file(olddsc) check_file(olddsc)
check_file(newdsc) check_file(newdsc)
@ -176,7 +176,7 @@ def submit_bugreport(body, debdiff, deb_version, changelog):
"--pseudo-header", "--pseudo-header",
"User: ubuntu-devel@lists.ubuntu.com", "User: ubuntu-devel@lists.ubuntu.com",
"--pseudo-header", "--pseudo-header",
"Usertags: origin-ubuntu %s ubuntu-patch" % devel, f"Usertags: origin-ubuntu {devel} ubuntu-patch",
"--tag", "--tag",
"patch", "patch",
"--bts", "--bts",
@ -197,15 +197,14 @@ def check_reportbug_config():
if os.path.exists(reportbugrc_filename): if os.path.exists(reportbugrc_filename):
return return
email = ubu_email()[1] email = ubu_email()[1]
reportbugrc = ( reportbugrc = f"""# Reportbug configuration generated by submittodebian(1)
"""# Reportbug configuration generated by submittodebian(1)
# See reportbug.conf(5) for the configuration file format. # See reportbug.conf(5) for the configuration file format.
# Use Debian's reportbug SMTP Server: # Use Debian's reportbug SMTP Server:
# Note: it's limited to 5 connections per hour, and cannot CC you at submission # Note: it's limited to 5 connections per hour, and cannot CC you at submission
# time. See /usr/share/doc/reportbug/README.Users.gz for more details. # time. See /usr/share/doc/reportbug/README.Users.gz for more details.
smtphost reportbug.debian.org:587 smtphost reportbug.debian.org:587
header "X-Debbugs-CC: %s" header "X-Debbugs-CC: {email}"
no-cc no-cc
# Use GMail's SMTP Server: # Use GMail's SMTP Server:
@ -213,8 +212,6 @@ no-cc
#smtpuser "<your address>@gmail.com" #smtpuser "<your address>@gmail.com"
#smtptls #smtptls
""" """
% email
)
with open(reportbugrc_filename, "w", encoding="utf-8") as f: with open(reportbugrc_filename, "w", encoding="utf-8") as f:
f.write(reportbugrc) f.write(reportbugrc)

View File

@ -85,11 +85,11 @@ def add_fixed_bugs(changes, bugs):
for i, change in enumerate(changes): for i, change in enumerate(changes):
if change.startswith("Launchpad-Bugs-Fixed:"): if change.startswith("Launchpad-Bugs-Fixed:"):
bugs.update(changes[i][22:].strip().split(" ")) bugs.update(changes[i][22:].strip().split(" "))
changes[i] = "Launchpad-Bugs-Fixed: %s" % (" ".join(bugs)) changes[i] = f"Launchpad-Bugs-Fixed: {' '.join(bugs)}"
break break
if i == len(changes) - 1: if i == len(changes) - 1:
# Launchpad-Bugs-Fixed entry does not exist in changes file # Launchpad-Bugs-Fixed entry does not exist in changes file
line = "Launchpad-Bugs-Fixed: %s" % (" ".join(bugs)) line = f"Launchpad-Bugs-Fixed: {' '.join(bugs)}"
changes.append(line) changes.append(line)
return "\n".join(changes + [""]) return "\n".join(changes + [""])
@ -194,7 +194,7 @@ def sync_dsc(
if not fakesync: if not fakesync:
# create the changes file # create the changes file
changes_filename = "%s_%s_source.changes" % (src_pkg.source, new_ver.strip_epoch()) changes_filename = f"{src_pkg.source}_{new_ver.strip_epoch()}_source.changes"
cmd = [ cmd = [
"dpkg-genchanges", "dpkg-genchanges",
"-S", "-S",
@ -237,11 +237,10 @@ def sync_dsc(
else: else:
# Create fakesync changelog entry # Create fakesync changelog entry
new_ver = Version(new_ver.full_version + "fakesync1") new_ver = Version(new_ver.full_version + "fakesync1")
changes_filename = "%s_%s_source.changes" % (src_pkg.source, new_ver.strip_epoch()) changes_filename = f"{src_pkg.source}_{new_ver.strip_epoch()}_source.changes"
if len(bugs) > 0: if len(bugs) > 0:
message = "Fake sync due to mismatching orig tarball (LP: %s)." % ( bug_numbers = [f"#{b}" for b in bugs]
", ".join(["#" + str(b) for b in bugs]) message = f"Fake sync due to mismatching orig tarball (LP: {', '.join(bug_numbers)})."
)
else: else:
message = "Fake sync due to mismatching orig tarball." message = "Fake sync due to mismatching orig tarball."
cmd = ["dch", "-v", new_ver.full_version, "--force-distribution", "-D", release, message] cmd = ["dch", "-v", new_ver.full_version, "--force-distribution", "-D", release, message]
@ -441,12 +440,8 @@ def is_blacklisted(query):
lp_comments = series.getDifferenceComments(source_package_name=query) lp_comments = series.getDifferenceComments(source_package_name=query)
blacklisted = False blacklisted = False
comments = [ comments = [
"%s\n -- %s %s" f"{c.body_text}\n -- {c.comment_author.name}"
% ( f" {c.comment_date.strftime('%a, %d %b %Y %H:%M:%S +0000')}"
c.body_text,
c.comment_author.name,
c.comment_date.strftime("%a, %d %b %Y %H:%M:%S +0000"),
)
for c in lp_comments for c in lp_comments
] ]
@ -482,9 +477,9 @@ def is_blacklisted(query):
def close_bugs(bugs, package, version, changes, sponsoree): def close_bugs(bugs, package, version, changes, sponsoree):
"""Close the correct task on all bugs, with changes""" """Close the correct task on all bugs, with changes"""
ubuntu = Launchpad.distributions["ubuntu"] ubuntu = Launchpad.distributions["ubuntu"]
message = "This bug was fixed in the package %s - %s" % (package, version) message = f"This bug was fixed in the package {package} - {version}"
if sponsoree: if sponsoree:
message += "\nSponsored for %s (%s)" % (sponsoree.display_name, sponsoree.name) message += f"\nSponsored for {sponsoree.display_name} ({sponsoree.name})"
if changes: if changes:
message += "\n\n---------------\n" + changes message += "\n\n---------------\n" + changes
for bug in bugs: for bug in bugs:
@ -510,7 +505,7 @@ def parse():
"""Parse given command-line parameters.""" """Parse given command-line parameters."""
usage = "%(prog)s [options] <.dsc URL/path or package name>" usage = "%(prog)s [options] <.dsc URL/path or package name>"
epilog = "See %s(1) for more info." % os.path.basename(sys.argv[0]) epilog = f"See {os.path.basename(sys.argv[0])}(1) for more info."
parser = argparse.ArgumentParser(usage=usage, epilog=epilog) parser = argparse.ArgumentParser(usage=usage, epilog=epilog)
parser.add_argument("-d", "--distribution", help="Debian distribution to sync from.") parser.add_argument("-d", "--distribution", help="Debian distribution to sync from.")
@ -599,13 +594,13 @@ def parse():
"-D", "-D",
"--debian-mirror", "--debian-mirror",
metavar="DEBIAN_MIRROR", metavar="DEBIAN_MIRROR",
help="Preferred Debian mirror (default: %s)" % UDTConfig.defaults["DEBIAN_MIRROR"], help=f"Preferred Debian mirror (default: {UDTConfig.defaults['DEBIAN_MIRROR']})",
) )
no_lp.add_argument( no_lp.add_argument(
"-U", "-U",
"--ubuntu-mirror", "--ubuntu-mirror",
metavar="UBUNTU_MIRROR", metavar="UBUNTU_MIRROR",
help="Preferred Ubuntu mirror (default: %s)" % UDTConfig.defaults["UBUNTU_MIRROR"], help=f"Preferred Ubuntu mirror (default: {UDTConfig.defaults['UBUNTU_MIRROR']})",
) )
parser.add_argument("package", help=argparse.SUPPRESS) parser.add_argument("package", help=argparse.SUPPRESS)
args = parser.parse_args() args = parser.parse_args()
@ -620,8 +615,8 @@ def parse():
if args.component not in (None, "main", "contrib", "non-free"): if args.component not in (None, "main", "contrib", "non-free"):
parser.error( parser.error(
"%s is not a valid Debian component. " f"{args.component} is not a valid Debian component. "
"It should be one of main, contrib, or non-free." % args.component f"It should be one of main, contrib, or non-free."
) )
if args.lp and args.uploader_name: if args.lp and args.uploader_name:
@ -668,7 +663,7 @@ def main():
if args.release is None: if args.release is None:
ubuntu = Launchpad.distributions["ubuntu"] ubuntu = Launchpad.distributions["ubuntu"]
args.release = "%s-proposed" % ubuntu.current_series.name args.release = f"{ubuntu.current_series.name}-proposed"
if not args.fakesync and not args.lp: if not args.fakesync and not args.lp:
Logger.warning( Logger.warning(

View File

@ -80,9 +80,8 @@ def main():
"--arch", "--arch",
action="append", action="append",
dest="architecture", dest="architecture",
help="Rebuild or rescore a specific " help=f"Rebuild or rescore a specific architecture. Valid architectures "
"architecture. Valid architectures " f"include: {', '.join(valid_archs)}.",
"include: %s." % ", ".join(valid_archs),
) )
# Batch processing options # Batch processing options
@ -115,8 +114,8 @@ def main():
"--arch2", "--arch2",
action="append", action="append",
dest="architecture", dest="architecture",
help="Affect only 'architecture' (can be used " help=f"Affect only 'architecture' (can be used several times)."
"several times). Valid architectures are: %s." % ", ".join(valid_archs), f" Valid architectures are: {', '.join(valid_archs)}.",
) )
parser.add_argument("packages", metavar="package", nargs="+", help=argparse.SUPPRESS) parser.add_argument("packages", metavar="package", nargs="+", help=argparse.SUPPRESS)

View File

@ -54,7 +54,7 @@ def main():
for iso in args.isos: for iso in args.isos:
if len(args.isos) > 1: if len(args.isos) > 1:
prefix = "%s:" % iso prefix = f"{iso}:"
else: else:
prefix = "" prefix = ""

View File

@ -647,7 +647,7 @@ class DebianSourcePackage(SourcePackage):
version = self.version.full_version version = self.version.full_version
srcpkg = Snapshot.getSourcePackage(self.source, version=version) srcpkg = Snapshot.getSourcePackage(self.source, version=version)
if not srcpkg: if not srcpkg:
msg = "Package {} {} not found".format(self.source, version) msg = f"Package {self.source} {version} not found"
raise PackageNotFoundException(msg) raise PackageNotFoundException(msg)
self._snapshot_package = srcpkg self._snapshot_package = srcpkg
else: else:
@ -657,7 +657,7 @@ class DebianSourcePackage(SourcePackage):
params = {"series": series} if series else {} params = {"series": series} if series else {}
srcpkg = Madison(self.distribution).getSourcePackage(self.source, **params) srcpkg = Madison(self.distribution).getSourcePackage(self.source, **params)
if not srcpkg: if not srcpkg:
raise PackageNotFoundException("Package {} not found".format(self.source)) raise PackageNotFoundException(f"Package {self.source} not found")
if self.source != srcpkg.name: if self.source != srcpkg.name:
self.binary = self.source self.binary = self.source
self.source = srcpkg.name self.source = srcpkg.name
@ -688,7 +688,7 @@ class PersonalPackageArchiveSourcePackage(UbuntuSourcePackage):
assert "ppa" in kwargs assert "ppa" in kwargs
ppa = kwargs["ppa"].split("/") ppa = kwargs["ppa"].split("/")
if len(ppa) != 2: if len(ppa) != 2:
raise ValueError('Invalid PPA value "%s",' 'must be "<USER>/<PPA>"' % kwargs["ppa"]) raise ValueError(f'Invalid PPA value "{kwargs["ppa"]}",' 'must be "<USER>/<PPA>"')
self._teamname = ppa[0] self._teamname = ppa[0]
self._ppaname = ppa[1] self._ppaname = ppa[1]
self.masters = [] self.masters = []
@ -960,13 +960,13 @@ class Madison(_WebJSON):
return self.urls[self._distro] return self.urls[self._distro]
def getSourcePackage(self, name, series="unstable"): def getSourcePackage(self, name, series="unstable"):
url = "?f=json&package={name}&s={series}".format(name=name, series=series) url = f"?f=json&package={name}&s={series}"
try: try:
result = self.load(url) result = self.load(url)
except HTTPError: except HTTPError:
result = None result = None
if not result: if not result:
msg = "Package {} not found in '{}'".format(name, series) msg = f"Package {name} not found in '{series}'"
raise PackageNotFoundException(msg) raise PackageNotFoundException(msg)
versions = list(result[0][name].values())[0] versions = list(result[0][name].values())[0]
latest = versions[sorted(versions.keys(), reverse=True)[0]] latest = versions[sorted(versions.keys(), reverse=True)[0]]
@ -985,17 +985,17 @@ class _Snapshot(_WebJSON):
# unfortunately there is no (easy) way to find the component for older # unfortunately there is no (easy) way to find the component for older
# package versions (madison only lists the most recent versions). # package versions (madison only lists the most recent versions).
# so we have to parse the file path to determine the component :( # so we have to parse the file path to determine the component :(
url = "/mr/package/{}/{}/srcfiles".format(name, version) url = f"/mr/package/{name}/{version}/srcfiles"
try: try:
response = self.load("{}?fileinfo=1".format(url)) response = self.load(f"{url}?fileinfo=1")
except HTTPError as error: except HTTPError as error:
msg = "Package {} version {} not found" msg = f"Package {name} version {version} not found"
raise PackageNotFoundException(msg.format(name, version)) from error raise PackageNotFoundException(msg) from error
result = response.get("result") result = response.get("result")
info = response.get("fileinfo") info = response.get("fileinfo")
if len(result) < 1: if len(result) < 1:
msg = "No source files for package {} version {}" msg = f"No source files for package {name} version {version}"
raise PackageNotFoundException(msg.format(name, version)) raise PackageNotFoundException(msg)
path = info[result[0]["hash"]][0]["path"] path = info[result[0]["hash"]][0]["path"]
# this expects the 'component' to follow 'pool[-*]' in the path # this expects the 'component' to follow 'pool[-*]' in the path
found_pool = False found_pool = False
@ -1015,15 +1015,15 @@ class _Snapshot(_WebJSON):
def _get_package(self, name, url, pkginit, version, sort_key): def _get_package(self, name, url, pkginit, version, sort_key):
try: try:
results = self.load("/mr/{}/{}/".format(url, name))["result"] results = self.load(f"/mr/{url}/{name}/")["result"]
except HTTPError as error: except HTTPError as error:
raise PackageNotFoundException("Package {} not found.".format(name)) from error raise PackageNotFoundException(f"Package {name} not found.") from error
results = sorted(results, key=lambda r: r[sort_key], reverse=True) results = sorted(results, key=lambda r: r[sort_key], reverse=True)
results = [pkginit(r) for r in results if version == r["version"]] results = [pkginit(r) for r in results if version == r["version"]]
if not results: if not results:
msg = "Package {name} version {version} not found." msg = f"Package {name} version {version} not found."
raise PackageNotFoundException(msg.format(name=name, version=version)) raise PackageNotFoundException(msg)
return results return results
def getSourcePackages(self, name, version): def getSourcePackages(self, name, version):
@ -1080,8 +1080,8 @@ class SnapshotSourcePackage(SnapshotPackage):
def getBinaryFiles(self, arch=None, name=None, ext=None): def getBinaryFiles(self, arch=None, name=None, ext=None):
if not self._binary_files: if not self._binary_files:
url = "/mr/package/{}/{}/allfiles".format(self.name, self.version) url = f"/mr/package/{self.name}/{self.version}/allfiles"
response = Snapshot.load("{}?fileinfo=1".format(url)) response = Snapshot.load(f"{url}?fileinfo=1")
info = response["fileinfo"] info = response["fileinfo"]
files = [ files = [
SnapshotBinaryFile( SnapshotBinaryFile(
@ -1108,8 +1108,8 @@ class SnapshotSourcePackage(SnapshotPackage):
def getFiles(self): def getFiles(self):
if not self._files: if not self._files:
url = "/mr/package/{}/{}/srcfiles".format(self.name, self.version) url = f"/mr/package/{self.name}/{self.version}/srcfiles"
response = Snapshot.load("{}?fileinfo=1".format(url)) response = Snapshot.load(f"{url}?fileinfo=1")
info = response["fileinfo"] info = response["fileinfo"]
self._files = [ self._files = [
SnapshotSourceFile( SnapshotSourceFile(
@ -1144,8 +1144,8 @@ class SnapshotBinaryPackage(SnapshotPackage):
def getFiles(self, arch=None): def getFiles(self, arch=None):
if not self._files: if not self._files:
url = "/mr/binary/{}/{}/binfiles".format(self.name, self.version) url = f"/mr/binary/{self.name}/{self.version}/binfiles"
response = Snapshot.load("{}?fileinfo=1".format(url)) response = Snapshot.load(f"{url}?fileinfo=1")
info = response["fileinfo"] info = response["fileinfo"]
self._files = [ self._files = [
SnapshotBinaryFile( SnapshotBinaryFile(
@ -1209,10 +1209,10 @@ class SnapshotFile:
return self._hash return self._hash
def getUrl(self): def getUrl(self):
return "{}/file/{}".format(Snapshot.getHostUrl(), self.getHash()) return f"{Snapshot.getHostUrl()}/file/{self.getHash()}"
def __repr__(self): def __repr__(self):
return "{}/{} {} bytes {}".format(self.path, self.name, self.size, self.date) return f"{self.path}/{self.name} {self.size} bytes {self.date}"
class SnapshotSourceFile(SnapshotFile): class SnapshotSourceFile(SnapshotFile):
@ -1260,7 +1260,7 @@ class SnapshotSPPH:
@property @property
def display_name(self): def display_name(self):
return "{name} {version}".format(name=self.getPackageName(), version=self.getVersion()) return f"{self.getPackageName()} {self.getVersion()}"
@property @property
def pocket(self): def pocket(self):
@ -1329,7 +1329,7 @@ class SnapshotSPPH:
if self._changelog is None: if self._changelog is None:
name = self.getPackageName() name = self.getPackageName()
if name.startswith("lib"): if name.startswith("lib"):
subdir = "lib%s" % name[3] subdir = f"lib{name[3]}"
else: else:
subdir = name[0] subdir = name[0]
pkgversion = Version(self.getVersion()).strip_epoch() pkgversion = Version(self.getVersion()).strip_epoch()
@ -1394,7 +1394,7 @@ class SnapshotBPPH: # pylint: disable=too-many-public-methods
@property @property
def display_name(self): def display_name(self):
return "{name} {version}".format(name=self.getPackageName(), version=self.getVersion()) return f"{self.getPackageName()} {self.getVersion()}"
@property @property
def pocket(self): def pocket(self):

View File

@ -130,7 +130,7 @@ class MetaWrapper(type):
def __init__(cls, name, bases, attrd): def __init__(cls, name, bases, attrd):
super(MetaWrapper, cls).__init__(name, bases, attrd) super(MetaWrapper, cls).__init__(name, bases, attrd)
if "resource_type" not in attrd: if "resource_type" not in attrd:
raise TypeError('Class "%s" needs an associated resource type' % name) raise TypeError(f'Class "{name}" needs an associated resource type')
cls._cache = {} cls._cache = {}
@ -173,13 +173,13 @@ class BaseWrapper(metaclass=MetaWrapper):
if isinstance(cache, collections.abc.Callable): if isinstance(cache, collections.abc.Callable):
cache(cached) cache(cached)
return cached return cached
raise TypeError("'%s' is not a '%s' object" % (str(data), str(cls.resource_type))) raise TypeError(f"'{data}' is not a '{cls.resource_type}' object")
# not a LP API representation, let the specific class handle it # not a LP API representation, let the specific class handle it
fetch = getattr(cls, "fetch", None) fetch = getattr(cls, "fetch", None)
if isinstance(fetch, collections.abc.Callable): if isinstance(fetch, collections.abc.Callable):
return fetch(data) return fetch(data)
raise NotImplementedError("Don't know how to fetch '%s' from LP" % str(data)) raise NotImplementedError(f"Don't know how to fetch '{data}' from LP")
def __call__(self): def __call__(self):
return self._lpobject return self._lpobject
@ -188,9 +188,7 @@ class BaseWrapper(metaclass=MetaWrapper):
return getattr(self._lpobject, attr) return getattr(self._lpobject, attr)
def __repr__(self): def __repr__(self):
if hasattr(str, "format"): return f"<{self.__class__.__name__}: {self._lpobject!r}>"
return "<{0}: {1!r}>".format(self.__class__.__name__, self._lpobject)
return "<%s: %r>" % (self.__class__.__name__, self._lpobject)
class Distribution(BaseWrapper): class Distribution(BaseWrapper):
@ -225,7 +223,7 @@ class Distribution(BaseWrapper):
Fetch the distribution object identified by 'dist' from LP. Fetch the distribution object identified by 'dist' from LP.
""" """
if not isinstance(dist, str): if not isinstance(dist, str):
raise TypeError("Don't know what do with '%r'" % dist) raise TypeError(f"Don't know what do with '{dist!r}'")
cached = cls._cache.get(dist) cached = cls._cache.get(dist)
if not cached: if not cached:
cached = Distribution(Launchpad.distributions[dist]) cached = Distribution(Launchpad.distributions[dist])
@ -250,7 +248,7 @@ class Distribution(BaseWrapper):
if res: if res:
return res return res
message = "The Archive '%s' doesn't exist in %s" % (archive, self.display_name) message = f"The Archive '{archive}' doesn't exist in {self.display_name}"
raise ArchiveNotFoundException(message) raise ArchiveNotFoundException(message)
if self._main_archive is None: if self._main_archive is None:
@ -271,7 +269,7 @@ class Distribution(BaseWrapper):
try: try:
series = DistroSeries(self().getSeries(name_or_version=name_or_version)) series = DistroSeries(self().getSeries(name_or_version=name_or_version))
except HTTPError as error: except HTTPError as error:
message = "Release '%s' is unknown in '%s'." % (name_or_version, self.display_name) message = f"Release '{name_or_version}' is unknown in '{self.display_name}'."
raise SeriesNotFoundException(message) from error raise SeriesNotFoundException(message) from error
self._cache_series(series) self._cache_series(series)
@ -299,10 +297,7 @@ class Distribution(BaseWrapper):
allseries = filter(lambda s: s.active, self._series.values()) allseries = filter(lambda s: s.active, self._series.values())
allseries = sorted(allseries, key=lambda s: float(s.version), reverse=True) allseries = sorted(allseries, key=lambda s: float(s.version), reverse=True)
Logger.debug( Logger.debug("Found series: %s", ", ".join([f"{s.name} ({s.version})" for s in allseries]))
"Found series: %s",
", ".join(map(lambda s: "%s (%s)" % (s.name, s.version), allseries)),
)
return collections.OrderedDict((s.name, s) for s in allseries) return collections.OrderedDict((s.name, s) for s in allseries)
@ -346,7 +341,7 @@ class DistroSeries(BaseWrapper):
architecture = DistroArchSeries(self().getDistroArchSeries(archtag=archtag)) architecture = DistroArchSeries(self().getDistroArchSeries(archtag=archtag))
self._architectures[architecture.architecture_tag] = architecture self._architectures[architecture.architecture_tag] = architecture
except HTTPError as error: except HTTPError as error:
message = "Architecture %s is unknown." % archtag message = f"Architecture {archtag} is unknown."
raise ArchSeriesNotFoundException(message) from error raise ArchSeriesNotFoundException(message) from error
return self._architectures[archtag] return self._architectures[archtag]
@ -584,7 +579,7 @@ class Archive(BaseWrapper):
for pocket_ in pockets: for pocket_ in pockets:
if pocket_ not in POCKETS: if pocket_ not in POCKETS:
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket_) raise PocketDoesNotExistError(f"Pocket '{pocket_}' does not exist.")
if not status: if not status:
if version: if version:
@ -600,7 +595,7 @@ class Archive(BaseWrapper):
for status_ in statuses: for status_ in statuses:
if status_ not in STATUSES: if status_ not in STATUSES:
raise ValueError("Status '%s' is not valid." % status_) raise ValueError(f"Status '{status_}' is not valid.")
dist = Distribution(self.distribution_link) dist = Distribution(self.distribution_link)
@ -662,32 +657,30 @@ class Archive(BaseWrapper):
params["version"] = version params["version"] = version
Logger.debug( Logger.debug(
"Calling %s(%s)", "Calling %s(%s)", function, ", ".join([f"{k}={v}" for (k, v) in params.items()])
function,
", ".join(["%s=%s" % (k, v) for (k, v) in params.items()]),
) )
records = getattr(self, function)(**params) records = getattr(self, function)(**params)
err_msg = "does not exist in the %s %s archive" % (dist.display_name, self.name) err_msg = f"does not exist in the {dist.display_name} {self.name} archive"
for record in records: for record in records:
if binary: if binary:
rversion = getattr(record, "binary_package_version", None) rversion = getattr(record, "binary_package_version", None)
else: else:
rversion = getattr(record, "source_package_version", None) rversion = getattr(record, "source_package_version", None)
skipmsg = "Skipping version %s: " % rversion skipmsg = f"Skipping version {rversion}: "
if record.pocket not in pockets: if record.pocket not in pockets:
err_msg = "pocket %s not in (%s)" % (record.pocket, ",".join(pockets)) err_msg = f"pocket {record.pocket} not in ({','.join(pockets)})"
Logger.debug(skipmsg + err_msg) Logger.debug(skipmsg + err_msg)
continue continue
if record.status not in statuses: if record.status not in statuses:
err_msg = "status %s not in (%s)" % (record.status, ",".join(statuses)) err_msg = f"status {record.status} not in ({','.join(statuses)})"
Logger.debug(skipmsg + err_msg) Logger.debug(skipmsg + err_msg)
continue continue
release = wrapper(record) release = wrapper(record)
if binary and archtag and archtag != release.arch: if binary and archtag and archtag != release.arch:
err_msg = "arch %s does not match requested arch %s" % (release.arch, archtag) err_msg = f"arch {release.arch} does not match requested arch {archtag}"
Logger.debug(skipmsg + err_msg) Logger.debug(skipmsg + err_msg)
continue continue
# results are ordered so first is latest # results are ordered so first is latest
@ -713,30 +706,30 @@ class Archive(BaseWrapper):
package_type = "source package" package_type = "source package"
else: else:
package_type = "package" package_type = "package"
msg = "The %s '%s' " % (package_type, name) msg = f"The {package_type} '{name}' "
if version: if version:
msg += "version %s " % version msg += f"version {version} "
msg += err_msg msg += err_msg
if binary and archtag: if binary and archtag:
msg += " for architecture %s" % archtag msg += f" for architecture {archtag}"
if len(series_to_check) > 1: if len(series_to_check) > 1:
msg += " in any release" msg += " in any release"
if len(pockets) == 1: if len(pockets) == 1:
msg += " for pocket %s" % pockets[0] msg += f" for pocket {pockets[0]}"
elif len(pockets) != len(POCKETS): elif len(pockets) != len(POCKETS):
msg += " for pockets " + ", ".join(pockets) msg += f" for pockets {', '.join(pockets)}"
elif series: elif series:
msg += " in %s" % series.name msg += f" in {series.name}"
if len(pockets) == 1: if len(pockets) == 1:
msg += "-%s" % pockets[0] msg += f"-{pockets[0]}"
elif len(pockets) != len(POCKETS): elif len(pockets) != len(POCKETS):
msg += " for pockets " + ", ".join(pockets) msg += f" for pockets {', '.join(pockets)}"
if len(statuses) == 1: if len(statuses) == 1:
msg += " with status %s" % statuses[0] msg += f" with status {statuses[0]}"
elif len(statuses) != len(STATUSES): elif len(statuses) != len(STATUSES):
msg += " with status in " + ", ".join(statuses) msg += f" with status in {', '.join(statuses)}"
if version_with_epoch: if version_with_epoch:
msg += " (did you forget the epoch? try %s)" % version_with_epoch msg += f" (did you forget the epoch? try {version_with_epoch})"
raise PackageNotFoundException(msg) raise PackageNotFoundException(msg)
def copyPackage( def copyPackage(
@ -1113,8 +1106,9 @@ class SourcePackagePublishingHistory(BaseWrapper):
for arch in archs: for arch in archs:
build = self._builds.get(arch) build = self._builds.get(arch)
if build: if build:
res.append(" %s" % build) res.append(f" {build}")
return "Build state(s) for '%s':\n%s" % (self.getPackageName(), "\n".join(res)) msg = "\n".join(res)
return f"Build state(s) for '{self.getPackageName()}':\n{msg}"
def rescoreBuilds(self, archs, score): def rescoreBuilds(self, archs, score):
res = [] res = []
@ -1126,14 +1120,11 @@ class SourcePackagePublishingHistory(BaseWrapper):
build = self._builds.get(arch) build = self._builds.get(arch)
if build: if build:
if build.rescore(score): if build.rescore(score):
res.append(" %s: done" % arch) res.append(f" {arch}: done")
else: else:
res.append(" %s: failed" % arch) res.append(f" {arch}: failed")
return "Rescoring builds of '%s' to %i:\n%s" % ( msg = "\n".join(res)
self.getPackageName(), return f"Rescoring builds of '{self.getPackageName()}' to {score}:\n{msg}"
score,
"\n".join(res),
)
def retryBuilds(self, archs): def retryBuilds(self, archs):
res = [] res = []
@ -1145,10 +1136,11 @@ class SourcePackagePublishingHistory(BaseWrapper):
build = self._builds.get(arch) build = self._builds.get(arch)
if build: if build:
if build.retry(): if build.retry():
res.append(" %s: done" % arch) res.append(f" {arch}: done")
else: else:
res.append(" %s: failed" % arch) res.append(f" {arch}: failed")
return "Retrying builds of '%s':\n%s" % (self.getPackageName(), "\n".join(res)) msg = "\n".join(res)
return f"Retrying builds of '{self.getPackageName()}':\n{msg}"
class BinaryPackagePublishingHistory(BaseWrapper): class BinaryPackagePublishingHistory(BaseWrapper):
@ -1297,9 +1289,7 @@ class BinaryPackagePublishingHistory(BaseWrapper):
""" """
Returns the original build URL of the binary package. Returns the original build URL of the binary package.
""" """
return "{build}/+files/{filename}".format( return f"{self.getBuild().getUrl()}/+files/{self.getFileName()}"
build=self.getBuild().getUrl(), filename=self.getFileName()
)
def getFileVersion(self): def getFileVersion(self):
""" """
@ -1351,11 +1341,9 @@ class BinaryPackagePublishingHistory(BaseWrapper):
""" """
Returns the filename for this binary package. Returns the filename for this binary package.
""" """
return "{name}_{version}_{arch}.{ext}".format( return (
name=self.getPackageName(), f"{self.getPackageName()}_{self.getFileVersion()}"
version=self.getFileVersion(), f"_{self.getFileArch()}.{self.getFileExt()}"
arch=self.getFileArch(),
ext=self.getFileExt(),
) )
@ -1392,7 +1380,7 @@ class PersonTeam(BaseWrapper, metaclass=MetaPersonTeam):
self._upload = {} self._upload = {}
def __str__(self): def __str__(self):
return "%s (%s)" % (self.display_name, self.name) return f"{self.display_name} ({self.name})"
def cache(self): def cache(self):
self._cache[self.name] = self self._cache[self.name] = self
@ -1403,7 +1391,7 @@ class PersonTeam(BaseWrapper, metaclass=MetaPersonTeam):
Fetch the person or team object identified by 'url' from LP. Fetch the person or team object identified by 'url' from LP.
""" """
if not isinstance(person_or_team, str): if not isinstance(person_or_team, str):
raise TypeError("Don't know what do with '%r'" % person_or_team) raise TypeError(f"Don't know what do with '{person_or_team!r}'")
cached = cls._cache.get(person_or_team) cached = cls._cache.get(person_or_team)
if not cached: if not cached:
cached = PersonTeam(Launchpad.people[person_or_team]) cached = PersonTeam(Launchpad.people[person_or_team])
@ -1426,9 +1414,9 @@ class PersonTeam(BaseWrapper, metaclass=MetaPersonTeam):
'distroseries' has to be an DistroSeries object. 'distroseries' has to be an DistroSeries object.
""" """
if not isinstance(archive, Archive): if not isinstance(archive, Archive):
raise TypeError("'%r' is not an Archive object." % archive) raise TypeError(f"'{archive!r}' is not an Archive object.")
if not isinstance(distroseries, DistroSeries): if not isinstance(distroseries, DistroSeries):
raise TypeError("'%r' is not a DistroSeries object." % distroseries) raise TypeError(f"'{distroseries!r}' is not a DistroSeries object.")
if package is not None and not isinstance(package, str): if package is not None and not isinstance(package, str):
raise TypeError("A source package name expected.") raise TypeError("A source package name expected.")
if component is not None and not isinstance(component, str): if component is not None and not isinstance(component, str):
@ -1436,7 +1424,7 @@ class PersonTeam(BaseWrapper, metaclass=MetaPersonTeam):
if package is None and component is None: if package is None and component is None:
raise ValueError("Either a source package name or a component has to be specified.") raise ValueError("Either a source package name or a component has to be specified.")
if pocket not in POCKETS: if pocket not in POCKETS:
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket) raise PocketDoesNotExistError(f"Pocket '{pocket}' does not exist.")
can_upload = self._upload.get((archive, distroseries, pocket, package, component)) can_upload = self._upload.get((archive, distroseries, pocket, package, component))
@ -1504,7 +1492,7 @@ class Project(BaseWrapper):
Fetch the project object identified by 'project' from LP. Fetch the project object identified by 'project' from LP.
""" """
if not isinstance(project, str): if not isinstance(project, str):
raise TypeError("Don't know what do with '%r'" % project) raise TypeError(f"Don't know what do with '{project!r}'")
return Project(Launchpad.projects(project)) return Project(Launchpad.projects(project))
@ -1524,7 +1512,7 @@ class Build(BaseWrapper):
resource_type = "build" resource_type = "build"
def __str__(self): def __str__(self):
return "%s: %s" % (self.arch_tag, self.buildstate) return f"{self.arch_tag}: {self.buildstate}"
def getSourcePackagePublishingHistory(self): def getSourcePackagePublishingHistory(self):
link = self._lpobject.current_source_publication_link link = self._lpobject.current_source_publication_link

View File

@ -173,7 +173,7 @@ def split_release_pocket(release, default="Release"):
pocket = pocket.capitalize() pocket = pocket.capitalize()
if pocket not in POCKETS: if pocket not in POCKETS:
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket) raise PocketDoesNotExistError(f"Pocket '{pocket}' does not exist.")
return (release, pocket) return (release, pocket)
@ -406,7 +406,7 @@ def _download(fsrc, fdst, size, *, blocksize):
parsed = urlparse(url) parsed = urlparse(url)
filename = Path(parsed.path).name filename = Path(parsed.path).name
hostname = parsed.hostname hostname = parsed.hostname
sizemb = " (%0.3f MiB)" % (size / 1024.0 / 1024) if size else "" sizemb = f" ({size / 1024.0 / 1024:0.3f} MiB)" if size else ""
Logger.info("Downloading %s from %s%s", filename, hostname, sizemb) Logger.info("Downloading %s from %s%s", filename, hostname, sizemb)
# Don't show progress if: # Don't show progress if:

View File

@ -133,12 +133,12 @@ class PullPkg:
help_default_pull = "What to pull: " + ", ".join(VALID_PULLS) help_default_pull = "What to pull: " + ", ".join(VALID_PULLS)
if self._default_pull: if self._default_pull:
help_default_pull += " (default: %s)" % self._default_pull help_default_pull += f" (default: {self._default_pull})"
help_default_distro = "Pull from: " + ", ".join(VALID_DISTROS) help_default_distro = "Pull from: " + ", ".join(VALID_DISTROS)
if self._default_distro: if self._default_distro:
help_default_distro += " (default: %s)" % self._default_distro help_default_distro += f" (default: {self._default_distro})"
help_default_arch = "Get binary packages for arch" help_default_arch = "Get binary packages for arch"
help_default_arch += "(default: %s)" % self._default_arch help_default_arch += f"(default: {self._default_arch})"
# use add_help=False because we do parse_known_args() below, and if # use add_help=False because we do parse_known_args() below, and if
# that sees --help then it exits immediately # that sees --help then it exits immediately
@ -233,7 +233,7 @@ class PullPkg:
pull = PULL_DEBS pull = PULL_DEBS
# verify pull action is valid # verify pull action is valid
if pull not in VALID_PULLS: if pull not in VALID_PULLS:
raise InvalidPullValueError("Invalid pull action '%s'" % pull) raise InvalidPullValueError(f"Invalid pull action '{pull}'")
return pull return pull
@ -254,7 +254,7 @@ class PullPkg:
distro = DISTRO_UCA distro = DISTRO_UCA
# verify distro is valid # verify distro is valid
if distro not in VALID_DISTROS: if distro not in VALID_DISTROS:
raise InvalidDistroValueError("Invalid distro '%s'" % distro) raise InvalidDistroValueError(f"Invalid distro '{distro}'")
return distro return distro
@ -397,11 +397,10 @@ class PullPkg:
elif options["status"][0].capitalize() in UPLOAD_QUEUE_STATUSES: elif options["status"][0].capitalize() in UPLOAD_QUEUE_STATUSES:
params["status"] = options["status"][0].capitalize() params["status"] = options["status"][0].capitalize()
else: else:
msg = "Invalid upload queue status '%s': valid values are %s" % ( raise ValueError(
options["status"][0], f"Invalid upload queue status '{options['status'][0]}':"
", ".join(UPLOAD_QUEUE_STATUSES), f" valid values are {', '.join(UPLOAD_QUEUE_STATUSES)}"
) )
raise ValueError(msg)
return params return params
@ -492,7 +491,7 @@ class PullPkg:
Logger.error("No %s found for %s %s", pull, package, spph.getVersion()) Logger.error("No %s found for %s %s", pull, package, spph.getVersion())
else: else:
Logger.error("Internal error: invalid pull value after parse_pull()") Logger.error("Internal error: invalid pull value after parse_pull()")
raise InvalidPullValueError("Invalid pull value '%s'" % pull) raise InvalidPullValueError(f"Invalid pull value '{pull}'")
def pull_upload_queue( def pull_upload_queue(
self, self,
@ -543,22 +542,18 @@ class PullPkg:
] ]
if not packages: if not packages:
msg = "Package %s not found in %s upload queue for %s" % ( msg = f"Package {package} not found in {queuetype} upload queue for {series.name}"
package,
queuetype,
series.name,
)
if version: if version:
msg += " with version/id %s" % version msg += f" with version/id {version}"
if pull in VALID_BINARY_PULLS: if pull in VALID_BINARY_PULLS:
msg += " for arch %s" % arch msg += f" for arch {arch}"
raise PackageNotFoundException(msg) raise PackageNotFoundException(msg)
if pull == PULL_LIST: if pull == PULL_LIST:
for pkg in packages: for pkg in packages:
msg = "Found %s %s (ID %s)" % (pkg.package_name, pkg.package_version, pkg.id) msg = f"Found {pkg.package_name} {pkg.package_version} (ID {pkg.id})"
if pkg.display_arches: if pkg.display_arches:
msg += " arch %s" % pkg.display_arches msg += f" arch {pkg.display_arches}"
Logger.info(msg) Logger.info(msg)
url = pkg.changesFileUrl() url = pkg.changesFileUrl()
if url: if url:
@ -591,7 +586,7 @@ class PullPkg:
if len(packages) > 1: if len(packages) > 1:
msg = "Found multiple packages" msg = "Found multiple packages"
if version: if version:
msg += " with version %s, please specify the ID instead" % version msg += f" with version {version}, please specify the ID instead"
else: else:
msg += ", please specify the version" msg += ", please specify the version"
Logger.error("Available package versions/ids are:") Logger.error("Available package versions/ids are:")
@ -634,13 +629,13 @@ class PullPkg:
else: else:
name = ".*" name = ".*"
if pull == PULL_DEBS: if pull == PULL_DEBS:
name = r"{}(?<!-di)(?<!-dbgsym)$".format(name) name = rf"{name}(?<!-di)(?<!-dbgsym)$"
elif pull == PULL_DDEBS: elif pull == PULL_DDEBS:
name += "-dbgsym$" name += "-dbgsym$"
elif pull == PULL_UDEBS: elif pull == PULL_UDEBS:
name += "-di$" name += "-di$"
else: else:
raise InvalidPullValueError("Invalid pull value %s" % pull) raise InvalidPullValueError(f"Invalid pull value {pull}")
urls |= set(pkg.binaryFileUrls()) urls |= set(pkg.binaryFileUrls())
if not urls: if not urls:

View File

@ -78,7 +78,7 @@ class YesNoQuestion(Question):
def input_number(question, min_number, max_number, default=None): def input_number(question, min_number, max_number, default=None):
if default: if default:
question += " [%i]? " % (default) question += f" [{default}]? "
else: else:
question += "? " question += "? "
selected = None selected = None
@ -94,7 +94,7 @@ def input_number(question, min_number, max_number, default=None):
try: try:
selected = int(selected) selected = int(selected)
if selected < min_number or selected > max_number: if selected < min_number or selected > max_number:
print("Please input a number between %i and %i." % (min_number, max_number)) print(f"Please input a number between {min_number} and {max_number}.")
except ValueError: except ValueError:
print("Please input a number.") print("Please input a number.")
assert isinstance(selected, int) assert isinstance(selected, int)
@ -108,7 +108,7 @@ def confirmation_prompt(message=None, action=None):
if message is None: if message is None:
if action is None: if action is None:
action = "continue" action = "continue"
message = "Press [Enter] to %s. Press [Ctrl-C] to abort now." % action message = f"Press [Enter] to {action}. Press [Ctrl-C] to abort now."
try: try:
input(message) input(message)
except (EOFError, KeyboardInterrupt): except (EOFError, KeyboardInterrupt):
@ -126,7 +126,7 @@ class EditFile:
def edit(self, optional=False): def edit(self, optional=False):
if optional: if optional:
print("\n\nCurrently the %s looks like:" % self.description) print(f"\n\nCurrently the {self.description} looks like:")
with open(self.filename, "r", encoding="utf-8") as f: with open(self.filename, "r", encoding="utf-8") as f:
print(f.read()) print(f.read())
if YesNoQuestion().ask("Edit", "no") == "no": if YesNoQuestion().ask("Edit", "no") == "no":
@ -147,12 +147,12 @@ class EditFile:
if placeholders_present: if placeholders_present:
print( print(
"Placeholders still present in the %s. " f"Placeholders still present in the {self.description}. "
"Please replace them with useful information." % self.description f"Please replace them with useful information."
) )
confirmation_prompt(action="edit again") confirmation_prompt(action="edit again")
elif not modified: elif not modified:
print("The %s was not modified" % self.description) print(f"The {self.description} was not modified")
if YesNoQuestion().ask("Edit again", "yes") == "no": if YesNoQuestion().ask("Edit again", "yes") == "no":
done = True done = True
elif self.check_edit(): elif self.check_edit():
@ -172,9 +172,7 @@ class EditBugReport(EditFile):
def __init__(self, subject, body, placeholders=None): def __init__(self, subject, body, placeholders=None):
prefix = os.path.basename(sys.argv[0]) + "_" prefix = os.path.basename(sys.argv[0]) + "_"
tmpfile = tempfile.NamedTemporaryFile(prefix=prefix, suffix=".txt", delete=False) tmpfile = tempfile.NamedTemporaryFile(prefix=prefix, suffix=".txt", delete=False)
tmpfile.write( tmpfile.write((f"Summary (one line):\n{subject}\n\nDescription:\n{body}").encode("utf-8"))
("Summary (one line):\n%s\n\nDescription:\n%s" % (subject, body)).encode("utf-8")
)
tmpfile.close() tmpfile.close()
super().__init__(tmpfile.name, "bug report", placeholders) super().__init__(tmpfile.name, "bug report", placeholders)
@ -184,8 +182,7 @@ class EditBugReport(EditFile):
if self.split_re.match(report) is None: if self.split_re.match(report) is None:
print( print(
"The %s doesn't start with 'Summary:' and 'Description:' " f"The {self.description} doesn't start with 'Summary:' and 'Description:' blocks"
"blocks" % self.description
) )
confirmation_prompt("edit again") confirmation_prompt("edit again")
return False return False

View File

@ -105,13 +105,11 @@ def check_existing_reports(srcpkg):
# Search bug list for other sync requests. # Search bug list for other sync requests.
for bug in pkg_bug_list: for bug in pkg_bug_list:
# check for Sync or sync and the package name # check for Sync or sync and the package name
if not bug.is_complete and "ync %s" % srcpkg in bug.title: if not bug.is_complete and f"ync {srcpkg}" in bug.title:
print( print(
"The following bug could be a possible duplicate sync bug " f"The following bug could be a possible duplicate sync bug on Launchpad:\n"
"on Launchpad:\n" f" * {bug.title} ({bug.web_link})\n"
" * %s (%s)\n" f"Please check the above URL to verify this before continuing."
"Please check the above URL to verify this before "
"continuing." % (bug.title, bug.web_link)
) )
confirmation_prompt() confirmation_prompt()
@ -126,10 +124,9 @@ def get_ubuntu_delta_changelog(srcpkg):
source_name=srcpkg.getPackageName(), exact_match=True, pocket="Release" source_name=srcpkg.getPackageName(), exact_match=True, pocket="Release"
) )
debian_info = DebianDistroInfo() debian_info = DebianDistroInfo()
name_chars = "[-+0-9a-z.]"
topline = re.compile( topline = re.compile(
r"^(\w%(name_chars)s*) \(([^\(\) \t]+)\)" rf"^(\w%({name_chars})s*) \(([^\(\) \t]+)\)((\s+%({name_chars})s+)+)\;", re.IGNORECASE
r"((\s+%(name_chars)s+)+)\;" % {"name_chars": "[-+0-9a-z.]"},
re.IGNORECASE,
) )
delta = [] delta = []
for record in spph: for record in spph:
@ -168,14 +165,12 @@ def post_bug(srcpkg, subscribe, status, bugtitle, bugtext):
Use the LP API to file the sync request. Use the LP API to file the sync request.
""" """
print("The final report is:\nSummary: %s\nDescription:\n%s\n" % (bugtitle, bugtext)) print(f"The final report is:\nSummary: {bugtitle}\nDescription:\n{bugtext}\n")
confirmation_prompt() confirmation_prompt()
if srcpkg: if srcpkg:
# pylint: disable=protected-access # pylint: disable=protected-access
bug_target = DistributionSourcePackage( bug_target = DistributionSourcePackage(f"{Launchpad._root_uri}ubuntu/+source/{srcpkg}")
"%subuntu/+source/%s" % (Launchpad._root_uri, srcpkg)
)
else: else:
# new source package # new source package
bug_target = Distribution("ubuntu") bug_target = Distribution("ubuntu")
@ -193,4 +188,4 @@ def post_bug(srcpkg, subscribe, status, bugtitle, bugtext):
bug.subscribe(person=PersonTeam(subscribe)()) bug.subscribe(person=PersonTeam(subscribe)())
print("Sync request filed as bug #%i: %s" % (bug.id, bug.web_link)) print(f"Sync request filed as bug #{bug.id}: {bug.web_link}")

View File

@ -73,8 +73,8 @@ def need_sponsorship(name, component, release):
""" """
val = YesNoQuestion().ask( val = YesNoQuestion().ask(
"Do you have upload permissions for the '%s' component or " f"Do you have upload permissions for the '{component}' component or "
"the package '%s' in Ubuntu %s?\nIf in doubt answer 'n'." % (component, name, release), f"the package '{name}' in Ubuntu {release}?\nIf in doubt answer 'n'.",
"no", "no",
) )
return val == "no" return val == "no"
@ -85,9 +85,8 @@ def check_existing_reports(srcpkg):
Point the user to the URL to manually check for duplicate bug reports. Point the user to the URL to manually check for duplicate bug reports.
""" """
print( print(
"Please check on " f"Please check on https://bugs.launchpad.net/ubuntu/+source/{srcpkg}/+bugs\n"
"https://bugs.launchpad.net/ubuntu/+source/%s/+bugs\n" f"for duplicate sync requests before continuing."
"for duplicate sync requests before continuing." % srcpkg
) )
confirmation_prompt() confirmation_prompt()
@ -129,29 +128,25 @@ def mail_bug(
Submit the sync request per email. Submit the sync request per email.
""" """
to = "new@" + bug_mail_domain to = f"new@{bug_mail_domain}"
# generate mailbody # generate mailbody
if srcpkg: if srcpkg:
mailbody = " affects ubuntu/%s\n" % srcpkg mailbody = f" affects ubuntu/{srcpkg}\n"
else: else:
mailbody = " affects ubuntu\n" mailbody = " affects ubuntu\n"
mailbody += """\ mailbody += f"""\
status %s status {status}
importance wishlist importance wishlist
subscribe %s subscribe {subscribe}
done done
%s""" % ( {bugtext}"""
status,
subscribe,
bugtext,
)
# prepare sign command # prepare sign command
gpg_command = None gpg_command = None
for cmd in ("gnome-gpg", "gpg2", "gpg"): for cmd in ("gnome-gpg", "gpg2", "gpg"):
if os.access("/usr/bin/%s" % cmd, os.X_OK): if os.access(f"/usr/bin/{cmd}", os.X_OK):
gpg_command = [cmd] gpg_command = [cmd]
break break
@ -173,20 +168,15 @@ def mail_bug(
sys.exit(1) sys.exit(1)
# generate email # generate email
mail = """\ mail = f"""\
From: %s From: {myemailaddr}
To: %s To: {to}
Subject: %s Subject: {bugtitle}
Content-Type: text/plain; charset=UTF-8 Content-Type: text/plain; charset=UTF-8
%s""" % ( {signed_report}"""
myemailaddr,
to,
bugtitle,
signed_report,
)
print("The final report is:\n%s" % mail) print(f"The final report is:\n{mail}")
confirmation_prompt() confirmation_prompt()
# save mail in temporary file # save mail in temporary file

View File

@ -80,7 +80,7 @@ class BugTask:
dsc_file = os.path.join(os.getcwd(), filename) dsc_file = os.path.join(os.getcwd(), filename)
else: else:
urlretrieve(url, filename) urlretrieve(url, filename)
assert os.path.isfile(dsc_file), "%s does not exist." % (dsc_file) assert os.path.isfile(dsc_file), f"{dsc_file} does not exist."
return dsc_file return dsc_file
def get_branch_link(self): def get_branch_link(self):

View File

@ -91,10 +91,9 @@ class SourcePackage:
msg = "Sync request ACK'd." msg = "Sync request ACK'd."
if self._build_log: if self._build_log:
msg = ("%s %s builds on %s. " + msg) % ( msg = (
self._package, f"{self._package} {self._version} builds"
self._version, f" on {self._builder.get_architecture()}. {msg}"
self._builder.get_architecture(),
) )
bug.newMessage(content=msg, subject="sponsor-patch") bug.newMessage(content=msg, subject="sponsor-patch")
Logger.debug("Acknowledged sync request bug #%i.", bug.id) Logger.debug("Acknowledged sync request bug #%i.", bug.id)
@ -136,7 +135,7 @@ class SourcePackage:
else: else:
target = upload target = upload
question = Question(["yes", "edit", "no"]) question = Question(["yes", "edit", "no"])
answer = question.ask("Do you want to upload the package to %s" % target, "no") answer = question.ask(f"Do you want to upload the package to {target}", "no")
if answer == "edit": if answer == "edit":
return False return False
if answer == "no": if answer == "no":
@ -177,8 +176,9 @@ class SourcePackage:
if dist is None: if dist is None:
dist = re.sub("-.*$", "", self._changelog.distributions) dist = re.sub("-.*$", "", self._changelog.distributions)
build_name = "{}_{}_{}.build".format( build_name = (
self._package, strip_epoch(self._version), self._builder.get_architecture() f"{self._package}_{strip_epoch(self._version)}"
f"_{self._builder.get_architecture()}.build"
) )
self._build_log = os.path.join(self._buildresult, build_name) self._build_log = os.path.join(self._buildresult, build_name)
@ -255,7 +255,7 @@ class SourcePackage:
def _changes_file(self): def _changes_file(self):
"""Returns the file name of the .changes file.""" """Returns the file name of the .changes file."""
return os.path.join( return os.path.join(
self._workdir, "{}_{}_source.changes".format(self._package, strip_epoch(self._version)) self._workdir, f"{self._package}_{ strip_epoch(self._version)}_source.changes"
) )
def check_target(self, upload, launchpad): def check_target(self, upload, launchpad):
@ -310,8 +310,8 @@ class SourcePackage:
bug title.""" bug title."""
if not task.title_contains(self._version): if not task.title_contains(self._version):
print("Bug #%i title: %s" % (bug_number, task.get_bug_title())) print(f"Bug #{bug_number} title: {task.get_bug_title()}")
msg = "Is %s %s the version that should be synced" % (self._package, self._version) msg = f"Is {self._package} {self._version} the version that should be synced"
answer = YesNoQuestion().ask(msg, "no") answer = YesNoQuestion().ask(msg, "no")
if answer == "no": if answer == "no":
user_abort() user_abort()
@ -319,22 +319,20 @@ class SourcePackage:
@property @property
def _debdiff_filename(self): def _debdiff_filename(self):
"""Returns the file name of the .debdiff file.""" """Returns the file name of the .debdiff file."""
debdiff_name = "{}_{}.debdiff".format(self._package, strip_epoch(self._version)) debdiff_name = f"{self._package}_{strip_epoch(self._version)}.debdiff"
return os.path.join(self._workdir, debdiff_name) return os.path.join(self._workdir, debdiff_name)
@property @property
def _dsc_file(self): def _dsc_file(self):
"""Returns the file name of the .dsc file.""" """Returns the file name of the .dsc file."""
return os.path.join( return os.path.join(self._workdir, f"{self._package}_{strip_epoch(self._version)}.dsc")
self._workdir, "{}_{}.dsc".format(self._package, strip_epoch(self._version))
)
def generate_debdiff(self, dsc_file): def generate_debdiff(self, dsc_file):
"""Generates a debdiff between the given .dsc file and this source """Generates a debdiff between the given .dsc file and this source
package.""" package."""
assert os.path.isfile(dsc_file), "%s does not exist." % (dsc_file) assert os.path.isfile(dsc_file), f"{dsc_file} does not exist."
assert os.path.isfile(self._dsc_file), "%s does not exist." % (self._dsc_file) assert os.path.isfile(self._dsc_file), f"{self._dsc_file} does not exist."
cmd = ["debdiff", dsc_file, self._dsc_file] cmd = ["debdiff", dsc_file, self._dsc_file]
if not Logger.isEnabledFor(logging.DEBUG): if not Logger.isEnabledFor(logging.DEBUG):
cmd.insert(1, "-q") cmd.insert(1, "-q")
@ -350,7 +348,7 @@ class SourcePackage:
change something. change something.
""" """
assert os.path.isfile(self._changes_file), "%s does not exist." % (self._changes_file) assert os.path.isfile(self._changes_file), f"{self._changes_file} does not exist."
changes = debian.deb822.Changes(open(self._changes_file, encoding="utf-8")) changes = debian.deb822.Changes(open(self._changes_file, encoding="utf-8"))
fixed_bugs = [] fixed_bugs = []
if "Launchpad-Bugs-Fixed" in changes: if "Launchpad-Bugs-Fixed" in changes:
@ -371,7 +369,7 @@ class SourcePackage:
"""Print things that should be checked before uploading a package.""" """Print things that should be checked before uploading a package."""
lintian_filename = self._run_lintian() lintian_filename = self._run_lintian()
print("\nPlease check %s %s carefully:" % (self._package, self._version)) print(f"\nPlease check {self._package} {self._version} carefully:")
if os.path.isfile(self._debdiff_filename): if os.path.isfile(self._debdiff_filename):
print("file://" + self._debdiff_filename) print("file://" + self._debdiff_filename)
print("file://" + lintian_filename) print("file://" + lintian_filename)
@ -430,7 +428,7 @@ class SourcePackage:
changes_for_lintian = self._changes_file changes_for_lintian = self._changes_file
# Check lintian # Check lintian
assert os.path.isfile(changes_for_lintian), "%s does not exist." % (changes_for_lintian) assert os.path.isfile(changes_for_lintian), f"{changes_for_lintian} does not exist."
cmd = ["lintian", "-IE", "--pedantic", "-q", "--profile", "ubuntu", changes_for_lintian] cmd = ["lintian", "-IE", "--pedantic", "-q", "--profile", "ubuntu", changes_for_lintian]
lintian_filename = os.path.join( lintian_filename = os.path.join(
self._workdir, self._package + "_" + strip_epoch(self._version) + ".lintian" self._workdir, self._package + "_" + strip_epoch(self._version) + ".lintian"

View File

@ -83,12 +83,11 @@ def edit_source():
cmd = [get_user_shell()] cmd = [get_user_shell()]
Logger.debug(" ".join(cmd)) Logger.debug(" ".join(cmd))
print( print(
"""An interactive shell was launched in f"""An interactive shell was launched in
file://%s file://{os.getcwd()}
Edit your files. When you are done, exit the shell. If you wish to abort the Edit your files. When you are done, exit the shell. If you wish to abort the
process, exit the shell such that it returns an exit code other than zero. process, exit the shell such that it returns an exit code other than zero.
""" """,
% (os.getcwd()),
end=" ", end=" ",
) )
returncode = subprocess.call(cmd) returncode = subprocess.call(cmd)
@ -101,35 +100,25 @@ def ask_for_patch_or_branch(bug, attached_patches, linked_branches):
patch = None patch = None
branch = None branch = None
if len(attached_patches) == 0: if len(attached_patches) == 0:
msg = "https://launchpad.net/bugs/%i has %i branches linked:" % ( msg = f"{len(linked_branches)} branches linked:"
bug.id,
len(linked_branches),
)
elif len(linked_branches) == 0: elif len(linked_branches) == 0:
msg = "https://launchpad.net/bugs/%i has %i patches attached:" % ( msg = f"{len(attached_patches)} patches attached:"
bug.id,
len(attached_patches),
)
else: else:
branches = "%i branch" % len(linked_branches) branches = f"{len(linked_branches)} branch"
if len(linked_branches) > 1: if len(linked_branches) > 1:
branches += "es" branches += "es"
patches = "%i patch" % len(attached_patches) patches = f"{len(attached_patches)} patch"
if len(attached_patches) > 1: if len(attached_patches) > 1:
patches += "es" patches += "es"
msg = "https://launchpad.net/bugs/%i has %s linked and %s attached:" % ( msg = f"{branches} linked and {patches} attached:"
bug.id, Logger.info("https://launchpad.net/bugs/%i has %s", bug.id, msg)
branches,
patches,
)
Logger.info(msg)
i = 0 i = 0
for linked_branch in linked_branches: for linked_branch in linked_branches:
i += 1 i += 1
print("%i) %s" % (i, linked_branch.display_name)) print(f"{i}) {linked_branch.display_name}")
for attached_patch in attached_patches: for attached_patch in attached_patches:
i += 1 i += 1
print("%i) %s" % (i, attached_patch.title)) print(f"{i}) {attached_patch.title}")
selected = input_number("Which branch or patch do you want to download", 1, i, i) selected = input_number("Which branch or patch do you want to download", 1, i, i)
if selected <= len(linked_branches): if selected <= len(linked_branches):
branch = linked_branches[selected - 1].bzr_identity branch = linked_branches[selected - 1].bzr_identity
@ -246,7 +235,7 @@ def get_open_ubuntu_bug_task(launchpad, bug, branch=None):
"https://launchpad.net/bugs/%i has %i Ubuntu tasks:", bug_id, len(ubuntu_tasks) "https://launchpad.net/bugs/%i has %i Ubuntu tasks:", bug_id, len(ubuntu_tasks)
) )
for i, ubuntu_task in enumerate(ubuntu_tasks): for i, ubuntu_task in enumerate(ubuntu_tasks):
print("%i) %s" % (i + 1, ubuntu_task.get_package_and_series())) print(f"{i + 1}) {ubuntu_task.get_package_and_series()}")
selected = input_number( selected = input_number(
"To which Ubuntu task does the patch belong", 1, len(ubuntu_tasks) "To which Ubuntu task does the patch belong", 1, len(ubuntu_tasks)
) )

View File

@ -36,7 +36,7 @@ class ConfigTestCase(unittest.TestCase):
os.path.expanduser("~/.devscripts"): self._config_files["user"], os.path.expanduser("~/.devscripts"): self._config_files["user"],
} }
if filename not in files: if filename not in files:
raise IOError("No such file or directory: '%s'" % filename) raise IOError(f"No such file or directory: '{filename}'")
return StringIO(files[filename]) return StringIO(files[filename])
def setUp(self): def setUp(self):
@ -209,7 +209,7 @@ class UbuEmailTestCase(unittest.TestCase):
os.environ["DEBEMAIL"] = "joe@debian.org" os.environ["DEBEMAIL"] = "joe@debian.org"
name = "Joe Ubuntunista" name = "Joe Ubuntunista"
email = "joe@ubuntu.com" email = "joe@ubuntu.com"
os.environ["UBUMAIL"] = "%s <%s>" % (name, email) os.environ["UBUMAIL"] = f"{name} <{email}>"
self.assertEqual(ubu_email(), (name, email)) self.assertEqual(ubu_email(), (name, email))
self.assertEqual(os.environ["DEBFULLNAME"], name) self.assertEqual(os.environ["DEBFULLNAME"], name)
self.assertEqual(os.environ["DEBEMAIL"], email) self.assertEqual(os.environ["DEBEMAIL"], email)
@ -217,7 +217,7 @@ class UbuEmailTestCase(unittest.TestCase):
def test_debemail_with_name(self): def test_debemail_with_name(self):
name = "Joe Developer" name = "Joe Developer"
email = "joe@example.net" email = "joe@example.net"
os.environ["DEBEMAIL"] = orig = "%s <%s>" % (name, email) os.environ["DEBEMAIL"] = orig = f"{name} <{email}>"
self.assertEqual(ubu_email(), (name, email)) self.assertEqual(ubu_email(), (name, email))
self.assertEqual(os.environ["DEBEMAIL"], orig) self.assertEqual(os.environ["DEBEMAIL"], orig)

View File

@ -216,7 +216,7 @@ class UpdateMaintainerTestCase(unittest.TestCase):
or base not in self._files or base not in self._files
or (mode == "r" and self._files[base] is None) or (mode == "r" and self._files[base] is None)
): ):
raise IOError("No such file or directory: '%s'" % filename) raise IOError(f"No such file or directory: '{filename}'")
if mode == "w": if mode == "w":
self._files[base] = StringIO() self._files[base] = StringIO()
self._files[base].close = lambda: None self._files[base].close = lambda: None

View File

@ -41,7 +41,7 @@ class Control:
"""Represents a debian/control file""" """Represents a debian/control file"""
def __init__(self, filename): def __init__(self, filename):
assert os.path.isfile(filename), "%s does not exist." % (filename) assert os.path.isfile(filename), f"{filename} does not exist."
self._filename = filename self._filename = filename
self._content = open(filename, encoding="utf-8").read() self._content = open(filename, encoding="utf-8").read()
@ -114,9 +114,9 @@ def _find_files(debian_directory, verbose):
# Make sure that a changelog and control file is available # Make sure that a changelog and control file is available
if len(control_files) == 0: if len(control_files) == 0:
raise MaintainerUpdateException("No control file found in %s." % debian_directory) raise MaintainerUpdateException(f"No control file found in {debian_directory}.")
if not os.path.isfile(changelog_file): if not os.path.isfile(changelog_file):
raise MaintainerUpdateException("No changelog file found in %s." % debian_directory) raise MaintainerUpdateException(f"No changelog file found in {debian_directory}.")
# If the rules file accounts for XSBC-Original-Maintainer, we should not # If the rules file accounts for XSBC-Original-Maintainer, we should not
# touch it in this package (e.g. the python package). # touch it in this package (e.g. the python package).
@ -160,8 +160,8 @@ def update_maintainer(debian_directory, verbose=False):
if original_maintainer.strip().lower() in _PREVIOUS_UBUNTU_MAINTAINER: if original_maintainer.strip().lower() in _PREVIOUS_UBUNTU_MAINTAINER:
if verbose: if verbose:
print("The old maintainer was: %s" % original_maintainer) print(f"The old maintainer was: {original_maintainer}")
print("Resetting as: %s" % _UBUNTU_MAINTAINER) print(f"Resetting as: {_UBUNTU_MAINTAINER}")
control.set_maintainer(_UBUNTU_MAINTAINER) control.set_maintainer(_UBUNTU_MAINTAINER)
control.save() control.save()
continue continue
@ -182,8 +182,8 @@ def update_maintainer(debian_directory, verbose=False):
) )
if verbose: if verbose:
print("The original maintainer is: %s" % original_maintainer) print(f"The original maintainer is: {original_maintainer}")
print("Resetting as: %s" % _UBUNTU_MAINTAINER) print(f"Resetting as: {_UBUNTU_MAINTAINER}")
control.set_original_maintainer(original_maintainer) control.set_original_maintainer(original_maintainer)
control.set_maintainer(_UBUNTU_MAINTAINER) control.set_maintainer(_UBUNTU_MAINTAINER)
control.save() control.save()
@ -205,7 +205,7 @@ def restore_maintainer(debian_directory, verbose=False):
if not orig_maintainer: if not orig_maintainer:
continue continue
if verbose: if verbose:
print("Restoring original maintainer: %s" % orig_maintainer) print(f"Restoring original maintainer: {orig_maintainer}")
control.set_maintainer(orig_maintainer) control.set_maintainer(orig_maintainer)
control.remove_original_maintainer() control.remove_original_maintainer()
control.save() control.save()

View File

@ -36,7 +36,7 @@ def find_debian_dir(depth=6):
:returns: a path to an existing debian/ directory, or None :returns: a path to an existing debian/ directory, or None
""" """
for path in ["../" * n or "./" for n in list(range(0, depth + 1))]: for path in ["../" * n or "./" for n in list(range(0, depth + 1))]:
debian_path = "{}debian".format(path) debian_path = f"{path}debian"
if os.path.exists(os.path.join(debian_path, "control")) and os.path.exists( if os.path.exists(os.path.join(debian_path, "control")) and os.path.exists(
os.path.join(debian_path, "changelog") os.path.join(debian_path, "changelog")
): ):
@ -46,7 +46,7 @@ def find_debian_dir(depth=6):
def main(): def main():
script_name = os.path.basename(sys.argv[0]) script_name = os.path.basename(sys.argv[0])
epilog = "See %s(1) for more info." % (script_name) epilog = f"See {script_name}(1) for more info."
parser = argparse.ArgumentParser(epilog=epilog) parser = argparse.ArgumentParser(epilog=epilog)
parser.add_argument( parser.add_argument(
"-d", "-d",