Format code with black and isort

```
isort pbuilder-dist pm-helper running-autopkgtests ubuntu-build ubuntutools
black -C pbuilder-dist pm-helper running-autopkgtests ubuntu-build ubuntutools
```
This commit is contained in:
Benjamin Drung 2024-11-02 17:21:30 +01:00
parent 017941ad70
commit c7a855ff20
9 changed files with 158 additions and 143 deletions

View File

@ -294,7 +294,9 @@ class PbuilderDist:
if self.target_distro in self._debian_distros: if self.target_distro in self._debian_distros:
try: try:
codename = self.debian_distro_info.codename(self.target_distro, default=self.target_distro) codename = self.debian_distro_info.codename(
self.target_distro, default=self.target_distro
)
except DistroDataOutdated as error: except DistroDataOutdated as error:
Logger.warning(error) Logger.warning(error)
if codename in (self.debian_distro_info.devel(), "experimental"): if codename in (self.debian_distro_info.devel(), "experimental"):

View File

@ -15,32 +15,30 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
import lzma import lzma
from argparse import ArgumentParser
import sys import sys
import webbrowser import webbrowser
import yaml from argparse import ArgumentParser
import yaml
from launchpadlib.launchpad import Launchpad from launchpadlib.launchpad import Launchpad
from ubuntutools.utils import get_url from ubuntutools.utils import get_url
# proposed-migration is only concerned with the devel series; unlike other # proposed-migration is only concerned with the devel series; unlike other
# tools, don't make this configurable # tools, don't make this configurable
excuses_url = 'https://ubuntu-archive-team.ubuntu.com/proposed-migration/' \ excuses_url = "https://ubuntu-archive-team.ubuntu.com/proposed-migration/update_excuses.yaml.xz"
+ 'update_excuses.yaml.xz'
def get_proposed_version(excuses, package): def get_proposed_version(excuses, package):
for k in excuses['sources']: for k in excuses["sources"]:
if k['source'] == package: if k["source"] == package:
return k.get('new-version') return k.get("new-version")
return None return None
def claim_excuses_bug(launchpad, bug, package): def claim_excuses_bug(launchpad, bug, package):
print("LP: #%d: %s" % (bug.id, bug.title)) print("LP: #%d: %s" % (bug.id, bug.title))
ubuntu = launchpad.distributions['ubuntu'] ubuntu = launchpad.distributions["ubuntu"]
series = ubuntu.current_series.fullseriesname series = ubuntu.current_series.fullseriesname
for task in bug.bug_tasks: for task in bug.bug_tasks:
@ -58,10 +56,10 @@ def claim_excuses_bug(launchpad, bug, package):
elif our_task.assignee: elif our_task.assignee:
print("Currently assigned to %s" % our_task.assignee.name) print("Currently assigned to %s" % our_task.assignee.name)
print('''Do you want to claim this bug? [yN] ''', end="") print("""Do you want to claim this bug? [yN] """, end="")
sys.stdout.flush() sys.stdout.flush()
response = sys.stdin.readline() response = sys.stdin.readline()
if response.strip().lower().startswith('y'): if response.strip().lower().startswith("y"):
our_task.assignee = launchpad.me our_task.assignee = launchpad.me
our_task.lp_save() our_task.lp_save()
return True return True
@ -72,10 +70,10 @@ def claim_excuses_bug(launchpad, bug, package):
def create_excuses_bug(launchpad, package, version): def create_excuses_bug(launchpad, package, version):
print("Will open a new bug") print("Will open a new bug")
bug = launchpad.bugs.createBug( bug = launchpad.bugs.createBug(
title = 'proposed-migration for %s %s' % (package, version), title="proposed-migration for %s %s" % (package, version),
tags = ('update-excuse'), tags=("update-excuse"),
target = 'https://api.launchpad.net/devel/ubuntu/+source/%s' % package, target="https://api.launchpad.net/devel/ubuntu/+source/%s" % package,
description = '%s %s is stuck in -proposed.' % (package, version) description="%s %s is stuck in -proposed." % (package, version),
) )
task = bug.bug_tasks[0] task = bug.bug_tasks[0]
@ -88,12 +86,12 @@ def create_excuses_bug(launchpad, package, version):
def has_excuses_bugs(launchpad, package): def has_excuses_bugs(launchpad, package):
ubuntu = launchpad.distributions['ubuntu'] ubuntu = launchpad.distributions["ubuntu"]
pkg = ubuntu.getSourcePackage(name=package) pkg = ubuntu.getSourcePackage(name=package)
if not pkg: if not pkg:
raise ValueError(f"No such source package: {package}") raise ValueError(f"No such source package: {package}")
tasks = pkg.searchTasks(tags=['update-excuse'], order_by=['id']) tasks = pkg.searchTasks(tags=["update-excuse"], order_by=["id"])
bugs = [task.bug for task in tasks] bugs = [task.bug for task in tasks]
if not bugs: if not bugs:
@ -102,8 +100,7 @@ def has_excuses_bugs(launchpad, package):
if len(bugs) == 1: if len(bugs) == 1:
print("There is 1 open update-excuse bug against %s" % package) print("There is 1 open update-excuse bug against %s" % package)
else: else:
print("There are %d open update-excuse bugs against %s" \ print("There are %d open update-excuse bugs against %s" % (len(bugs), package))
% (len(bugs), package))
for bug in bugs: for bug in bugs:
if claim_excuses_bug(launchpad, bug, package): if claim_excuses_bug(launchpad, bug, package):
@ -114,17 +111,14 @@ def has_excuses_bugs(launchpad, package):
def main(): def main():
parser = ArgumentParser() parser = ArgumentParser()
parser.add_argument("-l", "--launchpad", dest="launchpad_instance", default="production")
parser.add_argument( parser.add_argument(
"-l", "--launchpad", dest="launchpad_instance", default="production") "-v", "--verbose", default=False, action="store_true", help="be more verbose"
parser.add_argument( )
"-v", "--verbose", default=False, action="store_true", parser.add_argument("package", nargs="?", help="act on this package only")
help="be more verbose")
parser.add_argument(
'package', nargs='?', help="act on this package only")
args = parser.parse_args() args = parser.parse_args()
args.launchpad = Launchpad.login_with( args.launchpad = Launchpad.login_with("pm-helper", args.launchpad_instance, version="devel")
"pm-helper", args.launchpad_instance, version="devel")
f = get_url(excuses_url, False) f = get_url(excuses_url, False)
with lzma.open(f) as lzma_f: with lzma.open(f) as lzma_f:
@ -137,13 +131,12 @@ def main():
if not proposed_version: if not proposed_version:
print("Package %s not found in -proposed." % args.package) print("Package %s not found in -proposed." % args.package)
sys.exit(1) sys.exit(1)
create_excuses_bug(args.launchpad, args.package, create_excuses_bug(args.launchpad, args.package, proposed_version)
proposed_version)
except ValueError as e: except ValueError as e:
sys.stderr.write(f"{e}\n") sys.stderr.write(f"{e}\n")
else: else:
pass # for now pass # for now
if __name__ == '__main__': if __name__ == "__main__":
sys.exit(main()) sys.exit(main())

View File

@ -23,7 +23,12 @@
__example__ = """ __example__ = """
Display first listed test running on amd64 hardware: Display first listed test running on amd64 hardware:
$ running-autopkgtests | grep amd64 | head -n1 $ running-autopkgtests | grep amd64 | head -n1
R 0:01:40 systemd-upstream - focal amd64 upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb', 'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true', 'UPSTREAM_PULL_REQUEST=23153', 'GITHUB_STATUSES_URL=https://api.github.com/repos/systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e'] R 0:01:40 systemd-upstream - focal amd64\
upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb',\
'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true',\
'UPSTREAM_PULL_REQUEST=23153',\
'GITHUB_STATUSES_URL=https://api.github.com/repos/\
systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
""" """
import sys import sys
@ -46,16 +51,10 @@ def parse_args():
formatter_class=RawDescriptionHelpFormatter, formatter_class=RawDescriptionHelpFormatter,
) )
parser.add_argument( parser.add_argument(
"-r", "-r", "--running", action="store_true", help="Print runnning autopkgtests (default: true)"
"--running",
action="store_true",
help="Print runnning autopkgtests (default: true)",
) )
parser.add_argument( parser.add_argument(
"-q", "-q", "--queued", action="store_true", help="Print queued autopkgtests (default: true)"
"--queued",
action="store_true",
help="Print queued autopkgtests (default: true)",
) )
options = parser.parse_args() options = parser.parse_args()

View File

@ -28,9 +28,9 @@
import argparse import argparse
import sys import sys
import lazr.restfulclient.errors
from launchpadlib.credentials import TokenAuthorizationException from launchpadlib.credentials import TokenAuthorizationException
from launchpadlib.launchpad import Launchpad from launchpadlib.launchpad import Launchpad
import lazr.restfulclient.errors
from ubuntutools import getLogger from ubuntutools import getLogger
from ubuntutools.lp.udtexceptions import PocketDoesNotExistError from ubuntutools.lp.udtexceptions import PocketDoesNotExistError
@ -48,6 +48,7 @@ def getBuildStates(pkg, archs):
msg = "\n".join(res) msg = "\n".join(res)
return f"Build state(s) for '{pkg.source_package_name}':\n{msg}" return f"Build state(s) for '{pkg.source_package_name}':\n{msg}"
def rescoreBuilds(pkg, archs, score): def rescoreBuilds(pkg, archs, score):
res = [] res = []
@ -61,17 +62,18 @@ def rescoreBuilds(pkg, archs, score):
res.append(f" {arch}: done") res.append(f" {arch}: done")
except lazr.restfulclient.errors.Unauthorized: except lazr.restfulclient.errors.Unauthorized:
Logger.error( Logger.error(
"You don't have the permissions to rescore builds. Ignoring your rescore request." "You don't have the permissions to rescore builds."
" Ignoring your rescore request."
) )
return None return None
except lazr.restfulclient.errors.BadRequest: except lazr.restfulclient.errors.BadRequest:
Logger.info("Cannot rescore build of %s on %s.", Logger.info("Cannot rescore build of %s on %s.", build.source_package_name, arch)
build.source_package_name, arch)
res.append(f" {arch}: failed") res.append(f" {arch}: failed")
msg = "\n".join(res) msg = "\n".join(res)
return f"Rescoring builds of '{pkg.source_package_name}' to {score}:\n{msg}" return f"Rescoring builds of '{pkg.source_package_name}' to {score}:\n{msg}"
def retryBuilds(pkg, archs): def retryBuilds(pkg, archs):
res = [] res = []
for build in pkg.getBuilds(): for build in pkg.getBuilds():
@ -94,16 +96,7 @@ def main():
# Valid architectures. # Valid architectures.
valid_archs = set( valid_archs = set(
[ ["armhf", "arm64", "amd64", "i386", "powerpc", "ppc64el", "riscv64", "s390x"]
"armhf",
"arm64",
"amd64",
"i386",
"powerpc",
"ppc64el",
"riscv64",
"s390x",
]
) )
# Prepare our option parser. # Prepare our option parser.
@ -118,8 +111,7 @@ def main():
f"include: {', '.join(valid_archs)}.", f"include: {', '.join(valid_archs)}.",
) )
parser.add_argument("-A", "--archive", help="operate on ARCHIVE", parser.add_argument("-A", "--archive", help="operate on ARCHIVE", default="ubuntu")
default="ubuntu")
# Batch processing options # Batch processing options
batch_options = parser.add_argument_group( batch_options = parser.add_argument_group(
@ -148,7 +140,9 @@ def main():
help="Rescore builds to <priority>.", help="Rescore builds to <priority>.",
) )
batch_options.add_argument( batch_options.add_argument(
"--state", action="store", dest="state", "--state",
action="store",
dest="state",
help="Act on builds that are in the specified state", help="Act on builds that are in the specified state",
) )
@ -157,11 +151,10 @@ def main():
# Parse our options. # Parse our options.
args = parser.parse_args() args = parser.parse_args()
launchpad = Launchpad.login_with("ubuntu-dev-tools", "production", launchpad = Launchpad.login_with("ubuntu-dev-tools", "production", version="devel")
version="devel")
me = launchpad.me me = launchpad.me
ubuntu = launchpad.distributions['ubuntu'] ubuntu = launchpad.distributions["ubuntu"]
if args.batch: if args.batch:
release = args.series release = args.series
@ -169,7 +162,7 @@ def main():
# ppas don't have a proposed pocket so just use the release pocket; # ppas don't have a proposed pocket so just use the release pocket;
# but for the main archive we default to -proposed # but for the main archive we default to -proposed
release = ubuntu.getDevelopmentSeries()[0].name release = ubuntu.getDevelopmentSeries()[0].name
if args.archive == 'ubuntu': if args.archive == "ubuntu":
release = release + "-proposed" release = release + "-proposed"
try: try:
(release, pocket) = split_release_pocket(release) (release, pocket) = split_release_pocket(release)
@ -227,7 +220,8 @@ def main():
exact_match=True, exact_match=True,
pocket=pocket, pocket=pocket,
source_name=package, source_name=package,
status='Published')[0] status="Published",
)[0]
except IndexError as error: except IndexError as error:
Logger.error("No publication found for package %s", package) Logger.error("No publication found for package %s", package)
sys.exit(1) sys.exit(1)
@ -288,7 +282,8 @@ def main():
build.rescore(score=priority) build.rescore(score=priority)
except lazr.restfulclient.errors.Unauthorized: except lazr.restfulclient.errors.Unauthorized:
Logger.error( Logger.error(
"You don't have the permissions to rescore builds. Ignoring your rescore request." "You don't have the permissions to rescore builds."
" Ignoring your rescore request."
) )
break break
else: else:
@ -325,21 +320,19 @@ def main():
if not args.state: if not args.state:
if args.retry: if args.retry:
args.state='Failed to build' args.state = "Failed to build"
elif args.priority: elif args.priority:
args.state='Needs building' args.state = "Needs building"
# there is no equivalent to series.getBuildRecords() for a ppa. # there is no equivalent to series.getBuildRecords() for a ppa.
# however, we don't want to have to traverse all build records for # however, we don't want to have to traverse all build records for
# all series when working on the main archive, so we use # all series when working on the main archive, so we use
# series.getBuildRecords() for ubuntu and handle ppas separately # series.getBuildRecords() for ubuntu and handle ppas separately
series = ubuntu.getSeries(name_or_version=release) series = ubuntu.getSeries(name_or_version=release)
if args.archive == 'ubuntu': if args.archive == "ubuntu":
builds = series.getBuildRecords(build_state=args.state, builds = series.getBuildRecords(build_state=args.state, pocket=pocket)
pocket=pocket)
else: else:
builds = [] builds = []
for build in archive.getBuildRecords(build_state=args.state, for build in archive.getBuildRecords(build_state=args.state, pocket=pocket):
pocket=pocket):
if not build.current_source_publication: if not build.current_source_publication:
continue continue
if build.current_source_publication.distro_series == series: if build.current_source_publication.distro_series == series:
@ -361,9 +354,8 @@ def main():
) )
if args.retry and not can_retry: if args.retry and not can_retry:
Logger.error( Logger.error(
"You don't have the permissions to retry the " "You don't have the permissions to retry the build of '%s', skipping.",
"build of '%s', skipping.", build.source_package_name,
build.source_package_name
) )
continue continue
Logger.info( Logger.info(
@ -371,18 +363,22 @@ def main():
build.source_package_name, build.source_package_name,
release, release,
pocket, pocket,
build.source_package_version build.source_package_version,
) )
if args.retry and build.can_be_retried: if args.retry and build.can_be_retried:
Logger.info("Retrying build of %s on %s...", Logger.info(
build.source_package_name, build.arch_tag) "Retrying build of %s on %s...", build.source_package_name, build.arch_tag
)
try: try:
build.retry() build.retry()
retry_count += 1 retry_count += 1
except lazr.restfulclient.errors.BadRequest: except lazr.restfulclient.errors.BadRequest:
Logger.info("Failed to retry build of %s on %s", Logger.info(
build.source_package_name, build.arch_tag) "Failed to retry build of %s on %s",
build.source_package_name,
build.arch_tag,
)
if args.priority and can_rescore: if args.priority and can_rescore:
if build.can_be_rescored: if build.can_be_rescored:
@ -390,19 +386,22 @@ def main():
build.rescore(score=args.priority) build.rescore(score=args.priority)
except lazr.restfulclient.errors.Unauthorized: except lazr.restfulclient.errors.Unauthorized:
Logger.error( Logger.error(
"You don't have the permissions to rescore builds. Ignoring your rescore request." "You don't have the permissions to rescore builds."
" Ignoring your rescore request."
) )
can_rescore = False can_rescore = False
except lazr.restfulclient.errors.BadRequest: except lazr.restfulclient.errors.BadRequest:
Logger.info("Cannot rescore build of %s on %s.", Logger.info(
build.source_package_name, build.arch_tag) "Cannot rescore build of %s on %s.",
build.source_package_name,
build.arch_tag,
)
Logger.info("") Logger.info("")
if args.retry: if args.retry:
Logger.info("%d package builds retried", retry_count) Logger.info("%d package builds retried", retry_count)
sys.exit(0) sys.exit(0)
for pkg in args.packages: for pkg in args.packages:
try: try:
pkg = archive.getPublishedSources( pkg = archive.getPublishedSources(
@ -410,7 +409,8 @@ def main():
exact_match=True, exact_match=True,
pocket=pocket, pocket=pocket,
source_name=pkg, source_name=pkg,
status='Published')[0] status="Published",
)[0]
except IndexError as error: except IndexError as error:
Logger.error("No publication found for package %s", pkg) Logger.error("No publication found for package %s", pkg)
continue continue
@ -450,4 +450,3 @@ def main():
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -462,9 +462,11 @@ class PullPkg:
uri = srcpkg.dsc[original_key] uri = srcpkg.dsc[original_key]
Logger.warning("\nNOTICE: '%s' packaging is maintained in " Logger.warning(
"\nNOTICE: '%s' packaging is maintained in "
"the '%s' version control system at:\n" "the '%s' version control system at:\n"
" %s\n" % (package, vcs, uri)) " %s\n" % (package, vcs, uri)
)
if vcs == "Bazaar": if vcs == "Bazaar":
vcscmd = " $ bzr branch " + uri vcscmd = " $ bzr branch " + uri
@ -472,9 +474,11 @@ class PullPkg:
vcscmd = " $ git clone " + uri vcscmd = " $ git clone " + uri
if vcscmd: if vcscmd:
Logger.info(f"Please use:\n{vcscmd}\n" Logger.info(
f"Please use:\n{vcscmd}\n"
"to retrieve the latest (possibly unreleased) " "to retrieve the latest (possibly unreleased) "
"updates to the package.\n") "updates to the package.\n"
)
if pull == PULL_LIST: if pull == PULL_LIST:
Logger.info("Source files:") Logger.info("Source files:")

View File

@ -26,10 +26,7 @@ URL_QUEUED = "http://autopkgtest.ubuntu.com/queues.json"
def _get_jobs(url: str) -> dict: def _get_jobs(url: str) -> dict:
request = urllib.request.Request( request = urllib.request.Request(url, headers={"Cache-Control": "max-age-0"})
url,
headers={"Cache-Control": "max-age-0"},
)
with urllib.request.urlopen(request) as response: with urllib.request.urlopen(request) as response:
data = response.read() data = response.read()
jobs = json.loads(data.decode("utf-8")) jobs = json.loads(data.decode("utf-8"))
@ -52,7 +49,10 @@ def get_running():
env = jobinfo[0].get("env", "-") env = jobinfo[0].get("env", "-")
time = str(datetime.timedelta(seconds=jobinfo[1])) time = str(datetime.timedelta(seconds=jobinfo[1]))
try: try:
line = f"R {time:6} {pkg:30} {'-':10} {series:8} {arch:8} {ppas:31} {triggers} {env}\n" line = (
f"R {time:6} {pkg:30} {'-':10} {series:8} {arch:8}"
f" {ppas:31} {triggers} {env}\n"
)
running.append((jobinfo[1], line)) running.append((jobinfo[1], line))
except BrokenPipeError: except BrokenPipeError:
sys.exit(1) sys.exit(1)
@ -86,7 +86,10 @@ def get_queued():
n = n + 1 n = n + 1
try: try:
output += f"Q{n:04d} {'-:--':>6} {pkg:30} {origin:10} {series:8} {arch:8} {ppas:31} {triggers}\n" output += (
f"Q{n:04d} {'-:--':>6} {pkg:30} {origin:10} {series:8} {arch:8}"
f" {ppas:31} {triggers}\n"
)
except BrokenPipeError: except BrokenPipeError:
sys.exit(1) sys.exit(1)
return output return output

View File

@ -30,6 +30,4 @@ class BinaryTests(unittest.TestCase):
try: try:
import keyring # noqa: F401 import keyring # noqa: F401
except ModuleNotFoundError: except ModuleNotFoundError:
raise ModuleNotFoundError( raise ModuleNotFoundError("package python3-keyring is not installed")
"package python3-keyring is not installed"
)

View File

@ -32,8 +32,17 @@ from ubuntutools.running_autopkgtests import (
) )
# Cached binary response data from autopkgtest server # Cached binary response data from autopkgtest server
RUN_DATA = b'{"pyatem": { "submit-time_2024-01-19 19:37:36;triggers_[\'python3-defaults/3.12.1-0ubuntu1\'];": {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"], "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}' RUN_DATA = (
QUEUED_DATA = b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\", \\"submit-time\\": \\"2024-01-18 01:08:55\\", \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}' b'{"pyatem": {'
b" \"submit-time_2024-01-19 19:37:36;triggers_['python3-defaults/3.12.1-0ubuntu1'];\":"
b' {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"],'
b' "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}'
)
QUEUED_DATA = (
b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\",'
b' \\"submit-time\\": \\"2024-01-18 01:08:55\\",'
b' \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}'
)
# Expected result(s) of parsing the above JSON data # Expected result(s) of parsing the above JSON data
RUNNING_JOB = { RUNNING_JOB = {
@ -57,7 +66,9 @@ QUEUED_JOB = {
"ubuntu": { "ubuntu": {
"noble": { "noble": {
"arm64": [ "arm64": [
'libobject-accessor-perl {"requester": "someone", "submit-time": "2024-01-18 01:08:55", "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}', 'libobject-accessor-perl {"requester": "someone",'
' "submit-time": "2024-01-18 01:08:55",'
' "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}'
] ]
} }
} }
@ -68,9 +79,18 @@ PRIVATE_JOB = {"ppa": {"noble": {"arm64": ["private job"]}}}
# Expected textual output of the program based on the above data # Expected textual output of the program based on the above data
RUNNING_OUTPUT = "R 0:06:20 pyatem - noble arm64 - python3-defaults/3.12.1-0ubuntu1 -\n" RUNNING_OUTPUT = (
QUEUED_OUTPUT = "Q0001 -:-- libobject-accessor-perl ubuntu noble arm64 - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n" "R 0:06:20 pyatem - noble arm64"
PRIVATE_OUTPUT = "Q0001 -:-- private job ppa noble arm64 private job private job\n" " - python3-defaults/3.12.1-0ubuntu1 -\n"
)
QUEUED_OUTPUT = (
"Q0001 -:-- libobject-accessor-perl ubuntu noble arm64"
" - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n"
)
PRIVATE_OUTPUT = (
"Q0001 -:-- private job ppa noble arm64"
" private job private job\n"
)
class RunningAutopkgtestTestCase(unittest.TestCase): class RunningAutopkgtestTestCase(unittest.TestCase):

View File

@ -15,47 +15,44 @@
"""Portions of archive related code that is re-used by various tools.""" """Portions of archive related code that is re-used by various tools."""
from datetime import datetime
import os import os
import re import re
import urllib.request import urllib.request
from datetime import datetime
import dateutil.parser import dateutil.parser
from dateutil.tz import tzutc from dateutil.tz import tzutc
def get_cache_dir(): def get_cache_dir():
cache_dir = os.environ.get('XDG_CACHE_HOME', cache_dir = os.environ.get("XDG_CACHE_HOME", os.path.expanduser(os.path.join("~", ".cache")))
os.path.expanduser(os.path.join('~', '.cache'))) uat_cache = os.path.join(cache_dir, "ubuntu-archive-tools")
uat_cache = os.path.join(cache_dir, 'ubuntu-archive-tools')
os.makedirs(uat_cache, exist_ok=True) os.makedirs(uat_cache, exist_ok=True)
return uat_cache return uat_cache
def get_url(url, force_cached): def get_url(url, force_cached):
''' Return file to the URL, possibly caching it """Return file to the URL, possibly caching it"""
'''
cache_file = None cache_file = None
# ignore bileto urls wrt caching, they're usually too small to matter # ignore bileto urls wrt caching, they're usually too small to matter
# and we don't do proper cache expiry # and we don't do proper cache expiry
m = re.search('ubuntu-archive-team.ubuntu.com/proposed-migration/' m = re.search("ubuntu-archive-team.ubuntu.com/proposed-migration/([^/]*)/([^/]*)", url)
'([^/]*)/([^/]*)',
url)
if m: if m:
cache_dir = get_cache_dir() cache_dir = get_cache_dir()
cache_file = os.path.join(cache_dir, '%s_%s' % (m.group(1), m.group(2))) cache_file = os.path.join(cache_dir, "%s_%s" % (m.group(1), m.group(2)))
else: else:
# test logs can be cached, too # test logs can be cached, too
m = re.search( m = re.search(
'https://autopkgtest.ubuntu.com/results/autopkgtest-[^/]*/([^/]*)/([^/]*)' "https://autopkgtest.ubuntu.com/results/autopkgtest-[^/]*/([^/]*)/([^/]*)"
'/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz', "/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz",
url) url,
)
if m: if m:
cache_dir = get_cache_dir() cache_dir = get_cache_dir()
cache_file = os.path.join( cache_file = os.path.join(
cache_dir, '%s_%s_%s_%s.gz' % ( cache_dir, "%s_%s_%s_%s.gz" % (m.group(1), m.group(2), m.group(3), m.group(4))
m.group(1), m.group(2), m.group(3), m.group(4))) )
if cache_file: if cache_file:
try: try:
@ -65,18 +62,18 @@ def get_url(url, force_cached):
prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc()) prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc())
new_timestamp = datetime.now(tz=tzutc()).timestamp() new_timestamp = datetime.now(tz=tzutc()).timestamp()
if force_cached: if force_cached:
return open(cache_file, 'rb') return open(cache_file, "rb")
f = urllib.request.urlopen(url) f = urllib.request.urlopen(url)
if cache_file: if cache_file:
remote_ts = dateutil.parser.parse(f.headers['last-modified']) remote_ts = dateutil.parser.parse(f.headers["last-modified"])
if remote_ts > prev_timestamp: if remote_ts > prev_timestamp:
with open('%s.new' % cache_file, 'wb') as new_cache: with open("%s.new" % cache_file, "wb") as new_cache:
for line in f: for line in f:
new_cache.write(line) new_cache.write(line)
os.rename('%s.new' % cache_file, cache_file) os.rename("%s.new" % cache_file, cache_file)
os.utime(cache_file, times=(new_timestamp, new_timestamp)) os.utime(cache_file, times=(new_timestamp, new_timestamp))
f.close() f.close()
f = open(cache_file, 'rb') f = open(cache_file, "rb")
return f return f