running-autopkgtests: Changelog entry, ArgumentParser, refactor, tests

Created a new changelog entry to include addition of the running-autopkgtests
script. This includes a refactor of the original script resulting in a new
module in ubuntutools, test cases, and the addition an argument parser to
allow printing just the queued tests, just the running tests, or both
(default).
This commit is contained in:
Chris Peterson 2024-01-18 22:39:05 -08:00
parent cb7464cf61
commit a9eb902b83
4 changed files with 258 additions and 60 deletions

6
debian/changelog vendored
View File

@ -1,3 +1,9 @@
ubuntu-dev-tools (0.200) UNRELEASED; urgency=medium
* Add support to see currently running autopkgtests (running-autopkgtests)
-- Chris Peterson <chris.peterson@canonical.com> Wed, 14 Feb 2024 14:58:30 -0800
ubuntu-dev-tools (0.199) unstable; urgency=medium
[ Simon Quigley ]

View File

@ -5,76 +5,65 @@
# Andy P. Whitcroft
# Christian Ehrhardt
'''Dumps a list of currently running tests in Autopkgtest'''
"""Dumps a list of currently running tests in Autopkgtest"""
__example__ = '''
__example__ = """
Display first listed test running on amd64 hardware:
$ running-autopkgtests | grep amd64 | head -n1
R 0:01:40 systemd-upstream - focal amd64 upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb', 'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true', 'UPSTREAM_PULL_REQUEST=23153', 'GITHUB_STATUSES_URL=https://api.github.com/repos/systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
'''
"""
import datetime
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import urllib.request
import json
URL_RUNNING = 'http://autopkgtest.ubuntu.com/static/running.json'
request = urllib.request.Request(URL_RUNNING)
request.add_header('Cache-Control', 'max-age=0')
with urllib.request.urlopen(request) as response:
data = response.read()
jobs = json.loads(data.decode('utf-8'))
from ubuntutools.running_autopkgtests import get_queued, get_running
running = []
for pkg in jobs:
for handle in jobs[pkg]:
for series in jobs[pkg][handle]:
for arch in jobs[pkg][handle][series]:
jobinfo = jobs[pkg][handle][series][arch]
triggers = ','.join(jobinfo[0].get('triggers', '-'))
ppas = ','.join(jobinfo[0].get('ppas', '-'))
time = jobinfo[1]
env = jobinfo[0].get('env', '-')
time = str(datetime.timedelta(seconds=jobinfo[1]))
try:
fmt = "R {6:6} {0:30} {5:10} {1:8} {2:8} {3:31} {4} {7}"
line = fmt.format(pkg, series, arch, ppas, triggers, '-', time, env)
running.append((jobinfo[1], line))
except BrokenPipeError:
sys.exit(1)
def parse_args():
description = (
"Dumps a list of currently running and queued tests in Autopkgtest. "
"Pass --running to only see running tests, or --queued to only see "
"queued tests. Passing both will print both, which is the default behavior. "
)
for (time, row) in sorted(running, reverse=True):
print(row)
parser = ArgumentParser(
prog="running-autopkgtests",
description=description,
epilog=f"example: {__example__}",
formatter_class=RawDescriptionHelpFormatter,
)
parser.add_argument(
"-r",
"--running",
action="store_true",
help="Print runnning autopkgtests (default: true)",
)
parser.add_argument(
"-q",
"--queued",
action="store_true",
help="Print queued autopkgtests (default: true)",
)
request = urllib.request.Request('http://autopkgtest.ubuntu.com/queues.json')
request.add_header('Cache-Control', 'max-age=0')
with urllib.request.urlopen(request) as response:
data = response.read()
queues = json.loads(data.decode('utf-8'))
options = parser.parse_args()
for origin in queues:
for series in queues[origin]:
for arch in queues[origin][series]:
n = 0
for key in queues[origin][series][arch]:
if key == "private job":
pkg = triggers = ppas = "private job"
else:
(pkg, json_data) = key.split(maxsplit=1)
try:
jobinfo = json.loads(json_data)
triggers = ','.join(jobinfo.get('triggers', '-'))
ppas = ','.join(jobinfo.get('ppas', '-'))
except json.decoder.JSONDecodeError:
pkg = triggers = ppas = "failed to parse"
continue
# If neither flag was specified, default to both not neither
if not options.running and not options.queued:
options.running = True
options.queued = True
n = n + 1
try:
fmt = "Q{5:04d} {7:>6} {0:30} {6:10} {1:8} {2:8} {3:31} {4}"
print(fmt.format(pkg, series, arch, ppas, triggers, n, origin, '-:--'))
except BrokenPipeError:
sys.exit(1)
return options
def main() -> int:
args = parse_args()
if args.running:
print(get_running())
if args.queued:
print(get_queued())
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -0,0 +1,94 @@
# Copyright (C) 2024 Canonical Ltd.
# Author: Chris Peterson <chris.peterson@canonical.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import json
import sys
import urllib
import urllib.request
URL_RUNNING = "http://autopkgtest.ubuntu.com/static/running.json"
URL_QUEUED = "http://autopkgtest.ubuntu.com/queues.json"
def _get_jobs(url: str) -> dict:
request = urllib.request.Request(
url,
headers={"Cache-Control": "max-age-0"},
)
with urllib.request.urlopen(request) as response:
data = response.read()
jobs = json.loads(data.decode("utf-8"))
return jobs
def get_running():
jobs = _get_jobs(URL_RUNNING)
running = []
for pkg in jobs:
for handle in jobs[pkg]:
for series in jobs[pkg][handle]:
for arch in jobs[pkg][handle][series]:
jobinfo = jobs[pkg][handle][series][arch]
triggers = ",".join(jobinfo[0].get("triggers", "-"))
ppas = ",".join(jobinfo[0].get("ppas", "-"))
time = jobinfo[1]
env = jobinfo[0].get("env", "-")
time = str(datetime.timedelta(seconds=jobinfo[1]))
try:
fmt = "R {6:6} {0:30} {5:10} {1:8} {2:8} {3:31} {4} {7}"
line = fmt.format(pkg, series, arch, ppas, triggers, "-", time, env)
running.append((jobinfo[1], line))
except BrokenPipeError:
sys.exit(1)
output = ""
for time, row in sorted(running, reverse=True):
output += f"{row}\n"
return output
def get_queued():
queues = _get_jobs(URL_QUEUED)
output = ""
for origin in queues:
for series in queues[origin]:
for arch in queues[origin][series]:
n = 0
for key in queues[origin][series][arch]:
if key == "private job":
pkg = triggers = ppas = "private job"
else:
(pkg, json_data) = key.split(maxsplit=1)
try:
jobinfo = json.loads(json_data)
triggers = ",".join(jobinfo.get("triggers", "-"))
ppas = ",".join(jobinfo.get("ppas", "-"))
except json.decoder.JSONDecodeError:
pkg = triggers = ppas = "failed to parse"
continue
n = n + 1
try:
fmt = "Q{5:04d} {7:>6} {0:30} {6:10} {1:8} {2:8} {3:31} {4}"
line = fmt.format(pkg, series, arch, ppas, triggers, n, origin, "-:--")
output += f"{line}\n"
except BrokenPipeError:
sys.exit(1)
return output

View File

@ -0,0 +1,109 @@
# Copyright (C) 2024 Canonical Ltd.
# Author: Chris Peterson <chris.peterson@canonical.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Tests for running_autopkgtests
Tests using cached data from autopkgtest servers.
These tests only ensure code changes don't change parsing behavior
of the response data. If the response format changes, then the cached
responses will need to change as well.
"""
import unittest
from unittest.mock import patch
from ubuntutools.running_autopkgtests import (
URL_QUEUED,
URL_RUNNING,
_get_jobs,
get_queued,
get_running,
)
# Cached binary response data from autopkgtest server
RUN_DATA = b'{"pyatem": { "submit-time_2024-01-19 19:37:36;triggers_[\'python3-defaults/3.12.1-0ubuntu1\'];": {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"], "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}'
QUEUED_DATA = b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\", \\"submit-time\\": \\"2024-01-18 01:08:55\\", \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}'
# Expected result(s) of parsing the above JSON data
RUNNING_JOB = {
"pyatem": {
"submit-time_2024-01-19 19:37:36;triggers_['python3-defaults/3.12.1-0ubuntu1'];": {
"noble": {
"arm64": [
{
"triggers": ["python3-defaults/3.12.1-0ubuntu1"],
"submit-time": "2024-01-19 19:37:36",
},
380,
"<omitted log>",
]
}
}
}
}
QUEUED_JOB = {
"ubuntu": {
"noble": {
"arm64": [
'libobject-accessor-perl {"requester": "someone", "submit-time": "2024-01-18 01:08:55", "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}',
]
}
}
}
PRIVATE_JOB = {"ppa": {"noble": {"arm64": ["private job"]}}}
# Expected textual output of the program based on the above data
RUNNING_OUTPUT = "R 0:06:20 pyatem - noble arm64 - python3-defaults/3.12.1-0ubuntu1 -\n"
QUEUED_OUTPUT = "Q0001 -:-- libobject-accessor-perl ubuntu noble arm64 - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n"
PRIVATE_OUTPUT = "Q0001 -:-- private job ppa noble arm64 private job private job\n"
class RunningAutopkgtestTestCase(unittest.TestCase):
"""Assert helper functions parse data correctly"""
maxDiff = None
@patch("urllib.request.urlopen")
def test_get_running_jobs(self, mock_response):
"""Test: Correctly parse autopkgtest json data for running tests"""
mock_response.return_value.__enter__.return_value.read.return_value = RUN_DATA
jobs = _get_jobs(URL_RUNNING)
self.assertEqual(RUNNING_JOB, jobs)
@patch("urllib.request.urlopen")
def test_get_queued_jobs(self, mock_response):
"""Test: Correctly parse autopkgtest json data for queued tests"""
mock_response.return_value.__enter__.return_value.read.return_value = QUEUED_DATA
jobs = _get_jobs(URL_QUEUED)
self.assertEqual(QUEUED_JOB, jobs)
def test_get_running_output(self):
"""Test: Correctly print running tests"""
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=RUNNING_JOB):
self.assertEqual(get_running(), RUNNING_OUTPUT)
def test_get_queued_output(self):
"""Test: Correctly print queued tests"""
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=QUEUED_JOB):
self.assertEqual(get_queued(), QUEUED_OUTPUT)
def test_private_queued_job(self):
"""Test: Correctly print queued private job"""
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=PRIVATE_JOB):
self.assertEqual(get_queued(), PRIVATE_OUTPUT)