mirror of
https://git.launchpad.net/ubuntu-dev-tools
synced 2025-03-12 15:41:09 +00:00
Format code with black and isort
``` isort pbuilder-dist pm-helper running-autopkgtests ubuntu-build ubuntutools black -C pbuilder-dist pm-helper running-autopkgtests ubuntu-build ubuntutools ```
This commit is contained in:
parent
017941ad70
commit
c7a855ff20
@ -294,7 +294,9 @@ class PbuilderDist:
|
||||
|
||||
if self.target_distro in self._debian_distros:
|
||||
try:
|
||||
codename = self.debian_distro_info.codename(self.target_distro, default=self.target_distro)
|
||||
codename = self.debian_distro_info.codename(
|
||||
self.target_distro, default=self.target_distro
|
||||
)
|
||||
except DistroDataOutdated as error:
|
||||
Logger.warning(error)
|
||||
if codename in (self.debian_distro_info.devel(), "experimental"):
|
||||
|
53
pm-helper
53
pm-helper
@ -15,32 +15,30 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import lzma
|
||||
from argparse import ArgumentParser
|
||||
import sys
|
||||
import webbrowser
|
||||
import yaml
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import yaml
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools.utils import get_url
|
||||
|
||||
|
||||
# proposed-migration is only concerned with the devel series; unlike other
|
||||
# tools, don't make this configurable
|
||||
excuses_url = 'https://ubuntu-archive-team.ubuntu.com/proposed-migration/' \
|
||||
+ 'update_excuses.yaml.xz'
|
||||
excuses_url = "https://ubuntu-archive-team.ubuntu.com/proposed-migration/update_excuses.yaml.xz"
|
||||
|
||||
|
||||
def get_proposed_version(excuses, package):
|
||||
for k in excuses['sources']:
|
||||
if k['source'] == package:
|
||||
return k.get('new-version')
|
||||
for k in excuses["sources"]:
|
||||
if k["source"] == package:
|
||||
return k.get("new-version")
|
||||
return None
|
||||
|
||||
|
||||
def claim_excuses_bug(launchpad, bug, package):
|
||||
print("LP: #%d: %s" % (bug.id, bug.title))
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
series = ubuntu.current_series.fullseriesname
|
||||
|
||||
for task in bug.bug_tasks:
|
||||
@ -58,10 +56,10 @@ def claim_excuses_bug(launchpad, bug, package):
|
||||
elif our_task.assignee:
|
||||
print("Currently assigned to %s" % our_task.assignee.name)
|
||||
|
||||
print('''Do you want to claim this bug? [yN] ''', end="")
|
||||
print("""Do you want to claim this bug? [yN] """, end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if response.strip().lower().startswith('y'):
|
||||
if response.strip().lower().startswith("y"):
|
||||
our_task.assignee = launchpad.me
|
||||
our_task.lp_save()
|
||||
return True
|
||||
@ -72,10 +70,10 @@ def claim_excuses_bug(launchpad, bug, package):
|
||||
def create_excuses_bug(launchpad, package, version):
|
||||
print("Will open a new bug")
|
||||
bug = launchpad.bugs.createBug(
|
||||
title = 'proposed-migration for %s %s' % (package, version),
|
||||
tags = ('update-excuse'),
|
||||
target = 'https://api.launchpad.net/devel/ubuntu/+source/%s' % package,
|
||||
description = '%s %s is stuck in -proposed.' % (package, version)
|
||||
title="proposed-migration for %s %s" % (package, version),
|
||||
tags=("update-excuse"),
|
||||
target="https://api.launchpad.net/devel/ubuntu/+source/%s" % package,
|
||||
description="%s %s is stuck in -proposed." % (package, version),
|
||||
)
|
||||
|
||||
task = bug.bug_tasks[0]
|
||||
@ -88,12 +86,12 @@ def create_excuses_bug(launchpad, package, version):
|
||||
|
||||
|
||||
def has_excuses_bugs(launchpad, package):
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
pkg = ubuntu.getSourcePackage(name=package)
|
||||
if not pkg:
|
||||
raise ValueError(f"No such source package: {package}")
|
||||
|
||||
tasks = pkg.searchTasks(tags=['update-excuse'], order_by=['id'])
|
||||
tasks = pkg.searchTasks(tags=["update-excuse"], order_by=["id"])
|
||||
|
||||
bugs = [task.bug for task in tasks]
|
||||
if not bugs:
|
||||
@ -102,8 +100,7 @@ def has_excuses_bugs(launchpad, package):
|
||||
if len(bugs) == 1:
|
||||
print("There is 1 open update-excuse bug against %s" % package)
|
||||
else:
|
||||
print("There are %d open update-excuse bugs against %s" \
|
||||
% (len(bugs), package))
|
||||
print("There are %d open update-excuse bugs against %s" % (len(bugs), package))
|
||||
|
||||
for bug in bugs:
|
||||
if claim_excuses_bug(launchpad, bug, package):
|
||||
@ -114,17 +111,14 @@ def has_excuses_bugs(launchpad, package):
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_argument(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", default=False, action="store_true",
|
||||
help="be more verbose")
|
||||
parser.add_argument(
|
||||
'package', nargs='?', help="act on this package only")
|
||||
"-v", "--verbose", default=False, action="store_true", help="be more verbose"
|
||||
)
|
||||
parser.add_argument("package", nargs="?", help="act on this package only")
|
||||
args = parser.parse_args()
|
||||
|
||||
args.launchpad = Launchpad.login_with(
|
||||
"pm-helper", args.launchpad_instance, version="devel")
|
||||
args.launchpad = Launchpad.login_with("pm-helper", args.launchpad_instance, version="devel")
|
||||
|
||||
f = get_url(excuses_url, False)
|
||||
with lzma.open(f) as lzma_f:
|
||||
@ -137,13 +131,12 @@ def main():
|
||||
if not proposed_version:
|
||||
print("Package %s not found in -proposed." % args.package)
|
||||
sys.exit(1)
|
||||
create_excuses_bug(args.launchpad, args.package,
|
||||
proposed_version)
|
||||
create_excuses_bug(args.launchpad, args.package, proposed_version)
|
||||
except ValueError as e:
|
||||
sys.stderr.write(f"{e}\n")
|
||||
else:
|
||||
pass # for now
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
@ -23,7 +23,12 @@
|
||||
__example__ = """
|
||||
Display first listed test running on amd64 hardware:
|
||||
$ running-autopkgtests | grep amd64 | head -n1
|
||||
R 0:01:40 systemd-upstream - focal amd64 upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb', 'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true', 'UPSTREAM_PULL_REQUEST=23153', 'GITHUB_STATUSES_URL=https://api.github.com/repos/systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
|
||||
R 0:01:40 systemd-upstream - focal amd64\
|
||||
upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb',\
|
||||
'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true',\
|
||||
'UPSTREAM_PULL_REQUEST=23153',\
|
||||
'GITHUB_STATUSES_URL=https://api.github.com/repos/\
|
||||
systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
|
||||
"""
|
||||
|
||||
import sys
|
||||
@ -46,16 +51,10 @@ def parse_args():
|
||||
formatter_class=RawDescriptionHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-r",
|
||||
"--running",
|
||||
action="store_true",
|
||||
help="Print runnning autopkgtests (default: true)",
|
||||
"-r", "--running", action="store_true", help="Print runnning autopkgtests (default: true)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q",
|
||||
"--queued",
|
||||
action="store_true",
|
||||
help="Print queued autopkgtests (default: true)",
|
||||
"-q", "--queued", action="store_true", help="Print queued autopkgtests (default: true)"
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
89
ubuntu-build
89
ubuntu-build
@ -28,9 +28,9 @@
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import lazr.restfulclient.errors
|
||||
from launchpadlib.credentials import TokenAuthorizationException
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
import lazr.restfulclient.errors
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.lp.udtexceptions import PocketDoesNotExistError
|
||||
@ -48,6 +48,7 @@ def getBuildStates(pkg, archs):
|
||||
msg = "\n".join(res)
|
||||
return f"Build state(s) for '{pkg.source_package_name}':\n{msg}"
|
||||
|
||||
|
||||
def rescoreBuilds(pkg, archs, score):
|
||||
res = []
|
||||
|
||||
@ -61,17 +62,18 @@ def rescoreBuilds(pkg, archs, score):
|
||||
res.append(f" {arch}: done")
|
||||
except lazr.restfulclient.errors.Unauthorized:
|
||||
Logger.error(
|
||||
"You don't have the permissions to rescore builds. Ignoring your rescore request."
|
||||
"You don't have the permissions to rescore builds."
|
||||
" Ignoring your rescore request."
|
||||
)
|
||||
return None
|
||||
except lazr.restfulclient.errors.BadRequest:
|
||||
Logger.info("Cannot rescore build of %s on %s.",
|
||||
build.source_package_name, arch)
|
||||
Logger.info("Cannot rescore build of %s on %s.", build.source_package_name, arch)
|
||||
res.append(f" {arch}: failed")
|
||||
|
||||
msg = "\n".join(res)
|
||||
return f"Rescoring builds of '{pkg.source_package_name}' to {score}:\n{msg}"
|
||||
|
||||
|
||||
def retryBuilds(pkg, archs):
|
||||
res = []
|
||||
for build in pkg.getBuilds():
|
||||
@ -94,16 +96,7 @@ def main():
|
||||
|
||||
# Valid architectures.
|
||||
valid_archs = set(
|
||||
[
|
||||
"armhf",
|
||||
"arm64",
|
||||
"amd64",
|
||||
"i386",
|
||||
"powerpc",
|
||||
"ppc64el",
|
||||
"riscv64",
|
||||
"s390x",
|
||||
]
|
||||
["armhf", "arm64", "amd64", "i386", "powerpc", "ppc64el", "riscv64", "s390x"]
|
||||
)
|
||||
|
||||
# Prepare our option parser.
|
||||
@ -118,8 +111,7 @@ def main():
|
||||
f"include: {', '.join(valid_archs)}.",
|
||||
)
|
||||
|
||||
parser.add_argument("-A", "--archive", help="operate on ARCHIVE",
|
||||
default="ubuntu")
|
||||
parser.add_argument("-A", "--archive", help="operate on ARCHIVE", default="ubuntu")
|
||||
|
||||
# Batch processing options
|
||||
batch_options = parser.add_argument_group(
|
||||
@ -148,7 +140,9 @@ def main():
|
||||
help="Rescore builds to <priority>.",
|
||||
)
|
||||
batch_options.add_argument(
|
||||
"--state", action="store", dest="state",
|
||||
"--state",
|
||||
action="store",
|
||||
dest="state",
|
||||
help="Act on builds that are in the specified state",
|
||||
)
|
||||
|
||||
@ -157,11 +151,10 @@ def main():
|
||||
# Parse our options.
|
||||
args = parser.parse_args()
|
||||
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", "production",
|
||||
version="devel")
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", "production", version="devel")
|
||||
me = launchpad.me
|
||||
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
|
||||
if args.batch:
|
||||
release = args.series
|
||||
@ -169,7 +162,7 @@ def main():
|
||||
# ppas don't have a proposed pocket so just use the release pocket;
|
||||
# but for the main archive we default to -proposed
|
||||
release = ubuntu.getDevelopmentSeries()[0].name
|
||||
if args.archive == 'ubuntu':
|
||||
if args.archive == "ubuntu":
|
||||
release = release + "-proposed"
|
||||
try:
|
||||
(release, pocket) = split_release_pocket(release)
|
||||
@ -227,7 +220,8 @@ def main():
|
||||
exact_match=True,
|
||||
pocket=pocket,
|
||||
source_name=package,
|
||||
status='Published')[0]
|
||||
status="Published",
|
||||
)[0]
|
||||
except IndexError as error:
|
||||
Logger.error("No publication found for package %s", package)
|
||||
sys.exit(1)
|
||||
@ -288,7 +282,8 @@ def main():
|
||||
build.rescore(score=priority)
|
||||
except lazr.restfulclient.errors.Unauthorized:
|
||||
Logger.error(
|
||||
"You don't have the permissions to rescore builds. Ignoring your rescore request."
|
||||
"You don't have the permissions to rescore builds."
|
||||
" Ignoring your rescore request."
|
||||
)
|
||||
break
|
||||
else:
|
||||
@ -325,24 +320,22 @@ def main():
|
||||
|
||||
if not args.state:
|
||||
if args.retry:
|
||||
args.state='Failed to build'
|
||||
args.state = "Failed to build"
|
||||
elif args.priority:
|
||||
args.state='Needs building'
|
||||
args.state = "Needs building"
|
||||
# there is no equivalent to series.getBuildRecords() for a ppa.
|
||||
# however, we don't want to have to traverse all build records for
|
||||
# all series when working on the main archive, so we use
|
||||
# series.getBuildRecords() for ubuntu and handle ppas separately
|
||||
series = ubuntu.getSeries(name_or_version=release)
|
||||
if args.archive == 'ubuntu':
|
||||
builds = series.getBuildRecords(build_state=args.state,
|
||||
pocket=pocket)
|
||||
if args.archive == "ubuntu":
|
||||
builds = series.getBuildRecords(build_state=args.state, pocket=pocket)
|
||||
else:
|
||||
builds = []
|
||||
for build in archive.getBuildRecords(build_state=args.state,
|
||||
pocket=pocket):
|
||||
for build in archive.getBuildRecords(build_state=args.state, pocket=pocket):
|
||||
if not build.current_source_publication:
|
||||
continue
|
||||
if build.current_source_publication.distro_series==series:
|
||||
if build.current_source_publication.distro_series == series:
|
||||
builds.append(build)
|
||||
for build in builds:
|
||||
if build.arch_tag not in archs:
|
||||
@ -361,9 +354,8 @@ def main():
|
||||
)
|
||||
if args.retry and not can_retry:
|
||||
Logger.error(
|
||||
"You don't have the permissions to retry the "
|
||||
"build of '%s', skipping.",
|
||||
build.source_package_name
|
||||
"You don't have the permissions to retry the build of '%s', skipping.",
|
||||
build.source_package_name,
|
||||
)
|
||||
continue
|
||||
Logger.info(
|
||||
@ -371,18 +363,22 @@ def main():
|
||||
build.source_package_name,
|
||||
release,
|
||||
pocket,
|
||||
build.source_package_version
|
||||
build.source_package_version,
|
||||
)
|
||||
|
||||
if args.retry and build.can_be_retried:
|
||||
Logger.info("Retrying build of %s on %s...",
|
||||
build.source_package_name, build.arch_tag)
|
||||
Logger.info(
|
||||
"Retrying build of %s on %s...", build.source_package_name, build.arch_tag
|
||||
)
|
||||
try:
|
||||
build.retry()
|
||||
retry_count += 1
|
||||
except lazr.restfulclient.errors.BadRequest:
|
||||
Logger.info("Failed to retry build of %s on %s",
|
||||
build.source_package_name, build.arch_tag)
|
||||
Logger.info(
|
||||
"Failed to retry build of %s on %s",
|
||||
build.source_package_name,
|
||||
build.arch_tag,
|
||||
)
|
||||
|
||||
if args.priority and can_rescore:
|
||||
if build.can_be_rescored:
|
||||
@ -390,19 +386,22 @@ def main():
|
||||
build.rescore(score=args.priority)
|
||||
except lazr.restfulclient.errors.Unauthorized:
|
||||
Logger.error(
|
||||
"You don't have the permissions to rescore builds. Ignoring your rescore request."
|
||||
"You don't have the permissions to rescore builds."
|
||||
" Ignoring your rescore request."
|
||||
)
|
||||
can_rescore = False
|
||||
except lazr.restfulclient.errors.BadRequest:
|
||||
Logger.info("Cannot rescore build of %s on %s.",
|
||||
build.source_package_name, build.arch_tag)
|
||||
Logger.info(
|
||||
"Cannot rescore build of %s on %s.",
|
||||
build.source_package_name,
|
||||
build.arch_tag,
|
||||
)
|
||||
|
||||
Logger.info("")
|
||||
if args.retry:
|
||||
Logger.info("%d package builds retried", retry_count)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
for pkg in args.packages:
|
||||
try:
|
||||
pkg = archive.getPublishedSources(
|
||||
@ -410,7 +409,8 @@ def main():
|
||||
exact_match=True,
|
||||
pocket=pocket,
|
||||
source_name=pkg,
|
||||
status='Published')[0]
|
||||
status="Published",
|
||||
)[0]
|
||||
except IndexError as error:
|
||||
Logger.error("No publication found for package %s", pkg)
|
||||
continue
|
||||
@ -450,4 +450,3 @@ def main():
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
@ -462,9 +462,11 @@ class PullPkg:
|
||||
|
||||
uri = srcpkg.dsc[original_key]
|
||||
|
||||
Logger.warning("\nNOTICE: '%s' packaging is maintained in "
|
||||
Logger.warning(
|
||||
"\nNOTICE: '%s' packaging is maintained in "
|
||||
"the '%s' version control system at:\n"
|
||||
" %s\n" % (package, vcs, uri))
|
||||
" %s\n" % (package, vcs, uri)
|
||||
)
|
||||
|
||||
if vcs == "Bazaar":
|
||||
vcscmd = " $ bzr branch " + uri
|
||||
@ -472,9 +474,11 @@ class PullPkg:
|
||||
vcscmd = " $ git clone " + uri
|
||||
|
||||
if vcscmd:
|
||||
Logger.info(f"Please use:\n{vcscmd}\n"
|
||||
Logger.info(
|
||||
f"Please use:\n{vcscmd}\n"
|
||||
"to retrieve the latest (possibly unreleased) "
|
||||
"updates to the package.\n")
|
||||
"updates to the package.\n"
|
||||
)
|
||||
|
||||
if pull == PULL_LIST:
|
||||
Logger.info("Source files:")
|
||||
|
@ -26,10 +26,7 @@ URL_QUEUED = "http://autopkgtest.ubuntu.com/queues.json"
|
||||
|
||||
|
||||
def _get_jobs(url: str) -> dict:
|
||||
request = urllib.request.Request(
|
||||
url,
|
||||
headers={"Cache-Control": "max-age-0"},
|
||||
)
|
||||
request = urllib.request.Request(url, headers={"Cache-Control": "max-age-0"})
|
||||
with urllib.request.urlopen(request) as response:
|
||||
data = response.read()
|
||||
jobs = json.loads(data.decode("utf-8"))
|
||||
@ -52,7 +49,10 @@ def get_running():
|
||||
env = jobinfo[0].get("env", "-")
|
||||
time = str(datetime.timedelta(seconds=jobinfo[1]))
|
||||
try:
|
||||
line = f"R {time:6} {pkg:30} {'-':10} {series:8} {arch:8} {ppas:31} {triggers} {env}\n"
|
||||
line = (
|
||||
f"R {time:6} {pkg:30} {'-':10} {series:8} {arch:8}"
|
||||
f" {ppas:31} {triggers} {env}\n"
|
||||
)
|
||||
running.append((jobinfo[1], line))
|
||||
except BrokenPipeError:
|
||||
sys.exit(1)
|
||||
@ -86,7 +86,10 @@ def get_queued():
|
||||
|
||||
n = n + 1
|
||||
try:
|
||||
output += f"Q{n:04d} {'-:--':>6} {pkg:30} {origin:10} {series:8} {arch:8} {ppas:31} {triggers}\n"
|
||||
output += (
|
||||
f"Q{n:04d} {'-:--':>6} {pkg:30} {origin:10} {series:8} {arch:8}"
|
||||
f" {ppas:31} {triggers}\n"
|
||||
)
|
||||
except BrokenPipeError:
|
||||
sys.exit(1)
|
||||
return output
|
||||
|
@ -30,6 +30,4 @@ class BinaryTests(unittest.TestCase):
|
||||
try:
|
||||
import keyring # noqa: F401
|
||||
except ModuleNotFoundError:
|
||||
raise ModuleNotFoundError(
|
||||
"package python3-keyring is not installed"
|
||||
)
|
||||
raise ModuleNotFoundError("package python3-keyring is not installed")
|
||||
|
@ -32,8 +32,17 @@ from ubuntutools.running_autopkgtests import (
|
||||
)
|
||||
|
||||
# Cached binary response data from autopkgtest server
|
||||
RUN_DATA = b'{"pyatem": { "submit-time_2024-01-19 19:37:36;triggers_[\'python3-defaults/3.12.1-0ubuntu1\'];": {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"], "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}'
|
||||
QUEUED_DATA = b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\", \\"submit-time\\": \\"2024-01-18 01:08:55\\", \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}'
|
||||
RUN_DATA = (
|
||||
b'{"pyatem": {'
|
||||
b" \"submit-time_2024-01-19 19:37:36;triggers_['python3-defaults/3.12.1-0ubuntu1'];\":"
|
||||
b' {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"],'
|
||||
b' "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}'
|
||||
)
|
||||
QUEUED_DATA = (
|
||||
b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\",'
|
||||
b' \\"submit-time\\": \\"2024-01-18 01:08:55\\",'
|
||||
b' \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}'
|
||||
)
|
||||
|
||||
# Expected result(s) of parsing the above JSON data
|
||||
RUNNING_JOB = {
|
||||
@ -57,7 +66,9 @@ QUEUED_JOB = {
|
||||
"ubuntu": {
|
||||
"noble": {
|
||||
"arm64": [
|
||||
'libobject-accessor-perl {"requester": "someone", "submit-time": "2024-01-18 01:08:55", "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}',
|
||||
'libobject-accessor-perl {"requester": "someone",'
|
||||
' "submit-time": "2024-01-18 01:08:55",'
|
||||
' "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}'
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -68,9 +79,18 @@ PRIVATE_JOB = {"ppa": {"noble": {"arm64": ["private job"]}}}
|
||||
|
||||
|
||||
# Expected textual output of the program based on the above data
|
||||
RUNNING_OUTPUT = "R 0:06:20 pyatem - noble arm64 - python3-defaults/3.12.1-0ubuntu1 -\n"
|
||||
QUEUED_OUTPUT = "Q0001 -:-- libobject-accessor-perl ubuntu noble arm64 - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n"
|
||||
PRIVATE_OUTPUT = "Q0001 -:-- private job ppa noble arm64 private job private job\n"
|
||||
RUNNING_OUTPUT = (
|
||||
"R 0:06:20 pyatem - noble arm64"
|
||||
" - python3-defaults/3.12.1-0ubuntu1 -\n"
|
||||
)
|
||||
QUEUED_OUTPUT = (
|
||||
"Q0001 -:-- libobject-accessor-perl ubuntu noble arm64"
|
||||
" - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n"
|
||||
)
|
||||
PRIVATE_OUTPUT = (
|
||||
"Q0001 -:-- private job ppa noble arm64"
|
||||
" private job private job\n"
|
||||
)
|
||||
|
||||
|
||||
class RunningAutopkgtestTestCase(unittest.TestCase):
|
||||
|
@ -15,47 +15,44 @@
|
||||
|
||||
"""Portions of archive related code that is re-used by various tools."""
|
||||
|
||||
from datetime import datetime
|
||||
import os
|
||||
import re
|
||||
import urllib.request
|
||||
from datetime import datetime
|
||||
|
||||
import dateutil.parser
|
||||
from dateutil.tz import tzutc
|
||||
|
||||
|
||||
def get_cache_dir():
|
||||
cache_dir = os.environ.get('XDG_CACHE_HOME',
|
||||
os.path.expanduser(os.path.join('~', '.cache')))
|
||||
uat_cache = os.path.join(cache_dir, 'ubuntu-archive-tools')
|
||||
cache_dir = os.environ.get("XDG_CACHE_HOME", os.path.expanduser(os.path.join("~", ".cache")))
|
||||
uat_cache = os.path.join(cache_dir, "ubuntu-archive-tools")
|
||||
os.makedirs(uat_cache, exist_ok=True)
|
||||
return uat_cache
|
||||
|
||||
|
||||
def get_url(url, force_cached):
|
||||
''' Return file to the URL, possibly caching it
|
||||
'''
|
||||
"""Return file to the URL, possibly caching it"""
|
||||
cache_file = None
|
||||
|
||||
# ignore bileto urls wrt caching, they're usually too small to matter
|
||||
# and we don't do proper cache expiry
|
||||
m = re.search('ubuntu-archive-team.ubuntu.com/proposed-migration/'
|
||||
'([^/]*)/([^/]*)',
|
||||
url)
|
||||
m = re.search("ubuntu-archive-team.ubuntu.com/proposed-migration/([^/]*)/([^/]*)", url)
|
||||
if m:
|
||||
cache_dir = get_cache_dir()
|
||||
cache_file = os.path.join(cache_dir, '%s_%s' % (m.group(1), m.group(2)))
|
||||
cache_file = os.path.join(cache_dir, "%s_%s" % (m.group(1), m.group(2)))
|
||||
else:
|
||||
# test logs can be cached, too
|
||||
m = re.search(
|
||||
'https://autopkgtest.ubuntu.com/results/autopkgtest-[^/]*/([^/]*)/([^/]*)'
|
||||
'/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz',
|
||||
url)
|
||||
"https://autopkgtest.ubuntu.com/results/autopkgtest-[^/]*/([^/]*)/([^/]*)"
|
||||
"/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz",
|
||||
url,
|
||||
)
|
||||
if m:
|
||||
cache_dir = get_cache_dir()
|
||||
cache_file = os.path.join(
|
||||
cache_dir, '%s_%s_%s_%s.gz' % (
|
||||
m.group(1), m.group(2), m.group(3), m.group(4)))
|
||||
cache_dir, "%s_%s_%s_%s.gz" % (m.group(1), m.group(2), m.group(3), m.group(4))
|
||||
)
|
||||
|
||||
if cache_file:
|
||||
try:
|
||||
@ -65,18 +62,18 @@ def get_url(url, force_cached):
|
||||
prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc())
|
||||
new_timestamp = datetime.now(tz=tzutc()).timestamp()
|
||||
if force_cached:
|
||||
return open(cache_file, 'rb')
|
||||
return open(cache_file, "rb")
|
||||
|
||||
f = urllib.request.urlopen(url)
|
||||
|
||||
if cache_file:
|
||||
remote_ts = dateutil.parser.parse(f.headers['last-modified'])
|
||||
remote_ts = dateutil.parser.parse(f.headers["last-modified"])
|
||||
if remote_ts > prev_timestamp:
|
||||
with open('%s.new' % cache_file, 'wb') as new_cache:
|
||||
with open("%s.new" % cache_file, "wb") as new_cache:
|
||||
for line in f:
|
||||
new_cache.write(line)
|
||||
os.rename('%s.new' % cache_file, cache_file)
|
||||
os.rename("%s.new" % cache_file, cache_file)
|
||||
os.utime(cache_file, times=(new_timestamp, new_timestamp))
|
||||
f.close()
|
||||
f = open(cache_file, 'rb')
|
||||
f = open(cache_file, "rb")
|
||||
return f
|
||||
|
Loading…
x
Reference in New Issue
Block a user