Compare commits

..

56 Commits

Author SHA1 Message Date
Simon Quigley
466e2784de Upload to Unstable 2025-03-04 13:43:32 -06:00
Simon Quigley
ba3f0511f9 syncpackage: Catch exceptions cleanly, simply skipping to the next package (erring on the side of caution) if there is an error doing the download (LP: #1943286). 2025-03-04 13:42:50 -06:00
Simon Quigley
2e550ceff2 syncpackage: Cache the sync blocklist in-memory, so it's not fetched multiple times when syncing more than one package. 2025-03-04 13:39:07 -06:00
Simon Quigley
6c8a5d74bd syncpackage: s/syncblacklist/syncblocklist/g 2025-03-04 13:29:02 -06:00
Simon Quigley
3d11516599 mk-sbuild: default to using UTC for schroots (LP: #2097159). 2025-03-04 13:22:40 -06:00
Simon Quigley
5a20308ab1 Read ~/.devscripts in a more robust way, to ideally pick up multi-line variables (Closes: #725418). 2025-03-04 13:17:30 -06:00
Simon Quigley
b551877651 Add a changelog entry 2025-03-04 13:10:04 -06:00
ferbraher
4a4c4e0a27 Parsing arch parameter to getBinaryPackage() 2025-03-04 13:08:59 -06:00
Simon Quigley
865c1c97bc Add a changelog entry 2025-03-04 13:07:42 -06:00
Shengjing Zhu
d09718e976 import-bug-from-debian: package option is overridden and not used 2025-03-04 13:07:11 -06:00
Simon Quigley
bff7baecc9 Add a changelog entry 2025-03-04 13:06:38 -06:00
Dan Bungert
45fbbb5bd1 mk-sbuild: enable pkgmaintainermangler
mk-sbuild installs pkgbinarymangler into the schroot.  Of of the
provided tools in pkgbinarymangler is pkgmaintainermangler.
pkgmaintainermangler is disabled by default, and enabled with
configuration.

A difference between launchpad builds of a synced package and an sbuild
is that the maintainer information will be different.

Enable pkgmaintainermangler to close this difference.
2025-03-04 13:05:57 -06:00
Simon Quigley
ca217c035e Add a new changelog entry 2025-03-04 13:04:49 -06:00
Simon Quigley
b5e117788b Upload to Unstable 2025-03-01 11:30:18 -06:00
Simon Quigley
ddba2d1e98 Update Standards-Version to 4.7.2, no changes needed. 2025-03-01 11:29:53 -06:00
Simon Quigley
02d65a5804 [syncpackage] Do not use exit(1) on an error or exception unless it applies to all packages, instead return None so we can continue to the next package. 2025-03-01 11:26:59 -06:00
Simon Quigley
bda85fa6a8 [syncpackage] Add support for -y or --yes, noted that it should be used with care. 2025-03-01 11:22:52 -06:00
Simon Quigley
86a83bf74d [syncpackage] Within fetch_source_pkg, do not exit(1) on an error or exception, simply return None so we can continue to the next package. 2025-03-01 11:17:02 -06:00
Simon Quigley
162e758671 [syncpackage] When syncing multiple packages, if one of the packages is in the sync blocklist, do not exit, simply continue. 2025-03-01 11:12:49 -06:00
Simon Quigley
049425adb7 Add debian/files to .gitignore 2025-03-01 11:11:34 -06:00
Simon Quigley
f6ca6cad92 Add a new changelog entry 2025-03-01 11:11:17 -06:00
Simon Quigley
3dc17934d6 Upload to Unstable 2025-02-24 19:55:03 -06:00
Simon Quigley
10a176567a Remove mail line from default ~/.sbuildrc, to resolve the undeclared dependency on sendmail (Closes: #1074632). 2025-02-24 19:52:59 -06:00
Simon Quigley
86b366c6c5 Add a large warning at the top of mk-sbuild encouraging the use of the unshare backend. This is to provide ample warning to users. 2025-02-24 19:15:55 -06:00
Simon Quigley
50b580b30e Add a manpage for running-autopkgtests. 2025-02-24 18:51:12 -06:00
Simon Quigley
6ba0641f63 Rename bitesize to lp-bitesize (Closes: #1076224). 2025-02-24 18:51:10 -06:00
Simon Quigley
1e815db9d2 Add my name to the copyright file. 2025-02-24 18:35:20 -06:00
Simon Quigley
e2f43318bd Add several Lintian overrides related to .pyc files. 2025-02-24 18:34:18 -06:00
Julien Plissonneau Duquène
cdd81232d9 Fix reverse-depends -b crash on packages that b-d on themselves (Closes: #1087760). 2025-02-24 18:31:33 -06:00
Simon Quigley
65044d84d9 Update Standards-Version to 4.7.1, no changes needed. 2025-02-24 18:26:59 -06:00
Mattia Rizzolo
19e40b49c2
Fix minor typo in pbuilder-dist(1)
LP: #2096956
Thanks: Rolf Leggewie for the patch
Signed-off-by: Mattia Rizzolo <mattia@debian.org>
2025-01-30 07:52:22 +01:00
Benjamin Drung
55eb521461 Release 0.203 2024-11-02 18:20:32 +01:00
Benjamin Drung
983bb3b70e Depend on python3-yaml for pm-helper 2024-11-02 18:09:16 +01:00
Benjamin Drung
85f2e46f7d conform to snake_case naming style 2024-11-02 18:07:23 +01:00
Benjamin Drung
649c3db767 ubuntu-build: fix used-before-assignment
```
ubuntu-build:244:40: E0601: Using variable 'necessary_privs' before assignment (used-before-assignment)
```
2024-11-02 17:56:47 +01:00
Benjamin Drung
e7ba650414 Avoid unnecessary "elif" after "continue"
Address pylint's no-else-continue.
2024-11-02 17:55:33 +01:00
Benjamin Drung
3bc802a209 Use lazy % formatting in logging functions 2024-11-02 17:55:20 +01:00
Benjamin Drung
92c80d7bb7 ubuntu-build: remove unused code/imports 2024-11-02 17:54:06 +01:00
Benjamin Drung
d7362d9ed8 Use Python f-strings
```
flynt -ll 99 -tc -tj -a pbuilder-dist pm-helper running-autopkgtests ubuntu-build ubuntutools
```
2024-11-02 17:49:20 +01:00
Benjamin Drung
c7a855ff20 Format code with black and isort
```
isort pbuilder-dist pm-helper running-autopkgtests ubuntu-build ubuntutools
black -C pbuilder-dist pm-helper running-autopkgtests ubuntu-build ubuntutools
```
2024-11-02 17:21:30 +01:00
Benjamin Drung
017941ad70 setup.py: add pm-helper 2024-11-02 16:41:44 +01:00
Benjamin Drung
69914f861e add missing files to debian/copyright 2024-11-02 16:35:31 +01:00
Benjamin Drung
454f1e30c8 Bump year in copyright 2024-11-02 15:57:19 +01:00
Benjamin Drung
55bc403a95 Bump Standards-Version to 4.7.0 2024-11-02 15:56:01 +01:00
Benjamin Drung
c9339aeae4 import-bug-from-debian: add type hints 2024-11-02 15:34:59 +01:00
Benjamin Drung
c205ee0381 import-bug-from-debian: avoid type change of bug_num
The variable `bug_num` has the type `str`. Do not reuse the name for
type `int` to ease mypy.
2024-11-02 15:33:15 +01:00
Benjamin Drung
7577e10f13 import-bug-from-debian: reuse message variable
`log[0]["message"]` was already queried.
2024-11-02 15:32:19 +01:00
Florent 'Skia' Jacquet
e328dc05c2 import-bug-from-debian: split big main function into smaller ones
This allows better understanding of the various parts of the code, by
naming important parts and defining boundaries on the used variables.
2024-11-02 15:08:09 +01:00
Florent 'Skia' Jacquet
9a94c9dea1 import-bug-from-debian: handle multipart messages
With multipart messages, like #1073996, `import-bug-from-debian` would
produce bug description with this:
```
[<email.message.Message object at 0x7fbe14096fa0>, <email.message.Message object at 0x7fbe15143820>]
```
For that kind of bug, it now produces a correct description with the
plain text parts concatenated in the description, the attachments added
as attachments, and the inline images converted to attachments with an
inline message placeholder.

See #981577 for a particularly weird case now gracefully handled.
If something weirder happens, then the tool will now abort with a clear
message instead of producing garbage.

Closes: #969510
2024-11-02 14:57:01 +01:00
Florent 'Skia' Jacquet
47ab7b608b Add gitignore 2024-10-30 17:31:54 +01:00
Steve Langasek
56044d8eac Recommend sbuild over pbuilder. sbuild is the tool recommended by Ubuntu developers whose behavior most closely approximates Launchpad builds. 2024-05-26 13:04:55 -07:00
Steve Langasek
c523b4cfc4 open new version 2024-05-26 13:01:23 -07:00
Steve Langasek
3df40f6392 Handle exceptions on retry
The "can be retried" value from launchpad may have been cached.  Avoid an
exception when we race someone else retrying a build.
2024-05-26 12:57:14 -07:00
Simon Quigley
6ebffe3f4a Consolidate Ubuntu changelog entries, upload to Unstable 2024-04-12 23:35:08 -05:00
Chris Peterson
f01234e8a5 update debian/copyright
- Correctly add ISC licenses to new files in ubuntutools/tests/*
  as specified in debian/copyright
- Add GPL-3 licenses and correct attribution for:
    - running-autopkgtests
    - ubuntutools/running_autopkgtests.py
2024-03-13 09:21:30 -07:00
Chris Peterson
43891eda88 depends: python3-launchpadlib-desktop
Replace the dependency on python3-launchpadlib with
python3-launchpadlib-desktop. This package is the same as python3-launchpadlib
except that it also includes python3-keyring, which is a requirement for
some of the desktop-centric code-paths. In the case, requestsync has a
path for logging in via a web browser which also requires python3-keyring
to be installed. This had caused a ModuleNotFoundError when
python3-launchpadlib dropped python3-keyring from Recommends to Suggests
(LP: #2049217).
2024-03-13 09:17:49 -07:00
35 changed files with 693 additions and 368 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
__pycache__
*.egg-info

1
debian/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
files

88
debian/changelog vendored
View File

@ -1,10 +1,83 @@
ubuntu-dev-tools (0.201ubuntu2~22.04.4) jammy; urgency=medium ubuntu-dev-tools (0.206) unstable; urgency=medium
* Backport current ubuntu-dev-tools to jammy. LP: #2057716. [ Dan Bungert ]
* mk-sbuild: enable pkgmaintainermangler
-- Steve Langasek <steve.langasek@ubuntu.com> Tue, 12 Mar 2024 17:39:42 -0700 [ Shengjing Zhu ]
* import-bug-from-debian: package option is overridden and not used
ubuntu-dev-tools (0.201ubuntu2) noble; urgency=medium [ Fernando Bravo Hernández ]
* Parsing arch parameter to getBinaryPackage() (LP: #2081861)
[ Simon Quigley ]
* Read ~/.devscripts in a more robust way, to ideally pick up multi-line
variables (Closes: #725418).
* mk-sbuild: default to using UTC for schroots (LP: #2097159).
* syncpackage: s/syncblacklist/syncblocklist/g
* syncpackage: Cache the sync blocklist in-memory, so it's not fetched
multiple times when syncing more than one package.
* syncpackage: Catch exceptions cleanly, simply skipping to the next
package (erring on the side of caution) if there is an error doing the
download (LP: #1943286).
-- Simon Quigley <tsimonq2@debian.org> Tue, 04 Mar 2025 13:43:15 -0600
ubuntu-dev-tools (0.205) unstable; urgency=medium
* [syncpackage] When syncing multiple packages, if one of the packages is in
the sync blocklist, do not exit, simply continue.
* [syncpackage] Do not use exit(1) on an error or exception unless it
applies to all packages, instead return None so we can continue to the
next package.
* [syncpackage] Add support for -y or --yes, noted that it should be used
with care.
* Update Standards-Version to 4.7.2, no changes needed.
-- Simon Quigley <tsimonq2@debian.org> Sat, 01 Mar 2025 11:29:54 -0600
ubuntu-dev-tools (0.204) unstable; urgency=medium
[ Simon Quigley ]
* Update Standards-Version to 4.7.1, no changes needed.
* Add several Lintian overrides related to .pyc files.
* Add my name to the copyright file.
* Rename bitesize to lp-bitesize (Closes: #1076224).
* Add a manpage for running-autopkgtests.
* Add a large warning at the top of mk-sbuild encouraging the use of the
unshare backend. This is to provide ample warning to users.
* Remove mail line from default ~/.sbuildrc, to resolve the undeclared
dependency on sendmail (Closes: #1074632).
[ Julien Plissonneau Duquène ]
* Fix reverse-depends -b crash on packages that b-d on themselves
(Closes: #1087760).
-- Simon Quigley <tsimonq2@debian.org> Mon, 24 Feb 2025 19:54:39 -0600
ubuntu-dev-tools (0.203) unstable; urgency=medium
[ Steve Langasek ]
* ubuntu-build: handle TOCTOU issue with the "can be retried" value on
builds.
* Recommend sbuild over pbuilder. sbuild is the tool recommended by
Ubuntu developers whose behavior most closely approximates Launchpad
builds.
[ Florent 'Skia' Jacquet ]
* import-bug-from-debian: handle multipart message (Closes: #969510)
[ Benjamin Drung ]
* import-bug-from-debian: add type hints
* Bump Standards-Version to 4.7.0
* Bump year and add missing files to copyright
* setup.py: add pm-helper
* Format code with black and isort
* Address several issues pointed out by Pylint
* Depend on python3-yaml for pm-helper
-- Benjamin Drung <bdrung@debian.org> Sat, 02 Nov 2024 18:19:24 +0100
ubuntu-dev-tools (0.202) unstable; urgency=medium
[ Steve Langasek ] [ Steve Langasek ]
* ubuntu-build: support --batch with no package names to retry all * ubuntu-build: support --batch with no package names to retry all
@ -15,14 +88,11 @@ ubuntu-dev-tools (0.201ubuntu2) noble; urgency=medium
* ubuntu-build: Handling of proposed vs release pocket default for ppas * ubuntu-build: Handling of proposed vs release pocket default for ppas
* ubuntu-build: update manpage * ubuntu-build: update manpage
-- Steve Langasek <steve.langasek@ubuntu.com> Tue, 12 Mar 2024 17:03:43 -0700 [ Chris Peterson ]
ubuntu-dev-tools (0.201ubuntu1) noble; urgency=medium
* Replace Depends on python3-launchpadlib with Depends on * Replace Depends on python3-launchpadlib with Depends on
python3-launchpadlib-desktop (LP: #2049217) python3-launchpadlib-desktop (LP: #2049217)
-- Chris Peterson <chris.peterson@canonical.com> Fri, 01 Mar 2024 14:08:07 -0800 -- Simon Quigley <tsimonq2@ubuntu.com> Fri, 12 Apr 2024 23:33:14 -0500
ubuntu-dev-tools (0.201) unstable; urgency=medium ubuntu-dev-tools (0.201) unstable; urgency=medium

15
debian/control vendored
View File

@ -1,8 +1,7 @@
Source: ubuntu-dev-tools Source: ubuntu-dev-tools
Section: devel Section: devel
Priority: optional Priority: optional
Maintainer: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com> Maintainer: Ubuntu Developers <ubuntu-dev-tools@packages.debian.org>
XSBC-Original-Maintainer: Ubuntu Developers <ubuntu-dev-tools@packages.debian.org>
Uploaders: Uploaders:
Benjamin Drung <bdrung@debian.org>, Benjamin Drung <bdrung@debian.org>,
Stefano Rivera <stefanor@debian.org>, Stefano Rivera <stefanor@debian.org>,
@ -27,11 +26,12 @@ Build-Depends:
python3-debianbts, python3-debianbts,
python3-distro-info, python3-distro-info,
python3-httplib2, python3-httplib2,
python3-launchpadlib, python3-launchpadlib-desktop,
python3-pytest, python3-pytest,
python3-requests <!nocheck>, python3-requests <!nocheck>,
python3-setuptools, python3-setuptools,
Standards-Version: 4.6.2 python3-yaml <!nocheck>,
Standards-Version: 4.7.2
Rules-Requires-Root: no Rules-Requires-Root: no
Vcs-Git: https://git.launchpad.net/ubuntu-dev-tools Vcs-Git: https://git.launchpad.net/ubuntu-dev-tools
Vcs-Browser: https://git.launchpad.net/ubuntu-dev-tools Vcs-Browser: https://git.launchpad.net/ubuntu-dev-tools
@ -54,9 +54,10 @@ Depends:
python3-debianbts, python3-debianbts,
python3-distro-info, python3-distro-info,
python3-httplib2, python3-httplib2,
python3-launchpadlib, python3-launchpadlib-desktop,
python3-lazr.restfulclient, python3-lazr.restfulclient,
python3-ubuntutools (= ${binary:Version}), python3-ubuntutools (= ${binary:Version}),
python3-yaml,
sensible-utils, sensible-utils,
sudo, sudo,
tzdata, tzdata,
@ -71,7 +72,7 @@ Recommends:
genisoimage, genisoimage,
lintian, lintian,
patch, patch,
pbuilder | cowbuilder | sbuild, sbuild | pbuilder | cowbuilder,
python3-dns, python3-dns,
quilt, quilt,
reportbug (>= 3.39ubuntu1), reportbug (>= 3.39ubuntu1),
@ -141,7 +142,7 @@ Depends:
python3-debian, python3-debian,
python3-distro-info, python3-distro-info,
python3-httplib2, python3-httplib2,
python3-launchpadlib, python3-launchpadlib-desktop,
python3-lazr.restfulclient, python3-lazr.restfulclient,
python3-requests, python3-requests,
sensible-utils, sensible-utils,

20
debian/copyright vendored
View File

@ -11,6 +11,7 @@ Files: backportpackage
doc/check-symbols.1 doc/check-symbols.1
doc/requestsync.1 doc/requestsync.1
doc/ubuntu-iso.1 doc/ubuntu-iso.1
doc/running-autopkgtests.1
GPL-2 GPL-2
README.updates README.updates
requestsync requestsync
@ -19,12 +20,13 @@ Files: backportpackage
ubuntu-iso ubuntu-iso
ubuntutools/requestsync/*.py ubuntutools/requestsync/*.py
Copyright: 2007, Albert Damen <albrt@gmx.net> Copyright: 2007, Albert Damen <albrt@gmx.net>
2010-2022, Benjamin Drung <bdrung@ubuntu.com> 2010-2024, Benjamin Drung <bdrung@ubuntu.com>
2007-2023, Canonical Ltd. 2007-2023, Canonical Ltd.
2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com> 2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com>
2010, Evan Broder <evan@ebroder.net> 2010, Evan Broder <evan@ebroder.net>
2006-2007, Luke Yelavich <themuso@ubuntu.com> 2006-2007, Luke Yelavich <themuso@ubuntu.com>
2009-2010, Michael Bienia <geser@ubuntu.com> 2009-2010, Michael Bienia <geser@ubuntu.com>
2024-2025, Simon Quigley <tsimonq2@debian.org>
2010-2011, Stefano Rivera <stefanor@ubuntu.com> 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
2008, Stephan Hermann <sh@sourcecode.de> 2008, Stephan Hermann <sh@sourcecode.de>
2007, Steve Kowalik <stevenk@ubuntu.com> 2007, Steve Kowalik <stevenk@ubuntu.com>
@ -72,23 +74,28 @@ License: GPL-2+
On Debian systems, the complete text of the GNU General Public License On Debian systems, the complete text of the GNU General Public License
version 2 can be found in the /usr/share/common-licenses/GPL-2 file. version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
Files: doc/bitesize.1 Files: doc/lp-bitesize.1
doc/check-mir.1 doc/check-mir.1
doc/grab-merge.1 doc/grab-merge.1
doc/merge-changelog.1 doc/merge-changelog.1
doc/pm-helper.1
doc/setup-packaging-environment.1 doc/setup-packaging-environment.1
doc/syncpackage.1 doc/syncpackage.1
bitesize lp-bitesize
check-mir check-mir
GPL-3 GPL-3
grab-merge grab-merge
merge-changelog merge-changelog
pm-helper
pyproject.toml pyproject.toml
run-linters run-linters
running-autopkgtests
setup-packaging-environment setup-packaging-environment
syncpackage syncpackage
Copyright: 2010, Benjamin Drung <bdrung@ubuntu.com> ubuntutools/running_autopkgtests.py
2007-2023, Canonical Ltd. ubuntutools/utils.py
Copyright: 2010-2024, Benjamin Drung <bdrung@ubuntu.com>
2007-2024, Canonical Ltd.
2008, Jonathan Patrick Davies <jpds@ubuntu.com> 2008, Jonathan Patrick Davies <jpds@ubuntu.com>
2008-2010, Martin Pitt <martin.pitt@canonical.com> 2008-2010, Martin Pitt <martin.pitt@canonical.com>
2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com> 2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
@ -177,11 +184,12 @@ Files: doc/pull-debian-debdiff.1
ubuntutools/version.py ubuntutools/version.py
update-maintainer update-maintainer
.pylintrc .pylintrc
Copyright: 2009-2023, Benjamin Drung <bdrung@ubuntu.com> Copyright: 2009-2024, Benjamin Drung <bdrung@ubuntu.com>
2010, Evan Broder <evan@ebroder.net> 2010, Evan Broder <evan@ebroder.net>
2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com> 2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
2010-2011, Stefano Rivera <stefanor@ubuntu.com> 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
2017-2021, Dan Streetman <ddstreet@canonical.com> 2017-2021, Dan Streetman <ddstreet@canonical.com>
2024, Canonical Ltd.
License: ISC License: ISC
Permission to use, copy, modify, and/or distribute this software for any Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above purpose with or without fee is hereby granted, provided that the above

3
debian/source/lintian-overrides vendored Normal file
View File

@ -0,0 +1,3 @@
# pyc files are machine-generated; they're expected to have long lines and have unstated copyright
source: file-without-copyright-information *.pyc [debian/copyright]
source: very-long-line-length-in-source-file * > 512 [*.pyc:*]

View File

@ -1,21 +1,21 @@
.TH bitesize "1" "May 9 2010" "ubuntu-dev-tools" .TH lp-bitesize "1" "May 9 2010" "ubuntu-dev-tools"
.SH NAME .SH NAME
bitesize \- Add \fBbitesize\fR tag to bugs and add a comment. lp-bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
.SH SYNOPSIS .SH SYNOPSIS
.B bitesize \fR<\fIbug number\fR> .B lp-bitesize \fR<\fIbug number\fR>
.br .br
.B bitesize \-\-help .B lp-bitesize \-\-help
.SH DESCRIPTION .SH DESCRIPTION
\fBbitesize\fR adds a bitesize tag to the bug, if it's not there yet. It \fBlp-bitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
also adds a comment to the bug indicating that you are willing to help with also adds a comment to the bug indicating that you are willing to help with
fixing it. fixing it.
It checks for permission to operate on a given bug first, It checks for permission to operate on a given bug first,
then perform required tasks on Launchpad. then perform required tasks on Launchpad.
.SH OPTIONS .SH OPTIONS
Listed below are the command line options for \fBbitesize\fR: Listed below are the command line options for \fBlp-bitesize\fR:
.TP .TP
.BR \-h ", " \-\-help .BR \-h ", " \-\-help
Display a help message and exit. Display a help message and exit.
@ -48,7 +48,7 @@ The default value for \fB--lpinstance\fR.
.BR ubuntu\-dev\-tools (5) .BR ubuntu\-dev\-tools (5)
.SH AUTHORS .SH AUTHORS
\fBbitesize\fR and this manual page were written by Daniel Holbach \fBlp-bitesize\fR and this manual page were written by Daniel Holbach
<daniel.holbach@canonical.com>. <daniel.holbach@canonical.com>.
.PP .PP
Both are released under the terms of the GNU General Public License, version 3. Both are released under the terms of the GNU General Public License, version 3.

View File

@ -20,7 +20,7 @@ like for example \fBpbuilder\-feisty\fP, \fBpbuilder\-sid\fP, \fBpbuilder\-gutsy
.PP .PP
The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main
difference between both is that pbuilder compresses the created chroot as a difference between both is that pbuilder compresses the created chroot as a
a tarball, thus using less disc space but needing to uncompress (and possibly tarball, thus using less disc space but needing to uncompress (and possibly
compress) its contents again on each run, and cowbuilder doesn't do this. compress) its contents again on each run, and cowbuilder doesn't do this.
.SH USAGE .SH USAGE

View File

@ -0,0 +1,15 @@
.TH running\-autopkgtests "1" "18 January 2024" "ubuntu-dev-tools"
.SH NAME
running\-autopkgtests \- dumps a list of currently running autopkgtests
.SH SYNOPSIS
.B running\-autopkgtests
.SH DESCRIPTION
Dumps a list of currently running and queued tests in Autopkgtest.
Pass --running to only see running tests, or --queued to only see
queued tests. Passing both will print both, which is the default behavior.
.SH AUTHOR
.B running\-autopkgtests
was written by Chris Peterson <chris.peterson@canonical.com>.

View File

@ -58,7 +58,7 @@ Display more progress information.
\fB\-F\fR, \fB\-\-fakesync\fR \fB\-F\fR, \fB\-\-fakesync\fR
Perform a fakesync, to work around a tarball mismatch between Debian and Perform a fakesync, to work around a tarball mismatch between Debian and
Ubuntu. Ubuntu.
This option ignores blacklisting, and performs a local sync. This option ignores blocklisting, and performs a local sync.
It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file
for you to upload. for you to upload.
.TP .TP

View File

@ -29,6 +29,8 @@ import logging
import re import re
import sys import sys
import webbrowser import webbrowser
from collections.abc import Iterable
from email.message import EmailMessage
import debianbts import debianbts
from launchpadlib.launchpad import Launchpad from launchpadlib.launchpad import Launchpad
@ -37,11 +39,10 @@ from ubuntutools import getLogger
from ubuntutools.config import UDTConfig from ubuntutools.config import UDTConfig
Logger = getLogger() Logger = getLogger()
ATTACHMENT_MAX_SIZE = 2000
def main(): def parse_args() -> argparse.Namespace:
bug_re = re.compile(r"bug=(\d+)")
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
"-b", "-b",
@ -71,28 +72,15 @@ def main():
"--no-conf", action="store_true", help="Don't read config files or environment variables." "--no-conf", action="store_true", help="Don't read config files or environment variables."
) )
parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)") parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)")
options = parser.parse_args() return parser.parse_args()
config = UDTConfig(options.no_conf)
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
if options.dry_run: def get_bug_numbers(bug_list: Iterable[str]) -> list[int]:
launchpad = Launchpad.login_anonymously("ubuntu-dev-tools") bug_re = re.compile(r"bug=(\d+)")
options.verbose = True
else:
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
if options.verbose:
Logger.setLevel(logging.DEBUG)
debian = launchpad.distributions["debian"]
ubuntu = launchpad.distributions["ubuntu"]
lp_debbugs = launchpad.bug_trackers.getByName(name="debbugs")
bug_nums = [] bug_nums = []
for bug_num in options.bugs: for bug_num in bug_list:
if bug_num.startswith("http"): if bug_num.startswith("http"):
# bug URL # bug URL
match = bug_re.search(bug_num) match = bug_re.search(bug_num)
@ -101,24 +89,81 @@ def main():
sys.exit(1) sys.exit(1)
bug_num = match.groups()[0] bug_num = match.groups()[0]
bug_num = bug_num.lstrip("#") bug_num = bug_num.lstrip("#")
bug_num = int(bug_num) bug_nums.append(int(bug_num))
bug_nums.append(bug_num)
bugs = debianbts.get_status(bug_nums) return bug_nums
if not bugs:
Logger.error("Cannot find any of the listed bugs") def walk_multipart_message(message: EmailMessage) -> tuple[str, list[tuple[int, EmailMessage]]]:
sys.exit(1) summary = ""
attachments = []
i = 1
for part in message.walk():
content_type = part.get_content_type()
if content_type.startswith("multipart/"):
# we're already iterating on multipart items
# let's just skip the multipart extra metadata
continue
if content_type == "application/pgp-signature":
# we're not interested in importing pgp signatures
continue
if part.is_attachment():
attachments.append((i, part))
elif content_type.startswith("image/"):
# images here are not attachment, they are inline, but Launchpad can't handle that,
# so let's add them as attachments
summary += f"Message part #{i}\n"
summary += f"[inline image '{part.get_filename()}']\n\n"
attachments.append((i, part))
elif content_type.startswith("text/html"):
summary += f"Message part #{i}\n"
summary += "[inline html]\n\n"
attachments.append((i, part))
elif content_type == "text/plain":
summary += f"Message part #{i}\n"
summary += part.get_content() + "\n"
else:
raise RuntimeError(
f"""Unknown message part
Your Debian bug is too weird to be imported in Launchpad, sorry.
You can fix that by patching this script in ubuntu-dev-tools.
Faulty message part:
{part}"""
)
i += 1
return summary, attachments
def process_bugs(
bugs: Iterable[debianbts.Bugreport],
launchpad: Launchpad,
package: str,
dry_run: bool = True,
browserless: bool = False,
) -> bool:
debian = launchpad.distributions["debian"]
ubuntu = launchpad.distributions["ubuntu"]
lp_debbugs = launchpad.bug_trackers.getByName(name="debbugs")
err = False err = False
for bug in bugs: for bug in bugs:
ubupackage = package = bug.source ubupackage = bug.source
if options.package: if package:
ubupackage = options.package ubupackage = package
bug_num = bug.bug_num bug_num = bug.bug_num
subject = bug.subject subject = bug.subject
log = debianbts.get_bug_log(bug_num) log = debianbts.get_bug_log(bug_num)
summary = log[0]["message"].get_payload() message = log[0]["message"]
assert isinstance(message, EmailMessage)
attachments: list[tuple[int, EmailMessage]] = []
if message.is_multipart():
summary, attachments = walk_multipart_message(message)
else:
summary = str(message.get_payload())
target = ubuntu.getSourcePackage(name=ubupackage) target = ubuntu.getSourcePackage(name=ubupackage)
if target is None: if target is None:
Logger.error( Logger.error(
@ -137,24 +182,73 @@ def main():
Logger.debug("Subject: %s", subject) Logger.debug("Subject: %s", subject)
Logger.debug("Description: ") Logger.debug("Description: ")
Logger.debug(description) Logger.debug(description)
for i, attachment in attachments:
Logger.debug("Attachment #%s (%s)", i, attachment.get_filename() or "inline")
Logger.debug("Content:")
if attachment.get_content_type() == "text/plain":
content = attachment.get_content()
if len(content) > ATTACHMENT_MAX_SIZE:
content = (
content[:ATTACHMENT_MAX_SIZE]
+ f" [attachment cropped after {ATTACHMENT_MAX_SIZE} characters...]"
)
Logger.debug(content)
else:
Logger.debug("[data]")
if options.dry_run: if dry_run:
Logger.info("Dry-Run: not creating Ubuntu bug.") Logger.info("Dry-Run: not creating Ubuntu bug.")
continue continue
u_bug = launchpad.bugs.createBug(target=target, title=subject, description=description) u_bug = launchpad.bugs.createBug(target=target, title=subject, description=description)
for i, attachment in attachments:
name = f"#{i}-{attachment.get_filename() or "inline"}"
content = attachment.get_content()
if isinstance(content, str):
# Launchpad only wants bytes
content = content.encode()
u_bug.addAttachment(
filename=name,
data=content,
comment=f"Imported from Debian bug http://bugs.debian.org/{bug_num}",
)
d_sp = debian.getSourcePackage(name=package) d_sp = debian.getSourcePackage(name=package)
if d_sp is None and options.package: if d_sp is None and package:
d_sp = debian.getSourcePackage(name=options.package) d_sp = debian.getSourcePackage(name=package)
d_task = u_bug.addTask(target=d_sp) d_task = u_bug.addTask(target=d_sp)
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs) d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
d_task.bug_watch = d_watch d_task.bug_watch = d_watch
d_task.lp_save() d_task.lp_save()
Logger.info("Opened %s", u_bug.web_link) Logger.info("Opened %s", u_bug.web_link)
if not options.browserless: if not browserless:
webbrowser.open(u_bug.web_link) webbrowser.open(u_bug.web_link)
if err: return err
def main() -> None:
options = parse_args()
config = UDTConfig(options.no_conf)
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
if options.dry_run:
launchpad = Launchpad.login_anonymously("ubuntu-dev-tools")
options.verbose = True
else:
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
if options.verbose:
Logger.setLevel(logging.DEBUG)
bugs = debianbts.get_status(get_bug_numbers(options.bugs))
if not bugs:
Logger.error("Cannot find any of the listed bugs")
sys.exit(1)
if process_bugs(bugs, launchpad, options.package, options.dry_run, options.browserless):
sys.exit(1) sys.exit(1)

View File

@ -155,6 +155,7 @@ proxy="_unset_"
DEBOOTSTRAP_NO_CHECK_GPG=0 DEBOOTSTRAP_NO_CHECK_GPG=0
EATMYDATA=1 EATMYDATA=1
CCACHE=0 CCACHE=0
USE_PKGBINARYMANGLER=0
while :; do while :; do
case "$1" in case "$1" in
@ -303,10 +304,26 @@ if [ ! -w /var/lib/sbuild ]; then
# Prepare a usable default .sbuildrc # Prepare a usable default .sbuildrc
if [ ! -e ~/.sbuildrc ]; then if [ ! -e ~/.sbuildrc ]; then
cat > ~/.sbuildrc <<EOM cat > ~/.sbuildrc <<EOM
# *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW *** # *** THIS COMMAND IS DEPRECATED ***
#
# In sbuild 0.87.0 and later, the unshare backend is available. This is
# expected to become the default in a future release.
#
# This is the new preferred way of building Debian packages, making the manual
# creation of schroots no longer necessary. To retain the default behavior,
# you may remove this comment block and continue.
#
# To test the unshare backend while retaining the default settings, run sbuild
# with --chroot-mode=unshare like this:
# $ sbuild --chroot-mode=unshare --dist=unstable hello
#
# To switch to the unshare backend by default (recommended), uncomment the
# following lines and delete the rest of the file (with the exception of the
# last two lines):
#\$chroot_mode = 'unshare';
#\$unshare_mmdebstrap_keep_tarball = 1;
# Mail address where logs are sent to (mandatory, no default!) # *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW ***
\$mailto = '$USER';
# Name to use as override in .changes files for the Maintainer: field # Name to use as override in .changes files for the Maintainer: field
#\$maintainer_name='$USER <$USER@localhost>'; #\$maintainer_name='$USER <$USER@localhost>';
@ -651,6 +668,7 @@ ubuntu)
if ubuntu_dist_ge "$RELEASE" "edgy"; then if ubuntu_dist_ge "$RELEASE" "edgy"; then
# Add pkgbinarymangler (edgy and later) # Add pkgbinarymangler (edgy and later)
BUILD_PKGS="$BUILD_PKGS pkgbinarymangler" BUILD_PKGS="$BUILD_PKGS pkgbinarymangler"
USE_PKGBINARYMANGLER=1
# Disable recommends for a smaller chroot (gutsy and later only) # Disable recommends for a smaller chroot (gutsy and later only)
if ubuntu_dist_ge "$RELEASE" "gutsy"; then if ubuntu_dist_ge "$RELEASE" "gutsy"; then
BUILD_PKGS="--no-install-recommends $BUILD_PKGS" BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
@ -910,8 +928,8 @@ if [ -n "$TEMP_PREFERENCES" ]; then
sudo mv "$TEMP_PREFERENCES" $MNT/etc/apt/preferences.d/proposed.pref sudo mv "$TEMP_PREFERENCES" $MNT/etc/apt/preferences.d/proposed.pref
fi fi
# Copy the timezone (comment this out if you want to leave the chroot at UTC) # Copy the timezone (uncomment this if you want to use your local time zone)
sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/ #sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
# Create a schroot entry for this chroot # Create a schroot entry for this chroot
TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX` TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX`
TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf
@ -1030,6 +1048,25 @@ EOF
EOM EOM
fi fi
if [ "$USE_PKGBINARYMANGLER" = 1 ]; then
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
mkdir -p /etc/pkgbinarymangler/
cat > /etc/pkgbinarymangler/maintainermangler.conf <<EOF
# pkgmaintainermangler configuration file
# pkgmaintainermangler will do nothing unless enable is set to "true"
enable: true
# Configure what happens if /CurrentlyBuilding is present, but invalid
# (i. e. it does not contain a Package: field). If "ignore" (default),
# the file is ignored (i. e. the Maintainer field is mangled) and a
# warning is printed. If "fail" (or any other value), pkgmaintainermangler
# exits with an error, which causes a package build to fail.
invalid_currentlybuilding: ignore
EOF
EOM
fi
if [ -n "$TARGET_ARCH" ]; then if [ -n "$TARGET_ARCH" ]; then
sudo bash -c "cat >> $MNT/finish.sh" <<EOM sudo bash -c "cat >> $MNT/finish.sh" <<EOM
# Configure target architecture # Configure target architecture
@ -1048,7 +1085,7 @@ apt-get update || true
echo set debconf/frontend Noninteractive | debconf-communicate echo set debconf/frontend Noninteractive | debconf-communicate
echo set debconf/priority critical | debconf-communicate echo set debconf/priority critical | debconf-communicate
# Install basic build tool set, trying to match buildd # Install basic build tool set, trying to match buildd
apt-get -y --force-yes install $BUILD_PKGS apt-get -y --force-yes -o Dpkg::Options::="--force-confold" install $BUILD_PKGS
# Set up expected /dev entries # Set up expected /dev entries
if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi
if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi

View File

@ -294,7 +294,9 @@ class PbuilderDist:
if self.target_distro in self._debian_distros: if self.target_distro in self._debian_distros:
try: try:
codename = self.debian_distro_info.codename(self.target_distro, default=self.target_distro) codename = self.debian_distro_info.codename(
self.target_distro, default=self.target_distro
)
except DistroDataOutdated as error: except DistroDataOutdated as error:
Logger.warning(error) Logger.warning(error)
if codename in (self.debian_distro_info.devel(), "experimental"): if codename in (self.debian_distro_info.devel(), "experimental"):

View File

@ -15,53 +15,51 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
import lzma import lzma
from argparse import ArgumentParser
import sys import sys
import webbrowser import webbrowser
import yaml from argparse import ArgumentParser
import yaml
from launchpadlib.launchpad import Launchpad from launchpadlib.launchpad import Launchpad
from ubuntutools.utils import get_url from ubuntutools.utils import get_url
# proposed-migration is only concerned with the devel series; unlike other # proposed-migration is only concerned with the devel series; unlike other
# tools, don't make this configurable # tools, don't make this configurable
excuses_url = 'https://ubuntu-archive-team.ubuntu.com/proposed-migration/' \ excuses_url = "https://ubuntu-archive-team.ubuntu.com/proposed-migration/update_excuses.yaml.xz"
+ 'update_excuses.yaml.xz'
def get_proposed_version(excuses, package): def get_proposed_version(excuses, package):
for k in excuses['sources']: for k in excuses["sources"]:
if k['source'] == package: if k["source"] == package:
return k.get('new-version') return k.get("new-version")
return None return None
def claim_excuses_bug(launchpad, bug, package): def claim_excuses_bug(launchpad, bug, package):
print("LP: #%d: %s" % (bug.id, bug.title)) print(f"LP: #{bug.id}: {bug.title}")
ubuntu = launchpad.distributions['ubuntu'] ubuntu = launchpad.distributions["ubuntu"]
series = ubuntu.current_series.fullseriesname series = ubuntu.current_series.fullseriesname
for task in bug.bug_tasks: for task in bug.bug_tasks:
# targeting to a series doesn't make the default task disappear, # targeting to a series doesn't make the default task disappear,
# it just makes it useless # it just makes it useless
if task.bug_target_name == "%s (%s)" % (package, series): if task.bug_target_name == f"{package} ({series})":
our_task = task our_task = task
break break
elif task.bug_target_name == "%s (Ubuntu)" % package: if task.bug_target_name == f"{package} (Ubuntu)":
our_task = task our_task = task
if our_task.assignee == launchpad.me: if our_task.assignee == launchpad.me:
print("Bug already assigned to you.") print("Bug already assigned to you.")
return True return True
elif our_task.assignee: if our_task.assignee:
print("Currently assigned to %s" % our_task.assignee.name) print(f"Currently assigned to {our_task.assignee.name}")
print('''Do you want to claim this bug? [yN] ''', end="") print("""Do you want to claim this bug? [yN] """, end="")
sys.stdout.flush() sys.stdout.flush()
response = sys.stdin.readline() response = sys.stdin.readline()
if response.strip().lower().startswith('y'): if response.strip().lower().startswith("y"):
our_task.assignee = launchpad.me our_task.assignee = launchpad.me
our_task.lp_save() our_task.lp_save()
return True return True
@ -72,38 +70,37 @@ def claim_excuses_bug(launchpad, bug, package):
def create_excuses_bug(launchpad, package, version): def create_excuses_bug(launchpad, package, version):
print("Will open a new bug") print("Will open a new bug")
bug = launchpad.bugs.createBug( bug = launchpad.bugs.createBug(
title = 'proposed-migration for %s %s' % (package, version), title=f"proposed-migration for {package} {version}",
tags = ('update-excuse'), tags=("update-excuse"),
target = 'https://api.launchpad.net/devel/ubuntu/+source/%s' % package, target=f"https://api.launchpad.net/devel/ubuntu/+source/{package}",
description = '%s %s is stuck in -proposed.' % (package, version) description=f"{package} {version} is stuck in -proposed.",
) )
task = bug.bug_tasks[0] task = bug.bug_tasks[0]
task.assignee = launchpad.me task.assignee = launchpad.me
task.lp_save() task.lp_save()
print("Opening %s in browser" % bug.web_link) print(f"Opening {bug.web_link} in browser")
webbrowser.open(bug.web_link) webbrowser.open(bug.web_link)
return bug return bug
def has_excuses_bugs(launchpad, package): def has_excuses_bugs(launchpad, package):
ubuntu = launchpad.distributions['ubuntu'] ubuntu = launchpad.distributions["ubuntu"]
pkg = ubuntu.getSourcePackage(name=package) pkg = ubuntu.getSourcePackage(name=package)
if not pkg: if not pkg:
raise ValueError(f"No such source package: {package}") raise ValueError(f"No such source package: {package}")
tasks = pkg.searchTasks(tags=['update-excuse'], order_by=['id']) tasks = pkg.searchTasks(tags=["update-excuse"], order_by=["id"])
bugs = [task.bug for task in tasks] bugs = [task.bug for task in tasks]
if not bugs: if not bugs:
return False return False
if len(bugs) == 1: if len(bugs) == 1:
print("There is 1 open update-excuse bug against %s" % package) print(f"There is 1 open update-excuse bug against {package}")
else: else:
print("There are %d open update-excuse bugs against %s" \ print(f"There are {len(bugs)} open update-excuse bugs against {package}")
% (len(bugs), package))
for bug in bugs: for bug in bugs:
if claim_excuses_bug(launchpad, bug, package): if claim_excuses_bug(launchpad, bug, package):
@ -114,17 +111,14 @@ def has_excuses_bugs(launchpad, package):
def main(): def main():
parser = ArgumentParser() parser = ArgumentParser()
parser.add_argument("-l", "--launchpad", dest="launchpad_instance", default="production")
parser.add_argument( parser.add_argument(
"-l", "--launchpad", dest="launchpad_instance", default="production") "-v", "--verbose", default=False, action="store_true", help="be more verbose"
parser.add_argument( )
"-v", "--verbose", default=False, action="store_true", parser.add_argument("package", nargs="?", help="act on this package only")
help="be more verbose")
parser.add_argument(
'package', nargs='?', help="act on this package only")
args = parser.parse_args() args = parser.parse_args()
args.launchpad = Launchpad.login_with( args.launchpad = Launchpad.login_with("pm-helper", args.launchpad_instance, version="devel")
"pm-helper", args.launchpad_instance, version="devel")
f = get_url(excuses_url, False) f = get_url(excuses_url, False)
with lzma.open(f) as lzma_f: with lzma.open(f) as lzma_f:
@ -135,15 +129,14 @@ def main():
if not has_excuses_bugs(args.launchpad, args.package): if not has_excuses_bugs(args.launchpad, args.package):
proposed_version = get_proposed_version(excuses, args.package) proposed_version = get_proposed_version(excuses, args.package)
if not proposed_version: if not proposed_version:
print("Package %s not found in -proposed." % args.package) print(f"Package {args.package} not found in -proposed.")
sys.exit(1) sys.exit(1)
create_excuses_bug(args.launchpad, args.package, create_excuses_bug(args.launchpad, args.package, proposed_version)
proposed_version)
except ValueError as e: except ValueError as e:
sys.stderr.write(f"{e}\n") sys.stderr.write(f"{e}\n")
else: else:
pass # for now pass # for now
if __name__ == '__main__': if __name__ == "__main__":
sys.exit(main()) sys.exit(main())

View File

@ -183,7 +183,7 @@ def display_verbose(package, values):
Logger.info("No reverse dependencies found") Logger.info("No reverse dependencies found")
return return
def log_package(values, package, arch, dependency, offset=0): def log_package(values, package, arch, dependency, visited, offset=0):
line = f"{' ' * offset}* {package}" line = f"{' ' * offset}* {package}"
if all_archs and set(arch) != all_archs: if all_archs and set(arch) != all_archs:
line += f" [{' '.join(sorted(arch))}]" line += f" [{' '.join(sorted(arch))}]"
@ -192,6 +192,9 @@ def display_verbose(package, values):
line += " " * (30 - len(line)) line += " " * (30 - len(line))
line += f" (for {dependency})" line += f" (for {dependency})"
Logger.info(line) Logger.info(line)
if package in visited:
return
visited = visited.copy().add(package)
data = values.get(package) data = values.get(package)
if data: if data:
offset = offset + 1 offset = offset + 1
@ -202,6 +205,7 @@ def display_verbose(package, values):
rdep["Package"], rdep["Package"],
rdep.get("Architectures", all_archs), rdep.get("Architectures", all_archs),
rdep.get("Dependency"), rdep.get("Dependency"),
visited,
offset, offset,
) )
@ -223,6 +227,7 @@ def display_verbose(package, values):
rdep["Package"], rdep["Package"],
rdep.get("Architectures", all_archs), rdep.get("Architectures", all_archs),
rdep.get("Dependency"), rdep.get("Dependency"),
{package},
) )
Logger.info("") Logger.info("")

View File

@ -4,13 +4,31 @@
# Authors: # Authors:
# Andy P. Whitcroft # Andy P. Whitcroft
# Christian Ehrhardt # Christian Ehrhardt
# Chris Peterson <chris.peterson@canonical.com>
#
# Copyright (C) 2024 Canonical Ltd.
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Dumps a list of currently running tests in Autopkgtest""" """Dumps a list of currently running tests in Autopkgtest"""
__example__ = """ __example__ = """
Display first listed test running on amd64 hardware: Display first listed test running on amd64 hardware:
$ running-autopkgtests | grep amd64 | head -n1 $ running-autopkgtests | grep amd64 | head -n1
R 0:01:40 systemd-upstream - focal amd64 upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb', 'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true', 'UPSTREAM_PULL_REQUEST=23153', 'GITHUB_STATUSES_URL=https://api.github.com/repos/systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e'] R 0:01:40 systemd-upstream - focal amd64\
upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb',\
'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true',\
'UPSTREAM_PULL_REQUEST=23153',\
'GITHUB_STATUSES_URL=https://api.github.com/repos/\
systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
""" """
import sys import sys
@ -33,16 +51,10 @@ def parse_args():
formatter_class=RawDescriptionHelpFormatter, formatter_class=RawDescriptionHelpFormatter,
) )
parser.add_argument( parser.add_argument(
"-r", "-r", "--running", action="store_true", help="Print runnning autopkgtests (default: true)"
"--running",
action="store_true",
help="Print runnning autopkgtests (default: true)",
) )
parser.add_argument( parser.add_argument(
"-q", "-q", "--queued", action="store_true", help="Print queued autopkgtests (default: true)"
"--queued",
action="store_true",
help="Print queued autopkgtests (default: true)",
) )
options = parser.parse_args() options = parser.parse_args()

View File

@ -32,17 +32,18 @@ def make_pep440_compliant(version: str) -> str:
scripts = [ scripts = [
"backportpackage", "backportpackage",
"bitesize",
"check-mir", "check-mir",
"check-symbols", "check-symbols",
"dch-repeat", "dch-repeat",
"grab-merge", "grab-merge",
"grep-merges", "grep-merges",
"import-bug-from-debian", "import-bug-from-debian",
"lp-bitesize",
"merge-changelog", "merge-changelog",
"mk-sbuild", "mk-sbuild",
"pbuilder-dist", "pbuilder-dist",
"pbuilder-dist-simple", "pbuilder-dist-simple",
"pm-helper",
"pull-pkg", "pull-pkg",
"pull-debian-debdiff", "pull-debian-debdiff",
"pull-debian-source", "pull-debian-source",

View File

@ -49,6 +49,7 @@ from ubuntutools.requestsync.mail import get_debian_srcpkg as requestsync_mail_g
from ubuntutools.version import Version from ubuntutools.version import Version
Logger = getLogger() Logger = getLogger()
cached_sync_blocklist = None
def remove_signature(dscname): def remove_signature(dscname):
@ -143,7 +144,7 @@ def sync_dsc(
if ubuntu_ver.is_modified_in_ubuntu(): if ubuntu_ver.is_modified_in_ubuntu():
if not force: if not force:
Logger.error("--force is required to discard Ubuntu changes.") Logger.error("--force is required to discard Ubuntu changes.")
sys.exit(1) return None
Logger.warning( Logger.warning(
"Overwriting modified Ubuntu version %s, setting current version to %s", "Overwriting modified Ubuntu version %s, setting current version to %s",
@ -157,7 +158,7 @@ def sync_dsc(
src_pkg.pull() src_pkg.pull()
except DownloadError as e: except DownloadError as e:
Logger.error("Failed to download: %s", str(e)) Logger.error("Failed to download: %s", str(e))
sys.exit(1) return None
src_pkg.unpack() src_pkg.unpack()
needs_fakesync = not (need_orig or ubu_pkg.verify_orig()) needs_fakesync = not (need_orig or ubu_pkg.verify_orig())
@ -166,13 +167,13 @@ def sync_dsc(
Logger.warning("Performing a fakesync") Logger.warning("Performing a fakesync")
elif not needs_fakesync and fakesync: elif not needs_fakesync and fakesync:
Logger.error("Fakesync not required, aborting.") Logger.error("Fakesync not required, aborting.")
sys.exit(1) return None
elif needs_fakesync and not fakesync: elif needs_fakesync and not fakesync:
Logger.error( Logger.error(
"The checksums of the Debian and Ubuntu packages " "The checksums of the Debian and Ubuntu packages "
"mismatch. A fake sync using --fakesync is required." "mismatch. A fake sync using --fakesync is required."
) )
sys.exit(1) return None
if fakesync: if fakesync:
# Download Ubuntu files (override Debian source tarballs) # Download Ubuntu files (override Debian source tarballs)
@ -180,7 +181,7 @@ def sync_dsc(
ubu_pkg.pull() ubu_pkg.pull()
except DownloadError as e: except DownloadError as e:
Logger.error("Failed to download: %s", str(e)) Logger.error("Failed to download: %s", str(e))
sys.exit(1) return None
# change into package directory # change into package directory
directory = src_pkg.source + "-" + new_ver.upstream_version directory = src_pkg.source + "-" + new_ver.upstream_version
@ -265,7 +266,7 @@ def sync_dsc(
returncode = subprocess.call(cmd) returncode = subprocess.call(cmd)
if returncode != 0: if returncode != 0:
Logger.error("Source-only build with debuild failed. Please check build log above.") Logger.error("Source-only build with debuild failed. Please check build log above.")
sys.exit(1) return None
def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror): def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
@ -295,7 +296,7 @@ def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
udtexceptions.SeriesNotFoundException, udtexceptions.SeriesNotFoundException,
) as e: ) as e:
Logger.error(str(e)) Logger.error(str(e))
sys.exit(1) return None
if version is None: if version is None:
version = Version(debian_srcpkg.getVersion()) version = Version(debian_srcpkg.getVersion())
try: try:
@ -306,7 +307,7 @@ def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
ubuntu_version = Version("~") ubuntu_version = Version("~")
except udtexceptions.SeriesNotFoundException as e: except udtexceptions.SeriesNotFoundException as e:
Logger.error(str(e)) Logger.error(str(e))
sys.exit(1) return None
if ubuntu_version >= version: if ubuntu_version >= version:
# The LP importer is maybe out of date # The LP importer is maybe out of date
debian_srcpkg = requestsync_mail_get_debian_srcpkg(package, dist) debian_srcpkg = requestsync_mail_get_debian_srcpkg(package, dist)
@ -320,7 +321,7 @@ def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
ubuntu_version, ubuntu_version,
ubuntu_release, ubuntu_release,
) )
sys.exit(1) return None
if component is None: if component is None:
component = debian_srcpkg.getComponent() component = debian_srcpkg.getComponent()
@ -329,7 +330,7 @@ def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
return DebianSourcePackage(package, version.full_version, component, mirrors=mirrors) return DebianSourcePackage(package, version.full_version, component, mirrors=mirrors)
def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False): def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False, yes=False):
"""Copy a source package from Debian to Ubuntu using the Launchpad API.""" """Copy a source package from Debian to Ubuntu using the Launchpad API."""
ubuntu = Distribution("ubuntu") ubuntu = Distribution("ubuntu")
debian_archive = Distribution("debian").getArchive() debian_archive = Distribution("debian").getArchive()
@ -352,7 +353,7 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
"Debian version %s has not been picked up by LP yet. Please try again later.", "Debian version %s has not been picked up by LP yet. Please try again later.",
src_pkg.version, src_pkg.version,
) )
sys.exit(1) return None
try: try:
ubuntu_spph = get_ubuntu_srcpkg(src_pkg.source, ubuntu_series, ubuntu_pocket) ubuntu_spph = get_ubuntu_srcpkg(src_pkg.source, ubuntu_series, ubuntu_pocket)
@ -373,7 +374,7 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
base_version = ubuntu_version.get_related_debian_version() base_version = ubuntu_version.get_related_debian_version()
if not force and ubuntu_version.is_modified_in_ubuntu(): if not force and ubuntu_version.is_modified_in_ubuntu():
Logger.error("--force is required to discard Ubuntu changes.") Logger.error("--force is required to discard Ubuntu changes.")
sys.exit(1) return None
# Check whether a fakesync would be required. # Check whether a fakesync would be required.
if not src_pkg.dsc.compare_dsc(ubuntu_pkg.dsc): if not src_pkg.dsc.compare_dsc(ubuntu_pkg.dsc):
@ -381,7 +382,7 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
"The checksums of the Debian and Ubuntu packages " "The checksums of the Debian and Ubuntu packages "
"mismatch. A fake sync using --fakesync is required." "mismatch. A fake sync using --fakesync is required."
) )
sys.exit(1) return None
except udtexceptions.PackageNotFoundException: except udtexceptions.PackageNotFoundException:
base_version = Version("~") base_version = Version("~")
Logger.info( Logger.info(
@ -402,9 +403,10 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
if sponsoree: if sponsoree:
Logger.info("Sponsoring this sync for %s (%s)", sponsoree.display_name, sponsoree.name) Logger.info("Sponsoring this sync for %s (%s)", sponsoree.display_name, sponsoree.name)
answer = YesNoQuestion().ask("Sync this package", "no") if not yes:
if answer != "yes": answer = YesNoQuestion().ask("Sync this package", "no")
return if answer != "yes":
return
try: try:
ubuntu_archive.copyPackage( ubuntu_archive.copyPackage(
@ -419,26 +421,29 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
except HTTPError as error: except HTTPError as error:
Logger.error("HTTP Error %s: %s", error.response.status, error.response.reason) Logger.error("HTTP Error %s: %s", error.response.status, error.response.reason)
Logger.error(error.content) Logger.error(error.content)
sys.exit(1) return None
Logger.info("Request succeeded; you should get an e-mail once it is processed.") Logger.info("Request succeeded; you should get an e-mail once it is processed.")
bugs = sorted(set(bugs)) bugs = sorted(set(bugs))
if bugs: if bugs:
Logger.info("Launchpad bugs to be closed: %s", ", ".join(str(bug) for bug in bugs)) Logger.info("Launchpad bugs to be closed: %s", ", ".join(str(bug) for bug in bugs))
Logger.info("Please wait for the sync to be successful before closing bugs.") Logger.info("Please wait for the sync to be successful before closing bugs.")
answer = YesNoQuestion().ask("Close bugs", "yes") if yes:
if answer == "yes":
close_bugs(bugs, src_pkg.source, src_pkg.version.full_version, changes, sponsoree) close_bugs(bugs, src_pkg.source, src_pkg.version.full_version, changes, sponsoree)
else:
answer = YesNoQuestion().ask("Close bugs", "yes")
if answer == "yes":
close_bugs(bugs, src_pkg.source, src_pkg.version.full_version, changes, sponsoree)
def is_blacklisted(query): def is_blocklisted(query):
"""Determine if package "query" is in the sync blacklist """Determine if package "query" is in the sync blocklist
Returns tuple of (blacklisted, comments) Returns tuple of (blocklisted, comments)
blacklisted is one of False, 'CURRENT', 'ALWAYS' blocklisted is one of False, 'CURRENT', 'ALWAYS'
""" """
series = Launchpad.distributions["ubuntu"].current_series series = Launchpad.distributions["ubuntu"].current_series
lp_comments = series.getDifferenceComments(source_package_name=query) lp_comments = series.getDifferenceComments(source_package_name=query)
blacklisted = False blocklisted = False
comments = [ comments = [
f"{c.body_text}\n -- {c.comment_author.name}" f"{c.body_text}\n -- {c.comment_author.name}"
f" {c.comment_date.strftime('%a, %d %b %Y %H:%M:%S +0000')}" f" {c.comment_date.strftime('%a, %d %b %Y %H:%M:%S +0000')}"
@ -446,32 +451,38 @@ def is_blacklisted(query):
] ]
for diff in series.getDifferencesTo(source_package_name_filter=query): for diff in series.getDifferencesTo(source_package_name_filter=query):
if diff.status == "Blacklisted current version" and blacklisted != "ALWAYS": if diff.status == "Blacklisted current version" and blocklisted != "ALWAYS":
blacklisted = "CURRENT" blocklisted = "CURRENT"
if diff.status == "Blacklisted always": if diff.status == "Blacklisted always":
blacklisted = "ALWAYS" blocklisted = "ALWAYS"
# Old blacklist: global cached_sync_blocklist
url = "https://ubuntu-archive-team.ubuntu.com/sync-blacklist.txt" if not cached_sync_blocklist:
with urllib.request.urlopen(url) as f: url = "https://ubuntu-archive-team.ubuntu.com/sync-blocklist.txt"
applicable_lines = [] try:
for line in f: with urllib.request.urlopen(url) as f:
line = line.decode("utf-8") cached_sync_blocklist = f.read().decode("utf-8")
if not line.strip(): except:
applicable_lines = [] print("WARNING: unable to download the sync blocklist. Erring on the side of caution.")
continue return ("ALWAYS", "INTERNAL ERROR: Unable to fetch sync blocklist")
applicable_lines.append(line)
try:
line = line[: line.index("#")]
except ValueError:
pass
source = line.strip()
if source and fnmatch.fnmatch(query, source):
comments += ["From sync-blacklist.txt:"] + applicable_lines
blacklisted = "ALWAYS"
break
return (blacklisted, comments) applicable_lines = []
for line in cached_sync_blocklist.splitlines():
if not line.strip():
applicable_lines = []
continue
applicable_lines.append(line)
try:
line = line[:line.index("#")]
except ValueError:
pass
source = line.strip()
if source and fnmatch.fnmatch(query, source):
comments += ["From sync-blocklist.txt:"] + applicable_lines
blocklisted = "ALWAYS"
break
return (blocklisted, comments)
def close_bugs(bugs, package, version, changes, sponsoree): def close_bugs(bugs, package, version, changes, sponsoree):
@ -508,6 +519,12 @@ def parse():
epilog = f"See {os.path.basename(sys.argv[0])}(1) for more info." epilog = f"See {os.path.basename(sys.argv[0])}(1) for more info."
parser = argparse.ArgumentParser(usage=usage, epilog=epilog) parser = argparse.ArgumentParser(usage=usage, epilog=epilog)
parser.add_argument(
"-y",
"--yes",
action="store_true",
help="Automatically sync without prompting. Use with caution and care."
)
parser.add_argument("-d", "--distribution", help="Debian distribution to sync from.") parser.add_argument("-d", "--distribution", help="Debian distribution to sync from.")
parser.add_argument("-r", "--release", help="Specify target Ubuntu release.") parser.add_argument("-r", "--release", help="Specify target Ubuntu release.")
parser.add_argument("-V", "--debian-version", help="Specify the version to sync from.") parser.add_argument("-V", "--debian-version", help="Specify the version to sync from.")
@ -712,36 +729,38 @@ def main():
args.release, args.release,
args.debian_mirror, args.debian_mirror,
) )
if not src_pkg:
continue
blacklisted, comments = is_blacklisted(src_pkg.source) blocklisted, comments = is_blocklisted(src_pkg.source)
blacklist_fail = False blocklist_fail = False
if blacklisted: if blocklisted:
messages = [] messages = []
if blacklisted == "CURRENT": if blocklisted == "CURRENT":
Logger.debug( Logger.debug(
"Source package %s is temporarily blacklisted " "Source package %s is temporarily blocklisted "
"(blacklisted_current). " "(blocklisted_current). "
"Ubuntu ignores these for now. " "Ubuntu ignores these for now. "
"See also LP: #841372", "See also LP: #841372",
src_pkg.source, src_pkg.source,
) )
else: else:
if args.fakesync: if args.fakesync:
messages += ["Doing a fakesync, overriding blacklist."] messages += ["Doing a fakesync, overriding blocklist."]
else: else:
blacklist_fail = True blocklist_fail = True
messages += [ messages += [
"If this package needs a fakesync, use --fakesync", "If this package needs a fakesync, use --fakesync",
"If you think this package shouldn't be " "If you think this package shouldn't be "
"blacklisted, please file a bug explaining your " "blocklisted, please file a bug explaining your "
"reasoning and subscribe ~ubuntu-archive.", "reasoning and subscribe ~ubuntu-archive.",
] ]
if blacklist_fail: if blocklist_fail:
Logger.error("Source package %s is blacklisted.", src_pkg.source) Logger.error("Source package %s is blocklisted.", src_pkg.source)
elif blacklisted == "ALWAYS": elif blocklisted == "ALWAYS":
Logger.info("Source package %s is blacklisted.", src_pkg.source) Logger.info("Source package %s is blocklisted.", src_pkg.source)
if messages: if messages:
for message in messages: for message in messages:
for line in textwrap.wrap(message): for line in textwrap.wrap(message):
@ -753,14 +772,15 @@ def main():
for line in textwrap.wrap(comment): for line in textwrap.wrap(comment):
Logger.info(" %s", line) Logger.info(" %s", line)
if blacklist_fail: if blocklist_fail:
sys.exit(1) continue
if args.lp: if args.lp:
copy(src_pkg, args.release, args.bugs, sponsoree, args.simulate, args.force) if not copy(src_pkg, args.release, args.bugs, sponsoree, args.simulate, args.force, args.yes):
continue
else: else:
os.environ["DEB_VENDOR"] = "Ubuntu" os.environ["DEB_VENDOR"] = "Ubuntu"
sync_dsc( if not sync_dsc(
src_pkg, src_pkg,
args.distribution, args.distribution,
args.release, args.release,
@ -772,7 +792,8 @@ def main():
args.simulate, args.simulate,
args.force, args.force,
args.fakesync, args.fakesync,
) ):
continue
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -28,9 +28,8 @@
import argparse import argparse
import sys import sys
from launchpadlib.credentials import TokenAuthorizationException
from launchpadlib.launchpad import Launchpad
import lazr.restfulclient.errors import lazr.restfulclient.errors
from launchpadlib.launchpad import Launchpad
from ubuntutools import getLogger from ubuntutools import getLogger
from ubuntutools.lp.udtexceptions import PocketDoesNotExistError from ubuntutools.lp.udtexceptions import PocketDoesNotExistError
@ -39,7 +38,7 @@ from ubuntutools.misc import split_release_pocket
Logger = getLogger() Logger = getLogger()
def getBuildStates(pkg, archs): def get_build_states(pkg, archs):
res = [] res = []
for build in pkg.getBuilds(): for build in pkg.getBuilds():
@ -48,7 +47,8 @@ def getBuildStates(pkg, archs):
msg = "\n".join(res) msg = "\n".join(res)
return f"Build state(s) for '{pkg.source_package_name}':\n{msg}" return f"Build state(s) for '{pkg.source_package_name}':\n{msg}"
def rescoreBuilds(pkg, archs, score):
def rescore_builds(pkg, archs, score):
res = [] res = []
for build in pkg.getBuilds(): for build in pkg.getBuilds():
@ -61,18 +61,19 @@ def rescoreBuilds(pkg, archs, score):
res.append(f" {arch}: done") res.append(f" {arch}: done")
except lazr.restfulclient.errors.Unauthorized: except lazr.restfulclient.errors.Unauthorized:
Logger.error( Logger.error(
"You don't have the permissions to rescore builds. Ignoring your rescore request." "You don't have the permissions to rescore builds."
" Ignoring your rescore request."
) )
return None return None
except lazr.restfulclient.errors.BadRequest: except lazr.restfulclient.errors.BadRequest:
Logger.info("Cannot rescore build of %s on %s.", Logger.info("Cannot rescore build of %s on %s.", build.source_package_name, arch)
build.source_package_name, arch)
res.append(f" {arch}: failed") res.append(f" {arch}: failed")
msg = "\n".join(res) msg = "\n".join(res)
return f"Rescoring builds of '{pkg.source_package_name}' to {score}:\n{msg}" return f"Rescoring builds of '{pkg.source_package_name}' to {score}:\n{msg}"
def retryBuilds(pkg, archs):
def retry_builds(pkg, archs):
res = [] res = []
for build in pkg.getBuilds(): for build in pkg.getBuilds():
arch = build.arch_tag arch = build.arch_tag
@ -94,16 +95,7 @@ def main():
# Valid architectures. # Valid architectures.
valid_archs = set( valid_archs = set(
[ ["armhf", "arm64", "amd64", "i386", "powerpc", "ppc64el", "riscv64", "s390x"]
"armhf",
"arm64",
"amd64",
"i386",
"powerpc",
"ppc64el",
"riscv64",
"s390x",
]
) )
# Prepare our option parser. # Prepare our option parser.
@ -118,8 +110,7 @@ def main():
f"include: {', '.join(valid_archs)}.", f"include: {', '.join(valid_archs)}.",
) )
parser.add_argument("-A", "--archive", help="operate on ARCHIVE", parser.add_argument("-A", "--archive", help="operate on ARCHIVE", default="ubuntu")
default="ubuntu")
# Batch processing options # Batch processing options
batch_options = parser.add_argument_group( batch_options = parser.add_argument_group(
@ -148,7 +139,9 @@ def main():
help="Rescore builds to <priority>.", help="Rescore builds to <priority>.",
) )
batch_options.add_argument( batch_options.add_argument(
"--state", action="store", dest="state", "--state",
action="store",
dest="state",
help="Act on builds that are in the specified state", help="Act on builds that are in the specified state",
) )
@ -157,11 +150,8 @@ def main():
# Parse our options. # Parse our options.
args = parser.parse_args() args = parser.parse_args()
launchpad = Launchpad.login_with("ubuntu-dev-tools", "production", launchpad = Launchpad.login_with("ubuntu-dev-tools", "production", version="devel")
version="devel") ubuntu = launchpad.distributions["ubuntu"]
me = launchpad.me
ubuntu = launchpad.distributions['ubuntu']
if args.batch: if args.batch:
release = args.series release = args.series
@ -169,8 +159,8 @@ def main():
# ppas don't have a proposed pocket so just use the release pocket; # ppas don't have a proposed pocket so just use the release pocket;
# but for the main archive we default to -proposed # but for the main archive we default to -proposed
release = ubuntu.getDevelopmentSeries()[0].name release = ubuntu.getDevelopmentSeries()[0].name
if args.archive == 'ubuntu': if args.archive == "ubuntu":
release = release + "-proposed" release = f"{release}-proposed"
try: try:
(release, pocket) = split_release_pocket(release) (release, pocket) = split_release_pocket(release)
except PocketDoesNotExistError as error: except PocketDoesNotExistError as error:
@ -223,12 +213,13 @@ def main():
# Get list of published sources for package in question. # Get list of published sources for package in question.
try: try:
sources = archive.getPublishedSources( sources = archive.getPublishedSources(
distro_series=distroseries, distro_series=distroseries,
exact_match=True, exact_match=True,
pocket=pocket, pocket=pocket,
source_name=package, source_name=package,
status='Published')[0] status="Published",
except IndexError as error: )[0]
except IndexError:
Logger.error("No publication found for package %s", package) Logger.error("No publication found for package %s", package)
sys.exit(1) sys.exit(1)
# Get list of builds for that package. # Get list of builds for that package.
@ -243,21 +234,20 @@ def main():
# are in place. # are in place.
if operation == "retry": if operation == "retry":
necessary_privs = archive.checkUpload( necessary_privs = archive.checkUpload(
component=sources.getComponent(), component=sources.getComponent(),
distroseries=distroseries, distroseries=distroseries,
person=launchpad.me, person=launchpad.me,
pocket=pocket, pocket=pocket,
sourcepackagename=sources.getPackageName(), sourcepackagename=sources.getPackageName(),
) )
if not necessary_privs:
if operation == "retry" and not necessary_privs: Logger.error(
Logger.error( "You cannot perform the %s operation on a %s package as you"
"You cannot perform the %s operation on a %s package as you" " do not have the permissions to do this action.",
" do not have the permissions to do this action.", operation,
operation, component,
component, )
) sys.exit(1)
sys.exit(1)
# Output details. # Output details.
Logger.info( Logger.info(
@ -288,7 +278,8 @@ def main():
build.rescore(score=priority) build.rescore(score=priority)
except lazr.restfulclient.errors.Unauthorized: except lazr.restfulclient.errors.Unauthorized:
Logger.error( Logger.error(
"You don't have the permissions to rescore builds. Ignoring your rescore request." "You don't have the permissions to rescore builds."
" Ignoring your rescore request."
) )
break break
else: else:
@ -325,24 +316,22 @@ def main():
if not args.state: if not args.state:
if args.retry: if args.retry:
args.state='Failed to build' args.state = "Failed to build"
elif args.priority: elif args.priority:
args.state='Needs building' args.state = "Needs building"
# there is no equivalent to series.getBuildRecords() for a ppa. # there is no equivalent to series.getBuildRecords() for a ppa.
# however, we don't want to have to traverse all build records for # however, we don't want to have to traverse all build records for
# all series when working on the main archive, so we use # all series when working on the main archive, so we use
# series.getBuildRecords() for ubuntu and handle ppas separately # series.getBuildRecords() for ubuntu and handle ppas separately
series = ubuntu.getSeries(name_or_version=release) series = ubuntu.getSeries(name_or_version=release)
if args.archive == 'ubuntu': if args.archive == "ubuntu":
builds = series.getBuildRecords(build_state=args.state, builds = series.getBuildRecords(build_state=args.state, pocket=pocket)
pocket=pocket)
else: else:
builds = [] builds = []
for build in archive.getBuildRecords(build_state=args.state, for build in archive.getBuildRecords(build_state=args.state, pocket=pocket):
pocket=pocket):
if not build.current_source_publication: if not build.current_source_publication:
continue continue
if build.current_source_publication.distro_series==series: if build.current_source_publication.distro_series == series:
builds.append(build) builds.append(build)
for build in builds: for build in builds:
if build.arch_tag not in archs: if build.arch_tag not in archs:
@ -361,24 +350,31 @@ def main():
) )
if args.retry and not can_retry: if args.retry and not can_retry:
Logger.error( Logger.error(
"You don't have the permissions to retry the " "You don't have the permissions to retry the build of '%s', skipping.",
"build of '%s', skipping.", build.source_package_name,
build.source_package_name
) )
continue continue
Logger.info( Logger.info(
"The source version for '%s' in '%s' (%s) is: %s", "The source version for '%s' in '%s' (%s) is: %s",
build.source_package_name, build.source_package_name,
release, release,
pocket, pocket,
build.source_package_version build.source_package_version,
) )
if args.retry and build.can_be_retried: if args.retry and build.can_be_retried:
Logger.info("Retrying build of %s on %s...", Logger.info(
build.source_package_name, build.arch_tag) "Retrying build of %s on %s...", build.source_package_name, build.arch_tag
build.retry() )
retry_count += 1 try:
build.retry()
retry_count += 1
except lazr.restfulclient.errors.BadRequest:
Logger.info(
"Failed to retry build of %s on %s",
build.source_package_name,
build.arch_tag,
)
if args.priority and can_rescore: if args.priority and can_rescore:
if build.can_be_rescored: if build.can_be_rescored:
@ -386,28 +382,32 @@ def main():
build.rescore(score=args.priority) build.rescore(score=args.priority)
except lazr.restfulclient.errors.Unauthorized: except lazr.restfulclient.errors.Unauthorized:
Logger.error( Logger.error(
"You don't have the permissions to rescore builds. Ignoring your rescore request." "You don't have the permissions to rescore builds."
" Ignoring your rescore request."
) )
can_rescore = False can_rescore = False
except lazr.restfulclient.errors.BadRequest: except lazr.restfulclient.errors.BadRequest:
Logger.info("Cannot rescore build of %s on %s.", Logger.info(
build.source_package_name, build.arch_tag) "Cannot rescore build of %s on %s.",
build.source_package_name,
build.arch_tag,
)
Logger.info("") Logger.info("")
if args.retry: if args.retry:
Logger.info("%d package builds retried", retry_count) Logger.info("%d package builds retried", retry_count)
sys.exit(0) sys.exit(0)
for pkg in args.packages: for pkg in args.packages:
try: try:
pkg = archive.getPublishedSources( pkg = archive.getPublishedSources(
distro_series=distroseries, distro_series=distroseries,
exact_match=True, exact_match=True,
pocket=pocket, pocket=pocket,
source_name=pkg, source_name=pkg,
status='Published')[0] status="Published",
except IndexError as error: )[0]
except IndexError:
Logger.error("No publication found for package %s", pkg) Logger.error("No publication found for package %s", pkg)
continue continue
@ -435,15 +435,14 @@ def main():
pkg.source_package_version, pkg.source_package_version,
) )
Logger.info(getBuildStates(pkg, archs)) Logger.info(get_build_states(pkg, archs))
if can_retry: if can_retry:
Logger.info(retryBuilds(pkg, archs)) Logger.info(retry_builds(pkg, archs))
if args.priority: if args.priority:
Logger.info(rescoreBuilds(pkg, archs, args.priority)) Logger.info(rescore_builds(pkg, archs, args.priority))
Logger.info("") Logger.info("")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -165,6 +165,7 @@ class SourcePackage(ABC):
series = kwargs.get("series") series = kwargs.get("series")
pocket = kwargs.get("pocket") pocket = kwargs.get("pocket")
status = kwargs.get("status") status = kwargs.get("status")
arch = kwargs.get("arch")
verify_signature = kwargs.get("verify_signature", False) verify_signature = kwargs.get("verify_signature", False)
try_binary = kwargs.get("try_binary", True) try_binary = kwargs.get("try_binary", True)
@ -184,6 +185,7 @@ class SourcePackage(ABC):
self._series = series self._series = series
self._pocket = pocket self._pocket = pocket
self._status = status self._status = status
self._arch = arch
# dscfile can be either a path or an URL. misc.py's download() will # dscfile can be either a path or an URL. misc.py's download() will
# later fiture it out # later fiture it out
self._dsc_source = dscfile self._dsc_source = dscfile
@ -252,6 +254,7 @@ class SourcePackage(ABC):
) )
try: try:
params["archtag"] = self._arch
bpph = archive.getBinaryPackage(self.source, **params) bpph = archive.getBinaryPackage(self.source, **params)
except PackageNotFoundException as bpnfe: except PackageNotFoundException as bpnfe:
# log binary lookup failure, in case it provides hints # log binary lookup failure, in case it provides hints
@ -543,7 +546,7 @@ class SourcePackage(ABC):
Return the debdiff filename. Return the debdiff filename.
""" """
cmd = ["debdiff", self.dsc_name, newpkg.dsc_name] cmd = ["debdiff", self.dsc_name, newpkg.dsc_name]
difffn = newpkg.dsc_name[:-3] + "debdiff" difffn = f"{newpkg.dsc_name[:-3]}debdiff"
Logger.debug("%s > %s", " ".join(cmd), difffn) Logger.debug("%s > %s", " ".join(cmd), difffn)
with open(difffn, "w", encoding="utf-8") as f: with open(difffn, "w", encoding="utf-8") as f:
if subprocess.call(cmd, stdout=f, cwd=str(self.workdir)) > 2: if subprocess.call(cmd, stdout=f, cwd=str(self.workdir)) > 2:
@ -1342,7 +1345,7 @@ class SnapshotSPPH:
self.getComponent(), self.getComponent(),
subdir, subdir,
name, name,
name + "_" + pkgversion, f"{name}_{pkgversion}",
"changelog.txt", "changelog.txt",
) )
try: try:

View File

@ -71,8 +71,8 @@ class Pbuilder(Builder):
cmd = [ cmd = [
"sudo", "sudo",
"-E", "-E",
"ARCH=" + self.architecture, f"ARCH={self.architecture}",
"DIST=" + dist, f"DIST={dist}",
self.name, self.name,
"--build", "--build",
"--architecture", "--architecture",
@ -91,8 +91,8 @@ class Pbuilder(Builder):
cmd = [ cmd = [
"sudo", "sudo",
"-E", "-E",
"ARCH=" + self.architecture, f"ARCH={self.architecture}",
"DIST=" + dist, f"DIST={dist}",
self.name, self.name,
"--update", "--update",
"--architecture", "--architecture",
@ -140,7 +140,7 @@ class Sbuild(Builder):
workdir = os.getcwd() workdir = os.getcwd()
Logger.debug("cd %s", result_directory) Logger.debug("cd %s", result_directory)
os.chdir(result_directory) os.chdir(result_directory)
cmd = ["sbuild", "--arch-all", "--dist=" + dist, "--arch=" + self.architecture, dsc_file] cmd = ["sbuild", "--arch-all", f"--dist={dist}", f"--arch={self.architecture}", dsc_file]
Logger.debug(" ".join(cmd)) Logger.debug(" ".join(cmd))
returncode = subprocess.call(cmd) returncode = subprocess.call(cmd)
Logger.debug("cd %s", workdir) Logger.debug("cd %s", workdir)

View File

@ -68,21 +68,19 @@ class UDTConfig:
config = {} config = {}
for filename in ("/etc/devscripts.conf", "~/.devscripts"): for filename in ("/etc/devscripts.conf", "~/.devscripts"):
try: try:
f = open(os.path.expanduser(filename), "r", encoding="utf-8") with open(os.path.expanduser(filename), "r", encoding="utf-8") as f:
content = f.read()
except IOError: except IOError:
continue continue
for line in f: try:
parsed = shlex.split(line, comments=True) tokens = shlex.split(content, comments=True)
if len(parsed) > 1: except ValueError as e:
Logger.warning( Logger.error("Error parsing %s: %s", filename, e)
"Cannot parse variable assignment in %s: %s", continue
getattr(f, "name", "<config>"), for token in tokens:
line, if "=" in token:
) key, value = token.split("=", 1)
if len(parsed) >= 1 and "=" in parsed[0]:
key, value = parsed[0].split("=", 1)
config[key] = value config[key] = value
f.close()
return config return config
def get_value(self, key, default=None, boolean=False, compat_keys=()): def get_value(self, key, default=None, boolean=False, compat_keys=()):
@ -99,9 +97,9 @@ class UDTConfig:
if default is None and key in self.defaults: if default is None and key in self.defaults:
default = self.defaults[key] default = self.defaults[key]
keys = [self.prefix + "_" + key] keys = [f"{self.prefix}_{key}"]
if key in self.defaults: if key in self.defaults:
keys.append("UBUNTUTOOLS_" + key) keys.append(f"UBUNTUTOOLS_{key}")
keys += compat_keys keys += compat_keys
for k in keys: for k in keys:
@ -114,9 +112,9 @@ class UDTConfig:
else: else:
continue continue
if k in compat_keys: if k in compat_keys:
replacements = self.prefix + "_" + key replacements = f"{self.prefix}_{key}"
if key in self.defaults: if key in self.defaults:
replacements += "or UBUNTUTOOLS_" + key replacements += f"or UBUNTUTOOLS_{key}"
Logger.warning( Logger.warning(
"Using deprecated configuration variable %s. You should use %s.", "Using deprecated configuration variable %s. You should use %s.",
k, k,
@ -180,7 +178,7 @@ def ubu_email(name=None, email=None, export=True):
mailname = socket.getfqdn() mailname = socket.getfqdn()
if os.path.isfile("/etc/mailname"): if os.path.isfile("/etc/mailname"):
mailname = open("/etc/mailname", "r", encoding="utf-8").read().strip() mailname = open("/etc/mailname", "r", encoding="utf-8").read().strip()
email = pwd.getpwuid(os.getuid()).pw_name + "@" + mailname email = f"{pwd.getpwuid(os.getuid()).pw_name}@{mailname}"
if export: if export:
os.environ["DEBFULLNAME"] = name os.environ["DEBFULLNAME"] = name

View File

@ -883,7 +883,7 @@ class SourcePackagePublishingHistory(BaseWrapper):
""" """
release = self.getSeriesName() release = self.getSeriesName()
if self.pocket != "Release": if self.pocket != "Release":
release += "-" + self.pocket.lower() release += f"-{self.pocket.lower()}"
return release return release
def getArchive(self): def getArchive(self):

View File

@ -385,7 +385,7 @@ class _StderrProgressBar:
pctstr = f"{pct:>3}%" pctstr = f"{pct:>3}%"
barlen = self.width * pct // 100 barlen = self.width * pct // 100
barstr = "=" * barlen barstr = "=" * barlen
barstr = barstr[:-1] + ">" barstr = f"{barstr[:-1]}>"
barstr = barstr.ljust(self.width) barstr = barstr.ljust(self.width)
fullstr = f"\r[{barstr}]{pctstr}" fullstr = f"\r[{barstr}]{pctstr}"
sys.stderr.write(fullstr) sys.stderr.write(fullstr)

View File

@ -340,6 +340,7 @@ class PullPkg:
params = {} params = {}
params["package"] = options["package"] params["package"] = options["package"]
params["arch"] = options["arch"]
if options["release"]: if options["release"]:
(release, version, pocket) = self.parse_release_and_version( (release, version, pocket) = self.parse_release_and_version(
@ -453,7 +454,7 @@ class PullPkg:
if key.startswith("vcs-"): if key.startswith("vcs-"):
if key == "vcs-browser": if key == "vcs-browser":
continue continue
elif key == "vcs-git": if key == "vcs-git":
vcs = "Git" vcs = "Git"
elif key == "vcs-bzr": elif key == "vcs-bzr":
vcs = "Bazaar" vcs = "Bazaar"
@ -462,9 +463,13 @@ class PullPkg:
uri = srcpkg.dsc[original_key] uri = srcpkg.dsc[original_key]
Logger.warning("\nNOTICE: '%s' packaging is maintained in " Logger.warning(
"the '%s' version control system at:\n" "\nNOTICE: '%s' packaging is maintained in "
" %s\n" % (package, vcs, uri)) "the '%s' version control system at:\n %s\n",
package,
vcs,
uri,
)
if vcs == "Bazaar": if vcs == "Bazaar":
vcscmd = " $ bzr branch " + uri vcscmd = " $ bzr branch " + uri
@ -472,9 +477,11 @@ class PullPkg:
vcscmd = " $ git clone " + uri vcscmd = " $ git clone " + uri
if vcscmd: if vcscmd:
Logger.info(f"Please use:\n{vcscmd}\n" Logger.info(
"to retrieve the latest (possibly unreleased) " "Please use:\n%s\n"
"updates to the package.\n") "to retrieve the latest (possibly unreleased) updates to the package.\n",
vcscmd,
)
if pull == PULL_LIST: if pull == PULL_LIST:
Logger.info("Source files:") Logger.info("Source files:")

View File

@ -31,9 +31,9 @@ class Question:
def get_options(self): def get_options(self):
if len(self.options) == 2: if len(self.options) == 2:
options = self.options[0] + " or " + self.options[1] options = f"{self.options[0]} or {self.options[1]}"
else: else:
options = ", ".join(self.options[:-1]) + ", or " + self.options[-1] options = f"{', '.join(self.options[:-1])}, or {self.options[-1]}"
return options return options
def ask(self, question, default=None): def ask(self, question, default=None):
@ -67,7 +67,7 @@ class Question:
if selected == option[0]: if selected == option[0]:
selected = option selected = option
if selected not in self.options: if selected not in self.options:
print("Please answer the question with " + self.get_options() + ".") print(f"Please answer the question with {self.get_options()}.")
return selected return selected
@ -170,7 +170,7 @@ class EditBugReport(EditFile):
split_re = re.compile(r"^Summary.*?:\s+(.*?)\s+Description:\s+(.*)$", re.DOTALL | re.UNICODE) split_re = re.compile(r"^Summary.*?:\s+(.*?)\s+Description:\s+(.*)$", re.DOTALL | re.UNICODE)
def __init__(self, subject, body, placeholders=None): def __init__(self, subject, body, placeholders=None):
prefix = os.path.basename(sys.argv[0]) + "_" prefix = f"{os.path.basename(sys.argv[0])}_"
tmpfile = tempfile.NamedTemporaryFile(prefix=prefix, suffix=".txt", delete=False) tmpfile = tempfile.NamedTemporaryFile(prefix=prefix, suffix=".txt", delete=False)
tmpfile.write((f"Summary (one line):\n{subject}\n\nDescription:\n{body}").encode("utf-8")) tmpfile.write((f"Summary (one line):\n{subject}\n\nDescription:\n{body}").encode("utf-8"))
tmpfile.close() tmpfile.close()

View File

@ -183,7 +183,7 @@ Content-Type: text/plain; charset=UTF-8
backup = tempfile.NamedTemporaryFile( backup = tempfile.NamedTemporaryFile(
mode="w", mode="w",
delete=False, delete=False,
prefix="requestsync-" + re.sub(r"[^a-zA-Z0-9_-]", "", bugtitle.replace(" ", "_")), prefix=f"requestsync-{re.sub('[^a-zA-Z0-9_-]', '', bugtitle.replace(' ', '_'))}",
) )
with backup: with backup:
backup.write(mail) backup.write(mail)

View File

@ -1,18 +1,19 @@
# Copyright (C) 2024 Canonical Ltd. # Copyright (C) 2024 Canonical Ltd.
# Author: Chris Peterson <chris.peterson@canonical.com> # Author: Chris Peterson <chris.peterson@canonical.com>
# Author: Andy P. Whitcroft
# Author: Christian Ehrhardt
# #
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify it
# it under the terms of the GNU Affero General Public License as # under the terms of the GNU General Public License version 3, as published
# published by the Free Software Foundation, either version 3 of the # by the Free Software Foundation.
# License, or (at your option) any later version.
# #
# This program is distributed in the hope that it will be useful, # This program is distributed in the hope that it will be useful, but
# but WITHOUT ANY WARRANTY; without even the implied warranty of # WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
# GNU Affero General Public License for more details. # See the GNU General Public License for more details.
# #
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU General Public License along
# along with this program. If not, see <http://www.gnu.org/licenses/>. # with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime import datetime
import json import json
@ -25,10 +26,7 @@ URL_QUEUED = "http://autopkgtest.ubuntu.com/queues.json"
def _get_jobs(url: str) -> dict: def _get_jobs(url: str) -> dict:
request = urllib.request.Request( request = urllib.request.Request(url, headers={"Cache-Control": "max-age-0"})
url,
headers={"Cache-Control": "max-age-0"},
)
with urllib.request.urlopen(request) as response: with urllib.request.urlopen(request) as response:
data = response.read() data = response.read()
jobs = json.loads(data.decode("utf-8")) jobs = json.loads(data.decode("utf-8"))
@ -51,7 +49,10 @@ def get_running():
env = jobinfo[0].get("env", "-") env = jobinfo[0].get("env", "-")
time = str(datetime.timedelta(seconds=jobinfo[1])) time = str(datetime.timedelta(seconds=jobinfo[1]))
try: try:
line = f"R {time:6} {pkg:30} {'-':10} {series:8} {arch:8} {ppas:31} {triggers} {env}\n" line = (
f"R {time:6} {pkg:30} {'-':10} {series:8} {arch:8}"
f" {ppas:31} {triggers} {env}\n"
)
running.append((jobinfo[1], line)) running.append((jobinfo[1], line))
except BrokenPipeError: except BrokenPipeError:
sys.exit(1) sys.exit(1)
@ -85,7 +86,10 @@ def get_queued():
n = n + 1 n = n + 1
try: try:
output += f"Q{n:04d} {'-:--':>6} {pkg:30} {origin:10} {series:8} {arch:8} {ppas:31} {triggers}\n" output += (
f"Q{n:04d} {'-:--':>6} {pkg:30} {origin:10} {series:8} {arch:8}"
f" {ppas:31} {triggers}\n"
)
except BrokenPipeError: except BrokenPipeError:
sys.exit(1) sys.exit(1)
return output return output

View File

@ -255,7 +255,7 @@ class SourcePackage:
def _changes_file(self): def _changes_file(self):
"""Returns the file name of the .changes file.""" """Returns the file name of the .changes file."""
return os.path.join( return os.path.join(
self._workdir, f"{self._package}_{ strip_epoch(self._version)}_source.changes" self._workdir, f"{self._package}_{strip_epoch(self._version)}_source.changes"
) )
def check_target(self, upload, launchpad): def check_target(self, upload, launchpad):

View File

@ -39,7 +39,7 @@ def is_command_available(command, check_sbin=False):
"Is command in $PATH?" "Is command in $PATH?"
path = os.environ.get("PATH", "/usr/bin:/bin").split(":") path = os.environ.get("PATH", "/usr/bin:/bin").split(":")
if check_sbin: if check_sbin:
path += [directory[:-3] + "sbin" for directory in path if directory.endswith("/bin")] path += [f"{directory[:-3]}sbin" for directory in path if directory.endswith("/bin")]
return any(os.access(os.path.join(directory, command), os.X_OK) for directory in path) return any(os.access(os.path.join(directory, command), os.X_OK) for directory in path)
@ -303,7 +303,7 @@ def _download_and_change_into(task, dsc_file, patch, branch):
extract_source(dsc_file, Logger.isEnabledFor(logging.DEBUG)) extract_source(dsc_file, Logger.isEnabledFor(logging.DEBUG))
# change directory # change directory
directory = task.package + "-" + task.get_version().upstream_version directory = f"{task.package}-{task.get_version().upstream_version}"
Logger.debug("cd %s", directory) Logger.debug("cd %s", directory)
os.chdir(directory) os.chdir(directory)

View File

@ -0,0 +1,33 @@
# Copyright (C) 2024 Canonical Ltd.
# Author: Chris Peterson <chris.peterson@canonical.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
import unittest
# Binary Tests
class BinaryTests(unittest.TestCase):
# The requestsync binary has the option of using the launchpad api
# to log in but requires python3-keyring in addition to
# python3-launchpadlib. Testing the integrated login functionality
# automatically isn't very feasbile, but we can at least write a smoke
# test to make sure the required packages are installed.
# See LP: #2049217
def test_keyring_installed(self):
"""Smoke test for required lp api dependencies"""
try:
import keyring # noqa: F401
except ModuleNotFoundError:
raise ModuleNotFoundError("package python3-keyring is not installed")

View File

@ -1,18 +1,17 @@
# Copyright (C) 2024 Canonical Ltd. # Copyright (C) 2024 Canonical Ltd.
# Author: Chris Peterson <chris.peterson@canonical.com> # Author: Chris Peterson <chris.peterson@canonical.com>
# #
# This program is free software: you can redistribute it and/or modify # Permission to use, copy, modify, and/or distribute this software for any
# it under the terms of the GNU Affero General Public License as # purpose with or without fee is hereby granted, provided that the above
# published by the Free Software Foundation, either version 3 of the # copyright notice and this permission notice appear in all copies.
# License, or (at your option) any later version.
# #
# This program is distributed in the hope that it will be useful, # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# but WITHOUT ANY WARRANTY; without even the implied warranty of # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# GNU Affero General Public License for more details. # INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# # LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# You should have received a copy of the GNU Affero General Public License # OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# along with this program. If not, see <http://www.gnu.org/licenses/>. # PERFORMANCE OF THIS SOFTWARE.
""" Tests for running_autopkgtests """ Tests for running_autopkgtests
Tests using cached data from autopkgtest servers. Tests using cached data from autopkgtest servers.
@ -33,8 +32,17 @@ from ubuntutools.running_autopkgtests import (
) )
# Cached binary response data from autopkgtest server # Cached binary response data from autopkgtest server
RUN_DATA = b'{"pyatem": { "submit-time_2024-01-19 19:37:36;triggers_[\'python3-defaults/3.12.1-0ubuntu1\'];": {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"], "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}' RUN_DATA = (
QUEUED_DATA = b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\", \\"submit-time\\": \\"2024-01-18 01:08:55\\", \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}' b'{"pyatem": {'
b" \"submit-time_2024-01-19 19:37:36;triggers_['python3-defaults/3.12.1-0ubuntu1'];\":"
b' {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"],'
b' "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}'
)
QUEUED_DATA = (
b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\",'
b' \\"submit-time\\": \\"2024-01-18 01:08:55\\",'
b' \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}'
)
# Expected result(s) of parsing the above JSON data # Expected result(s) of parsing the above JSON data
RUNNING_JOB = { RUNNING_JOB = {
@ -58,7 +66,9 @@ QUEUED_JOB = {
"ubuntu": { "ubuntu": {
"noble": { "noble": {
"arm64": [ "arm64": [
'libobject-accessor-perl {"requester": "someone", "submit-time": "2024-01-18 01:08:55", "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}', 'libobject-accessor-perl {"requester": "someone",'
' "submit-time": "2024-01-18 01:08:55",'
' "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}'
] ]
} }
} }
@ -69,9 +79,18 @@ PRIVATE_JOB = {"ppa": {"noble": {"arm64": ["private job"]}}}
# Expected textual output of the program based on the above data # Expected textual output of the program based on the above data
RUNNING_OUTPUT = "R 0:06:20 pyatem - noble arm64 - python3-defaults/3.12.1-0ubuntu1 -\n" RUNNING_OUTPUT = (
QUEUED_OUTPUT = "Q0001 -:-- libobject-accessor-perl ubuntu noble arm64 - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n" "R 0:06:20 pyatem - noble arm64"
PRIVATE_OUTPUT = "Q0001 -:-- private job ppa noble arm64 private job private job\n" " - python3-defaults/3.12.1-0ubuntu1 -\n"
)
QUEUED_OUTPUT = (
"Q0001 -:-- libobject-accessor-perl ubuntu noble arm64"
" - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n"
)
PRIVATE_OUTPUT = (
"Q0001 -:-- private job ppa noble arm64"
" private job private job\n"
)
class RunningAutopkgtestTestCase(unittest.TestCase): class RunningAutopkgtestTestCase(unittest.TestCase):

View File

@ -72,17 +72,17 @@ class Control:
def set_maintainer(self, maintainer): def set_maintainer(self, maintainer):
"""Sets the value of the Maintainer field.""" """Sets the value of the Maintainer field."""
pattern = re.compile("^Maintainer: ?.*$", re.MULTILINE) pattern = re.compile("^Maintainer: ?.*$", re.MULTILINE)
self._content = pattern.sub("Maintainer: " + maintainer, self._content) self._content = pattern.sub(f"Maintainer: {maintainer}", self._content)
def set_original_maintainer(self, original_maintainer): def set_original_maintainer(self, original_maintainer):
"""Sets the value of the XSBC-Original-Maintainer field.""" """Sets the value of the XSBC-Original-Maintainer field."""
original_maintainer = "XSBC-Original-Maintainer: " + original_maintainer original_maintainer = f"XSBC-Original-Maintainer: {original_maintainer}"
if self.get_original_maintainer(): if self.get_original_maintainer():
pattern = re.compile("^(?:[XSBC]*-)?Original-Maintainer:.*$", re.MULTILINE) pattern = re.compile("^(?:[XSBC]*-)?Original-Maintainer:.*$", re.MULTILINE)
self._content = pattern.sub(original_maintainer, self._content) self._content = pattern.sub(original_maintainer, self._content)
else: else:
pattern = re.compile("^(Maintainer:.*)$", re.MULTILINE) pattern = re.compile("^(Maintainer:.*)$", re.MULTILINE)
self._content = pattern.sub(r"\1\n" + original_maintainer, self._content) self._content = pattern.sub(f"\\1\\n{original_maintainer}", self._content)
def remove_original_maintainer(self): def remove_original_maintainer(self):
"""Strip out out the XSBC-Original-Maintainer line""" """Strip out out the XSBC-Original-Maintainer line"""

View File

@ -15,47 +15,44 @@
"""Portions of archive related code that is re-used by various tools.""" """Portions of archive related code that is re-used by various tools."""
from datetime import datetime
import os import os
import re import re
import urllib.request import urllib.request
from datetime import datetime
import dateutil.parser import dateutil.parser
from dateutil.tz import tzutc from dateutil.tz import tzutc
def get_cache_dir(): def get_cache_dir():
cache_dir = os.environ.get('XDG_CACHE_HOME', cache_dir = os.environ.get("XDG_CACHE_HOME", os.path.expanduser(os.path.join("~", ".cache")))
os.path.expanduser(os.path.join('~', '.cache'))) uat_cache = os.path.join(cache_dir, "ubuntu-archive-tools")
uat_cache = os.path.join(cache_dir, 'ubuntu-archive-tools')
os.makedirs(uat_cache, exist_ok=True) os.makedirs(uat_cache, exist_ok=True)
return uat_cache return uat_cache
def get_url(url, force_cached): def get_url(url, force_cached):
''' Return file to the URL, possibly caching it """Return file to the URL, possibly caching it"""
'''
cache_file = None cache_file = None
# ignore bileto urls wrt caching, they're usually too small to matter # ignore bileto urls wrt caching, they're usually too small to matter
# and we don't do proper cache expiry # and we don't do proper cache expiry
m = re.search('ubuntu-archive-team.ubuntu.com/proposed-migration/' m = re.search("ubuntu-archive-team.ubuntu.com/proposed-migration/([^/]*)/([^/]*)", url)
'([^/]*)/([^/]*)',
url)
if m: if m:
cache_dir = get_cache_dir() cache_dir = get_cache_dir()
cache_file = os.path.join(cache_dir, '%s_%s' % (m.group(1), m.group(2))) cache_file = os.path.join(cache_dir, f"{m.group(1)}_{m.group(2)}")
else: else:
# test logs can be cached, too # test logs can be cached, too
m = re.search( m = re.search(
'https://autopkgtest.ubuntu.com/results/autopkgtest-[^/]*/([^/]*)/([^/]*)' "https://autopkgtest.ubuntu.com/results/autopkgtest-[^/]*/([^/]*)/([^/]*)"
'/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz', "/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz",
url) url,
)
if m: if m:
cache_dir = get_cache_dir() cache_dir = get_cache_dir()
cache_file = os.path.join( cache_file = os.path.join(
cache_dir, '%s_%s_%s_%s.gz' % ( cache_dir, f"{m.group(1)}_{m.group(2)}_{m.group(3)}_{m.group(4)}.gz"
m.group(1), m.group(2), m.group(3), m.group(4))) )
if cache_file: if cache_file:
try: try:
@ -65,18 +62,18 @@ def get_url(url, force_cached):
prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc()) prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc())
new_timestamp = datetime.now(tz=tzutc()).timestamp() new_timestamp = datetime.now(tz=tzutc()).timestamp()
if force_cached: if force_cached:
return open(cache_file, 'rb') return open(cache_file, "rb")
f = urllib.request.urlopen(url) f = urllib.request.urlopen(url)
if cache_file: if cache_file:
remote_ts = dateutil.parser.parse(f.headers['last-modified']) remote_ts = dateutil.parser.parse(f.headers["last-modified"])
if remote_ts > prev_timestamp: if remote_ts > prev_timestamp:
with open('%s.new' % cache_file, 'wb') as new_cache: with open(f"{cache_file}.new", "wb") as new_cache:
for line in f: for line in f:
new_cache.write(line) new_cache.write(line)
os.rename('%s.new' % cache_file, cache_file) os.rename(f"{cache_file}.new", cache_file)
os.utime(cache_file, times=(new_timestamp, new_timestamp)) os.utime(cache_file, times=(new_timestamp, new_timestamp))
f.close() f.close()
f = open(cache_file, 'rb') f = open(cache_file, "rb")
return f return f