mirror of
https://git.launchpad.net/ubuntu-dev-tools
synced 2025-03-13 08:01:09 +00:00
Compare commits
109 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
466e2784de | ||
|
ba3f0511f9 | ||
|
2e550ceff2 | ||
|
6c8a5d74bd | ||
|
3d11516599 | ||
|
5a20308ab1 | ||
|
b551877651 | ||
|
4a4c4e0a27 | ||
|
865c1c97bc | ||
|
d09718e976 | ||
|
bff7baecc9 | ||
|
45fbbb5bd1 | ||
|
ca217c035e | ||
|
b5e117788b | ||
|
ddba2d1e98 | ||
|
02d65a5804 | ||
|
bda85fa6a8 | ||
|
86a83bf74d | ||
|
162e758671 | ||
|
049425adb7 | ||
|
f6ca6cad92 | ||
|
3dc17934d6 | ||
|
10a176567a | ||
|
86b366c6c5 | ||
|
50b580b30e | ||
|
6ba0641f63 | ||
|
1e815db9d2 | ||
|
e2f43318bd | ||
|
cdd81232d9 | ||
|
65044d84d9 | ||
|
19e40b49c2 | ||
|
55eb521461 | ||
|
983bb3b70e | ||
|
85f2e46f7d | ||
|
649c3db767 | ||
|
e7ba650414 | ||
|
3bc802a209 | ||
|
92c80d7bb7 | ||
|
d7362d9ed8 | ||
|
c7a855ff20 | ||
|
017941ad70 | ||
|
69914f861e | ||
|
454f1e30c8 | ||
|
55bc403a95 | ||
|
c9339aeae4 | ||
|
c205ee0381 | ||
|
7577e10f13 | ||
|
e328dc05c2 | ||
|
9a94c9dea1 | ||
|
47ab7b608b | ||
|
56044d8eac | ||
|
c523b4cfc4 | ||
|
3df40f6392 | ||
|
6ebffe3f4a | ||
|
f01234e8a5 | ||
|
43891eda88 | ||
|
132866e2ba | ||
|
a0fcac7777 | ||
|
490895075d | ||
|
5186e76d8d | ||
|
bf46f7fbc1 | ||
|
881602c4b9 | ||
|
c869d07f75 | ||
|
59041af613 | ||
|
0ec53180f2 | ||
|
c92fa6502f | ||
|
07d3158ade | ||
|
d5faa9b133 | ||
|
9e710a3d66 | ||
|
010af53d7c | ||
|
0bef4d7352 | ||
|
688202a7cf | ||
|
691c1381db | ||
|
f01502bda2 | ||
|
42f8e5c0d2 | ||
|
bb8a9f7394 | ||
|
a058c716b9 | ||
|
e64fe7e212 | ||
|
f07d3df40c | ||
|
f73f2c1df1 | ||
|
268d082226 | ||
|
6bc59d789e | ||
|
9a4cc312f4 | ||
|
ffc787b454 | ||
|
bce1ef88c5 | ||
|
a9eb902b83 | ||
|
cb7464cf61 | ||
|
19f1df1054 | ||
|
7f64dde12c | ||
|
c2539c6787 | ||
|
fd885ec239 | ||
|
abbc56e185 | ||
|
a2176110f0 | ||
|
a5185e4612 | ||
|
e90ceaf26b | ||
|
9aab0135a2 | ||
|
23539f28b1 | ||
|
4a09d23db6 | ||
|
534cd254f4 | ||
|
29c3fa98bc | ||
|
7c9c7f2890 | ||
|
739279da3f | ||
|
7c11832ee0 | ||
|
f5512846d6 | ||
|
9e0dff4461 | ||
|
7129e6e27a | ||
|
79d30a9bfc | ||
|
2c6a8b5451 | ||
|
ad014685ea |
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
__pycache__
|
||||||
|
*.egg-info
|
44
check-mir
44
check-mir
@ -47,10 +47,41 @@ def check_support(apt_cache, pkgname, alt=False):
|
|||||||
else:
|
else:
|
||||||
prefix = " * " + pkgname
|
prefix = " * " + pkgname
|
||||||
|
|
||||||
try:
|
prov_packages = apt_cache.get_providing_packages(pkgname)
|
||||||
|
if pkgname in apt_cache:
|
||||||
pkg = apt_cache[pkgname]
|
pkg = apt_cache[pkgname]
|
||||||
except KeyError:
|
|
||||||
print(prefix, "does not exist (pure virtual?)", file=sys.stderr)
|
# If this is a virtual package, iterate through the binary packages that
|
||||||
|
# provide this, and ensure they are all in Main. Source packages in and of
|
||||||
|
# themselves cannot provide virtual packages, only binary packages can.
|
||||||
|
elif len(prov_packages) > 0:
|
||||||
|
supported, unsupported = [], []
|
||||||
|
for pkg in prov_packages:
|
||||||
|
candidate = pkg.candidate
|
||||||
|
if candidate:
|
||||||
|
section = candidate.section
|
||||||
|
if section.startswith("universe") or section.startswith("multiverse"):
|
||||||
|
unsupported.append(pkg.name)
|
||||||
|
else:
|
||||||
|
supported.append(pkg.name)
|
||||||
|
|
||||||
|
if len(supported) > 0:
|
||||||
|
msg = "is a virtual package, which is provided by the following "
|
||||||
|
msg += "candidates in Main: " + " ".join(supported)
|
||||||
|
print(prefix, msg)
|
||||||
|
elif len(unsupported) > 0:
|
||||||
|
msg = "is a virtual package, but is only provided by the "
|
||||||
|
msg += "following non-Main candidates: " + " ".join(unsupported)
|
||||||
|
print(prefix, msg, file=sys.stderr)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
msg = "is a virtual package that exists but is not provided by "
|
||||||
|
msg += "package currently in the archive. Proceed with caution."
|
||||||
|
print(prefix, msg, file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(prefix, "does not exist", file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
section = pkg.candidate.section
|
section = pkg.candidate.section
|
||||||
@ -93,6 +124,13 @@ def check_build_dependencies(apt_cache, control):
|
|||||||
continue
|
continue
|
||||||
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
|
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
|
||||||
pkgname = or_group[0][0]
|
pkgname = or_group[0][0]
|
||||||
|
|
||||||
|
# debhelper-compat is expected to be a build dependency of every
|
||||||
|
# package, so it is a red herring to display it in this report.
|
||||||
|
# (src:debhelper is in Ubuntu Main anyway)
|
||||||
|
if pkgname == "debhelper-compat":
|
||||||
|
continue
|
||||||
|
|
||||||
if not check_support(apt_cache, pkgname):
|
if not check_support(apt_cache, pkgname):
|
||||||
# check non-preferred alternatives
|
# check non-preferred alternatives
|
||||||
for altpkg in or_group[1:]:
|
for altpkg in or_group[1:]:
|
||||||
|
1
debian/.gitignore
vendored
Normal file
1
debian/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
files
|
147
debian/changelog
vendored
147
debian/changelog
vendored
@ -1,3 +1,150 @@
|
|||||||
|
ubuntu-dev-tools (0.206) unstable; urgency=medium
|
||||||
|
|
||||||
|
[ Dan Bungert ]
|
||||||
|
* mk-sbuild: enable pkgmaintainermangler
|
||||||
|
|
||||||
|
[ Shengjing Zhu ]
|
||||||
|
* import-bug-from-debian: package option is overridden and not used
|
||||||
|
|
||||||
|
[ Fernando Bravo Hernández ]
|
||||||
|
* Parsing arch parameter to getBinaryPackage() (LP: #2081861)
|
||||||
|
|
||||||
|
[ Simon Quigley ]
|
||||||
|
* Read ~/.devscripts in a more robust way, to ideally pick up multi-line
|
||||||
|
variables (Closes: #725418).
|
||||||
|
* mk-sbuild: default to using UTC for schroots (LP: #2097159).
|
||||||
|
* syncpackage: s/syncblacklist/syncblocklist/g
|
||||||
|
* syncpackage: Cache the sync blocklist in-memory, so it's not fetched
|
||||||
|
multiple times when syncing more than one package.
|
||||||
|
* syncpackage: Catch exceptions cleanly, simply skipping to the next
|
||||||
|
package (erring on the side of caution) if there is an error doing the
|
||||||
|
download (LP: #1943286).
|
||||||
|
|
||||||
|
-- Simon Quigley <tsimonq2@debian.org> Tue, 04 Mar 2025 13:43:15 -0600
|
||||||
|
|
||||||
|
ubuntu-dev-tools (0.205) unstable; urgency=medium
|
||||||
|
|
||||||
|
* [syncpackage] When syncing multiple packages, if one of the packages is in
|
||||||
|
the sync blocklist, do not exit, simply continue.
|
||||||
|
* [syncpackage] Do not use exit(1) on an error or exception unless it
|
||||||
|
applies to all packages, instead return None so we can continue to the
|
||||||
|
next package.
|
||||||
|
* [syncpackage] Add support for -y or --yes, noted that it should be used
|
||||||
|
with care.
|
||||||
|
* Update Standards-Version to 4.7.2, no changes needed.
|
||||||
|
|
||||||
|
-- Simon Quigley <tsimonq2@debian.org> Sat, 01 Mar 2025 11:29:54 -0600
|
||||||
|
|
||||||
|
ubuntu-dev-tools (0.204) unstable; urgency=medium
|
||||||
|
|
||||||
|
[ Simon Quigley ]
|
||||||
|
* Update Standards-Version to 4.7.1, no changes needed.
|
||||||
|
* Add several Lintian overrides related to .pyc files.
|
||||||
|
* Add my name to the copyright file.
|
||||||
|
* Rename bitesize to lp-bitesize (Closes: #1076224).
|
||||||
|
* Add a manpage for running-autopkgtests.
|
||||||
|
* Add a large warning at the top of mk-sbuild encouraging the use of the
|
||||||
|
unshare backend. This is to provide ample warning to users.
|
||||||
|
* Remove mail line from default ~/.sbuildrc, to resolve the undeclared
|
||||||
|
dependency on sendmail (Closes: #1074632).
|
||||||
|
|
||||||
|
[ Julien Plissonneau Duquène ]
|
||||||
|
* Fix reverse-depends -b crash on packages that b-d on themselves
|
||||||
|
(Closes: #1087760).
|
||||||
|
|
||||||
|
-- Simon Quigley <tsimonq2@debian.org> Mon, 24 Feb 2025 19:54:39 -0600
|
||||||
|
|
||||||
|
ubuntu-dev-tools (0.203) unstable; urgency=medium
|
||||||
|
|
||||||
|
[ Steve Langasek ]
|
||||||
|
* ubuntu-build: handle TOCTOU issue with the "can be retried" value on
|
||||||
|
builds.
|
||||||
|
* Recommend sbuild over pbuilder. sbuild is the tool recommended by
|
||||||
|
Ubuntu developers whose behavior most closely approximates Launchpad
|
||||||
|
builds.
|
||||||
|
|
||||||
|
[ Florent 'Skia' Jacquet ]
|
||||||
|
* import-bug-from-debian: handle multipart message (Closes: #969510)
|
||||||
|
|
||||||
|
[ Benjamin Drung ]
|
||||||
|
* import-bug-from-debian: add type hints
|
||||||
|
* Bump Standards-Version to 4.7.0
|
||||||
|
* Bump year and add missing files to copyright
|
||||||
|
* setup.py: add pm-helper
|
||||||
|
* Format code with black and isort
|
||||||
|
* Address several issues pointed out by Pylint
|
||||||
|
* Depend on python3-yaml for pm-helper
|
||||||
|
|
||||||
|
-- Benjamin Drung <bdrung@debian.org> Sat, 02 Nov 2024 18:19:24 +0100
|
||||||
|
|
||||||
|
ubuntu-dev-tools (0.202) unstable; urgency=medium
|
||||||
|
|
||||||
|
[ Steve Langasek ]
|
||||||
|
* ubuntu-build: support --batch with no package names to retry all
|
||||||
|
* ubuntu-build: in batch mode, print a count of packages retried
|
||||||
|
* ubuntu-build: make the --arch option top-level.
|
||||||
|
This gets rid of the fugly --arch2 option
|
||||||
|
* ubuntu-build: support retrying builds in other states that failed-to-build
|
||||||
|
* ubuntu-build: Handling of proposed vs release pocket default for ppas
|
||||||
|
* ubuntu-build: update manpage
|
||||||
|
|
||||||
|
[ Chris Peterson ]
|
||||||
|
* Replace Depends on python3-launchpadlib with Depends on
|
||||||
|
python3-launchpadlib-desktop (LP: #2049217)
|
||||||
|
|
||||||
|
-- Simon Quigley <tsimonq2@ubuntu.com> Fri, 12 Apr 2024 23:33:14 -0500
|
||||||
|
|
||||||
|
ubuntu-dev-tools (0.201) unstable; urgency=medium
|
||||||
|
|
||||||
|
* running-autopkgtests: fix packaging to make the script available
|
||||||
|
(LP: #2055466)
|
||||||
|
|
||||||
|
-- Chris Peterson <chris.peterson@canonical.com> Thu, 29 Feb 2024 11:09:14 -0800
|
||||||
|
|
||||||
|
ubuntu-dev-tools (0.200) unstable; urgency=medium
|
||||||
|
|
||||||
|
[ Gianfranco Costamagna ]
|
||||||
|
* Team upload
|
||||||
|
|
||||||
|
[ Chris Peterson ]
|
||||||
|
* Add support to see currently running autopkgtests (running-autopkgtests)
|
||||||
|
* running-autopkgtests: use f-strings
|
||||||
|
|
||||||
|
[ Athos Ribeiro ]
|
||||||
|
* syncpackage: log LP authentication errors before halting.
|
||||||
|
|
||||||
|
[ Ying-Chun Liu (PaulLiu) ]
|
||||||
|
* Drop qemu-debootstrap
|
||||||
|
qemu-debootstrap is deprecated for a while. In newer qemu release
|
||||||
|
the command is totally removed. We can use debootstrap directly.
|
||||||
|
Signed-off-by: Ying-Chun Liu (PaulLiu) <paulliu@debian.org>
|
||||||
|
|
||||||
|
[ Logan Rosen ]
|
||||||
|
* Don't rely on debootstrap for validating Ubuntu distro
|
||||||
|
|
||||||
|
-- Gianfranco Costamagna <locutusofborg@debian.org> Thu, 15 Feb 2024 17:53:48 +0100
|
||||||
|
|
||||||
|
ubuntu-dev-tools (0.199) unstable; urgency=medium
|
||||||
|
|
||||||
|
[ Simon Quigley ]
|
||||||
|
* Add my name to Uploaders.
|
||||||
|
|
||||||
|
[ Steve Langasek ]
|
||||||
|
* Introduce a pm-helper tool.
|
||||||
|
|
||||||
|
-- Simon Quigley <tsimonq2@debian.org> Mon, 29 Jan 2024 10:03:22 -0600
|
||||||
|
|
||||||
|
ubuntu-dev-tools (0.198) unstable; urgency=medium
|
||||||
|
|
||||||
|
* In check-mir, ignore debhelper-compat when checking the build
|
||||||
|
dependencies. This is expected to be a build dependency of all packages,
|
||||||
|
so warning about it in any way is surely a red herring.
|
||||||
|
* Add proper support for virtual packages in check-mir, basing the
|
||||||
|
determination solely off of binary packages. This is not expected to be a
|
||||||
|
typical case.
|
||||||
|
|
||||||
|
-- Simon Quigley <tsimonq2@debian.org> Wed, 10 Jan 2024 20:04:02 -0600
|
||||||
|
|
||||||
ubuntu-dev-tools (0.197) unstable; urgency=medium
|
ubuntu-dev-tools (0.197) unstable; urgency=medium
|
||||||
|
|
||||||
* Update the manpage for syncpackage to reflect the ability to sync
|
* Update the manpage for syncpackage to reflect the ability to sync
|
||||||
|
17
debian/control
vendored
17
debian/control
vendored
@ -6,6 +6,7 @@ Uploaders:
|
|||||||
Benjamin Drung <bdrung@debian.org>,
|
Benjamin Drung <bdrung@debian.org>,
|
||||||
Stefano Rivera <stefanor@debian.org>,
|
Stefano Rivera <stefanor@debian.org>,
|
||||||
Mattia Rizzolo <mattia@debian.org>,
|
Mattia Rizzolo <mattia@debian.org>,
|
||||||
|
Simon Quigley <tsimonq2@debian.org>,
|
||||||
Build-Depends:
|
Build-Depends:
|
||||||
black <!nocheck>,
|
black <!nocheck>,
|
||||||
dctrl-tools,
|
dctrl-tools,
|
||||||
@ -20,15 +21,17 @@ Build-Depends:
|
|||||||
pylint <!nocheck>,
|
pylint <!nocheck>,
|
||||||
python3-all,
|
python3-all,
|
||||||
python3-apt,
|
python3-apt,
|
||||||
|
python3-dateutil,
|
||||||
python3-debian,
|
python3-debian,
|
||||||
python3-debianbts,
|
python3-debianbts,
|
||||||
python3-distro-info,
|
python3-distro-info,
|
||||||
python3-httplib2,
|
python3-httplib2,
|
||||||
python3-launchpadlib,
|
python3-launchpadlib-desktop,
|
||||||
python3-pytest,
|
python3-pytest,
|
||||||
python3-requests <!nocheck>,
|
python3-requests <!nocheck>,
|
||||||
python3-setuptools,
|
python3-setuptools,
|
||||||
Standards-Version: 4.6.2
|
python3-yaml <!nocheck>,
|
||||||
|
Standards-Version: 4.7.2
|
||||||
Rules-Requires-Root: no
|
Rules-Requires-Root: no
|
||||||
Vcs-Git: https://git.launchpad.net/ubuntu-dev-tools
|
Vcs-Git: https://git.launchpad.net/ubuntu-dev-tools
|
||||||
Vcs-Browser: https://git.launchpad.net/ubuntu-dev-tools
|
Vcs-Browser: https://git.launchpad.net/ubuntu-dev-tools
|
||||||
@ -51,9 +54,10 @@ Depends:
|
|||||||
python3-debianbts,
|
python3-debianbts,
|
||||||
python3-distro-info,
|
python3-distro-info,
|
||||||
python3-httplib2,
|
python3-httplib2,
|
||||||
python3-launchpadlib,
|
python3-launchpadlib-desktop,
|
||||||
python3-lazr.restfulclient,
|
python3-lazr.restfulclient,
|
||||||
python3-ubuntutools (= ${binary:Version}),
|
python3-ubuntutools (= ${binary:Version}),
|
||||||
|
python3-yaml,
|
||||||
sensible-utils,
|
sensible-utils,
|
||||||
sudo,
|
sudo,
|
||||||
tzdata,
|
tzdata,
|
||||||
@ -68,7 +72,7 @@ Recommends:
|
|||||||
genisoimage,
|
genisoimage,
|
||||||
lintian,
|
lintian,
|
||||||
patch,
|
patch,
|
||||||
pbuilder | cowbuilder | sbuild,
|
sbuild | pbuilder | cowbuilder,
|
||||||
python3-dns,
|
python3-dns,
|
||||||
quilt,
|
quilt,
|
||||||
reportbug (>= 3.39ubuntu1),
|
reportbug (>= 3.39ubuntu1),
|
||||||
@ -114,6 +118,8 @@ Description: useful tools for Ubuntu developers
|
|||||||
- requestsync - files a sync request with Debian changelog and rationale.
|
- requestsync - files a sync request with Debian changelog and rationale.
|
||||||
- reverse-depends - find the reverse dependencies (or build dependencies) of
|
- reverse-depends - find the reverse dependencies (or build dependencies) of
|
||||||
a package.
|
a package.
|
||||||
|
- running-autopkgtests - lists the currently running and/or queued
|
||||||
|
autopkgtests on the Ubuntu autopkgtest infrastructure
|
||||||
- seeded-in-ubuntu - query if a package is safe to upload during a freeze.
|
- seeded-in-ubuntu - query if a package is safe to upload during a freeze.
|
||||||
- setup-packaging-environment - assistant to get an Ubuntu installation
|
- setup-packaging-environment - assistant to get an Ubuntu installation
|
||||||
ready for packaging work.
|
ready for packaging work.
|
||||||
@ -132,10 +138,11 @@ Package: python3-ubuntutools
|
|||||||
Architecture: all
|
Architecture: all
|
||||||
Section: python
|
Section: python
|
||||||
Depends:
|
Depends:
|
||||||
|
python3-dateutil,
|
||||||
python3-debian,
|
python3-debian,
|
||||||
python3-distro-info,
|
python3-distro-info,
|
||||||
python3-httplib2,
|
python3-httplib2,
|
||||||
python3-launchpadlib,
|
python3-launchpadlib-desktop,
|
||||||
python3-lazr.restfulclient,
|
python3-lazr.restfulclient,
|
||||||
python3-requests,
|
python3-requests,
|
||||||
sensible-utils,
|
sensible-utils,
|
||||||
|
20
debian/copyright
vendored
20
debian/copyright
vendored
@ -11,6 +11,7 @@ Files: backportpackage
|
|||||||
doc/check-symbols.1
|
doc/check-symbols.1
|
||||||
doc/requestsync.1
|
doc/requestsync.1
|
||||||
doc/ubuntu-iso.1
|
doc/ubuntu-iso.1
|
||||||
|
doc/running-autopkgtests.1
|
||||||
GPL-2
|
GPL-2
|
||||||
README.updates
|
README.updates
|
||||||
requestsync
|
requestsync
|
||||||
@ -19,12 +20,13 @@ Files: backportpackage
|
|||||||
ubuntu-iso
|
ubuntu-iso
|
||||||
ubuntutools/requestsync/*.py
|
ubuntutools/requestsync/*.py
|
||||||
Copyright: 2007, Albert Damen <albrt@gmx.net>
|
Copyright: 2007, Albert Damen <albrt@gmx.net>
|
||||||
2010-2022, Benjamin Drung <bdrung@ubuntu.com>
|
2010-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||||
2007-2023, Canonical Ltd.
|
2007-2023, Canonical Ltd.
|
||||||
2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com>
|
2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com>
|
||||||
2010, Evan Broder <evan@ebroder.net>
|
2010, Evan Broder <evan@ebroder.net>
|
||||||
2006-2007, Luke Yelavich <themuso@ubuntu.com>
|
2006-2007, Luke Yelavich <themuso@ubuntu.com>
|
||||||
2009-2010, Michael Bienia <geser@ubuntu.com>
|
2009-2010, Michael Bienia <geser@ubuntu.com>
|
||||||
|
2024-2025, Simon Quigley <tsimonq2@debian.org>
|
||||||
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||||
2008, Stephan Hermann <sh@sourcecode.de>
|
2008, Stephan Hermann <sh@sourcecode.de>
|
||||||
2007, Steve Kowalik <stevenk@ubuntu.com>
|
2007, Steve Kowalik <stevenk@ubuntu.com>
|
||||||
@ -72,23 +74,28 @@ License: GPL-2+
|
|||||||
On Debian systems, the complete text of the GNU General Public License
|
On Debian systems, the complete text of the GNU General Public License
|
||||||
version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
|
version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
|
||||||
|
|
||||||
Files: doc/bitesize.1
|
Files: doc/lp-bitesize.1
|
||||||
doc/check-mir.1
|
doc/check-mir.1
|
||||||
doc/grab-merge.1
|
doc/grab-merge.1
|
||||||
doc/merge-changelog.1
|
doc/merge-changelog.1
|
||||||
|
doc/pm-helper.1
|
||||||
doc/setup-packaging-environment.1
|
doc/setup-packaging-environment.1
|
||||||
doc/syncpackage.1
|
doc/syncpackage.1
|
||||||
bitesize
|
lp-bitesize
|
||||||
check-mir
|
check-mir
|
||||||
GPL-3
|
GPL-3
|
||||||
grab-merge
|
grab-merge
|
||||||
merge-changelog
|
merge-changelog
|
||||||
|
pm-helper
|
||||||
pyproject.toml
|
pyproject.toml
|
||||||
run-linters
|
run-linters
|
||||||
|
running-autopkgtests
|
||||||
setup-packaging-environment
|
setup-packaging-environment
|
||||||
syncpackage
|
syncpackage
|
||||||
Copyright: 2010, Benjamin Drung <bdrung@ubuntu.com>
|
ubuntutools/running_autopkgtests.py
|
||||||
2007-2023, Canonical Ltd.
|
ubuntutools/utils.py
|
||||||
|
Copyright: 2010-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||||
|
2007-2024, Canonical Ltd.
|
||||||
2008, Jonathan Patrick Davies <jpds@ubuntu.com>
|
2008, Jonathan Patrick Davies <jpds@ubuntu.com>
|
||||||
2008-2010, Martin Pitt <martin.pitt@canonical.com>
|
2008-2010, Martin Pitt <martin.pitt@canonical.com>
|
||||||
2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
||||||
@ -177,11 +184,12 @@ Files: doc/pull-debian-debdiff.1
|
|||||||
ubuntutools/version.py
|
ubuntutools/version.py
|
||||||
update-maintainer
|
update-maintainer
|
||||||
.pylintrc
|
.pylintrc
|
||||||
Copyright: 2009-2023, Benjamin Drung <bdrung@ubuntu.com>
|
Copyright: 2009-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||||
2010, Evan Broder <evan@ebroder.net>
|
2010, Evan Broder <evan@ebroder.net>
|
||||||
2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
||||||
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||||
2017-2021, Dan Streetman <ddstreet@canonical.com>
|
2017-2021, Dan Streetman <ddstreet@canonical.com>
|
||||||
|
2024, Canonical Ltd.
|
||||||
License: ISC
|
License: ISC
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
3
debian/source/lintian-overrides
vendored
Normal file
3
debian/source/lintian-overrides
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# pyc files are machine-generated; they're expected to have long lines and have unstated copyright
|
||||||
|
source: file-without-copyright-information *.pyc [debian/copyright]
|
||||||
|
source: very-long-line-length-in-source-file * > 512 [*.pyc:*]
|
@ -1,21 +1,21 @@
|
|||||||
.TH bitesize "1" "May 9 2010" "ubuntu-dev-tools"
|
.TH lp-bitesize "1" "May 9 2010" "ubuntu-dev-tools"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
|
lp-bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
|
||||||
|
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
.B bitesize \fR<\fIbug number\fR>
|
.B lp-bitesize \fR<\fIbug number\fR>
|
||||||
.br
|
.br
|
||||||
.B bitesize \-\-help
|
.B lp-bitesize \-\-help
|
||||||
|
|
||||||
.SH DESCRIPTION
|
.SH DESCRIPTION
|
||||||
\fBbitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
|
\fBlp-bitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
|
||||||
also adds a comment to the bug indicating that you are willing to help with
|
also adds a comment to the bug indicating that you are willing to help with
|
||||||
fixing it.
|
fixing it.
|
||||||
It checks for permission to operate on a given bug first,
|
It checks for permission to operate on a given bug first,
|
||||||
then perform required tasks on Launchpad.
|
then perform required tasks on Launchpad.
|
||||||
|
|
||||||
.SH OPTIONS
|
.SH OPTIONS
|
||||||
Listed below are the command line options for \fBbitesize\fR:
|
Listed below are the command line options for \fBlp-bitesize\fR:
|
||||||
.TP
|
.TP
|
||||||
.BR \-h ", " \-\-help
|
.BR \-h ", " \-\-help
|
||||||
Display a help message and exit.
|
Display a help message and exit.
|
||||||
@ -48,7 +48,7 @@ The default value for \fB--lpinstance\fR.
|
|||||||
.BR ubuntu\-dev\-tools (5)
|
.BR ubuntu\-dev\-tools (5)
|
||||||
|
|
||||||
.SH AUTHORS
|
.SH AUTHORS
|
||||||
\fBbitesize\fR and this manual page were written by Daniel Holbach
|
\fBlp-bitesize\fR and this manual page were written by Daniel Holbach
|
||||||
<daniel.holbach@canonical.com>.
|
<daniel.holbach@canonical.com>.
|
||||||
.PP
|
.PP
|
||||||
Both are released under the terms of the GNU General Public License, version 3.
|
Both are released under the terms of the GNU General Public License, version 3.
|
@ -20,7 +20,7 @@ like for example \fBpbuilder\-feisty\fP, \fBpbuilder\-sid\fP, \fBpbuilder\-gutsy
|
|||||||
.PP
|
.PP
|
||||||
The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main
|
The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main
|
||||||
difference between both is that pbuilder compresses the created chroot as a
|
difference between both is that pbuilder compresses the created chroot as a
|
||||||
a tarball, thus using less disc space but needing to uncompress (and possibly
|
tarball, thus using less disc space but needing to uncompress (and possibly
|
||||||
compress) its contents again on each run, and cowbuilder doesn't do this.
|
compress) its contents again on each run, and cowbuilder doesn't do this.
|
||||||
|
|
||||||
.SH USAGE
|
.SH USAGE
|
||||||
|
44
doc/pm-helper.1
Normal file
44
doc/pm-helper.1
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
.\" Copyright (C) 2023, Canonical Ltd.
|
||||||
|
.\"
|
||||||
|
.\" This program is free software; you can redistribute it and/or
|
||||||
|
.\" modify it under the terms of the GNU General Public License, version 3.
|
||||||
|
.\"
|
||||||
|
.\" This program is distributed in the hope that it will be useful,
|
||||||
|
.\" but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
.\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
.\" General Public License for more details.
|
||||||
|
.\"
|
||||||
|
.\" You should have received a copy of the GNU General Public License
|
||||||
|
.\" along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
.TH pm\-helper 1 "June 2023" ubuntu\-dev\-tools
|
||||||
|
|
||||||
|
.SH NAME
|
||||||
|
pm\-helper \- helper to guide a developer through proposed\-migration work
|
||||||
|
|
||||||
|
.SH SYNOPSIS
|
||||||
|
.B pm\-helper \fR[\fIoptions\fR] [\fIpackage\fR]
|
||||||
|
|
||||||
|
.SH DESCRIPTION
|
||||||
|
Claim a package from proposed\-migration to work on and get additional
|
||||||
|
information (such as the state of the package in Debian) that may be helpful
|
||||||
|
in unblocking it.
|
||||||
|
.PP
|
||||||
|
This tool is incomplete and under development.
|
||||||
|
|
||||||
|
.SH OPTIONS
|
||||||
|
.TP
|
||||||
|
.B \-l \fIINSTANCE\fR, \fB\-\-launchpad\fR=\fIINSTANCE\fR
|
||||||
|
Use the specified instance of Launchpad (e.g. "staging"), instead of
|
||||||
|
the default of "production".
|
||||||
|
.TP
|
||||||
|
.B \-v\fR, \fB--verbose\fR
|
||||||
|
be more verbose
|
||||||
|
.TP
|
||||||
|
\fB\-h\fR, \fB\-\-help\fR
|
||||||
|
Display a help message and exit
|
||||||
|
|
||||||
|
.SH AUTHORS
|
||||||
|
\fBpm\-helper\fR and this manpage were written by Steve Langasek
|
||||||
|
<steve.langasek@ubuntu.com>.
|
||||||
|
.PP
|
||||||
|
Both are released under the GPLv3 license.
|
15
doc/running-autopkgtests.1
Normal file
15
doc/running-autopkgtests.1
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
.TH running\-autopkgtests "1" "18 January 2024" "ubuntu-dev-tools"
|
||||||
|
.SH NAME
|
||||||
|
running\-autopkgtests \- dumps a list of currently running autopkgtests
|
||||||
|
|
||||||
|
.SH SYNOPSIS
|
||||||
|
.B running\-autopkgtests
|
||||||
|
|
||||||
|
.SH DESCRIPTION
|
||||||
|
Dumps a list of currently running and queued tests in Autopkgtest.
|
||||||
|
Pass --running to only see running tests, or --queued to only see
|
||||||
|
queued tests. Passing both will print both, which is the default behavior.
|
||||||
|
|
||||||
|
.SH AUTHOR
|
||||||
|
.B running\-autopkgtests
|
||||||
|
was written by Chris Peterson <chris.peterson@canonical.com>.
|
@ -58,7 +58,7 @@ Display more progress information.
|
|||||||
\fB\-F\fR, \fB\-\-fakesync\fR
|
\fB\-F\fR, \fB\-\-fakesync\fR
|
||||||
Perform a fakesync, to work around a tarball mismatch between Debian and
|
Perform a fakesync, to work around a tarball mismatch between Debian and
|
||||||
Ubuntu.
|
Ubuntu.
|
||||||
This option ignores blacklisting, and performs a local sync.
|
This option ignores blocklisting, and performs a local sync.
|
||||||
It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file
|
It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file
|
||||||
for you to upload.
|
for you to upload.
|
||||||
.TP
|
.TP
|
||||||
|
@ -1,9 +1,14 @@
|
|||||||
.TH UBUNTU-BUILD "1" "June 2010" "ubuntu-dev-tools"
|
.TH UBUNTU-BUILD "1" "Mar 2024" "ubuntu-dev-tools"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
ubuntu-build \- command-line interface to Launchpad build operations
|
ubuntu-build \- command-line interface to Launchpad build operations
|
||||||
|
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
.B ubuntu-build <srcpackage> <release> <operation>
|
.nf
|
||||||
|
\fBubuntu-build\fR <srcpackage> <release> <operation>
|
||||||
|
\fBubuntu-build\fR --batch [--retry] [--rescore \fIPRIORITY\fR] [--arch \fIARCH\fR [...]]
|
||||||
|
[--series \fISERIES\fR] [--state \fIBUILD-STATE\fR]
|
||||||
|
[-A \fIARCHIVE\fR] [pkg]...
|
||||||
|
.fi
|
||||||
|
|
||||||
.SH DESCRIPTION
|
.SH DESCRIPTION
|
||||||
\fBubuntu-build\fR provides a command line interface to the Launchpad build
|
\fBubuntu-build\fR provides a command line interface to the Launchpad build
|
||||||
@ -58,14 +63,16 @@ Retry builds (give\-back).
|
|||||||
\fB\-\-rescore\fR=\fIPRIORITY\fR
|
\fB\-\-rescore\fR=\fIPRIORITY\fR
|
||||||
Rescore builds to <priority>.
|
Rescore builds to <priority>.
|
||||||
.IP
|
.IP
|
||||||
\fB\-\-arch2\fR=\fIARCHITECTURE\fR
|
\fB\-\-arch\fR=\fIARCHITECTURE\fR
|
||||||
Affect only 'architecture' (can be used several
|
Affect only 'architecture' (can be used several
|
||||||
times). Valid architectures are:
|
times). Valid architectures are:
|
||||||
arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x.
|
arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x.
|
||||||
|
.IP
|
||||||
|
\fB\-A=\fIARCHIVE\fR
|
||||||
|
Act on the named archive (ppa) instead of on the main Ubuntu archive.
|
||||||
|
|
||||||
.SH AUTHORS
|
.SH AUTHORS
|
||||||
\fBubuntu-build\fR was written by Martin Pitt <martin.pitt@canonical.com>, and
|
\fBubuntu-build\fR was written by Martin Pitt <martin.pitt@canonical.com>, and
|
||||||
this manual page was written by Jonathan Patrick Davies <jpds@ubuntu.com>.
|
this manual page was written by Jonathan Patrick Davies <jpds@ubuntu.com>.
|
||||||
.PP
|
.PP
|
||||||
Both are released under the terms of the GNU General Public License, version 3
|
Both are released under the terms of the GNU General Public License, version 3.
|
||||||
or (at your option) any later version.
|
|
||||||
|
@ -29,6 +29,8 @@ import logging
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import webbrowser
|
import webbrowser
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from email.message import EmailMessage
|
||||||
|
|
||||||
import debianbts
|
import debianbts
|
||||||
from launchpadlib.launchpad import Launchpad
|
from launchpadlib.launchpad import Launchpad
|
||||||
@ -37,11 +39,10 @@ from ubuntutools import getLogger
|
|||||||
from ubuntutools.config import UDTConfig
|
from ubuntutools.config import UDTConfig
|
||||||
|
|
||||||
Logger = getLogger()
|
Logger = getLogger()
|
||||||
|
ATTACHMENT_MAX_SIZE = 2000
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def parse_args() -> argparse.Namespace:
|
||||||
bug_re = re.compile(r"bug=(\d+)")
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-b",
|
"-b",
|
||||||
@ -71,28 +72,15 @@ def main():
|
|||||||
"--no-conf", action="store_true", help="Don't read config files or environment variables."
|
"--no-conf", action="store_true", help="Don't read config files or environment variables."
|
||||||
)
|
)
|
||||||
parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)")
|
parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)")
|
||||||
options = parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
config = UDTConfig(options.no_conf)
|
|
||||||
if options.lpinstance is None:
|
|
||||||
options.lpinstance = config.get_value("LPINSTANCE")
|
|
||||||
|
|
||||||
if options.dry_run:
|
def get_bug_numbers(bug_list: Iterable[str]) -> list[int]:
|
||||||
launchpad = Launchpad.login_anonymously("ubuntu-dev-tools")
|
bug_re = re.compile(r"bug=(\d+)")
|
||||||
options.verbose = True
|
|
||||||
else:
|
|
||||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
|
|
||||||
|
|
||||||
if options.verbose:
|
|
||||||
Logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
debian = launchpad.distributions["debian"]
|
|
||||||
ubuntu = launchpad.distributions["ubuntu"]
|
|
||||||
lp_debbugs = launchpad.bug_trackers.getByName(name="debbugs")
|
|
||||||
|
|
||||||
bug_nums = []
|
bug_nums = []
|
||||||
|
|
||||||
for bug_num in options.bugs:
|
for bug_num in bug_list:
|
||||||
if bug_num.startswith("http"):
|
if bug_num.startswith("http"):
|
||||||
# bug URL
|
# bug URL
|
||||||
match = bug_re.search(bug_num)
|
match = bug_re.search(bug_num)
|
||||||
@ -101,24 +89,81 @@ def main():
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
bug_num = match.groups()[0]
|
bug_num = match.groups()[0]
|
||||||
bug_num = bug_num.lstrip("#")
|
bug_num = bug_num.lstrip("#")
|
||||||
bug_num = int(bug_num)
|
bug_nums.append(int(bug_num))
|
||||||
bug_nums.append(bug_num)
|
|
||||||
|
|
||||||
bugs = debianbts.get_status(bug_nums)
|
return bug_nums
|
||||||
|
|
||||||
if not bugs:
|
|
||||||
Logger.error("Cannot find any of the listed bugs")
|
def walk_multipart_message(message: EmailMessage) -> tuple[str, list[tuple[int, EmailMessage]]]:
|
||||||
sys.exit(1)
|
summary = ""
|
||||||
|
attachments = []
|
||||||
|
i = 1
|
||||||
|
for part in message.walk():
|
||||||
|
content_type = part.get_content_type()
|
||||||
|
|
||||||
|
if content_type.startswith("multipart/"):
|
||||||
|
# we're already iterating on multipart items
|
||||||
|
# let's just skip the multipart extra metadata
|
||||||
|
continue
|
||||||
|
if content_type == "application/pgp-signature":
|
||||||
|
# we're not interested in importing pgp signatures
|
||||||
|
continue
|
||||||
|
|
||||||
|
if part.is_attachment():
|
||||||
|
attachments.append((i, part))
|
||||||
|
elif content_type.startswith("image/"):
|
||||||
|
# images here are not attachment, they are inline, but Launchpad can't handle that,
|
||||||
|
# so let's add them as attachments
|
||||||
|
summary += f"Message part #{i}\n"
|
||||||
|
summary += f"[inline image '{part.get_filename()}']\n\n"
|
||||||
|
attachments.append((i, part))
|
||||||
|
elif content_type.startswith("text/html"):
|
||||||
|
summary += f"Message part #{i}\n"
|
||||||
|
summary += "[inline html]\n\n"
|
||||||
|
attachments.append((i, part))
|
||||||
|
elif content_type == "text/plain":
|
||||||
|
summary += f"Message part #{i}\n"
|
||||||
|
summary += part.get_content() + "\n"
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"""Unknown message part
|
||||||
|
Your Debian bug is too weird to be imported in Launchpad, sorry.
|
||||||
|
You can fix that by patching this script in ubuntu-dev-tools.
|
||||||
|
Faulty message part:
|
||||||
|
{part}"""
|
||||||
|
)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
return summary, attachments
|
||||||
|
|
||||||
|
|
||||||
|
def process_bugs(
|
||||||
|
bugs: Iterable[debianbts.Bugreport],
|
||||||
|
launchpad: Launchpad,
|
||||||
|
package: str,
|
||||||
|
dry_run: bool = True,
|
||||||
|
browserless: bool = False,
|
||||||
|
) -> bool:
|
||||||
|
debian = launchpad.distributions["debian"]
|
||||||
|
ubuntu = launchpad.distributions["ubuntu"]
|
||||||
|
lp_debbugs = launchpad.bug_trackers.getByName(name="debbugs")
|
||||||
|
|
||||||
err = False
|
err = False
|
||||||
for bug in bugs:
|
for bug in bugs:
|
||||||
ubupackage = package = bug.source
|
ubupackage = bug.source
|
||||||
if options.package:
|
if package:
|
||||||
ubupackage = options.package
|
ubupackage = package
|
||||||
bug_num = bug.bug_num
|
bug_num = bug.bug_num
|
||||||
subject = bug.subject
|
subject = bug.subject
|
||||||
log = debianbts.get_bug_log(bug_num)
|
log = debianbts.get_bug_log(bug_num)
|
||||||
summary = log[0]["message"].get_payload()
|
message = log[0]["message"]
|
||||||
|
assert isinstance(message, EmailMessage)
|
||||||
|
attachments: list[tuple[int, EmailMessage]] = []
|
||||||
|
if message.is_multipart():
|
||||||
|
summary, attachments = walk_multipart_message(message)
|
||||||
|
else:
|
||||||
|
summary = str(message.get_payload())
|
||||||
|
|
||||||
target = ubuntu.getSourcePackage(name=ubupackage)
|
target = ubuntu.getSourcePackage(name=ubupackage)
|
||||||
if target is None:
|
if target is None:
|
||||||
Logger.error(
|
Logger.error(
|
||||||
@ -137,24 +182,73 @@ def main():
|
|||||||
Logger.debug("Subject: %s", subject)
|
Logger.debug("Subject: %s", subject)
|
||||||
Logger.debug("Description: ")
|
Logger.debug("Description: ")
|
||||||
Logger.debug(description)
|
Logger.debug(description)
|
||||||
|
for i, attachment in attachments:
|
||||||
|
Logger.debug("Attachment #%s (%s)", i, attachment.get_filename() or "inline")
|
||||||
|
Logger.debug("Content:")
|
||||||
|
if attachment.get_content_type() == "text/plain":
|
||||||
|
content = attachment.get_content()
|
||||||
|
if len(content) > ATTACHMENT_MAX_SIZE:
|
||||||
|
content = (
|
||||||
|
content[:ATTACHMENT_MAX_SIZE]
|
||||||
|
+ f" [attachment cropped after {ATTACHMENT_MAX_SIZE} characters...]"
|
||||||
|
)
|
||||||
|
Logger.debug(content)
|
||||||
|
else:
|
||||||
|
Logger.debug("[data]")
|
||||||
|
|
||||||
if options.dry_run:
|
if dry_run:
|
||||||
Logger.info("Dry-Run: not creating Ubuntu bug.")
|
Logger.info("Dry-Run: not creating Ubuntu bug.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
u_bug = launchpad.bugs.createBug(target=target, title=subject, description=description)
|
u_bug = launchpad.bugs.createBug(target=target, title=subject, description=description)
|
||||||
|
for i, attachment in attachments:
|
||||||
|
name = f"#{i}-{attachment.get_filename() or "inline"}"
|
||||||
|
content = attachment.get_content()
|
||||||
|
if isinstance(content, str):
|
||||||
|
# Launchpad only wants bytes
|
||||||
|
content = content.encode()
|
||||||
|
u_bug.addAttachment(
|
||||||
|
filename=name,
|
||||||
|
data=content,
|
||||||
|
comment=f"Imported from Debian bug http://bugs.debian.org/{bug_num}",
|
||||||
|
)
|
||||||
|
d_sp = debian.getSourcePackage(name=package)
|
||||||
|
if d_sp is None and package:
|
||||||
d_sp = debian.getSourcePackage(name=package)
|
d_sp = debian.getSourcePackage(name=package)
|
||||||
if d_sp is None and options.package:
|
|
||||||
d_sp = debian.getSourcePackage(name=options.package)
|
|
||||||
d_task = u_bug.addTask(target=d_sp)
|
d_task = u_bug.addTask(target=d_sp)
|
||||||
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
|
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
|
||||||
d_task.bug_watch = d_watch
|
d_task.bug_watch = d_watch
|
||||||
d_task.lp_save()
|
d_task.lp_save()
|
||||||
Logger.info("Opened %s", u_bug.web_link)
|
Logger.info("Opened %s", u_bug.web_link)
|
||||||
if not options.browserless:
|
if not browserless:
|
||||||
webbrowser.open(u_bug.web_link)
|
webbrowser.open(u_bug.web_link)
|
||||||
|
|
||||||
if err:
|
return err
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
options = parse_args()
|
||||||
|
|
||||||
|
config = UDTConfig(options.no_conf)
|
||||||
|
if options.lpinstance is None:
|
||||||
|
options.lpinstance = config.get_value("LPINSTANCE")
|
||||||
|
|
||||||
|
if options.dry_run:
|
||||||
|
launchpad = Launchpad.login_anonymously("ubuntu-dev-tools")
|
||||||
|
options.verbose = True
|
||||||
|
else:
|
||||||
|
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
|
||||||
|
|
||||||
|
if options.verbose:
|
||||||
|
Logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
bugs = debianbts.get_status(get_bug_numbers(options.bugs))
|
||||||
|
|
||||||
|
if not bugs:
|
||||||
|
Logger.error("Cannot find any of the listed bugs")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if process_bugs(bugs, launchpad, options.package, options.dry_run, options.browserless):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
53
mk-sbuild
53
mk-sbuild
@ -155,6 +155,7 @@ proxy="_unset_"
|
|||||||
DEBOOTSTRAP_NO_CHECK_GPG=0
|
DEBOOTSTRAP_NO_CHECK_GPG=0
|
||||||
EATMYDATA=1
|
EATMYDATA=1
|
||||||
CCACHE=0
|
CCACHE=0
|
||||||
|
USE_PKGBINARYMANGLER=0
|
||||||
|
|
||||||
while :; do
|
while :; do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
@ -303,10 +304,26 @@ if [ ! -w /var/lib/sbuild ]; then
|
|||||||
# Prepare a usable default .sbuildrc
|
# Prepare a usable default .sbuildrc
|
||||||
if [ ! -e ~/.sbuildrc ]; then
|
if [ ! -e ~/.sbuildrc ]; then
|
||||||
cat > ~/.sbuildrc <<EOM
|
cat > ~/.sbuildrc <<EOM
|
||||||
# *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW ***
|
# *** THIS COMMAND IS DEPRECATED ***
|
||||||
|
#
|
||||||
|
# In sbuild 0.87.0 and later, the unshare backend is available. This is
|
||||||
|
# expected to become the default in a future release.
|
||||||
|
#
|
||||||
|
# This is the new preferred way of building Debian packages, making the manual
|
||||||
|
# creation of schroots no longer necessary. To retain the default behavior,
|
||||||
|
# you may remove this comment block and continue.
|
||||||
|
#
|
||||||
|
# To test the unshare backend while retaining the default settings, run sbuild
|
||||||
|
# with --chroot-mode=unshare like this:
|
||||||
|
# $ sbuild --chroot-mode=unshare --dist=unstable hello
|
||||||
|
#
|
||||||
|
# To switch to the unshare backend by default (recommended), uncomment the
|
||||||
|
# following lines and delete the rest of the file (with the exception of the
|
||||||
|
# last two lines):
|
||||||
|
#\$chroot_mode = 'unshare';
|
||||||
|
#\$unshare_mmdebstrap_keep_tarball = 1;
|
||||||
|
|
||||||
# Mail address where logs are sent to (mandatory, no default!)
|
# *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW ***
|
||||||
\$mailto = '$USER';
|
|
||||||
|
|
||||||
# Name to use as override in .changes files for the Maintainer: field
|
# Name to use as override in .changes files for the Maintainer: field
|
||||||
#\$maintainer_name='$USER <$USER@localhost>';
|
#\$maintainer_name='$USER <$USER@localhost>';
|
||||||
@ -651,6 +668,7 @@ ubuntu)
|
|||||||
if ubuntu_dist_ge "$RELEASE" "edgy"; then
|
if ubuntu_dist_ge "$RELEASE" "edgy"; then
|
||||||
# Add pkgbinarymangler (edgy and later)
|
# Add pkgbinarymangler (edgy and later)
|
||||||
BUILD_PKGS="$BUILD_PKGS pkgbinarymangler"
|
BUILD_PKGS="$BUILD_PKGS pkgbinarymangler"
|
||||||
|
USE_PKGBINARYMANGLER=1
|
||||||
# Disable recommends for a smaller chroot (gutsy and later only)
|
# Disable recommends for a smaller chroot (gutsy and later only)
|
||||||
if ubuntu_dist_ge "$RELEASE" "gutsy"; then
|
if ubuntu_dist_ge "$RELEASE" "gutsy"; then
|
||||||
BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
|
BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
|
||||||
@ -754,8 +772,8 @@ if [ "$CHROOT_ARCH" != "$HOST_ARCH" ] ; then
|
|||||||
;;
|
;;
|
||||||
# Sometimes we do
|
# Sometimes we do
|
||||||
*)
|
*)
|
||||||
DEBOOTSTRAP_COMMAND=qemu-debootstrap
|
DEBOOTSTRAP_COMMAND=debootstrap
|
||||||
if ! which "$DEBOOTSTRAP_COMMAND"; then
|
if ! which "qemu-x86_64-static"; then
|
||||||
sudo apt-get install qemu-user-static
|
sudo apt-get install qemu-user-static
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
@ -910,8 +928,8 @@ if [ -n "$TEMP_PREFERENCES" ]; then
|
|||||||
sudo mv "$TEMP_PREFERENCES" $MNT/etc/apt/preferences.d/proposed.pref
|
sudo mv "$TEMP_PREFERENCES" $MNT/etc/apt/preferences.d/proposed.pref
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Copy the timezone (comment this out if you want to leave the chroot at UTC)
|
# Copy the timezone (uncomment this if you want to use your local time zone)
|
||||||
sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
|
#sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
|
||||||
# Create a schroot entry for this chroot
|
# Create a schroot entry for this chroot
|
||||||
TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX`
|
TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX`
|
||||||
TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf
|
TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf
|
||||||
@ -1030,6 +1048,25 @@ EOF
|
|||||||
EOM
|
EOM
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ "$USE_PKGBINARYMANGLER" = 1 ]; then
|
||||||
|
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||||
|
mkdir -p /etc/pkgbinarymangler/
|
||||||
|
cat > /etc/pkgbinarymangler/maintainermangler.conf <<EOF
|
||||||
|
# pkgmaintainermangler configuration file
|
||||||
|
|
||||||
|
# pkgmaintainermangler will do nothing unless enable is set to "true"
|
||||||
|
enable: true
|
||||||
|
|
||||||
|
# Configure what happens if /CurrentlyBuilding is present, but invalid
|
||||||
|
# (i. e. it does not contain a Package: field). If "ignore" (default),
|
||||||
|
# the file is ignored (i. e. the Maintainer field is mangled) and a
|
||||||
|
# warning is printed. If "fail" (or any other value), pkgmaintainermangler
|
||||||
|
# exits with an error, which causes a package build to fail.
|
||||||
|
invalid_currentlybuilding: ignore
|
||||||
|
EOF
|
||||||
|
EOM
|
||||||
|
fi
|
||||||
|
|
||||||
if [ -n "$TARGET_ARCH" ]; then
|
if [ -n "$TARGET_ARCH" ]; then
|
||||||
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||||
# Configure target architecture
|
# Configure target architecture
|
||||||
@ -1048,7 +1085,7 @@ apt-get update || true
|
|||||||
echo set debconf/frontend Noninteractive | debconf-communicate
|
echo set debconf/frontend Noninteractive | debconf-communicate
|
||||||
echo set debconf/priority critical | debconf-communicate
|
echo set debconf/priority critical | debconf-communicate
|
||||||
# Install basic build tool set, trying to match buildd
|
# Install basic build tool set, trying to match buildd
|
||||||
apt-get -y --force-yes install $BUILD_PKGS
|
apt-get -y --force-yes -o Dpkg::Options::="--force-confold" install $BUILD_PKGS
|
||||||
# Set up expected /dev entries
|
# Set up expected /dev entries
|
||||||
if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi
|
if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi
|
||||||
if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi
|
if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi
|
||||||
|
@ -95,7 +95,11 @@ class PbuilderDist:
|
|||||||
# Builder
|
# Builder
|
||||||
self.builder = builder
|
self.builder = builder
|
||||||
|
|
||||||
self._debian_distros = DebianDistroInfo().all + ["stable", "testing", "unstable"]
|
# Distro info
|
||||||
|
self.debian_distro_info = DebianDistroInfo()
|
||||||
|
self.ubuntu_distro_info = UbuntuDistroInfo()
|
||||||
|
|
||||||
|
self._debian_distros = self.debian_distro_info.all + ["stable", "testing", "unstable"]
|
||||||
|
|
||||||
# Ensure that the used builder is installed
|
# Ensure that the used builder is installed
|
||||||
paths = set(os.environ["PATH"].split(":"))
|
paths = set(os.environ["PATH"].split(":"))
|
||||||
@ -151,8 +155,9 @@ class PbuilderDist:
|
|||||||
if not os.path.isfile(os.path.join("/usr/share/debootstrap/scripts/", distro)):
|
if not os.path.isfile(os.path.join("/usr/share/debootstrap/scripts/", distro)):
|
||||||
if os.path.isdir("/usr/share/debootstrap/scripts/"):
|
if os.path.isdir("/usr/share/debootstrap/scripts/"):
|
||||||
# Debian experimental doesn't have a debootstrap file but
|
# Debian experimental doesn't have a debootstrap file but
|
||||||
# should work nevertheless.
|
# should work nevertheless. Ubuntu releases automatically use
|
||||||
if distro not in self._debian_distros:
|
# the gutsy script as of debootstrap 1.0.128+nmu2ubuntu1.1.
|
||||||
|
if distro not in (self._debian_distros + self.ubuntu_distro_info.all):
|
||||||
question = (
|
question = (
|
||||||
f'Warning: Unknown distribution "{distro}". ' "Do you want to continue"
|
f'Warning: Unknown distribution "{distro}". ' "Do you want to continue"
|
||||||
)
|
)
|
||||||
@ -288,23 +293,24 @@ class PbuilderDist:
|
|||||||
othermirrors.append(repo)
|
othermirrors.append(repo)
|
||||||
|
|
||||||
if self.target_distro in self._debian_distros:
|
if self.target_distro in self._debian_distros:
|
||||||
debian_info = DebianDistroInfo()
|
|
||||||
try:
|
try:
|
||||||
codename = debian_info.codename(self.target_distro, default=self.target_distro)
|
codename = self.debian_distro_info.codename(
|
||||||
|
self.target_distro, default=self.target_distro
|
||||||
|
)
|
||||||
except DistroDataOutdated as error:
|
except DistroDataOutdated as error:
|
||||||
Logger.warning(error)
|
Logger.warning(error)
|
||||||
if codename in (debian_info.devel(), "experimental"):
|
if codename in (self.debian_distro_info.devel(), "experimental"):
|
||||||
self.enable_security = False
|
self.enable_security = False
|
||||||
self.enable_updates = False
|
self.enable_updates = False
|
||||||
self.enable_proposed = False
|
self.enable_proposed = False
|
||||||
elif codename in (debian_info.testing(), "testing"):
|
elif codename in (self.debian_distro_info.testing(), "testing"):
|
||||||
self.enable_updates = False
|
self.enable_updates = False
|
||||||
|
|
||||||
if self.enable_security:
|
if self.enable_security:
|
||||||
pocket = "-security"
|
pocket = "-security"
|
||||||
with suppress(ValueError):
|
with suppress(ValueError):
|
||||||
# before bullseye (version 11) security suite is /updates
|
# before bullseye (version 11) security suite is /updates
|
||||||
if float(debian_info.version(codename)) < 11.0:
|
if float(self.debian_distro_info.version(codename)) < 11.0:
|
||||||
pocket = "/updates"
|
pocket = "/updates"
|
||||||
othermirrors.append(
|
othermirrors.append(
|
||||||
f"deb {config.get_value('DEBSEC_MIRROR')}"
|
f"deb {config.get_value('DEBSEC_MIRROR')}"
|
||||||
@ -322,7 +328,7 @@ class PbuilderDist:
|
|||||||
aptcache = os.path.join(self.base, "aptcache", "debian")
|
aptcache = os.path.join(self.base, "aptcache", "debian")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
dev_release = self.target_distro == UbuntuDistroInfo().devel()
|
dev_release = self.target_distro == self.ubuntu_distro_info.devel()
|
||||||
except DistroDataOutdated as error:
|
except DistroDataOutdated as error:
|
||||||
Logger.warning(error)
|
Logger.warning(error)
|
||||||
dev_release = True
|
dev_release = True
|
||||||
@ -492,7 +498,7 @@ def main():
|
|||||||
("powerpc", "ppc64"),
|
("powerpc", "ppc64"),
|
||||||
("ppc64", "powerpc"),
|
("ppc64", "powerpc"),
|
||||||
]:
|
]:
|
||||||
args += ["--debootstrap", "qemu-debootstrap"]
|
args += ["--debootstrap", "debootstrap"]
|
||||||
|
|
||||||
if "mainonly" in sys.argv or "--main-only" in sys.argv:
|
if "mainonly" in sys.argv or "--main-only" in sys.argv:
|
||||||
app.extra_components = False
|
app.extra_components = False
|
||||||
|
142
pm-helper
Executable file
142
pm-helper
Executable file
@ -0,0 +1,142 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
# Find the next thing to work on for proposed-migration
|
||||||
|
# Copyright (C) 2023 Canonical Ltd.
|
||||||
|
# Author: Steve Langasek <steve.langasek@ubuntu.com>
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License, version 3.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import lzma
|
||||||
|
import sys
|
||||||
|
import webbrowser
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from launchpadlib.launchpad import Launchpad
|
||||||
|
|
||||||
|
from ubuntutools.utils import get_url
|
||||||
|
|
||||||
|
# proposed-migration is only concerned with the devel series; unlike other
|
||||||
|
# tools, don't make this configurable
|
||||||
|
excuses_url = "https://ubuntu-archive-team.ubuntu.com/proposed-migration/update_excuses.yaml.xz"
|
||||||
|
|
||||||
|
|
||||||
|
def get_proposed_version(excuses, package):
|
||||||
|
for k in excuses["sources"]:
|
||||||
|
if k["source"] == package:
|
||||||
|
return k.get("new-version")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def claim_excuses_bug(launchpad, bug, package):
|
||||||
|
print(f"LP: #{bug.id}: {bug.title}")
|
||||||
|
ubuntu = launchpad.distributions["ubuntu"]
|
||||||
|
series = ubuntu.current_series.fullseriesname
|
||||||
|
|
||||||
|
for task in bug.bug_tasks:
|
||||||
|
# targeting to a series doesn't make the default task disappear,
|
||||||
|
# it just makes it useless
|
||||||
|
if task.bug_target_name == f"{package} ({series})":
|
||||||
|
our_task = task
|
||||||
|
break
|
||||||
|
if task.bug_target_name == f"{package} (Ubuntu)":
|
||||||
|
our_task = task
|
||||||
|
|
||||||
|
if our_task.assignee == launchpad.me:
|
||||||
|
print("Bug already assigned to you.")
|
||||||
|
return True
|
||||||
|
if our_task.assignee:
|
||||||
|
print(f"Currently assigned to {our_task.assignee.name}")
|
||||||
|
|
||||||
|
print("""Do you want to claim this bug? [yN] """, end="")
|
||||||
|
sys.stdout.flush()
|
||||||
|
response = sys.stdin.readline()
|
||||||
|
if response.strip().lower().startswith("y"):
|
||||||
|
our_task.assignee = launchpad.me
|
||||||
|
our_task.lp_save()
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def create_excuses_bug(launchpad, package, version):
|
||||||
|
print("Will open a new bug")
|
||||||
|
bug = launchpad.bugs.createBug(
|
||||||
|
title=f"proposed-migration for {package} {version}",
|
||||||
|
tags=("update-excuse"),
|
||||||
|
target=f"https://api.launchpad.net/devel/ubuntu/+source/{package}",
|
||||||
|
description=f"{package} {version} is stuck in -proposed.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = bug.bug_tasks[0]
|
||||||
|
task.assignee = launchpad.me
|
||||||
|
task.lp_save()
|
||||||
|
|
||||||
|
print(f"Opening {bug.web_link} in browser")
|
||||||
|
webbrowser.open(bug.web_link)
|
||||||
|
return bug
|
||||||
|
|
||||||
|
|
||||||
|
def has_excuses_bugs(launchpad, package):
|
||||||
|
ubuntu = launchpad.distributions["ubuntu"]
|
||||||
|
pkg = ubuntu.getSourcePackage(name=package)
|
||||||
|
if not pkg:
|
||||||
|
raise ValueError(f"No such source package: {package}")
|
||||||
|
|
||||||
|
tasks = pkg.searchTasks(tags=["update-excuse"], order_by=["id"])
|
||||||
|
|
||||||
|
bugs = [task.bug for task in tasks]
|
||||||
|
if not bugs:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if len(bugs) == 1:
|
||||||
|
print(f"There is 1 open update-excuse bug against {package}")
|
||||||
|
else:
|
||||||
|
print(f"There are {len(bugs)} open update-excuse bugs against {package}")
|
||||||
|
|
||||||
|
for bug in bugs:
|
||||||
|
if claim_excuses_bug(launchpad, bug, package):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument("-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||||
|
parser.add_argument(
|
||||||
|
"-v", "--verbose", default=False, action="store_true", help="be more verbose"
|
||||||
|
)
|
||||||
|
parser.add_argument("package", nargs="?", help="act on this package only")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
args.launchpad = Launchpad.login_with("pm-helper", args.launchpad_instance, version="devel")
|
||||||
|
|
||||||
|
f = get_url(excuses_url, False)
|
||||||
|
with lzma.open(f) as lzma_f:
|
||||||
|
excuses = yaml.load(lzma_f, Loader=yaml.CSafeLoader)
|
||||||
|
|
||||||
|
if args.package:
|
||||||
|
try:
|
||||||
|
if not has_excuses_bugs(args.launchpad, args.package):
|
||||||
|
proposed_version = get_proposed_version(excuses, args.package)
|
||||||
|
if not proposed_version:
|
||||||
|
print(f"Package {args.package} not found in -proposed.")
|
||||||
|
sys.exit(1)
|
||||||
|
create_excuses_bug(args.launchpad, args.package, proposed_version)
|
||||||
|
except ValueError as e:
|
||||||
|
sys.stderr.write(f"{e}\n")
|
||||||
|
else:
|
||||||
|
pass # for now
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
@ -1,5 +1,6 @@
|
|||||||
python-debian
|
python-debian
|
||||||
python-debianbts
|
python-debianbts
|
||||||
|
dateutil
|
||||||
distro-info
|
distro-info
|
||||||
httplib2
|
httplib2
|
||||||
launchpadlib
|
launchpadlib
|
||||||
|
@ -183,7 +183,7 @@ def display_verbose(package, values):
|
|||||||
Logger.info("No reverse dependencies found")
|
Logger.info("No reverse dependencies found")
|
||||||
return
|
return
|
||||||
|
|
||||||
def log_package(values, package, arch, dependency, offset=0):
|
def log_package(values, package, arch, dependency, visited, offset=0):
|
||||||
line = f"{' ' * offset}* {package}"
|
line = f"{' ' * offset}* {package}"
|
||||||
if all_archs and set(arch) != all_archs:
|
if all_archs and set(arch) != all_archs:
|
||||||
line += f" [{' '.join(sorted(arch))}]"
|
line += f" [{' '.join(sorted(arch))}]"
|
||||||
@ -192,6 +192,9 @@ def display_verbose(package, values):
|
|||||||
line += " " * (30 - len(line))
|
line += " " * (30 - len(line))
|
||||||
line += f" (for {dependency})"
|
line += f" (for {dependency})"
|
||||||
Logger.info(line)
|
Logger.info(line)
|
||||||
|
if package in visited:
|
||||||
|
return
|
||||||
|
visited = visited.copy().add(package)
|
||||||
data = values.get(package)
|
data = values.get(package)
|
||||||
if data:
|
if data:
|
||||||
offset = offset + 1
|
offset = offset + 1
|
||||||
@ -202,6 +205,7 @@ def display_verbose(package, values):
|
|||||||
rdep["Package"],
|
rdep["Package"],
|
||||||
rdep.get("Architectures", all_archs),
|
rdep.get("Architectures", all_archs),
|
||||||
rdep.get("Dependency"),
|
rdep.get("Dependency"),
|
||||||
|
visited,
|
||||||
offset,
|
offset,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -223,6 +227,7 @@ def display_verbose(package, values):
|
|||||||
rdep["Package"],
|
rdep["Package"],
|
||||||
rdep.get("Architectures", all_archs),
|
rdep.get("Architectures", all_archs),
|
||||||
rdep.get("Dependency"),
|
rdep.get("Dependency"),
|
||||||
|
{package},
|
||||||
)
|
)
|
||||||
Logger.info("")
|
Logger.info("")
|
||||||
|
|
||||||
|
81
running-autopkgtests
Executable file
81
running-autopkgtests
Executable file
@ -0,0 +1,81 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
|
||||||
|
|
||||||
|
# Authors:
|
||||||
|
# Andy P. Whitcroft
|
||||||
|
# Christian Ehrhardt
|
||||||
|
# Chris Peterson <chris.peterson@canonical.com>
|
||||||
|
#
|
||||||
|
# Copyright (C) 2024 Canonical Ltd.
|
||||||
|
# This program is free software: you can redistribute it and/or modify it
|
||||||
|
# under the terms of the GNU General Public License version 3, as published
|
||||||
|
# by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranties of
|
||||||
|
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
|
||||||
|
# See the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
"""Dumps a list of currently running tests in Autopkgtest"""
|
||||||
|
|
||||||
|
__example__ = """
|
||||||
|
Display first listed test running on amd64 hardware:
|
||||||
|
$ running-autopkgtests | grep amd64 | head -n1
|
||||||
|
R 0:01:40 systemd-upstream - focal amd64\
|
||||||
|
upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb',\
|
||||||
|
'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true',\
|
||||||
|
'UPSTREAM_PULL_REQUEST=23153',\
|
||||||
|
'GITHUB_STATUSES_URL=https://api.github.com/repos/\
|
||||||
|
systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
||||||
|
|
||||||
|
from ubuntutools.running_autopkgtests import get_queued, get_running
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
description = (
|
||||||
|
"Dumps a list of currently running and queued tests in Autopkgtest. "
|
||||||
|
"Pass --running to only see running tests, or --queued to only see "
|
||||||
|
"queued tests. Passing both will print both, which is the default behavior. "
|
||||||
|
)
|
||||||
|
|
||||||
|
parser = ArgumentParser(
|
||||||
|
prog="running-autopkgtests",
|
||||||
|
description=description,
|
||||||
|
epilog=f"example: {__example__}",
|
||||||
|
formatter_class=RawDescriptionHelpFormatter,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-r", "--running", action="store_true", help="Print runnning autopkgtests (default: true)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-q", "--queued", action="store_true", help="Print queued autopkgtests (default: true)"
|
||||||
|
)
|
||||||
|
|
||||||
|
options = parser.parse_args()
|
||||||
|
|
||||||
|
# If neither flag was specified, default to both not neither
|
||||||
|
if not options.running and not options.queued:
|
||||||
|
options.running = True
|
||||||
|
options.queued = True
|
||||||
|
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
args = parse_args()
|
||||||
|
if args.running:
|
||||||
|
print(get_running())
|
||||||
|
if args.queued:
|
||||||
|
print(get_queued())
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
4
setup.py
4
setup.py
@ -32,17 +32,18 @@ def make_pep440_compliant(version: str) -> str:
|
|||||||
|
|
||||||
scripts = [
|
scripts = [
|
||||||
"backportpackage",
|
"backportpackage",
|
||||||
"bitesize",
|
|
||||||
"check-mir",
|
"check-mir",
|
||||||
"check-symbols",
|
"check-symbols",
|
||||||
"dch-repeat",
|
"dch-repeat",
|
||||||
"grab-merge",
|
"grab-merge",
|
||||||
"grep-merges",
|
"grep-merges",
|
||||||
"import-bug-from-debian",
|
"import-bug-from-debian",
|
||||||
|
"lp-bitesize",
|
||||||
"merge-changelog",
|
"merge-changelog",
|
||||||
"mk-sbuild",
|
"mk-sbuild",
|
||||||
"pbuilder-dist",
|
"pbuilder-dist",
|
||||||
"pbuilder-dist-simple",
|
"pbuilder-dist-simple",
|
||||||
|
"pm-helper",
|
||||||
"pull-pkg",
|
"pull-pkg",
|
||||||
"pull-debian-debdiff",
|
"pull-debian-debdiff",
|
||||||
"pull-debian-source",
|
"pull-debian-source",
|
||||||
@ -64,6 +65,7 @@ scripts = [
|
|||||||
"requestbackport",
|
"requestbackport",
|
||||||
"requestsync",
|
"requestsync",
|
||||||
"reverse-depends",
|
"reverse-depends",
|
||||||
|
"running-autopkgtests",
|
||||||
"seeded-in-ubuntu",
|
"seeded-in-ubuntu",
|
||||||
"setup-packaging-environment",
|
"setup-packaging-environment",
|
||||||
"sponsor-patch",
|
"sponsor-patch",
|
||||||
|
122
syncpackage
122
syncpackage
@ -49,6 +49,7 @@ from ubuntutools.requestsync.mail import get_debian_srcpkg as requestsync_mail_g
|
|||||||
from ubuntutools.version import Version
|
from ubuntutools.version import Version
|
||||||
|
|
||||||
Logger = getLogger()
|
Logger = getLogger()
|
||||||
|
cached_sync_blocklist = None
|
||||||
|
|
||||||
|
|
||||||
def remove_signature(dscname):
|
def remove_signature(dscname):
|
||||||
@ -143,7 +144,7 @@ def sync_dsc(
|
|||||||
if ubuntu_ver.is_modified_in_ubuntu():
|
if ubuntu_ver.is_modified_in_ubuntu():
|
||||||
if not force:
|
if not force:
|
||||||
Logger.error("--force is required to discard Ubuntu changes.")
|
Logger.error("--force is required to discard Ubuntu changes.")
|
||||||
sys.exit(1)
|
return None
|
||||||
|
|
||||||
Logger.warning(
|
Logger.warning(
|
||||||
"Overwriting modified Ubuntu version %s, setting current version to %s",
|
"Overwriting modified Ubuntu version %s, setting current version to %s",
|
||||||
@ -157,7 +158,7 @@ def sync_dsc(
|
|||||||
src_pkg.pull()
|
src_pkg.pull()
|
||||||
except DownloadError as e:
|
except DownloadError as e:
|
||||||
Logger.error("Failed to download: %s", str(e))
|
Logger.error("Failed to download: %s", str(e))
|
||||||
sys.exit(1)
|
return None
|
||||||
src_pkg.unpack()
|
src_pkg.unpack()
|
||||||
|
|
||||||
needs_fakesync = not (need_orig or ubu_pkg.verify_orig())
|
needs_fakesync = not (need_orig or ubu_pkg.verify_orig())
|
||||||
@ -166,13 +167,13 @@ def sync_dsc(
|
|||||||
Logger.warning("Performing a fakesync")
|
Logger.warning("Performing a fakesync")
|
||||||
elif not needs_fakesync and fakesync:
|
elif not needs_fakesync and fakesync:
|
||||||
Logger.error("Fakesync not required, aborting.")
|
Logger.error("Fakesync not required, aborting.")
|
||||||
sys.exit(1)
|
return None
|
||||||
elif needs_fakesync and not fakesync:
|
elif needs_fakesync and not fakesync:
|
||||||
Logger.error(
|
Logger.error(
|
||||||
"The checksums of the Debian and Ubuntu packages "
|
"The checksums of the Debian and Ubuntu packages "
|
||||||
"mismatch. A fake sync using --fakesync is required."
|
"mismatch. A fake sync using --fakesync is required."
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
return None
|
||||||
|
|
||||||
if fakesync:
|
if fakesync:
|
||||||
# Download Ubuntu files (override Debian source tarballs)
|
# Download Ubuntu files (override Debian source tarballs)
|
||||||
@ -180,7 +181,7 @@ def sync_dsc(
|
|||||||
ubu_pkg.pull()
|
ubu_pkg.pull()
|
||||||
except DownloadError as e:
|
except DownloadError as e:
|
||||||
Logger.error("Failed to download: %s", str(e))
|
Logger.error("Failed to download: %s", str(e))
|
||||||
sys.exit(1)
|
return None
|
||||||
|
|
||||||
# change into package directory
|
# change into package directory
|
||||||
directory = src_pkg.source + "-" + new_ver.upstream_version
|
directory = src_pkg.source + "-" + new_ver.upstream_version
|
||||||
@ -265,7 +266,7 @@ def sync_dsc(
|
|||||||
returncode = subprocess.call(cmd)
|
returncode = subprocess.call(cmd)
|
||||||
if returncode != 0:
|
if returncode != 0:
|
||||||
Logger.error("Source-only build with debuild failed. Please check build log above.")
|
Logger.error("Source-only build with debuild failed. Please check build log above.")
|
||||||
sys.exit(1)
|
return None
|
||||||
|
|
||||||
|
|
||||||
def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
|
def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
|
||||||
@ -295,7 +296,7 @@ def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
|
|||||||
udtexceptions.SeriesNotFoundException,
|
udtexceptions.SeriesNotFoundException,
|
||||||
) as e:
|
) as e:
|
||||||
Logger.error(str(e))
|
Logger.error(str(e))
|
||||||
sys.exit(1)
|
return None
|
||||||
if version is None:
|
if version is None:
|
||||||
version = Version(debian_srcpkg.getVersion())
|
version = Version(debian_srcpkg.getVersion())
|
||||||
try:
|
try:
|
||||||
@ -306,7 +307,7 @@ def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
|
|||||||
ubuntu_version = Version("~")
|
ubuntu_version = Version("~")
|
||||||
except udtexceptions.SeriesNotFoundException as e:
|
except udtexceptions.SeriesNotFoundException as e:
|
||||||
Logger.error(str(e))
|
Logger.error(str(e))
|
||||||
sys.exit(1)
|
return None
|
||||||
if ubuntu_version >= version:
|
if ubuntu_version >= version:
|
||||||
# The LP importer is maybe out of date
|
# The LP importer is maybe out of date
|
||||||
debian_srcpkg = requestsync_mail_get_debian_srcpkg(package, dist)
|
debian_srcpkg = requestsync_mail_get_debian_srcpkg(package, dist)
|
||||||
@ -320,7 +321,7 @@ def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
|
|||||||
ubuntu_version,
|
ubuntu_version,
|
||||||
ubuntu_release,
|
ubuntu_release,
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
return None
|
||||||
if component is None:
|
if component is None:
|
||||||
component = debian_srcpkg.getComponent()
|
component = debian_srcpkg.getComponent()
|
||||||
|
|
||||||
@ -329,7 +330,7 @@ def fetch_source_pkg(package, dist, version, component, ubuntu_release, mirror):
|
|||||||
return DebianSourcePackage(package, version.full_version, component, mirrors=mirrors)
|
return DebianSourcePackage(package, version.full_version, component, mirrors=mirrors)
|
||||||
|
|
||||||
|
|
||||||
def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
|
def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False, yes=False):
|
||||||
"""Copy a source package from Debian to Ubuntu using the Launchpad API."""
|
"""Copy a source package from Debian to Ubuntu using the Launchpad API."""
|
||||||
ubuntu = Distribution("ubuntu")
|
ubuntu = Distribution("ubuntu")
|
||||||
debian_archive = Distribution("debian").getArchive()
|
debian_archive = Distribution("debian").getArchive()
|
||||||
@ -352,7 +353,7 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
|
|||||||
"Debian version %s has not been picked up by LP yet. Please try again later.",
|
"Debian version %s has not been picked up by LP yet. Please try again later.",
|
||||||
src_pkg.version,
|
src_pkg.version,
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ubuntu_spph = get_ubuntu_srcpkg(src_pkg.source, ubuntu_series, ubuntu_pocket)
|
ubuntu_spph = get_ubuntu_srcpkg(src_pkg.source, ubuntu_series, ubuntu_pocket)
|
||||||
@ -373,7 +374,7 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
|
|||||||
base_version = ubuntu_version.get_related_debian_version()
|
base_version = ubuntu_version.get_related_debian_version()
|
||||||
if not force and ubuntu_version.is_modified_in_ubuntu():
|
if not force and ubuntu_version.is_modified_in_ubuntu():
|
||||||
Logger.error("--force is required to discard Ubuntu changes.")
|
Logger.error("--force is required to discard Ubuntu changes.")
|
||||||
sys.exit(1)
|
return None
|
||||||
|
|
||||||
# Check whether a fakesync would be required.
|
# Check whether a fakesync would be required.
|
||||||
if not src_pkg.dsc.compare_dsc(ubuntu_pkg.dsc):
|
if not src_pkg.dsc.compare_dsc(ubuntu_pkg.dsc):
|
||||||
@ -381,7 +382,7 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
|
|||||||
"The checksums of the Debian and Ubuntu packages "
|
"The checksums of the Debian and Ubuntu packages "
|
||||||
"mismatch. A fake sync using --fakesync is required."
|
"mismatch. A fake sync using --fakesync is required."
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
return None
|
||||||
except udtexceptions.PackageNotFoundException:
|
except udtexceptions.PackageNotFoundException:
|
||||||
base_version = Version("~")
|
base_version = Version("~")
|
||||||
Logger.info(
|
Logger.info(
|
||||||
@ -402,6 +403,7 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
|
|||||||
|
|
||||||
if sponsoree:
|
if sponsoree:
|
||||||
Logger.info("Sponsoring this sync for %s (%s)", sponsoree.display_name, sponsoree.name)
|
Logger.info("Sponsoring this sync for %s (%s)", sponsoree.display_name, sponsoree.name)
|
||||||
|
if not yes:
|
||||||
answer = YesNoQuestion().ask("Sync this package", "no")
|
answer = YesNoQuestion().ask("Sync this package", "no")
|
||||||
if answer != "yes":
|
if answer != "yes":
|
||||||
return
|
return
|
||||||
@ -419,26 +421,29 @@ def copy(src_pkg, release, bugs, sponsoree=None, simulate=False, force=False):
|
|||||||
except HTTPError as error:
|
except HTTPError as error:
|
||||||
Logger.error("HTTP Error %s: %s", error.response.status, error.response.reason)
|
Logger.error("HTTP Error %s: %s", error.response.status, error.response.reason)
|
||||||
Logger.error(error.content)
|
Logger.error(error.content)
|
||||||
sys.exit(1)
|
return None
|
||||||
|
|
||||||
Logger.info("Request succeeded; you should get an e-mail once it is processed.")
|
Logger.info("Request succeeded; you should get an e-mail once it is processed.")
|
||||||
bugs = sorted(set(bugs))
|
bugs = sorted(set(bugs))
|
||||||
if bugs:
|
if bugs:
|
||||||
Logger.info("Launchpad bugs to be closed: %s", ", ".join(str(bug) for bug in bugs))
|
Logger.info("Launchpad bugs to be closed: %s", ", ".join(str(bug) for bug in bugs))
|
||||||
Logger.info("Please wait for the sync to be successful before closing bugs.")
|
Logger.info("Please wait for the sync to be successful before closing bugs.")
|
||||||
|
if yes:
|
||||||
|
close_bugs(bugs, src_pkg.source, src_pkg.version.full_version, changes, sponsoree)
|
||||||
|
else:
|
||||||
answer = YesNoQuestion().ask("Close bugs", "yes")
|
answer = YesNoQuestion().ask("Close bugs", "yes")
|
||||||
if answer == "yes":
|
if answer == "yes":
|
||||||
close_bugs(bugs, src_pkg.source, src_pkg.version.full_version, changes, sponsoree)
|
close_bugs(bugs, src_pkg.source, src_pkg.version.full_version, changes, sponsoree)
|
||||||
|
|
||||||
|
|
||||||
def is_blacklisted(query):
|
def is_blocklisted(query):
|
||||||
"""Determine if package "query" is in the sync blacklist
|
"""Determine if package "query" is in the sync blocklist
|
||||||
Returns tuple of (blacklisted, comments)
|
Returns tuple of (blocklisted, comments)
|
||||||
blacklisted is one of False, 'CURRENT', 'ALWAYS'
|
blocklisted is one of False, 'CURRENT', 'ALWAYS'
|
||||||
"""
|
"""
|
||||||
series = Launchpad.distributions["ubuntu"].current_series
|
series = Launchpad.distributions["ubuntu"].current_series
|
||||||
lp_comments = series.getDifferenceComments(source_package_name=query)
|
lp_comments = series.getDifferenceComments(source_package_name=query)
|
||||||
blacklisted = False
|
blocklisted = False
|
||||||
comments = [
|
comments = [
|
||||||
f"{c.body_text}\n -- {c.comment_author.name}"
|
f"{c.body_text}\n -- {c.comment_author.name}"
|
||||||
f" {c.comment_date.strftime('%a, %d %b %Y %H:%M:%S +0000')}"
|
f" {c.comment_date.strftime('%a, %d %b %Y %H:%M:%S +0000')}"
|
||||||
@ -446,32 +451,38 @@ def is_blacklisted(query):
|
|||||||
]
|
]
|
||||||
|
|
||||||
for diff in series.getDifferencesTo(source_package_name_filter=query):
|
for diff in series.getDifferencesTo(source_package_name_filter=query):
|
||||||
if diff.status == "Blacklisted current version" and blacklisted != "ALWAYS":
|
if diff.status == "Blacklisted current version" and blocklisted != "ALWAYS":
|
||||||
blacklisted = "CURRENT"
|
blocklisted = "CURRENT"
|
||||||
if diff.status == "Blacklisted always":
|
if diff.status == "Blacklisted always":
|
||||||
blacklisted = "ALWAYS"
|
blocklisted = "ALWAYS"
|
||||||
|
|
||||||
# Old blacklist:
|
global cached_sync_blocklist
|
||||||
url = "https://ubuntu-archive-team.ubuntu.com/sync-blacklist.txt"
|
if not cached_sync_blocklist:
|
||||||
|
url = "https://ubuntu-archive-team.ubuntu.com/sync-blocklist.txt"
|
||||||
|
try:
|
||||||
with urllib.request.urlopen(url) as f:
|
with urllib.request.urlopen(url) as f:
|
||||||
|
cached_sync_blocklist = f.read().decode("utf-8")
|
||||||
|
except:
|
||||||
|
print("WARNING: unable to download the sync blocklist. Erring on the side of caution.")
|
||||||
|
return ("ALWAYS", "INTERNAL ERROR: Unable to fetch sync blocklist")
|
||||||
|
|
||||||
applicable_lines = []
|
applicable_lines = []
|
||||||
for line in f:
|
for line in cached_sync_blocklist.splitlines():
|
||||||
line = line.decode("utf-8")
|
|
||||||
if not line.strip():
|
if not line.strip():
|
||||||
applicable_lines = []
|
applicable_lines = []
|
||||||
continue
|
continue
|
||||||
applicable_lines.append(line)
|
applicable_lines.append(line)
|
||||||
try:
|
try:
|
||||||
line = line[: line.index("#")]
|
line = line[:line.index("#")]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
source = line.strip()
|
source = line.strip()
|
||||||
if source and fnmatch.fnmatch(query, source):
|
if source and fnmatch.fnmatch(query, source):
|
||||||
comments += ["From sync-blacklist.txt:"] + applicable_lines
|
comments += ["From sync-blocklist.txt:"] + applicable_lines
|
||||||
blacklisted = "ALWAYS"
|
blocklisted = "ALWAYS"
|
||||||
break
|
break
|
||||||
|
|
||||||
return (blacklisted, comments)
|
return (blocklisted, comments)
|
||||||
|
|
||||||
|
|
||||||
def close_bugs(bugs, package, version, changes, sponsoree):
|
def close_bugs(bugs, package, version, changes, sponsoree):
|
||||||
@ -508,6 +519,12 @@ def parse():
|
|||||||
epilog = f"See {os.path.basename(sys.argv[0])}(1) for more info."
|
epilog = f"See {os.path.basename(sys.argv[0])}(1) for more info."
|
||||||
parser = argparse.ArgumentParser(usage=usage, epilog=epilog)
|
parser = argparse.ArgumentParser(usage=usage, epilog=epilog)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-y",
|
||||||
|
"--yes",
|
||||||
|
action="store_true",
|
||||||
|
help="Automatically sync without prompting. Use with caution and care."
|
||||||
|
)
|
||||||
parser.add_argument("-d", "--distribution", help="Debian distribution to sync from.")
|
parser.add_argument("-d", "--distribution", help="Debian distribution to sync from.")
|
||||||
parser.add_argument("-r", "--release", help="Specify target Ubuntu release.")
|
parser.add_argument("-r", "--release", help="Specify target Ubuntu release.")
|
||||||
parser.add_argument("-V", "--debian-version", help="Specify the version to sync from.")
|
parser.add_argument("-V", "--debian-version", help="Specify the version to sync from.")
|
||||||
@ -652,14 +669,15 @@ def main():
|
|||||||
if args.lpinstance is None:
|
if args.lpinstance is None:
|
||||||
args.lpinstance = config.get_value("LPINSTANCE")
|
args.lpinstance = config.get_value("LPINSTANCE")
|
||||||
|
|
||||||
try:
|
|
||||||
# devel for copyPackage and changelogUrl
|
# devel for copyPackage and changelogUrl
|
||||||
kwargs = {"service": args.lpinstance, "api_version": "devel"}
|
kwargs = {"service": args.lpinstance, "api_version": "devel"}
|
||||||
|
try:
|
||||||
if args.lp and not args.simulate:
|
if args.lp and not args.simulate:
|
||||||
Launchpad.login(**kwargs)
|
Launchpad.login(**kwargs)
|
||||||
else:
|
else:
|
||||||
Launchpad.login_anonymously(**kwargs)
|
Launchpad.login_anonymously(**kwargs)
|
||||||
except IOError:
|
except IOError as e:
|
||||||
|
Logger.error("Could not authenticate to LP: %s", str(e))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if args.release is None:
|
if args.release is None:
|
||||||
@ -711,36 +729,38 @@ def main():
|
|||||||
args.release,
|
args.release,
|
||||||
args.debian_mirror,
|
args.debian_mirror,
|
||||||
)
|
)
|
||||||
|
if not src_pkg:
|
||||||
|
continue
|
||||||
|
|
||||||
blacklisted, comments = is_blacklisted(src_pkg.source)
|
blocklisted, comments = is_blocklisted(src_pkg.source)
|
||||||
blacklist_fail = False
|
blocklist_fail = False
|
||||||
if blacklisted:
|
if blocklisted:
|
||||||
messages = []
|
messages = []
|
||||||
|
|
||||||
if blacklisted == "CURRENT":
|
if blocklisted == "CURRENT":
|
||||||
Logger.debug(
|
Logger.debug(
|
||||||
"Source package %s is temporarily blacklisted "
|
"Source package %s is temporarily blocklisted "
|
||||||
"(blacklisted_current). "
|
"(blocklisted_current). "
|
||||||
"Ubuntu ignores these for now. "
|
"Ubuntu ignores these for now. "
|
||||||
"See also LP: #841372",
|
"See also LP: #841372",
|
||||||
src_pkg.source,
|
src_pkg.source,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if args.fakesync:
|
if args.fakesync:
|
||||||
messages += ["Doing a fakesync, overriding blacklist."]
|
messages += ["Doing a fakesync, overriding blocklist."]
|
||||||
else:
|
else:
|
||||||
blacklist_fail = True
|
blocklist_fail = True
|
||||||
messages += [
|
messages += [
|
||||||
"If this package needs a fakesync, use --fakesync",
|
"If this package needs a fakesync, use --fakesync",
|
||||||
"If you think this package shouldn't be "
|
"If you think this package shouldn't be "
|
||||||
"blacklisted, please file a bug explaining your "
|
"blocklisted, please file a bug explaining your "
|
||||||
"reasoning and subscribe ~ubuntu-archive.",
|
"reasoning and subscribe ~ubuntu-archive.",
|
||||||
]
|
]
|
||||||
|
|
||||||
if blacklist_fail:
|
if blocklist_fail:
|
||||||
Logger.error("Source package %s is blacklisted.", src_pkg.source)
|
Logger.error("Source package %s is blocklisted.", src_pkg.source)
|
||||||
elif blacklisted == "ALWAYS":
|
elif blocklisted == "ALWAYS":
|
||||||
Logger.info("Source package %s is blacklisted.", src_pkg.source)
|
Logger.info("Source package %s is blocklisted.", src_pkg.source)
|
||||||
if messages:
|
if messages:
|
||||||
for message in messages:
|
for message in messages:
|
||||||
for line in textwrap.wrap(message):
|
for line in textwrap.wrap(message):
|
||||||
@ -752,14 +772,15 @@ def main():
|
|||||||
for line in textwrap.wrap(comment):
|
for line in textwrap.wrap(comment):
|
||||||
Logger.info(" %s", line)
|
Logger.info(" %s", line)
|
||||||
|
|
||||||
if blacklist_fail:
|
if blocklist_fail:
|
||||||
sys.exit(1)
|
continue
|
||||||
|
|
||||||
if args.lp:
|
if args.lp:
|
||||||
copy(src_pkg, args.release, args.bugs, sponsoree, args.simulate, args.force)
|
if not copy(src_pkg, args.release, args.bugs, sponsoree, args.simulate, args.force, args.yes):
|
||||||
|
continue
|
||||||
else:
|
else:
|
||||||
os.environ["DEB_VENDOR"] = "Ubuntu"
|
os.environ["DEB_VENDOR"] = "Ubuntu"
|
||||||
sync_dsc(
|
if not sync_dsc(
|
||||||
src_pkg,
|
src_pkg,
|
||||||
args.distribution,
|
args.distribution,
|
||||||
args.release,
|
args.release,
|
||||||
@ -771,7 +792,8 @@ def main():
|
|||||||
args.simulate,
|
args.simulate,
|
||||||
args.force,
|
args.force,
|
||||||
args.fakesync,
|
args.fakesync,
|
||||||
)
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
307
ubuntu-build
307
ubuntu-build
@ -2,16 +2,16 @@
|
|||||||
#
|
#
|
||||||
# ubuntu-build - command line interface for Launchpad buildd operations.
|
# ubuntu-build - command line interface for Launchpad buildd operations.
|
||||||
#
|
#
|
||||||
# Copyright (C) 2007 Canonical Ltd.
|
# Copyright (C) 2007-2024 Canonical Ltd.
|
||||||
# Authors:
|
# Authors:
|
||||||
# - Martin Pitt <martin.pitt@canonical.com>
|
# - Martin Pitt <martin.pitt@canonical.com>
|
||||||
# - Jonathan Davies <jpds@ubuntu.com>
|
# - Jonathan Davies <jpds@ubuntu.com>
|
||||||
# - Michael Bienia <geser@ubuntu.com>
|
# - Michael Bienia <geser@ubuntu.com>
|
||||||
|
# - Steve Langasek <steve.langasek@canonical.com>
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
# the Free Software Foundation, version 3 of the License.
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
#
|
||||||
# This program is distributed in the hope that it will be useful,
|
# This program is distributed in the hope that it will be useful,
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
@ -28,20 +28,65 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from launchpadlib.credentials import TokenAuthorizationException
|
import lazr.restfulclient.errors
|
||||||
|
from launchpadlib.launchpad import Launchpad
|
||||||
|
|
||||||
from ubuntutools import getLogger
|
from ubuntutools import getLogger
|
||||||
from ubuntutools.lp.lpapicache import Distribution, Launchpad, PersonTeam
|
from ubuntutools.lp.udtexceptions import PocketDoesNotExistError
|
||||||
from ubuntutools.lp.udtexceptions import (
|
|
||||||
PackageNotFoundException,
|
|
||||||
PocketDoesNotExistError,
|
|
||||||
SeriesNotFoundException,
|
|
||||||
)
|
|
||||||
from ubuntutools.misc import split_release_pocket
|
from ubuntutools.misc import split_release_pocket
|
||||||
|
|
||||||
Logger = getLogger()
|
Logger = getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def get_build_states(pkg, archs):
|
||||||
|
res = []
|
||||||
|
|
||||||
|
for build in pkg.getBuilds():
|
||||||
|
if build.arch_tag in archs:
|
||||||
|
res.append(f" {build.arch_tag}: {build.buildstate}")
|
||||||
|
msg = "\n".join(res)
|
||||||
|
return f"Build state(s) for '{pkg.source_package_name}':\n{msg}"
|
||||||
|
|
||||||
|
|
||||||
|
def rescore_builds(pkg, archs, score):
|
||||||
|
res = []
|
||||||
|
|
||||||
|
for build in pkg.getBuilds():
|
||||||
|
arch = build.arch_tag
|
||||||
|
if arch in archs:
|
||||||
|
if not build.can_be_rescored:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
build.rescore(score=score)
|
||||||
|
res.append(f" {arch}: done")
|
||||||
|
except lazr.restfulclient.errors.Unauthorized:
|
||||||
|
Logger.error(
|
||||||
|
"You don't have the permissions to rescore builds."
|
||||||
|
" Ignoring your rescore request."
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
except lazr.restfulclient.errors.BadRequest:
|
||||||
|
Logger.info("Cannot rescore build of %s on %s.", build.source_package_name, arch)
|
||||||
|
res.append(f" {arch}: failed")
|
||||||
|
|
||||||
|
msg = "\n".join(res)
|
||||||
|
return f"Rescoring builds of '{pkg.source_package_name}' to {score}:\n{msg}"
|
||||||
|
|
||||||
|
|
||||||
|
def retry_builds(pkg, archs):
|
||||||
|
res = []
|
||||||
|
for build in pkg.getBuilds():
|
||||||
|
arch = build.arch_tag
|
||||||
|
if arch in archs:
|
||||||
|
try:
|
||||||
|
build.retry()
|
||||||
|
res.append(f" {arch}: done")
|
||||||
|
except lazr.restfulclient.errors.BadRequest:
|
||||||
|
res.append(f" {arch}: failed")
|
||||||
|
msg = "\n".join(res)
|
||||||
|
return f"Retrying builds of '{pkg.source_package_name}':\n{msg}"
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# Usage.
|
# Usage.
|
||||||
usage = "%(prog)s <srcpackage> <release> <operation>\n\n"
|
usage = "%(prog)s <srcpackage> <release> <operation>\n\n"
|
||||||
@ -50,27 +95,13 @@ def main():
|
|||||||
|
|
||||||
# Valid architectures.
|
# Valid architectures.
|
||||||
valid_archs = set(
|
valid_archs = set(
|
||||||
[
|
["armhf", "arm64", "amd64", "i386", "powerpc", "ppc64el", "riscv64", "s390x"]
|
||||||
"armhf",
|
|
||||||
"arm64",
|
|
||||||
"amd64",
|
|
||||||
"i386",
|
|
||||||
"powerpc",
|
|
||||||
"ppc64el",
|
|
||||||
"riscv64",
|
|
||||||
"s390x",
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Prepare our option parser.
|
# Prepare our option parser.
|
||||||
parser = argparse.ArgumentParser(usage=usage)
|
parser = argparse.ArgumentParser(usage=usage)
|
||||||
|
|
||||||
# Retry options
|
parser.add_argument(
|
||||||
retry_rescore_options = parser.add_argument_group(
|
|
||||||
"Retry and rescore options",
|
|
||||||
"These options may only be used with the 'retry' and 'rescore' operations.",
|
|
||||||
)
|
|
||||||
retry_rescore_options.add_argument(
|
|
||||||
"-a",
|
"-a",
|
||||||
"--arch",
|
"--arch",
|
||||||
action="append",
|
action="append",
|
||||||
@ -79,6 +110,8 @@ def main():
|
|||||||
f"include: {', '.join(valid_archs)}.",
|
f"include: {', '.join(valid_archs)}.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
parser.add_argument("-A", "--archive", help="operate on ARCHIVE", default="ubuntu")
|
||||||
|
|
||||||
# Batch processing options
|
# Batch processing options
|
||||||
batch_options = parser.add_argument_group(
|
batch_options = parser.add_argument_group(
|
||||||
"Batch processing",
|
"Batch processing",
|
||||||
@ -106,25 +139,34 @@ def main():
|
|||||||
help="Rescore builds to <priority>.",
|
help="Rescore builds to <priority>.",
|
||||||
)
|
)
|
||||||
batch_options.add_argument(
|
batch_options.add_argument(
|
||||||
"--arch2",
|
"--state",
|
||||||
action="append",
|
action="store",
|
||||||
dest="architecture",
|
dest="state",
|
||||||
help=f"Affect only 'architecture' (can be used several times)."
|
help="Act on builds that are in the specified state",
|
||||||
f" Valid architectures are: {', '.join(valid_archs)}.",
|
|
||||||
)
|
)
|
||||||
parser.add_argument("packages", metavar="package", nargs="+", help=argparse.SUPPRESS)
|
|
||||||
|
parser.add_argument("packages", metavar="package", nargs="*", help=argparse.SUPPRESS)
|
||||||
|
|
||||||
# Parse our options.
|
# Parse our options.
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
try:
|
launchpad = Launchpad.login_with("ubuntu-dev-tools", "production", version="devel")
|
||||||
# Will fail here if we have no credentials, bail out
|
ubuntu = launchpad.distributions["ubuntu"]
|
||||||
Launchpad.login()
|
|
||||||
except TokenAuthorizationException:
|
|
||||||
sys.exit(1)
|
|
||||||
me = PersonTeam.me
|
|
||||||
|
|
||||||
if not args.batch:
|
if args.batch:
|
||||||
|
release = args.series
|
||||||
|
if not release:
|
||||||
|
# ppas don't have a proposed pocket so just use the release pocket;
|
||||||
|
# but for the main archive we default to -proposed
|
||||||
|
release = ubuntu.getDevelopmentSeries()[0].name
|
||||||
|
if args.archive == "ubuntu":
|
||||||
|
release = f"{release}-proposed"
|
||||||
|
try:
|
||||||
|
(release, pocket) = split_release_pocket(release)
|
||||||
|
except PocketDoesNotExistError as error:
|
||||||
|
Logger.error(error)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
# Check we have the correct number of arguments.
|
# Check we have the correct number of arguments.
|
||||||
if len(args.packages) < 3:
|
if len(args.packages) < 3:
|
||||||
parser.error("Incorrect number of arguments.")
|
parser.error("Incorrect number of arguments.")
|
||||||
@ -137,6 +179,14 @@ def main():
|
|||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
archive = launchpad.archives.getByReference(reference=args.archive)
|
||||||
|
try:
|
||||||
|
distroseries = ubuntu.getSeries(name_or_version=release)
|
||||||
|
except lazr.restfulclient.errors.NotFound as error:
|
||||||
|
Logger.error(error)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not args.batch:
|
||||||
# Check our operation.
|
# Check our operation.
|
||||||
if operation not in ("rescore", "retry", "status"):
|
if operation not in ("rescore", "retry", "status"):
|
||||||
Logger.error("Invalid operation: %s.", operation)
|
Logger.error("Invalid operation: %s.", operation)
|
||||||
@ -160,36 +210,37 @@ def main():
|
|||||||
Logger.error(error)
|
Logger.error(error)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
ubuntu_archive = Distribution("ubuntu").getArchive()
|
|
||||||
# Get list of published sources for package in question.
|
# Get list of published sources for package in question.
|
||||||
try:
|
try:
|
||||||
sources = ubuntu_archive.getSourcePackage(package, release, pocket)
|
sources = archive.getPublishedSources(
|
||||||
distroseries = Distribution("ubuntu").getSeries(release)
|
distro_series=distroseries,
|
||||||
except (SeriesNotFoundException, PackageNotFoundException) as error:
|
exact_match=True,
|
||||||
Logger.error(error)
|
pocket=pocket,
|
||||||
|
source_name=package,
|
||||||
|
status="Published",
|
||||||
|
)[0]
|
||||||
|
except IndexError:
|
||||||
|
Logger.error("No publication found for package %s", package)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
# Get list of builds for that package.
|
# Get list of builds for that package.
|
||||||
builds = sources.getBuilds()
|
builds = sources.getBuilds()
|
||||||
|
|
||||||
# Find out the version and component in given release.
|
# Find out the version and component in given release.
|
||||||
version = sources.getVersion()
|
version = sources.source_package_version
|
||||||
component = sources.getComponent()
|
component = sources.component_name
|
||||||
|
|
||||||
# Operations that are remaining may only be done by Ubuntu developers
|
# Operations that are remaining may only be done by Ubuntu developers
|
||||||
# (retry) or buildd admins (rescore). Check if the proper permissions
|
# (retry) or buildd admins (rescore). Check if the proper permissions
|
||||||
# are in place.
|
# are in place.
|
||||||
if operation == "rescore":
|
|
||||||
necessary_privs = me.isLpTeamMember("launchpad-buildd-admins")
|
|
||||||
if operation == "retry":
|
if operation == "retry":
|
||||||
necessary_privs = me.canUploadPackage(
|
necessary_privs = archive.checkUpload(
|
||||||
ubuntu_archive,
|
component=sources.getComponent(),
|
||||||
distroseries,
|
distroseries=distroseries,
|
||||||
sources.getPackageName(),
|
person=launchpad.me,
|
||||||
sources.getComponent(),
|
|
||||||
pocket=pocket,
|
pocket=pocket,
|
||||||
|
sourcepackagename=sources.getPackageName(),
|
||||||
)
|
)
|
||||||
|
if not necessary_privs:
|
||||||
if operation in ("rescore", "retry") and not necessary_privs:
|
|
||||||
Logger.error(
|
Logger.error(
|
||||||
"You cannot perform the %s operation on a %s package as you"
|
"You cannot perform the %s operation on a %s package as you"
|
||||||
" do not have the permissions to do this action.",
|
" do not have the permissions to do this action.",
|
||||||
@ -223,7 +274,14 @@ def main():
|
|||||||
# FIXME: make priority an option
|
# FIXME: make priority an option
|
||||||
priority = 5000
|
priority = 5000
|
||||||
Logger.info("Rescoring build %s to %d...", build.arch_tag, priority)
|
Logger.info("Rescoring build %s to %d...", build.arch_tag, priority)
|
||||||
|
try:
|
||||||
build.rescore(score=priority)
|
build.rescore(score=priority)
|
||||||
|
except lazr.restfulclient.errors.Unauthorized:
|
||||||
|
Logger.error(
|
||||||
|
"You don't have the permissions to rescore builds."
|
||||||
|
" Ignoring your rescore request."
|
||||||
|
)
|
||||||
|
break
|
||||||
else:
|
else:
|
||||||
Logger.info("Cannot rescore build on %s.", build.arch_tag)
|
Logger.info("Cannot rescore build on %s.", build.arch_tag)
|
||||||
if operation == "retry":
|
if operation == "retry":
|
||||||
@ -252,61 +310,136 @@ def main():
|
|||||||
# filter out duplicate and invalid architectures
|
# filter out duplicate and invalid architectures
|
||||||
archs.intersection_update(valid_archs)
|
archs.intersection_update(valid_archs)
|
||||||
|
|
||||||
release = args.series
|
if not args.packages:
|
||||||
if not release:
|
retry_count = 0
|
||||||
release = Distribution("ubuntu").getDevelopmentSeries().name + "-proposed"
|
can_rescore = True
|
||||||
try:
|
|
||||||
(release, pocket) = split_release_pocket(release)
|
|
||||||
except PocketDoesNotExistError as error:
|
|
||||||
Logger.error(error)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
ubuntu_archive = Distribution("ubuntu").getArchive()
|
if not args.state:
|
||||||
try:
|
if args.retry:
|
||||||
distroseries = Distribution("ubuntu").getSeries(release)
|
args.state = "Failed to build"
|
||||||
except SeriesNotFoundException as error:
|
elif args.priority:
|
||||||
Logger.error(error)
|
args.state = "Needs building"
|
||||||
sys.exit(1)
|
# there is no equivalent to series.getBuildRecords() for a ppa.
|
||||||
|
# however, we don't want to have to traverse all build records for
|
||||||
# Check permisions (part 1): Rescoring can only be done by buildd admins
|
# all series when working on the main archive, so we use
|
||||||
can_rescore = args.priority and me.isLpTeamMember("launchpad-buildd-admins")
|
# series.getBuildRecords() for ubuntu and handle ppas separately
|
||||||
if args.priority and not can_rescore:
|
series = ubuntu.getSeries(name_or_version=release)
|
||||||
Logger.error(
|
if args.archive == "ubuntu":
|
||||||
"You don't have the permissions to rescore builds. Ignoring your rescore request."
|
builds = series.getBuildRecords(build_state=args.state, pocket=pocket)
|
||||||
|
else:
|
||||||
|
builds = []
|
||||||
|
for build in archive.getBuildRecords(build_state=args.state, pocket=pocket):
|
||||||
|
if not build.current_source_publication:
|
||||||
|
continue
|
||||||
|
if build.current_source_publication.distro_series == series:
|
||||||
|
builds.append(build)
|
||||||
|
for build in builds:
|
||||||
|
if build.arch_tag not in archs:
|
||||||
|
continue
|
||||||
|
if not build.current_source_publication:
|
||||||
|
continue
|
||||||
|
# fixme: refactor
|
||||||
|
# Check permissions (part 2): check upload permissions for the
|
||||||
|
# source package
|
||||||
|
can_retry = args.retry and archive.checkUpload(
|
||||||
|
component=build.current_source_publication.component_name,
|
||||||
|
distroseries=series,
|
||||||
|
person=launchpad.me,
|
||||||
|
pocket=pocket,
|
||||||
|
sourcepackagename=build.source_package_name,
|
||||||
)
|
)
|
||||||
|
if args.retry and not can_retry:
|
||||||
|
Logger.error(
|
||||||
|
"You don't have the permissions to retry the build of '%s', skipping.",
|
||||||
|
build.source_package_name,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
Logger.info(
|
||||||
|
"The source version for '%s' in '%s' (%s) is: %s",
|
||||||
|
build.source_package_name,
|
||||||
|
release,
|
||||||
|
pocket,
|
||||||
|
build.source_package_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
if args.retry and build.can_be_retried:
|
||||||
|
Logger.info(
|
||||||
|
"Retrying build of %s on %s...", build.source_package_name, build.arch_tag
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
build.retry()
|
||||||
|
retry_count += 1
|
||||||
|
except lazr.restfulclient.errors.BadRequest:
|
||||||
|
Logger.info(
|
||||||
|
"Failed to retry build of %s on %s",
|
||||||
|
build.source_package_name,
|
||||||
|
build.arch_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
if args.priority and can_rescore:
|
||||||
|
if build.can_be_rescored:
|
||||||
|
try:
|
||||||
|
build.rescore(score=args.priority)
|
||||||
|
except lazr.restfulclient.errors.Unauthorized:
|
||||||
|
Logger.error(
|
||||||
|
"You don't have the permissions to rescore builds."
|
||||||
|
" Ignoring your rescore request."
|
||||||
|
)
|
||||||
|
can_rescore = False
|
||||||
|
except lazr.restfulclient.errors.BadRequest:
|
||||||
|
Logger.info(
|
||||||
|
"Cannot rescore build of %s on %s.",
|
||||||
|
build.source_package_name,
|
||||||
|
build.arch_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
Logger.info("")
|
||||||
|
if args.retry:
|
||||||
|
Logger.info("%d package builds retried", retry_count)
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
for pkg in args.packages:
|
for pkg in args.packages:
|
||||||
try:
|
try:
|
||||||
pkg = ubuntu_archive.getSourcePackage(pkg, release, pocket)
|
pkg = archive.getPublishedSources(
|
||||||
except PackageNotFoundException as error:
|
distro_series=distroseries,
|
||||||
Logger.error(error)
|
exact_match=True,
|
||||||
|
pocket=pocket,
|
||||||
|
source_name=pkg,
|
||||||
|
status="Published",
|
||||||
|
)[0]
|
||||||
|
except IndexError:
|
||||||
|
Logger.error("No publication found for package %s", pkg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check permissions (part 2): check upload permissions for the source
|
# Check permissions (part 2): check upload permissions for the source
|
||||||
# package
|
# package
|
||||||
can_retry = args.retry and me.canUploadPackage(
|
can_retry = args.retry and archive.checkUpload(
|
||||||
ubuntu_archive, distroseries, pkg.getPackageName(), pkg.getComponent()
|
component=pkg.component_name,
|
||||||
|
distroseries=distroseries,
|
||||||
|
person=launchpad.me,
|
||||||
|
pocket=pocket,
|
||||||
|
sourcepackagename=pkg.source_package_name,
|
||||||
)
|
)
|
||||||
if args.retry and not can_retry:
|
if args.retry and not can_retry:
|
||||||
Logger.error(
|
Logger.error(
|
||||||
"You don't have the permissions to retry the "
|
"You don't have the permissions to retry the "
|
||||||
"build of '%s'. Ignoring your request.",
|
"build of '%s'. Ignoring your request.",
|
||||||
pkg.getPackageName(),
|
pkg.source_package_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
Logger.info(
|
Logger.info(
|
||||||
"The source version for '%s' in '%s' (%s) is: %s",
|
"The source version for '%s' in '%s' (%s) is: %s",
|
||||||
pkg.getPackageName(),
|
pkg.source_package_name,
|
||||||
release,
|
release,
|
||||||
pocket,
|
pocket,
|
||||||
pkg.getVersion(),
|
pkg.source_package_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
Logger.info(pkg.getBuildStates(archs))
|
Logger.info(get_build_states(pkg, archs))
|
||||||
if can_retry:
|
if can_retry:
|
||||||
Logger.info(pkg.retryBuilds(archs))
|
Logger.info(retry_builds(pkg, archs))
|
||||||
if args.priority and can_rescore:
|
if args.priority:
|
||||||
Logger.info(pkg.rescoreBuilds(archs, args.priority))
|
Logger.info(rescore_builds(pkg, archs, args.priority))
|
||||||
|
|
||||||
Logger.info("")
|
Logger.info("")
|
||||||
|
|
||||||
|
@ -165,6 +165,7 @@ class SourcePackage(ABC):
|
|||||||
series = kwargs.get("series")
|
series = kwargs.get("series")
|
||||||
pocket = kwargs.get("pocket")
|
pocket = kwargs.get("pocket")
|
||||||
status = kwargs.get("status")
|
status = kwargs.get("status")
|
||||||
|
arch = kwargs.get("arch")
|
||||||
verify_signature = kwargs.get("verify_signature", False)
|
verify_signature = kwargs.get("verify_signature", False)
|
||||||
try_binary = kwargs.get("try_binary", True)
|
try_binary = kwargs.get("try_binary", True)
|
||||||
|
|
||||||
@ -184,6 +185,7 @@ class SourcePackage(ABC):
|
|||||||
self._series = series
|
self._series = series
|
||||||
self._pocket = pocket
|
self._pocket = pocket
|
||||||
self._status = status
|
self._status = status
|
||||||
|
self._arch = arch
|
||||||
# dscfile can be either a path or an URL. misc.py's download() will
|
# dscfile can be either a path or an URL. misc.py's download() will
|
||||||
# later fiture it out
|
# later fiture it out
|
||||||
self._dsc_source = dscfile
|
self._dsc_source = dscfile
|
||||||
@ -252,6 +254,7 @@ class SourcePackage(ABC):
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
params["archtag"] = self._arch
|
||||||
bpph = archive.getBinaryPackage(self.source, **params)
|
bpph = archive.getBinaryPackage(self.source, **params)
|
||||||
except PackageNotFoundException as bpnfe:
|
except PackageNotFoundException as bpnfe:
|
||||||
# log binary lookup failure, in case it provides hints
|
# log binary lookup failure, in case it provides hints
|
||||||
@ -543,7 +546,7 @@ class SourcePackage(ABC):
|
|||||||
Return the debdiff filename.
|
Return the debdiff filename.
|
||||||
"""
|
"""
|
||||||
cmd = ["debdiff", self.dsc_name, newpkg.dsc_name]
|
cmd = ["debdiff", self.dsc_name, newpkg.dsc_name]
|
||||||
difffn = newpkg.dsc_name[:-3] + "debdiff"
|
difffn = f"{newpkg.dsc_name[:-3]}debdiff"
|
||||||
Logger.debug("%s > %s", " ".join(cmd), difffn)
|
Logger.debug("%s > %s", " ".join(cmd), difffn)
|
||||||
with open(difffn, "w", encoding="utf-8") as f:
|
with open(difffn, "w", encoding="utf-8") as f:
|
||||||
if subprocess.call(cmd, stdout=f, cwd=str(self.workdir)) > 2:
|
if subprocess.call(cmd, stdout=f, cwd=str(self.workdir)) > 2:
|
||||||
@ -1342,7 +1345,7 @@ class SnapshotSPPH:
|
|||||||
self.getComponent(),
|
self.getComponent(),
|
||||||
subdir,
|
subdir,
|
||||||
name,
|
name,
|
||||||
name + "_" + pkgversion,
|
f"{name}_{pkgversion}",
|
||||||
"changelog.txt",
|
"changelog.txt",
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
|
@ -71,8 +71,8 @@ class Pbuilder(Builder):
|
|||||||
cmd = [
|
cmd = [
|
||||||
"sudo",
|
"sudo",
|
||||||
"-E",
|
"-E",
|
||||||
"ARCH=" + self.architecture,
|
f"ARCH={self.architecture}",
|
||||||
"DIST=" + dist,
|
f"DIST={dist}",
|
||||||
self.name,
|
self.name,
|
||||||
"--build",
|
"--build",
|
||||||
"--architecture",
|
"--architecture",
|
||||||
@ -91,8 +91,8 @@ class Pbuilder(Builder):
|
|||||||
cmd = [
|
cmd = [
|
||||||
"sudo",
|
"sudo",
|
||||||
"-E",
|
"-E",
|
||||||
"ARCH=" + self.architecture,
|
f"ARCH={self.architecture}",
|
||||||
"DIST=" + dist,
|
f"DIST={dist}",
|
||||||
self.name,
|
self.name,
|
||||||
"--update",
|
"--update",
|
||||||
"--architecture",
|
"--architecture",
|
||||||
@ -140,7 +140,7 @@ class Sbuild(Builder):
|
|||||||
workdir = os.getcwd()
|
workdir = os.getcwd()
|
||||||
Logger.debug("cd %s", result_directory)
|
Logger.debug("cd %s", result_directory)
|
||||||
os.chdir(result_directory)
|
os.chdir(result_directory)
|
||||||
cmd = ["sbuild", "--arch-all", "--dist=" + dist, "--arch=" + self.architecture, dsc_file]
|
cmd = ["sbuild", "--arch-all", f"--dist={dist}", f"--arch={self.architecture}", dsc_file]
|
||||||
Logger.debug(" ".join(cmd))
|
Logger.debug(" ".join(cmd))
|
||||||
returncode = subprocess.call(cmd)
|
returncode = subprocess.call(cmd)
|
||||||
Logger.debug("cd %s", workdir)
|
Logger.debug("cd %s", workdir)
|
||||||
|
@ -68,21 +68,19 @@ class UDTConfig:
|
|||||||
config = {}
|
config = {}
|
||||||
for filename in ("/etc/devscripts.conf", "~/.devscripts"):
|
for filename in ("/etc/devscripts.conf", "~/.devscripts"):
|
||||||
try:
|
try:
|
||||||
f = open(os.path.expanduser(filename), "r", encoding="utf-8")
|
with open(os.path.expanduser(filename), "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
except IOError:
|
except IOError:
|
||||||
continue
|
continue
|
||||||
for line in f:
|
try:
|
||||||
parsed = shlex.split(line, comments=True)
|
tokens = shlex.split(content, comments=True)
|
||||||
if len(parsed) > 1:
|
except ValueError as e:
|
||||||
Logger.warning(
|
Logger.error("Error parsing %s: %s", filename, e)
|
||||||
"Cannot parse variable assignment in %s: %s",
|
continue
|
||||||
getattr(f, "name", "<config>"),
|
for token in tokens:
|
||||||
line,
|
if "=" in token:
|
||||||
)
|
key, value = token.split("=", 1)
|
||||||
if len(parsed) >= 1 and "=" in parsed[0]:
|
|
||||||
key, value = parsed[0].split("=", 1)
|
|
||||||
config[key] = value
|
config[key] = value
|
||||||
f.close()
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
def get_value(self, key, default=None, boolean=False, compat_keys=()):
|
def get_value(self, key, default=None, boolean=False, compat_keys=()):
|
||||||
@ -99,9 +97,9 @@ class UDTConfig:
|
|||||||
if default is None and key in self.defaults:
|
if default is None and key in self.defaults:
|
||||||
default = self.defaults[key]
|
default = self.defaults[key]
|
||||||
|
|
||||||
keys = [self.prefix + "_" + key]
|
keys = [f"{self.prefix}_{key}"]
|
||||||
if key in self.defaults:
|
if key in self.defaults:
|
||||||
keys.append("UBUNTUTOOLS_" + key)
|
keys.append(f"UBUNTUTOOLS_{key}")
|
||||||
keys += compat_keys
|
keys += compat_keys
|
||||||
|
|
||||||
for k in keys:
|
for k in keys:
|
||||||
@ -114,9 +112,9 @@ class UDTConfig:
|
|||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
if k in compat_keys:
|
if k in compat_keys:
|
||||||
replacements = self.prefix + "_" + key
|
replacements = f"{self.prefix}_{key}"
|
||||||
if key in self.defaults:
|
if key in self.defaults:
|
||||||
replacements += "or UBUNTUTOOLS_" + key
|
replacements += f"or UBUNTUTOOLS_{key}"
|
||||||
Logger.warning(
|
Logger.warning(
|
||||||
"Using deprecated configuration variable %s. You should use %s.",
|
"Using deprecated configuration variable %s. You should use %s.",
|
||||||
k,
|
k,
|
||||||
@ -180,7 +178,7 @@ def ubu_email(name=None, email=None, export=True):
|
|||||||
mailname = socket.getfqdn()
|
mailname = socket.getfqdn()
|
||||||
if os.path.isfile("/etc/mailname"):
|
if os.path.isfile("/etc/mailname"):
|
||||||
mailname = open("/etc/mailname", "r", encoding="utf-8").read().strip()
|
mailname = open("/etc/mailname", "r", encoding="utf-8").read().strip()
|
||||||
email = pwd.getpwuid(os.getuid()).pw_name + "@" + mailname
|
email = f"{pwd.getpwuid(os.getuid()).pw_name}@{mailname}"
|
||||||
|
|
||||||
if export:
|
if export:
|
||||||
os.environ["DEBFULLNAME"] = name
|
os.environ["DEBFULLNAME"] = name
|
||||||
|
@ -883,7 +883,7 @@ class SourcePackagePublishingHistory(BaseWrapper):
|
|||||||
"""
|
"""
|
||||||
release = self.getSeriesName()
|
release = self.getSeriesName()
|
||||||
if self.pocket != "Release":
|
if self.pocket != "Release":
|
||||||
release += "-" + self.pocket.lower()
|
release += f"-{self.pocket.lower()}"
|
||||||
return release
|
return release
|
||||||
|
|
||||||
def getArchive(self):
|
def getArchive(self):
|
||||||
@ -1097,51 +1097,6 @@ class SourcePackagePublishingHistory(BaseWrapper):
|
|||||||
for build in builds:
|
for build in builds:
|
||||||
self._builds[build.arch_tag] = Build(build)
|
self._builds[build.arch_tag] = Build(build)
|
||||||
|
|
||||||
def getBuildStates(self, archs):
|
|
||||||
res = []
|
|
||||||
|
|
||||||
if not self._builds:
|
|
||||||
self._fetch_builds()
|
|
||||||
|
|
||||||
for arch in archs:
|
|
||||||
build = self._builds.get(arch)
|
|
||||||
if build:
|
|
||||||
res.append(f" {build}")
|
|
||||||
msg = "\n".join(res)
|
|
||||||
return f"Build state(s) for '{self.getPackageName()}':\n{msg}"
|
|
||||||
|
|
||||||
def rescoreBuilds(self, archs, score):
|
|
||||||
res = []
|
|
||||||
|
|
||||||
if not self._builds:
|
|
||||||
self._fetch_builds()
|
|
||||||
|
|
||||||
for arch in archs:
|
|
||||||
build = self._builds.get(arch)
|
|
||||||
if build:
|
|
||||||
if build.rescore(score):
|
|
||||||
res.append(f" {arch}: done")
|
|
||||||
else:
|
|
||||||
res.append(f" {arch}: failed")
|
|
||||||
msg = "\n".join(res)
|
|
||||||
return f"Rescoring builds of '{self.getPackageName()}' to {score}:\n{msg}"
|
|
||||||
|
|
||||||
def retryBuilds(self, archs):
|
|
||||||
res = []
|
|
||||||
|
|
||||||
if not self._builds:
|
|
||||||
self._fetch_builds()
|
|
||||||
|
|
||||||
for arch in archs:
|
|
||||||
build = self._builds.get(arch)
|
|
||||||
if build:
|
|
||||||
if build.retry():
|
|
||||||
res.append(f" {arch}: done")
|
|
||||||
else:
|
|
||||||
res.append(f" {arch}: failed")
|
|
||||||
msg = "\n".join(res)
|
|
||||||
return f"Retrying builds of '{self.getPackageName()}':\n{msg}"
|
|
||||||
|
|
||||||
|
|
||||||
class BinaryPackagePublishingHistory(BaseWrapper):
|
class BinaryPackagePublishingHistory(BaseWrapper):
|
||||||
"""
|
"""
|
||||||
|
@ -385,7 +385,7 @@ class _StderrProgressBar:
|
|||||||
pctstr = f"{pct:>3}%"
|
pctstr = f"{pct:>3}%"
|
||||||
barlen = self.width * pct // 100
|
barlen = self.width * pct // 100
|
||||||
barstr = "=" * barlen
|
barstr = "=" * barlen
|
||||||
barstr = barstr[:-1] + ">"
|
barstr = f"{barstr[:-1]}>"
|
||||||
barstr = barstr.ljust(self.width)
|
barstr = barstr.ljust(self.width)
|
||||||
fullstr = f"\r[{barstr}]{pctstr}"
|
fullstr = f"\r[{barstr}]{pctstr}"
|
||||||
sys.stderr.write(fullstr)
|
sys.stderr.write(fullstr)
|
||||||
|
@ -340,6 +340,7 @@ class PullPkg:
|
|||||||
|
|
||||||
params = {}
|
params = {}
|
||||||
params["package"] = options["package"]
|
params["package"] = options["package"]
|
||||||
|
params["arch"] = options["arch"]
|
||||||
|
|
||||||
if options["release"]:
|
if options["release"]:
|
||||||
(release, version, pocket) = self.parse_release_and_version(
|
(release, version, pocket) = self.parse_release_and_version(
|
||||||
@ -453,7 +454,7 @@ class PullPkg:
|
|||||||
if key.startswith("vcs-"):
|
if key.startswith("vcs-"):
|
||||||
if key == "vcs-browser":
|
if key == "vcs-browser":
|
||||||
continue
|
continue
|
||||||
elif key == "vcs-git":
|
if key == "vcs-git":
|
||||||
vcs = "Git"
|
vcs = "Git"
|
||||||
elif key == "vcs-bzr":
|
elif key == "vcs-bzr":
|
||||||
vcs = "Bazaar"
|
vcs = "Bazaar"
|
||||||
@ -462,9 +463,13 @@ class PullPkg:
|
|||||||
|
|
||||||
uri = srcpkg.dsc[original_key]
|
uri = srcpkg.dsc[original_key]
|
||||||
|
|
||||||
Logger.warning("\nNOTICE: '%s' packaging is maintained in "
|
Logger.warning(
|
||||||
"the '%s' version control system at:\n"
|
"\nNOTICE: '%s' packaging is maintained in "
|
||||||
" %s\n" % (package, vcs, uri))
|
"the '%s' version control system at:\n %s\n",
|
||||||
|
package,
|
||||||
|
vcs,
|
||||||
|
uri,
|
||||||
|
)
|
||||||
|
|
||||||
if vcs == "Bazaar":
|
if vcs == "Bazaar":
|
||||||
vcscmd = " $ bzr branch " + uri
|
vcscmd = " $ bzr branch " + uri
|
||||||
@ -472,9 +477,11 @@ class PullPkg:
|
|||||||
vcscmd = " $ git clone " + uri
|
vcscmd = " $ git clone " + uri
|
||||||
|
|
||||||
if vcscmd:
|
if vcscmd:
|
||||||
Logger.info(f"Please use:\n{vcscmd}\n"
|
Logger.info(
|
||||||
"to retrieve the latest (possibly unreleased) "
|
"Please use:\n%s\n"
|
||||||
"updates to the package.\n")
|
"to retrieve the latest (possibly unreleased) updates to the package.\n",
|
||||||
|
vcscmd,
|
||||||
|
)
|
||||||
|
|
||||||
if pull == PULL_LIST:
|
if pull == PULL_LIST:
|
||||||
Logger.info("Source files:")
|
Logger.info("Source files:")
|
||||||
|
@ -31,9 +31,9 @@ class Question:
|
|||||||
|
|
||||||
def get_options(self):
|
def get_options(self):
|
||||||
if len(self.options) == 2:
|
if len(self.options) == 2:
|
||||||
options = self.options[0] + " or " + self.options[1]
|
options = f"{self.options[0]} or {self.options[1]}"
|
||||||
else:
|
else:
|
||||||
options = ", ".join(self.options[:-1]) + ", or " + self.options[-1]
|
options = f"{', '.join(self.options[:-1])}, or {self.options[-1]}"
|
||||||
return options
|
return options
|
||||||
|
|
||||||
def ask(self, question, default=None):
|
def ask(self, question, default=None):
|
||||||
@ -67,7 +67,7 @@ class Question:
|
|||||||
if selected == option[0]:
|
if selected == option[0]:
|
||||||
selected = option
|
selected = option
|
||||||
if selected not in self.options:
|
if selected not in self.options:
|
||||||
print("Please answer the question with " + self.get_options() + ".")
|
print(f"Please answer the question with {self.get_options()}.")
|
||||||
return selected
|
return selected
|
||||||
|
|
||||||
|
|
||||||
@ -170,7 +170,7 @@ class EditBugReport(EditFile):
|
|||||||
split_re = re.compile(r"^Summary.*?:\s+(.*?)\s+Description:\s+(.*)$", re.DOTALL | re.UNICODE)
|
split_re = re.compile(r"^Summary.*?:\s+(.*?)\s+Description:\s+(.*)$", re.DOTALL | re.UNICODE)
|
||||||
|
|
||||||
def __init__(self, subject, body, placeholders=None):
|
def __init__(self, subject, body, placeholders=None):
|
||||||
prefix = os.path.basename(sys.argv[0]) + "_"
|
prefix = f"{os.path.basename(sys.argv[0])}_"
|
||||||
tmpfile = tempfile.NamedTemporaryFile(prefix=prefix, suffix=".txt", delete=False)
|
tmpfile = tempfile.NamedTemporaryFile(prefix=prefix, suffix=".txt", delete=False)
|
||||||
tmpfile.write((f"Summary (one line):\n{subject}\n\nDescription:\n{body}").encode("utf-8"))
|
tmpfile.write((f"Summary (one line):\n{subject}\n\nDescription:\n{body}").encode("utf-8"))
|
||||||
tmpfile.close()
|
tmpfile.close()
|
||||||
|
@ -183,7 +183,7 @@ Content-Type: text/plain; charset=UTF-8
|
|||||||
backup = tempfile.NamedTemporaryFile(
|
backup = tempfile.NamedTemporaryFile(
|
||||||
mode="w",
|
mode="w",
|
||||||
delete=False,
|
delete=False,
|
||||||
prefix="requestsync-" + re.sub(r"[^a-zA-Z0-9_-]", "", bugtitle.replace(" ", "_")),
|
prefix=f"requestsync-{re.sub('[^a-zA-Z0-9_-]', '', bugtitle.replace(' ', '_'))}",
|
||||||
)
|
)
|
||||||
with backup:
|
with backup:
|
||||||
backup.write(mail)
|
backup.write(mail)
|
||||||
|
95
ubuntutools/running_autopkgtests.py
Normal file
95
ubuntutools/running_autopkgtests.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
# Copyright (C) 2024 Canonical Ltd.
|
||||||
|
# Author: Chris Peterson <chris.peterson@canonical.com>
|
||||||
|
# Author: Andy P. Whitcroft
|
||||||
|
# Author: Christian Ehrhardt
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify it
|
||||||
|
# under the terms of the GNU General Public License version 3, as published
|
||||||
|
# by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranties of
|
||||||
|
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
|
||||||
|
# See the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import urllib
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
|
URL_RUNNING = "http://autopkgtest.ubuntu.com/static/running.json"
|
||||||
|
URL_QUEUED = "http://autopkgtest.ubuntu.com/queues.json"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_jobs(url: str) -> dict:
|
||||||
|
request = urllib.request.Request(url, headers={"Cache-Control": "max-age-0"})
|
||||||
|
with urllib.request.urlopen(request) as response:
|
||||||
|
data = response.read()
|
||||||
|
jobs = json.loads(data.decode("utf-8"))
|
||||||
|
|
||||||
|
return jobs
|
||||||
|
|
||||||
|
|
||||||
|
def get_running():
|
||||||
|
jobs = _get_jobs(URL_RUNNING)
|
||||||
|
|
||||||
|
running = []
|
||||||
|
for pkg in jobs:
|
||||||
|
for handle in jobs[pkg]:
|
||||||
|
for series in jobs[pkg][handle]:
|
||||||
|
for arch in jobs[pkg][handle][series]:
|
||||||
|
jobinfo = jobs[pkg][handle][series][arch]
|
||||||
|
triggers = ",".join(jobinfo[0].get("triggers", "-"))
|
||||||
|
ppas = ",".join(jobinfo[0].get("ppas", "-"))
|
||||||
|
time = jobinfo[1]
|
||||||
|
env = jobinfo[0].get("env", "-")
|
||||||
|
time = str(datetime.timedelta(seconds=jobinfo[1]))
|
||||||
|
try:
|
||||||
|
line = (
|
||||||
|
f"R {time:6} {pkg:30} {'-':10} {series:8} {arch:8}"
|
||||||
|
f" {ppas:31} {triggers} {env}\n"
|
||||||
|
)
|
||||||
|
running.append((jobinfo[1], line))
|
||||||
|
except BrokenPipeError:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
output = ""
|
||||||
|
for time, row in sorted(running, reverse=True):
|
||||||
|
output += f"{row}"
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def get_queued():
|
||||||
|
queues = _get_jobs(URL_QUEUED)
|
||||||
|
output = ""
|
||||||
|
for origin in queues:
|
||||||
|
for series in queues[origin]:
|
||||||
|
for arch in queues[origin][series]:
|
||||||
|
n = 0
|
||||||
|
for key in queues[origin][series][arch]:
|
||||||
|
if key == "private job":
|
||||||
|
pkg = triggers = ppas = "private job"
|
||||||
|
else:
|
||||||
|
(pkg, json_data) = key.split(maxsplit=1)
|
||||||
|
try:
|
||||||
|
jobinfo = json.loads(json_data)
|
||||||
|
triggers = ",".join(jobinfo.get("triggers", "-"))
|
||||||
|
ppas = ",".join(jobinfo.get("ppas", "-"))
|
||||||
|
except json.decoder.JSONDecodeError:
|
||||||
|
pkg = triggers = ppas = "failed to parse"
|
||||||
|
continue
|
||||||
|
|
||||||
|
n = n + 1
|
||||||
|
try:
|
||||||
|
output += (
|
||||||
|
f"Q{n:04d} {'-:--':>6} {pkg:30} {origin:10} {series:8} {arch:8}"
|
||||||
|
f" {ppas:31} {triggers}\n"
|
||||||
|
)
|
||||||
|
except BrokenPipeError:
|
||||||
|
sys.exit(1)
|
||||||
|
return output
|
@ -255,7 +255,7 @@ class SourcePackage:
|
|||||||
def _changes_file(self):
|
def _changes_file(self):
|
||||||
"""Returns the file name of the .changes file."""
|
"""Returns the file name of the .changes file."""
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
self._workdir, f"{self._package}_{ strip_epoch(self._version)}_source.changes"
|
self._workdir, f"{self._package}_{strip_epoch(self._version)}_source.changes"
|
||||||
)
|
)
|
||||||
|
|
||||||
def check_target(self, upload, launchpad):
|
def check_target(self, upload, launchpad):
|
||||||
|
@ -39,7 +39,7 @@ def is_command_available(command, check_sbin=False):
|
|||||||
"Is command in $PATH?"
|
"Is command in $PATH?"
|
||||||
path = os.environ.get("PATH", "/usr/bin:/bin").split(":")
|
path = os.environ.get("PATH", "/usr/bin:/bin").split(":")
|
||||||
if check_sbin:
|
if check_sbin:
|
||||||
path += [directory[:-3] + "sbin" for directory in path if directory.endswith("/bin")]
|
path += [f"{directory[:-3]}sbin" for directory in path if directory.endswith("/bin")]
|
||||||
return any(os.access(os.path.join(directory, command), os.X_OK) for directory in path)
|
return any(os.access(os.path.join(directory, command), os.X_OK) for directory in path)
|
||||||
|
|
||||||
|
|
||||||
@ -303,7 +303,7 @@ def _download_and_change_into(task, dsc_file, patch, branch):
|
|||||||
extract_source(dsc_file, Logger.isEnabledFor(logging.DEBUG))
|
extract_source(dsc_file, Logger.isEnabledFor(logging.DEBUG))
|
||||||
|
|
||||||
# change directory
|
# change directory
|
||||||
directory = task.package + "-" + task.get_version().upstream_version
|
directory = f"{task.package}-{task.get_version().upstream_version}"
|
||||||
Logger.debug("cd %s", directory)
|
Logger.debug("cd %s", directory)
|
||||||
os.chdir(directory)
|
os.chdir(directory)
|
||||||
|
|
||||||
|
33
ubuntutools/test/test_requestsync.py
Normal file
33
ubuntutools/test/test_requestsync.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# Copyright (C) 2024 Canonical Ltd.
|
||||||
|
# Author: Chris Peterson <chris.peterson@canonical.com>
|
||||||
|
#
|
||||||
|
# Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
# purpose with or without fee is hereby granted, provided that the above
|
||||||
|
# copyright notice and this permission notice appear in all copies.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||||
|
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||||
|
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||||
|
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||||
|
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||||
|
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||||
|
# PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
# Binary Tests
|
||||||
|
class BinaryTests(unittest.TestCase):
|
||||||
|
|
||||||
|
# The requestsync binary has the option of using the launchpad api
|
||||||
|
# to log in but requires python3-keyring in addition to
|
||||||
|
# python3-launchpadlib. Testing the integrated login functionality
|
||||||
|
# automatically isn't very feasbile, but we can at least write a smoke
|
||||||
|
# test to make sure the required packages are installed.
|
||||||
|
# See LP: #2049217
|
||||||
|
def test_keyring_installed(self):
|
||||||
|
"""Smoke test for required lp api dependencies"""
|
||||||
|
try:
|
||||||
|
import keyring # noqa: F401
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
raise ModuleNotFoundError("package python3-keyring is not installed")
|
128
ubuntutools/test/test_running_autopkgtests.py
Normal file
128
ubuntutools/test/test_running_autopkgtests.py
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
# Copyright (C) 2024 Canonical Ltd.
|
||||||
|
# Author: Chris Peterson <chris.peterson@canonical.com>
|
||||||
|
#
|
||||||
|
# Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
# purpose with or without fee is hereby granted, provided that the above
|
||||||
|
# copyright notice and this permission notice appear in all copies.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||||
|
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||||
|
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||||
|
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||||
|
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||||
|
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||||
|
# PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
""" Tests for running_autopkgtests
|
||||||
|
Tests using cached data from autopkgtest servers.
|
||||||
|
|
||||||
|
These tests only ensure code changes don't change parsing behavior
|
||||||
|
of the response data. If the response format changes, then the cached
|
||||||
|
responses will need to change as well.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from ubuntutools.running_autopkgtests import (
|
||||||
|
URL_QUEUED,
|
||||||
|
URL_RUNNING,
|
||||||
|
_get_jobs,
|
||||||
|
get_queued,
|
||||||
|
get_running,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Cached binary response data from autopkgtest server
|
||||||
|
RUN_DATA = (
|
||||||
|
b'{"pyatem": {'
|
||||||
|
b" \"submit-time_2024-01-19 19:37:36;triggers_['python3-defaults/3.12.1-0ubuntu1'];\":"
|
||||||
|
b' {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"],'
|
||||||
|
b' "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}'
|
||||||
|
)
|
||||||
|
QUEUED_DATA = (
|
||||||
|
b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\",'
|
||||||
|
b' \\"submit-time\\": \\"2024-01-18 01:08:55\\",'
|
||||||
|
b' \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Expected result(s) of parsing the above JSON data
|
||||||
|
RUNNING_JOB = {
|
||||||
|
"pyatem": {
|
||||||
|
"submit-time_2024-01-19 19:37:36;triggers_['python3-defaults/3.12.1-0ubuntu1'];": {
|
||||||
|
"noble": {
|
||||||
|
"arm64": [
|
||||||
|
{
|
||||||
|
"triggers": ["python3-defaults/3.12.1-0ubuntu1"],
|
||||||
|
"submit-time": "2024-01-19 19:37:36",
|
||||||
|
},
|
||||||
|
380,
|
||||||
|
"<omitted log>",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
QUEUED_JOB = {
|
||||||
|
"ubuntu": {
|
||||||
|
"noble": {
|
||||||
|
"arm64": [
|
||||||
|
'libobject-accessor-perl {"requester": "someone",'
|
||||||
|
' "submit-time": "2024-01-18 01:08:55",'
|
||||||
|
' "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
PRIVATE_JOB = {"ppa": {"noble": {"arm64": ["private job"]}}}
|
||||||
|
|
||||||
|
|
||||||
|
# Expected textual output of the program based on the above data
|
||||||
|
RUNNING_OUTPUT = (
|
||||||
|
"R 0:06:20 pyatem - noble arm64"
|
||||||
|
" - python3-defaults/3.12.1-0ubuntu1 -\n"
|
||||||
|
)
|
||||||
|
QUEUED_OUTPUT = (
|
||||||
|
"Q0001 -:-- libobject-accessor-perl ubuntu noble arm64"
|
||||||
|
" - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n"
|
||||||
|
)
|
||||||
|
PRIVATE_OUTPUT = (
|
||||||
|
"Q0001 -:-- private job ppa noble arm64"
|
||||||
|
" private job private job\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RunningAutopkgtestTestCase(unittest.TestCase):
|
||||||
|
"""Assert helper functions parse data correctly"""
|
||||||
|
|
||||||
|
maxDiff = None
|
||||||
|
|
||||||
|
@patch("urllib.request.urlopen")
|
||||||
|
def test_get_running_jobs(self, mock_response):
|
||||||
|
"""Test: Correctly parse autopkgtest json data for running tests"""
|
||||||
|
mock_response.return_value.__enter__.return_value.read.return_value = RUN_DATA
|
||||||
|
jobs = _get_jobs(URL_RUNNING)
|
||||||
|
self.assertEqual(RUNNING_JOB, jobs)
|
||||||
|
|
||||||
|
@patch("urllib.request.urlopen")
|
||||||
|
def test_get_queued_jobs(self, mock_response):
|
||||||
|
"""Test: Correctly parse autopkgtest json data for queued tests"""
|
||||||
|
mock_response.return_value.__enter__.return_value.read.return_value = QUEUED_DATA
|
||||||
|
jobs = _get_jobs(URL_QUEUED)
|
||||||
|
self.assertEqual(QUEUED_JOB, jobs)
|
||||||
|
|
||||||
|
def test_get_running_output(self):
|
||||||
|
"""Test: Correctly print running tests"""
|
||||||
|
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=RUNNING_JOB):
|
||||||
|
self.assertEqual(get_running(), RUNNING_OUTPUT)
|
||||||
|
|
||||||
|
def test_get_queued_output(self):
|
||||||
|
"""Test: Correctly print queued tests"""
|
||||||
|
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=QUEUED_JOB):
|
||||||
|
self.assertEqual(get_queued(), QUEUED_OUTPUT)
|
||||||
|
|
||||||
|
def test_private_queued_job(self):
|
||||||
|
"""Test: Correctly print queued private job"""
|
||||||
|
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=PRIVATE_JOB):
|
||||||
|
self.assertEqual(get_queued(), PRIVATE_OUTPUT)
|
@ -72,17 +72,17 @@ class Control:
|
|||||||
def set_maintainer(self, maintainer):
|
def set_maintainer(self, maintainer):
|
||||||
"""Sets the value of the Maintainer field."""
|
"""Sets the value of the Maintainer field."""
|
||||||
pattern = re.compile("^Maintainer: ?.*$", re.MULTILINE)
|
pattern = re.compile("^Maintainer: ?.*$", re.MULTILINE)
|
||||||
self._content = pattern.sub("Maintainer: " + maintainer, self._content)
|
self._content = pattern.sub(f"Maintainer: {maintainer}", self._content)
|
||||||
|
|
||||||
def set_original_maintainer(self, original_maintainer):
|
def set_original_maintainer(self, original_maintainer):
|
||||||
"""Sets the value of the XSBC-Original-Maintainer field."""
|
"""Sets the value of the XSBC-Original-Maintainer field."""
|
||||||
original_maintainer = "XSBC-Original-Maintainer: " + original_maintainer
|
original_maintainer = f"XSBC-Original-Maintainer: {original_maintainer}"
|
||||||
if self.get_original_maintainer():
|
if self.get_original_maintainer():
|
||||||
pattern = re.compile("^(?:[XSBC]*-)?Original-Maintainer:.*$", re.MULTILINE)
|
pattern = re.compile("^(?:[XSBC]*-)?Original-Maintainer:.*$", re.MULTILINE)
|
||||||
self._content = pattern.sub(original_maintainer, self._content)
|
self._content = pattern.sub(original_maintainer, self._content)
|
||||||
else:
|
else:
|
||||||
pattern = re.compile("^(Maintainer:.*)$", re.MULTILINE)
|
pattern = re.compile("^(Maintainer:.*)$", re.MULTILINE)
|
||||||
self._content = pattern.sub(r"\1\n" + original_maintainer, self._content)
|
self._content = pattern.sub(f"\\1\\n{original_maintainer}", self._content)
|
||||||
|
|
||||||
def remove_original_maintainer(self):
|
def remove_original_maintainer(self):
|
||||||
"""Strip out out the XSBC-Original-Maintainer line"""
|
"""Strip out out the XSBC-Original-Maintainer line"""
|
||||||
|
79
ubuntutools/utils.py
Normal file
79
ubuntutools/utils.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
# Copyright (C) 2019-2023 Canonical Ltd.
|
||||||
|
# Author: Brian Murray <brian.murray@canonical.com> et al.
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; version 3 of the License.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""Portions of archive related code that is re-used by various tools."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import urllib.request
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import dateutil.parser
|
||||||
|
from dateutil.tz import tzutc
|
||||||
|
|
||||||
|
|
||||||
|
def get_cache_dir():
|
||||||
|
cache_dir = os.environ.get("XDG_CACHE_HOME", os.path.expanduser(os.path.join("~", ".cache")))
|
||||||
|
uat_cache = os.path.join(cache_dir, "ubuntu-archive-tools")
|
||||||
|
os.makedirs(uat_cache, exist_ok=True)
|
||||||
|
return uat_cache
|
||||||
|
|
||||||
|
|
||||||
|
def get_url(url, force_cached):
|
||||||
|
"""Return file to the URL, possibly caching it"""
|
||||||
|
cache_file = None
|
||||||
|
|
||||||
|
# ignore bileto urls wrt caching, they're usually too small to matter
|
||||||
|
# and we don't do proper cache expiry
|
||||||
|
m = re.search("ubuntu-archive-team.ubuntu.com/proposed-migration/([^/]*)/([^/]*)", url)
|
||||||
|
if m:
|
||||||
|
cache_dir = get_cache_dir()
|
||||||
|
cache_file = os.path.join(cache_dir, f"{m.group(1)}_{m.group(2)}")
|
||||||
|
else:
|
||||||
|
# test logs can be cached, too
|
||||||
|
m = re.search(
|
||||||
|
"https://autopkgtest.ubuntu.com/results/autopkgtest-[^/]*/([^/]*)/([^/]*)"
|
||||||
|
"/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz",
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
if m:
|
||||||
|
cache_dir = get_cache_dir()
|
||||||
|
cache_file = os.path.join(
|
||||||
|
cache_dir, f"{m.group(1)}_{m.group(2)}_{m.group(3)}_{m.group(4)}.gz"
|
||||||
|
)
|
||||||
|
|
||||||
|
if cache_file:
|
||||||
|
try:
|
||||||
|
prev_mtime = os.stat(cache_file).st_mtime
|
||||||
|
except FileNotFoundError:
|
||||||
|
prev_mtime = 0
|
||||||
|
prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc())
|
||||||
|
new_timestamp = datetime.now(tz=tzutc()).timestamp()
|
||||||
|
if force_cached:
|
||||||
|
return open(cache_file, "rb")
|
||||||
|
|
||||||
|
f = urllib.request.urlopen(url)
|
||||||
|
|
||||||
|
if cache_file:
|
||||||
|
remote_ts = dateutil.parser.parse(f.headers["last-modified"])
|
||||||
|
if remote_ts > prev_timestamp:
|
||||||
|
with open(f"{cache_file}.new", "wb") as new_cache:
|
||||||
|
for line in f:
|
||||||
|
new_cache.write(line)
|
||||||
|
os.rename(f"{cache_file}.new", cache_file)
|
||||||
|
os.utime(cache_file, times=(new_timestamp, new_timestamp))
|
||||||
|
f.close()
|
||||||
|
f = open(cache_file, "rb")
|
||||||
|
return f
|
Loading…
x
Reference in New Issue
Block a user