mirror of
https://git.launchpad.net/ubuntu-dev-tools
synced 2025-03-13 16:11:15 +00:00
Compare commits
No commits in common. "main" and "0.190" have entirely different histories.
.gitignorebackportpackage
bash_completion
bitesizecheck-mirdebian
doc
bitesize.1pbuilder-dist.1pm-helper.1running-autopkgtests.1setup-packaging-environment.1syncpackage.1ubuntu-build.1
enforced-editing-wrappergrep-mergesimport-bug-from-debianmerge-changelogmk-sbuildpbuilder-distpm-helperpull-debian-ddebspull-debian-debdiffpull-debian-debspull-debian-sourcepull-debian-udebspull-lp-ddebspull-lp-debspull-lp-sourcepull-lp-udebspull-pkgpull-ppa-ddebspull-ppa-debspull-ppa-sourcepull-ppa-udebspull-uca-ddebspull-uca-debspull-uca-sourcepull-uca-udebspyproject.tomlrequestbackportrequestsyncrequirements.txtreverse-dependsrun-lintersrunning-autopkgtestsseeded-in-ubuntusetup-packaging-environmentsetup.pysponsor-patchsubmittodebiansyncpackageubuntu-buildubuntu-isoubuntu-upload-permissionubuntutools
__init__.pyarchive.pybuilder.pyconfig.py
update-maintainerlp
misc.pypullpkg.pyquestion.pyrdepends.pyrequestsync
running_autopkgtests.pysponsor_patch
test
example_package.pypylint.conftest_archive.pytest_config.pytest_help.pytest_requestsync.pytest_running_autopkgtests.pytest_update_maintainer.py
update_maintainer.pyutils.pyversion.py
18
.gitignore
vendored
18
.gitignore
vendored
@ -1,2 +1,16 @@
|
||||
__pycache__
|
||||
*.egg-info
|
||||
.coverage
|
||||
.tox
|
||||
/ubuntu_dev_tools.egg-info/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
/build/
|
||||
/.pybuild/
|
||||
/test-data/
|
||||
/debian/python-ubuntutools/
|
||||
/debian/python3-ubuntutools/
|
||||
/debian/ubuntu-dev-tools/
|
||||
/debian/debhelper-build-stamp
|
||||
/debian/files
|
||||
/debian/*.debhelper
|
||||
/debian/*.debhelper.log
|
||||
/debian/*.substvars
|
||||
|
533
backportpackage
533
backportpackage
@ -18,8 +18,8 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import optparse
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
@ -27,223 +27,197 @@ import sys
|
||||
import tempfile
|
||||
from urllib.parse import quote
|
||||
|
||||
try:
|
||||
import lsb_release
|
||||
except ImportError:
|
||||
lsb_release = None
|
||||
from distro_info import DebianDistroInfo, UbuntuDistroInfo
|
||||
import lsb_release
|
||||
from httplib2 import Http, HttpLib2Error
|
||||
from distro_info import DebianDistroInfo, UbuntuDistroInfo
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.archive import DebianSourcePackage, DownloadError, UbuntuSourcePackage
|
||||
from ubuntutools.builder import get_builder
|
||||
from ubuntutools.archive import (DebianSourcePackage,
|
||||
UbuntuSourcePackage, DownloadError)
|
||||
from ubuntutools.config import UDTConfig, ubu_email
|
||||
from ubuntutools.lp.lpapicache import (
|
||||
Distribution,
|
||||
Launchpad,
|
||||
PackageNotFoundException,
|
||||
SeriesNotFoundException,
|
||||
)
|
||||
from ubuntutools.misc import codename_to_distribution, system_distribution, vendor_to_distroinfo
|
||||
from ubuntutools.builder import get_builder
|
||||
from ubuntutools.lp.lpapicache import (Launchpad, Distribution,
|
||||
SeriesNotFoundException,
|
||||
PackageNotFoundException)
|
||||
from ubuntutools.misc import (system_distribution, vendor_to_distroinfo,
|
||||
codename_to_distribution)
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def error(msg, *args):
|
||||
Logger.error(msg, *args)
|
||||
def error(msg):
|
||||
Logger.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def check_call(cmd, *args, **kwargs):
|
||||
Logger.debug(" ".join(cmd))
|
||||
Logger.debug(' '.join(cmd))
|
||||
ret = subprocess.call(cmd, *args, **kwargs)
|
||||
if ret != 0:
|
||||
error("%s returned %d.", cmd[0], ret)
|
||||
error('%s returned %d.' % (cmd[0], ret))
|
||||
|
||||
|
||||
def parse(argv):
|
||||
usage = "%(prog)s [options] <source package name or .dsc URL/file>"
|
||||
parser = argparse.ArgumentParser(usage=usage)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--destination",
|
||||
metavar="DEST",
|
||||
dest="dest_releases",
|
||||
default=[],
|
||||
action="append",
|
||||
help="Backport to DEST release (default: current release)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--source",
|
||||
metavar="SOURCE",
|
||||
dest="source_release",
|
||||
help="Backport from SOURCE release (default: devel release)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-S",
|
||||
"--suffix",
|
||||
metavar="SUFFIX",
|
||||
help="Suffix to append to version number (default: ~ppa1 when uploading to a PPA)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--message",
|
||||
metavar="MESSAGE",
|
||||
default="No-change",
|
||||
help='Changelog message to use instead of "No-change" '
|
||||
"(default: No-change backport to DEST.)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--build",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Build the package before uploading (default: %(default)s)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-B",
|
||||
"--builder",
|
||||
metavar="BUILDER",
|
||||
help="Specify the package builder (default: pbuilder)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-U",
|
||||
"--update",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Update the build environment before attempting to build",
|
||||
)
|
||||
parser.add_argument("-u", "--upload", metavar="UPLOAD", help="Specify an upload destination")
|
||||
parser.add_argument(
|
||||
"-k", "--key", dest="keyid", help="Specify the key ID to be used for signing."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dont-sign", dest="keyid", action="store_false", help="Do not sign the upload."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-y",
|
||||
"--yes",
|
||||
dest="prompt",
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="Do not prompt before uploading to a PPA",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--version", metavar="VERSION", help="Package version to backport (or verify)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-w",
|
||||
"--workdir",
|
||||
metavar="WORKDIR",
|
||||
help="Specify a working directory (default: temporary dir)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-r",
|
||||
"--release-pocket",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Target the release pocket in the .changes file. "
|
||||
"Necessary (and default) for uploads to PPAs",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c", "--close", metavar="BUG", help="Bug to close in the changelog entry."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-m", "--mirror", metavar="URL", help="Preferred mirror (default: Launchpad)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
help="Launchpad instance to connect to (default: production)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument("package_or_dsc", help=argparse.SUPPRESS)
|
||||
def parse(args):
|
||||
usage = 'Usage: %prog [options] <source package name or .dsc URL/file>'
|
||||
parser = optparse.OptionParser(usage)
|
||||
parser.add_option('-d', '--destination',
|
||||
metavar='DEST',
|
||||
dest='dest_releases',
|
||||
default=[],
|
||||
action='append',
|
||||
help='Backport to DEST release '
|
||||
'(default: current release)')
|
||||
parser.add_option('-s', '--source',
|
||||
metavar='SOURCE',
|
||||
dest='source_release',
|
||||
help='Backport from SOURCE release '
|
||||
'(default: devel release)')
|
||||
parser.add_option('-S', '--suffix',
|
||||
metavar='SUFFIX',
|
||||
help='Suffix to append to version number '
|
||||
'(default: ~ppa1 when uploading to a PPA)')
|
||||
parser.add_option('-e', '--message',
|
||||
metavar='MESSAGE',
|
||||
default="No-change",
|
||||
help='Changelog message to use instead of "No-change" '
|
||||
'(default: No-change backport to DEST.)')
|
||||
parser.add_option('-b', '--build',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='Build the package before uploading '
|
||||
'(default: %default)')
|
||||
parser.add_option('-B', '--builder',
|
||||
metavar='BUILDER',
|
||||
help='Specify the package builder (default: pbuilder)')
|
||||
parser.add_option('-U', '--update',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='Update the build environment before '
|
||||
'attempting to build')
|
||||
parser.add_option('-u', '--upload',
|
||||
metavar='UPLOAD',
|
||||
help='Specify an upload destination')
|
||||
parser.add_option("-k", "--key",
|
||||
dest='keyid',
|
||||
help="Specify the key ID to be used for signing.")
|
||||
parser.add_option('--dont-sign',
|
||||
dest='keyid', action='store_false',
|
||||
help='Do not sign the upload.')
|
||||
parser.add_option('-y', '--yes',
|
||||
dest='prompt',
|
||||
default=True,
|
||||
action='store_false',
|
||||
help='Do not prompt before uploading to a PPA')
|
||||
parser.add_option('-v', '--version',
|
||||
metavar='VERSION',
|
||||
help='Package version to backport (or verify)')
|
||||
parser.add_option('-w', '--workdir',
|
||||
metavar='WORKDIR',
|
||||
help='Specify a working directory '
|
||||
'(default: temporary dir)')
|
||||
parser.add_option('-r', '--release-pocket',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='Target the release pocket in the .changes file. '
|
||||
'Necessary (and default) for uploads to PPAs')
|
||||
parser.add_option('-c', '--close',
|
||||
metavar='BUG',
|
||||
help='Bug to close in the changelog entry.')
|
||||
parser.add_option('-m', '--mirror',
|
||||
metavar='URL',
|
||||
help='Preferred mirror (default: Launchpad)')
|
||||
parser.add_option('-l', '--lpinstance',
|
||||
metavar='INSTANCE',
|
||||
help='Launchpad instance to connect to '
|
||||
'(default: production)')
|
||||
parser.add_option('--no-conf',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="Don't read config files or environment variables")
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
config = UDTConfig(args.no_conf)
|
||||
if args.builder is None:
|
||||
args.builder = config.get_value("BUILDER")
|
||||
if not args.update:
|
||||
args.update = config.get_value("UPDATE_BUILDER", boolean=True)
|
||||
if args.workdir is None:
|
||||
args.workdir = config.get_value("WORKDIR")
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
if args.upload is None:
|
||||
args.upload = config.get_value("UPLOAD")
|
||||
if args.keyid is None:
|
||||
args.keyid = config.get_value("KEYID")
|
||||
if not args.upload and not args.workdir:
|
||||
parser.error("Please specify either a working dir or an upload target!")
|
||||
if args.upload and args.upload.startswith("ppa:"):
|
||||
args.release_pocket = True
|
||||
opts, args = parser.parse_args(args)
|
||||
if len(args) != 1:
|
||||
parser.error('You must specify a single source package or a .dsc '
|
||||
'URL/path.')
|
||||
config = UDTConfig(opts.no_conf)
|
||||
if opts.builder is None:
|
||||
opts.builder = config.get_value('BUILDER')
|
||||
if not opts.update:
|
||||
opts.update = config.get_value('UPDATE_BUILDER', boolean=True)
|
||||
if opts.workdir is None:
|
||||
opts.workdir = config.get_value('WORKDIR')
|
||||
if opts.lpinstance is None:
|
||||
opts.lpinstance = config.get_value('LPINSTANCE')
|
||||
if opts.upload is None:
|
||||
opts.upload = config.get_value('UPLOAD')
|
||||
if opts.keyid is None:
|
||||
opts.keyid = config.get_value('KEYID')
|
||||
if not opts.upload and not opts.workdir:
|
||||
parser.error('Please specify either a working dir or an upload target!')
|
||||
if opts.upload and opts.upload.startswith('ppa:'):
|
||||
opts.release_pocket = True
|
||||
|
||||
return args, config
|
||||
return opts, args, config
|
||||
|
||||
|
||||
def find_release_package(mirror, workdir, package, version, source_release, config):
|
||||
def find_release_package(mirror, workdir, package, version, source_release,
|
||||
config):
|
||||
srcpkg = None
|
||||
|
||||
if source_release:
|
||||
distribution = codename_to_distribution(source_release)
|
||||
if not distribution:
|
||||
error("Unknown release codename %s", source_release)
|
||||
error('Unknown release codename %s' % source_release)
|
||||
info = vendor_to_distroinfo(distribution)()
|
||||
source_release = info.codename(source_release, default=source_release)
|
||||
else:
|
||||
distribution = system_distribution()
|
||||
mirrors = [mirror] if mirror else []
|
||||
|
||||
mirrors.append(config.get_value(f"{distribution.upper()}_MIRROR"))
|
||||
mirrors.append(config.get_value('%s_MIRROR' % distribution.upper()))
|
||||
|
||||
if not version:
|
||||
archive = Distribution(distribution.lower()).getArchive()
|
||||
try:
|
||||
spph = archive.getSourcePackage(package, source_release)
|
||||
except (SeriesNotFoundException, PackageNotFoundException) as e:
|
||||
error("%s", str(e))
|
||||
error(str(e))
|
||||
version = spph.getVersion()
|
||||
|
||||
if distribution == "Debian":
|
||||
srcpkg = DebianSourcePackage(package, version, workdir=workdir, mirrors=mirrors)
|
||||
elif distribution == "Ubuntu":
|
||||
srcpkg = UbuntuSourcePackage(package, version, workdir=workdir, mirrors=mirrors)
|
||||
if distribution == 'Debian':
|
||||
srcpkg = DebianSourcePackage(package,
|
||||
version,
|
||||
workdir=workdir,
|
||||
mirrors=mirrors)
|
||||
elif distribution == 'Ubuntu':
|
||||
srcpkg = UbuntuSourcePackage(package,
|
||||
version,
|
||||
workdir=workdir,
|
||||
mirrors=mirrors)
|
||||
|
||||
return srcpkg
|
||||
|
||||
|
||||
def find_package(mirror, workdir, package, version, source_release, config):
|
||||
"Returns the SourcePackage"
|
||||
if package.endswith(".dsc"):
|
||||
if package.endswith('.dsc'):
|
||||
# Here we are using UbuntuSourcePackage just because we don't have any
|
||||
# "general" class that is safely instantiable (as SourcePackage is an
|
||||
# abstract class). None of the distribution-specific details within
|
||||
# UbuntuSourcePackage is relevant for this use case.
|
||||
return UbuntuSourcePackage(
|
||||
version=version, dscfile=package, workdir=workdir, mirrors=(mirror,)
|
||||
)
|
||||
return UbuntuSourcePackage(version=version, dscfile=package,
|
||||
workdir=workdir, mirrors=(mirror,))
|
||||
|
||||
if not source_release and not version:
|
||||
info = vendor_to_distroinfo(system_distribution())
|
||||
source_release = info().devel()
|
||||
|
||||
srcpkg = find_release_package(mirror, workdir, package, version, source_release, config)
|
||||
srcpkg = find_release_package(mirror, workdir, package, version,
|
||||
source_release, config)
|
||||
if version and srcpkg.version != version:
|
||||
error(
|
||||
"Requested backport of version %s but version of %s in %s is %s",
|
||||
version,
|
||||
package,
|
||||
source_release,
|
||||
srcpkg.version,
|
||||
)
|
||||
error('Requested backport of version %s but version of %s in %s is %s'
|
||||
% (version, package, source_release, srcpkg.version))
|
||||
|
||||
return srcpkg
|
||||
|
||||
@ -251,27 +225,30 @@ def find_package(mirror, workdir, package, version, source_release, config):
|
||||
def get_backport_version(version, suffix, upload, release):
|
||||
distribution = codename_to_distribution(release)
|
||||
if not distribution:
|
||||
error("Unknown release codename %s", release)
|
||||
if distribution == "Debian":
|
||||
error('Unknown release codename %s' % release)
|
||||
if distribution == 'Debian':
|
||||
debian_distro_info = DebianDistroInfo()
|
||||
debian_codenames = debian_distro_info.supported()
|
||||
if release in debian_codenames:
|
||||
release_version = debian_distro_info.version(release)
|
||||
if not release_version:
|
||||
error("Can't find the release version for %s", release)
|
||||
backport_version = f"{version}~bpo{release_version}+1"
|
||||
error(f"Can't find the release version for {release}")
|
||||
backport_version = "{}~bpo{}+1".format(
|
||||
version, release_version
|
||||
)
|
||||
else:
|
||||
error("%s is not a supported release (%s)", release, debian_codenames)
|
||||
elif distribution == "Ubuntu":
|
||||
series = Distribution(distribution.lower()).getSeries(name_or_version=release)
|
||||
error(f"{release} is not a supported release ({debian_codenames})")
|
||||
elif distribution == 'Ubuntu':
|
||||
series = Distribution(distribution.lower()).\
|
||||
getSeries(name_or_version=release)
|
||||
|
||||
backport_version = f"{version}~bpo{series.version}.1"
|
||||
backport_version = version + ('~bpo%s.1' % (series.version))
|
||||
else:
|
||||
error("Unknown distribution «%s» for release «%s»", distribution, release)
|
||||
error('Unknown distribution «%s» for release «%s»' % (distribution, release))
|
||||
if suffix is not None:
|
||||
backport_version += suffix
|
||||
elif upload and upload.startswith("ppa:"):
|
||||
backport_version += "~ppa1"
|
||||
elif upload and upload.startswith('ppa:'):
|
||||
backport_version += '~ppa1'
|
||||
return backport_version
|
||||
|
||||
|
||||
@ -279,25 +256,26 @@ def get_old_version(source, release):
|
||||
try:
|
||||
distribution = codename_to_distribution(release)
|
||||
archive = Distribution(distribution.lower()).getArchive()
|
||||
pkg = archive.getSourcePackage(
|
||||
source, release, ("Release", "Security", "Updates", "Proposed", "Backports")
|
||||
)
|
||||
pkg = archive.getSourcePackage(source,
|
||||
release,
|
||||
('Release', 'Security', 'Updates',
|
||||
'Proposed', 'Backports'))
|
||||
return pkg.getVersion()
|
||||
except (SeriesNotFoundException, PackageNotFoundException):
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def get_backport_dist(release, release_pocket):
|
||||
if release_pocket:
|
||||
return release
|
||||
return f"{release}-backports"
|
||||
else:
|
||||
return '%s-backports' % release
|
||||
|
||||
|
||||
def do_build(workdir, dsc, release, builder, update):
|
||||
builder = get_builder(builder)
|
||||
if not builder:
|
||||
return None
|
||||
return
|
||||
|
||||
if update:
|
||||
if 0 != builder.update(release):
|
||||
@ -305,41 +283,41 @@ def do_build(workdir, dsc, release, builder, update):
|
||||
|
||||
# builder.build is going to chdir to buildresult:
|
||||
workdir = os.path.realpath(workdir)
|
||||
return builder.build(os.path.join(workdir, dsc), release, os.path.join(workdir, "buildresult"))
|
||||
return builder.build(os.path.join(workdir, dsc),
|
||||
release,
|
||||
os.path.join(workdir, "buildresult"))
|
||||
|
||||
|
||||
def do_upload(workdir, package, bp_version, changes, upload, prompt):
|
||||
print(f"Please check {package} {bp_version} in file://{workdir} carefully!")
|
||||
if prompt or upload == "ubuntu":
|
||||
question = f"Do you want to upload the package to {upload}"
|
||||
print('Please check %s %s in file://%s carefully!' % (package, bp_version, workdir))
|
||||
if prompt or upload == 'ubuntu':
|
||||
question = 'Do you want to upload the package to %s' % upload
|
||||
answer = YesNoQuestion().ask(question, "yes")
|
||||
if answer == "no":
|
||||
return
|
||||
|
||||
check_call(["dput", upload, changes], cwd=workdir)
|
||||
check_call(['dput', upload, changes], cwd=workdir)
|
||||
|
||||
|
||||
def orig_needed(upload, workdir, pkg):
|
||||
"""Avoid a -sa if possible"""
|
||||
if not upload or not upload.startswith("ppa:"):
|
||||
'''Avoid a -sa if possible'''
|
||||
if not upload or not upload.startswith('ppa:'):
|
||||
return True
|
||||
ppa = upload.split(":", 1)[1]
|
||||
user, ppa = ppa.split("/", 1)
|
||||
ppa = upload.split(':', 1)[1]
|
||||
user, ppa = ppa.split('/', 1)
|
||||
|
||||
version = pkg.version.upstream_version
|
||||
|
||||
http = Http()
|
||||
for filename in glob.glob(os.path.join(workdir, f"{pkg.source}_{version}.orig*")):
|
||||
url = (
|
||||
f"https://launchpad.net/~{quote(user)}/+archive/{quote(ppa)}/+sourcefiles"
|
||||
f"/{quote(pkg.source)}/{quote(pkg.version.full_version)}"
|
||||
f"/{quote(os.path.basename(filename))}"
|
||||
)
|
||||
h = Http()
|
||||
for filename in glob.glob(os.path.join(workdir, '%s_%s.orig*' % (pkg.source, version))):
|
||||
url = ('https://launchpad.net/~%s/+archive/%s/+sourcefiles/%s/%s/%s'
|
||||
% (quote(user), quote(ppa), quote(pkg.source),
|
||||
quote(pkg.version.full_version),
|
||||
quote(os.path.basename(filename))))
|
||||
try:
|
||||
headers = http.request(url, "HEAD")[0]
|
||||
if headers.status != 200 or not headers["content-location"].startswith(
|
||||
"https://launchpadlibrarian.net"
|
||||
):
|
||||
headers, body = h.request(url, 'HEAD')
|
||||
if (headers.status != 200 or
|
||||
not headers['content-location'].startswith('https://launchpadlibrarian.net')):
|
||||
return True
|
||||
except HttpLib2Error as e:
|
||||
Logger.debug(e)
|
||||
@ -347,79 +325,61 @@ def orig_needed(upload, workdir, pkg):
|
||||
return False
|
||||
|
||||
|
||||
def do_backport(
|
||||
workdir,
|
||||
pkg,
|
||||
suffix,
|
||||
message,
|
||||
close,
|
||||
release,
|
||||
release_pocket,
|
||||
build,
|
||||
builder,
|
||||
update,
|
||||
upload,
|
||||
keyid,
|
||||
prompt,
|
||||
):
|
||||
dirname = f"{pkg.source}-{release}"
|
||||
def do_backport(workdir, pkg, suffix, message, close, release, release_pocket,
|
||||
build, builder, update, upload, keyid, prompt):
|
||||
dirname = '%s-%s' % (pkg.source, release)
|
||||
srcdir = os.path.join(workdir, dirname)
|
||||
|
||||
if os.path.exists(srcdir):
|
||||
question = f"Working directory {srcdir} already exists. Delete it?"
|
||||
if YesNoQuestion().ask(question, "no") == "no":
|
||||
question = 'Working directory %s already exists. Delete it?' % srcdir
|
||||
if YesNoQuestion().ask(question, 'no') == 'no':
|
||||
sys.exit(1)
|
||||
shutil.rmtree(srcdir)
|
||||
|
||||
pkg.unpack(dirname)
|
||||
|
||||
bp_version = get_backport_version(pkg.version.full_version, suffix, upload, release)
|
||||
bp_version = get_backport_version(pkg.version.full_version, suffix,
|
||||
upload, release)
|
||||
old_version = get_old_version(pkg.source, release)
|
||||
bp_dist = get_backport_dist(release, release_pocket)
|
||||
|
||||
changelog = f"{message} backport to {release}."
|
||||
changelog = '%s backport to %s.' % (message, release,)
|
||||
if close:
|
||||
changelog += f" (LP: #{close})"
|
||||
check_call(
|
||||
[
|
||||
"dch",
|
||||
"--force-bad-version",
|
||||
"--force-distribution",
|
||||
"--preserve",
|
||||
"--newversion",
|
||||
bp_version,
|
||||
"--distribution",
|
||||
bp_dist,
|
||||
changelog,
|
||||
],
|
||||
cwd=srcdir,
|
||||
)
|
||||
changelog += ' (LP: #%s)' % (close,)
|
||||
check_call(['dch',
|
||||
'--force-bad-version',
|
||||
'--force-distribution',
|
||||
'--preserve',
|
||||
'--newversion', bp_version,
|
||||
'--distribution', bp_dist,
|
||||
changelog],
|
||||
cwd=srcdir)
|
||||
|
||||
cmd = ["debuild", "--no-lintian", "-S", "-nc", "-uc", "-us"]
|
||||
cmd = ['debuild', '--no-lintian', '-S', '-nc', '-uc', '-us']
|
||||
if orig_needed(upload, workdir, pkg):
|
||||
cmd.append("-sa")
|
||||
cmd.append('-sa')
|
||||
else:
|
||||
cmd.append("-sd")
|
||||
cmd.append('-sd')
|
||||
if old_version:
|
||||
cmd.append(f"-v{old_version}")
|
||||
cmd.append('-v%s' % old_version)
|
||||
env = os.environ.copy()
|
||||
# An ubuntu.com e-mail address would make dpkg-buildpackage fail if there
|
||||
# wasn't an Ubuntu maintainer for an ubuntu-versioned package. LP: #1007042
|
||||
env.pop("DEBEMAIL", None)
|
||||
env.pop('DEBEMAIL', None)
|
||||
check_call(cmd, cwd=srcdir, env=env)
|
||||
|
||||
fn_base = pkg.source + "_" + bp_version.split(":", 1)[-1]
|
||||
changes = fn_base + "_source.changes"
|
||||
fn_base = pkg.source + '_' + bp_version.split(':', 1)[-1]
|
||||
changes = fn_base + '_source.changes'
|
||||
|
||||
if build:
|
||||
if 0 != do_build(workdir, fn_base + ".dsc", release, builder, update):
|
||||
if 0 != do_build(workdir, fn_base + '.dsc', release, builder, update):
|
||||
sys.exit(1)
|
||||
|
||||
# None: sign with the default signature. False: don't sign
|
||||
if keyid is not False:
|
||||
cmd = ["debsign"]
|
||||
cmd = ['debsign']
|
||||
if keyid:
|
||||
cmd.append("-k" + keyid)
|
||||
cmd.append('-k' + keyid)
|
||||
cmd.append(changes)
|
||||
check_call(cmd, cwd=workdir)
|
||||
if upload:
|
||||
@ -428,68 +388,63 @@ def do_backport(
|
||||
shutil.rmtree(srcdir)
|
||||
|
||||
|
||||
def main(argv):
|
||||
def main(args):
|
||||
ubu_email()
|
||||
|
||||
args, config = parse(argv[1:])
|
||||
opts, (package_or_dsc,), config = parse(args[1:])
|
||||
|
||||
Launchpad.login_anonymously(service=args.lpinstance)
|
||||
|
||||
if not args.dest_releases:
|
||||
if lsb_release:
|
||||
distinfo = lsb_release.get_distro_information()
|
||||
try:
|
||||
current_distro = distinfo["ID"]
|
||||
except KeyError:
|
||||
error("No destination release specified and unable to guess yours.")
|
||||
else:
|
||||
err, current_distro = subprocess.getstatusoutput("lsb_release --id --short")
|
||||
if err:
|
||||
error("Could not run lsb_release to retrieve distribution")
|
||||
Launchpad.login_anonymously(service=opts.lpinstance)
|
||||
|
||||
if not opts.dest_releases:
|
||||
distinfo = lsb_release.get_distro_information()
|
||||
try:
|
||||
current_distro = distinfo['ID']
|
||||
except KeyError:
|
||||
error('No destination release specified and unable to guess yours.')
|
||||
if current_distro == "Ubuntu":
|
||||
args.dest_releases = [UbuntuDistroInfo().lts()]
|
||||
elif current_distro == "Debian":
|
||||
args.dest_releases = [DebianDistroInfo().stable()]
|
||||
opts.dest_releases = [UbuntuDistroInfo().lts()]
|
||||
if current_distro == "Debian":
|
||||
opts.dest_releases = [DebianDistroInfo().stable()]
|
||||
else:
|
||||
error("Unknown distribution %s, can't guess target release", current_distro)
|
||||
error(f"Unknown distribution {current_distro}, can't guess target release")
|
||||
|
||||
if args.workdir:
|
||||
workdir = os.path.expanduser(args.workdir)
|
||||
if opts.workdir:
|
||||
workdir = os.path.expanduser(opts.workdir)
|
||||
else:
|
||||
workdir = tempfile.mkdtemp(prefix="backportpackage-")
|
||||
workdir = tempfile.mkdtemp(prefix='backportpackage-')
|
||||
|
||||
if not os.path.exists(workdir):
|
||||
os.makedirs(workdir)
|
||||
|
||||
try:
|
||||
pkg = find_package(
|
||||
args.mirror, workdir, args.package_or_dsc, args.version, args.source_release, config
|
||||
)
|
||||
pkg = find_package(opts.mirror,
|
||||
workdir,
|
||||
package_or_dsc,
|
||||
opts.version,
|
||||
opts.source_release,
|
||||
config)
|
||||
pkg.pull()
|
||||
|
||||
for release in args.dest_releases:
|
||||
do_backport(
|
||||
workdir,
|
||||
pkg,
|
||||
args.suffix,
|
||||
args.message,
|
||||
args.close,
|
||||
release,
|
||||
args.release_pocket,
|
||||
args.build,
|
||||
args.builder,
|
||||
args.update,
|
||||
args.upload,
|
||||
args.keyid,
|
||||
args.prompt,
|
||||
)
|
||||
for release in opts.dest_releases:
|
||||
do_backport(workdir,
|
||||
pkg,
|
||||
opts.suffix,
|
||||
opts.message,
|
||||
opts.close,
|
||||
release,
|
||||
opts.release_pocket,
|
||||
opts.build,
|
||||
opts.builder,
|
||||
opts.update,
|
||||
opts.upload,
|
||||
opts.keyid,
|
||||
opts.prompt)
|
||||
except DownloadError as e:
|
||||
error("%s", str(e))
|
||||
error(str(e))
|
||||
finally:
|
||||
if not args.workdir:
|
||||
if not opts.workdir:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
||||
|
@ -36,7 +36,7 @@ _pbuilder-dist()
|
||||
for distro in $(ubuntu-distro-info --all; debian-distro-info --all) stable testing unstable; do
|
||||
for builder in pbuilder cowbuilder; do
|
||||
echo "$builder-$distro"
|
||||
for arch in i386 amd64 armhf; do
|
||||
for arch in i386 amd64 armel armhf; do
|
||||
echo "$builder-$distro-$arch"
|
||||
done
|
||||
done
|
||||
|
@ -21,20 +21,20 @@
|
||||
# Authors:
|
||||
# Daniel Holbach <daniel.holbach@canonical.com>
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
from launchpadlib.errors import HTTPError
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from launchpadlib.errors import HTTPError
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def error_out(msg, *args):
|
||||
Logger.error(msg, *args)
|
||||
def error_out(msg):
|
||||
Logger.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -42,64 +42,54 @@ def save_entry(entry):
|
||||
try:
|
||||
entry.lp_save()
|
||||
except HTTPError as error:
|
||||
error_out("%s", error.content)
|
||||
error_out(error.content)
|
||||
|
||||
|
||||
def tag_bug(bug):
|
||||
bug.tags = bug.tags + ["bitesize"] # LP: #254901 workaround
|
||||
bug.tags = bug.tags + ['bitesize'] # LP: #254901 workaround
|
||||
save_entry(bug)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] <bug number>")
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
help="Launchpad instance to connect to (default: production)",
|
||||
dest="lpinstance",
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
help="Don't read config files or environment variables.",
|
||||
dest="no_conf",
|
||||
default=False,
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument("bug_number", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
config = UDTConfig(args.no_conf)
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
usage = "Usage: %prog <bug number>"
|
||||
opt_parser = OptionParser(usage)
|
||||
opt_parser.add_option("-l", "--lpinstance", metavar="INSTANCE",
|
||||
help="Launchpad instance to connect to "
|
||||
"(default: production)",
|
||||
dest="lpinstance", default=None)
|
||||
opt_parser.add_option("--no-conf",
|
||||
help="Don't read config files or "
|
||||
"environment variables.",
|
||||
dest="no_conf", default=False, action="store_true")
|
||||
(options, args) = opt_parser.parse_args()
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value("LPINSTANCE")
|
||||
if len(args) < 1:
|
||||
opt_parser.error("Need at least one bug number.")
|
||||
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", args.lpinstance)
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
|
||||
if launchpad is None:
|
||||
error_out("Couldn't authenticate to Launchpad.")
|
||||
|
||||
# check that the new main bug isn't a duplicate
|
||||
try:
|
||||
bug = launchpad.bugs[args.bug_number]
|
||||
bug = launchpad.bugs[args[0]]
|
||||
except HTTPError as error:
|
||||
if error.response.status == 401:
|
||||
error_out(
|
||||
"Don't have enough permissions to access bug %s. %s",
|
||||
args.bug_number,
|
||||
error.content,
|
||||
)
|
||||
error_out("Don't have enough permissions to access bug %s. %s" %
|
||||
(args[0], error.content))
|
||||
else:
|
||||
raise
|
||||
if "bitesize" in bug.tags:
|
||||
if 'bitesize' in bug.tags:
|
||||
error_out("Bug is already marked as 'bitesize'.")
|
||||
bug.newMessage(
|
||||
content="I'm marking this bug as 'bitesize' as it looks "
|
||||
"like an issue that is easy to fix and suitable "
|
||||
"for newcomers in Ubuntu development. If you need "
|
||||
"any help with fixing it, talk to me about it."
|
||||
)
|
||||
bug.newMessage(content="I'm marking this bug as 'bitesize' as it looks "
|
||||
"like an issue that is easy to fix and suitable "
|
||||
"for newcomers in Ubuntu development. If you need "
|
||||
"any help with fixing it, talk to me about it.")
|
||||
bug.subscribe(person=launchpad.me)
|
||||
tag_bug(launchpad.bugs[bug.id]) # fresh bug object, LP: #336866 workaround
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
135
check-mir
135
check-mir
@ -21,116 +21,69 @@
|
||||
# this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
"""Check if any of a package's build or binary dependencies are in universe or multiverse.
|
||||
|
||||
Run this inside an unpacked source package
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import sys
|
||||
import optparse
|
||||
import os.path
|
||||
|
||||
import apt
|
||||
|
||||
|
||||
def check_support(apt_cache, pkgname, alt=False):
|
||||
"""Check if pkgname is in main or restricted.
|
||||
'''Check if pkgname is in main or restricted.
|
||||
|
||||
This prints messages if a package is not in main/restricted, or only
|
||||
partially (i. e. source in main, but binary in universe).
|
||||
"""
|
||||
'''
|
||||
if alt:
|
||||
prefix = " ... alternative " + pkgname
|
||||
prefix = ' ... alternative ' + pkgname
|
||||
else:
|
||||
prefix = " * " + pkgname
|
||||
prefix = ' * ' + pkgname
|
||||
|
||||
prov_packages = apt_cache.get_providing_packages(pkgname)
|
||||
if pkgname in apt_cache:
|
||||
try:
|
||||
pkg = apt_cache[pkgname]
|
||||
|
||||
# If this is a virtual package, iterate through the binary packages that
|
||||
# provide this, and ensure they are all in Main. Source packages in and of
|
||||
# themselves cannot provide virtual packages, only binary packages can.
|
||||
elif len(prov_packages) > 0:
|
||||
supported, unsupported = [], []
|
||||
for pkg in prov_packages:
|
||||
candidate = pkg.candidate
|
||||
if candidate:
|
||||
section = candidate.section
|
||||
if section.startswith("universe") or section.startswith("multiverse"):
|
||||
unsupported.append(pkg.name)
|
||||
else:
|
||||
supported.append(pkg.name)
|
||||
|
||||
if len(supported) > 0:
|
||||
msg = "is a virtual package, which is provided by the following "
|
||||
msg += "candidates in Main: " + " ".join(supported)
|
||||
print(prefix, msg)
|
||||
elif len(unsupported) > 0:
|
||||
msg = "is a virtual package, but is only provided by the "
|
||||
msg += "following non-Main candidates: " + " ".join(unsupported)
|
||||
print(prefix, msg, file=sys.stderr)
|
||||
return False
|
||||
else:
|
||||
msg = "is a virtual package that exists but is not provided by "
|
||||
msg += "package currently in the archive. Proceed with caution."
|
||||
print(prefix, msg, file=sys.stderr)
|
||||
return False
|
||||
|
||||
else:
|
||||
print(prefix, "does not exist", file=sys.stderr)
|
||||
except KeyError:
|
||||
print(prefix, 'does not exist (pure virtual?)', file=sys.stderr)
|
||||
return False
|
||||
|
||||
section = pkg.candidate.section
|
||||
if section.startswith("universe") or section.startswith("multiverse"):
|
||||
if section.startswith('universe') or section.startswith('multiverse'):
|
||||
# check if the source package is in main and thus will only need binary
|
||||
# promotion
|
||||
source_records = apt.apt_pkg.SourceRecords()
|
||||
if not source_records.lookup(pkg.candidate.source_name):
|
||||
print("ERROR: Cannot lookup source package for", pkg.name, file=sys.stderr)
|
||||
print(prefix, "package is in", section.split("/")[0])
|
||||
print('ERROR: Cannot lookup source package for', pkg.name,
|
||||
file=sys.stderr)
|
||||
print(prefix, 'package is in', section.split('/')[0])
|
||||
return False
|
||||
src = apt.apt_pkg.TagSection(source_records.record)
|
||||
if src["Section"].startswith("universe") or src["Section"].startswith("multiverse"):
|
||||
print(prefix, "binary and source package is in", section.split("/")[0])
|
||||
if (src['Section'].startswith('universe') or
|
||||
src['Section'].startswith('multiverse')):
|
||||
print(prefix, 'binary and source package is in',
|
||||
section.split('/')[0])
|
||||
return False
|
||||
|
||||
print(
|
||||
prefix,
|
||||
"is in",
|
||||
section.split("/")[0] + ", but its source",
|
||||
pkg.candidate.source_name,
|
||||
"is already in main; file an ubuntu-archive bug for "
|
||||
"promoting the current preferred alternative",
|
||||
)
|
||||
return True
|
||||
else:
|
||||
print(prefix, 'is in', section.split('/')[0] + ', but its source',
|
||||
pkg.candidate.source_name,
|
||||
'is already in main; file an ubuntu-archive bug for '
|
||||
'promoting the current preferred alternative')
|
||||
return True
|
||||
|
||||
if alt:
|
||||
print(prefix, "is already in main; consider preferring it")
|
||||
print(prefix, 'is already in main; consider preferring it')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_build_dependencies(apt_cache, control):
|
||||
print("Checking support status of build dependencies...")
|
||||
print('Checking support status of build dependencies...')
|
||||
|
||||
any_unsupported = False
|
||||
|
||||
for field in ("Build-Depends", "Build-Depends-Indep"):
|
||||
for field in ('Build-Depends', 'Build-Depends-Indep'):
|
||||
if field not in control.section:
|
||||
continue
|
||||
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
|
||||
pkgname = or_group[0][0]
|
||||
|
||||
# debhelper-compat is expected to be a build dependency of every
|
||||
# package, so it is a red herring to display it in this report.
|
||||
# (src:debhelper is in Ubuntu Main anyway)
|
||||
if pkgname == "debhelper-compat":
|
||||
continue
|
||||
|
||||
if not check_support(apt_cache, pkgname):
|
||||
# check non-preferred alternatives
|
||||
for altpkg in or_group[1:]:
|
||||
@ -145,19 +98,20 @@ def check_build_dependencies(apt_cache, control):
|
||||
def check_binary_dependencies(apt_cache, control):
|
||||
any_unsupported = False
|
||||
|
||||
print("\nChecking support status of binary dependencies...")
|
||||
print('\nChecking support status of binary dependencies...')
|
||||
while True:
|
||||
try:
|
||||
next(control)
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
for field in ("Depends", "Pre-Depends", "Recommends"):
|
||||
for field in ('Depends', 'Pre-Depends', 'Recommends'):
|
||||
if field not in control.section:
|
||||
continue
|
||||
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
|
||||
for or_group in apt.apt_pkg.parse_src_depends(
|
||||
control.section[field]):
|
||||
pkgname = or_group[0][0]
|
||||
if pkgname.startswith("$"):
|
||||
if pkgname.startswith('$'):
|
||||
continue
|
||||
if not check_support(apt_cache, pkgname):
|
||||
# check non-preferred alternatives
|
||||
@ -171,33 +125,32 @@ def check_binary_dependencies(apt_cache, control):
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
description = "Check if any of a package's build or binary " + \
|
||||
"dependencies are in universe or multiverse. " + \
|
||||
"Run this inside an unpacked source package"
|
||||
parser = optparse.OptionParser(description=description)
|
||||
parser.parse_args()
|
||||
apt_cache = apt.Cache()
|
||||
|
||||
if not os.path.exists("debian/control"):
|
||||
print(
|
||||
"debian/control not found. You need to run this tool in a source package directory",
|
||||
file=sys.stderr,
|
||||
)
|
||||
if not os.path.exists('debian/control'):
|
||||
print('debian/control not found. You need to run this tool in a '
|
||||
'source package directory', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# get build dependencies from debian/control
|
||||
control = apt.apt_pkg.TagFile(open("debian/control", encoding="utf-8"))
|
||||
control = apt.apt_pkg.TagFile(open('debian/control'))
|
||||
next(control)
|
||||
|
||||
unsupported_build_deps = check_build_dependencies(apt_cache, control)
|
||||
unsupported_binary_deps = check_binary_dependencies(apt_cache, control)
|
||||
|
||||
if unsupported_build_deps or unsupported_binary_deps:
|
||||
print(
|
||||
"\nPlease check https://wiki.ubuntu.com/MainInclusionProcess if "
|
||||
"this source package needs to get into in main/restricted, or "
|
||||
"reconsider if the package really needs above dependencies."
|
||||
)
|
||||
print('\nPlease check https://wiki.ubuntu.com/MainInclusionProcess if '
|
||||
'this source package needs to get into in main/restricted, or '
|
||||
'reconsider if the package really needs above dependencies.')
|
||||
else:
|
||||
print("All dependencies are supported in main or restricted.")
|
||||
print('All dependencies are supported in main or restricted.')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
1
debian/.gitignore
vendored
1
debian/.gitignore
vendored
@ -1 +0,0 @@
|
||||
files
|
264
debian/changelog
vendored
264
debian/changelog
vendored
@ -1,267 +1,3 @@
|
||||
ubuntu-dev-tools (0.206) unstable; urgency=medium
|
||||
|
||||
[ Dan Bungert ]
|
||||
* mk-sbuild: enable pkgmaintainermangler
|
||||
|
||||
[ Shengjing Zhu ]
|
||||
* import-bug-from-debian: package option is overridden and not used
|
||||
|
||||
[ Fernando Bravo Hernández ]
|
||||
* Parsing arch parameter to getBinaryPackage() (LP: #2081861)
|
||||
|
||||
[ Simon Quigley ]
|
||||
* Read ~/.devscripts in a more robust way, to ideally pick up multi-line
|
||||
variables (Closes: #725418).
|
||||
* mk-sbuild: default to using UTC for schroots (LP: #2097159).
|
||||
* syncpackage: s/syncblacklist/syncblocklist/g
|
||||
* syncpackage: Cache the sync blocklist in-memory, so it's not fetched
|
||||
multiple times when syncing more than one package.
|
||||
* syncpackage: Catch exceptions cleanly, simply skipping to the next
|
||||
package (erring on the side of caution) if there is an error doing the
|
||||
download (LP: #1943286).
|
||||
|
||||
-- Simon Quigley <tsimonq2@debian.org> Tue, 04 Mar 2025 13:43:15 -0600
|
||||
|
||||
ubuntu-dev-tools (0.205) unstable; urgency=medium
|
||||
|
||||
* [syncpackage] When syncing multiple packages, if one of the packages is in
|
||||
the sync blocklist, do not exit, simply continue.
|
||||
* [syncpackage] Do not use exit(1) on an error or exception unless it
|
||||
applies to all packages, instead return None so we can continue to the
|
||||
next package.
|
||||
* [syncpackage] Add support for -y or --yes, noted that it should be used
|
||||
with care.
|
||||
* Update Standards-Version to 4.7.2, no changes needed.
|
||||
|
||||
-- Simon Quigley <tsimonq2@debian.org> Sat, 01 Mar 2025 11:29:54 -0600
|
||||
|
||||
ubuntu-dev-tools (0.204) unstable; urgency=medium
|
||||
|
||||
[ Simon Quigley ]
|
||||
* Update Standards-Version to 4.7.1, no changes needed.
|
||||
* Add several Lintian overrides related to .pyc files.
|
||||
* Add my name to the copyright file.
|
||||
* Rename bitesize to lp-bitesize (Closes: #1076224).
|
||||
* Add a manpage for running-autopkgtests.
|
||||
* Add a large warning at the top of mk-sbuild encouraging the use of the
|
||||
unshare backend. This is to provide ample warning to users.
|
||||
* Remove mail line from default ~/.sbuildrc, to resolve the undeclared
|
||||
dependency on sendmail (Closes: #1074632).
|
||||
|
||||
[ Julien Plissonneau Duquène ]
|
||||
* Fix reverse-depends -b crash on packages that b-d on themselves
|
||||
(Closes: #1087760).
|
||||
|
||||
-- Simon Quigley <tsimonq2@debian.org> Mon, 24 Feb 2025 19:54:39 -0600
|
||||
|
||||
ubuntu-dev-tools (0.203) unstable; urgency=medium
|
||||
|
||||
[ Steve Langasek ]
|
||||
* ubuntu-build: handle TOCTOU issue with the "can be retried" value on
|
||||
builds.
|
||||
* Recommend sbuild over pbuilder. sbuild is the tool recommended by
|
||||
Ubuntu developers whose behavior most closely approximates Launchpad
|
||||
builds.
|
||||
|
||||
[ Florent 'Skia' Jacquet ]
|
||||
* import-bug-from-debian: handle multipart message (Closes: #969510)
|
||||
|
||||
[ Benjamin Drung ]
|
||||
* import-bug-from-debian: add type hints
|
||||
* Bump Standards-Version to 4.7.0
|
||||
* Bump year and add missing files to copyright
|
||||
* setup.py: add pm-helper
|
||||
* Format code with black and isort
|
||||
* Address several issues pointed out by Pylint
|
||||
* Depend on python3-yaml for pm-helper
|
||||
|
||||
-- Benjamin Drung <bdrung@debian.org> Sat, 02 Nov 2024 18:19:24 +0100
|
||||
|
||||
ubuntu-dev-tools (0.202) unstable; urgency=medium
|
||||
|
||||
[ Steve Langasek ]
|
||||
* ubuntu-build: support --batch with no package names to retry all
|
||||
* ubuntu-build: in batch mode, print a count of packages retried
|
||||
* ubuntu-build: make the --arch option top-level.
|
||||
This gets rid of the fugly --arch2 option
|
||||
* ubuntu-build: support retrying builds in other states that failed-to-build
|
||||
* ubuntu-build: Handling of proposed vs release pocket default for ppas
|
||||
* ubuntu-build: update manpage
|
||||
|
||||
[ Chris Peterson ]
|
||||
* Replace Depends on python3-launchpadlib with Depends on
|
||||
python3-launchpadlib-desktop (LP: #2049217)
|
||||
|
||||
-- Simon Quigley <tsimonq2@ubuntu.com> Fri, 12 Apr 2024 23:33:14 -0500
|
||||
|
||||
ubuntu-dev-tools (0.201) unstable; urgency=medium
|
||||
|
||||
* running-autopkgtests: fix packaging to make the script available
|
||||
(LP: #2055466)
|
||||
|
||||
-- Chris Peterson <chris.peterson@canonical.com> Thu, 29 Feb 2024 11:09:14 -0800
|
||||
|
||||
ubuntu-dev-tools (0.200) unstable; urgency=medium
|
||||
|
||||
[ Gianfranco Costamagna ]
|
||||
* Team upload
|
||||
|
||||
[ Chris Peterson ]
|
||||
* Add support to see currently running autopkgtests (running-autopkgtests)
|
||||
* running-autopkgtests: use f-strings
|
||||
|
||||
[ Athos Ribeiro ]
|
||||
* syncpackage: log LP authentication errors before halting.
|
||||
|
||||
[ Ying-Chun Liu (PaulLiu) ]
|
||||
* Drop qemu-debootstrap
|
||||
qemu-debootstrap is deprecated for a while. In newer qemu release
|
||||
the command is totally removed. We can use debootstrap directly.
|
||||
Signed-off-by: Ying-Chun Liu (PaulLiu) <paulliu@debian.org>
|
||||
|
||||
[ Logan Rosen ]
|
||||
* Don't rely on debootstrap for validating Ubuntu distro
|
||||
|
||||
-- Gianfranco Costamagna <locutusofborg@debian.org> Thu, 15 Feb 2024 17:53:48 +0100
|
||||
|
||||
ubuntu-dev-tools (0.199) unstable; urgency=medium
|
||||
|
||||
[ Simon Quigley ]
|
||||
* Add my name to Uploaders.
|
||||
|
||||
[ Steve Langasek ]
|
||||
* Introduce a pm-helper tool.
|
||||
|
||||
-- Simon Quigley <tsimonq2@debian.org> Mon, 29 Jan 2024 10:03:22 -0600
|
||||
|
||||
ubuntu-dev-tools (0.198) unstable; urgency=medium
|
||||
|
||||
* In check-mir, ignore debhelper-compat when checking the build
|
||||
dependencies. This is expected to be a build dependency of all packages,
|
||||
so warning about it in any way is surely a red herring.
|
||||
* Add proper support for virtual packages in check-mir, basing the
|
||||
determination solely off of binary packages. This is not expected to be a
|
||||
typical case.
|
||||
|
||||
-- Simon Quigley <tsimonq2@debian.org> Wed, 10 Jan 2024 20:04:02 -0600
|
||||
|
||||
ubuntu-dev-tools (0.197) unstable; urgency=medium
|
||||
|
||||
* Update the manpage for syncpackage to reflect the ability to sync
|
||||
multiple packages at once.
|
||||
* When using pull-*-source to grab a package which already has a defined
|
||||
Vcs- field, display the exact same warning message `apt source` does.
|
||||
|
||||
-- Simon Quigley <tsimonq2@debian.org> Tue, 03 Oct 2023 14:01:25 -0500
|
||||
|
||||
ubuntu-dev-tools (0.196) unstable; urgency=medium
|
||||
|
||||
* Allow the user to sync multiple packages at one time (LP: #1756748).
|
||||
|
||||
-- Simon Quigley <tsimonq2@debian.org> Fri, 04 Aug 2023 14:37:59 -0500
|
||||
|
||||
ubuntu-dev-tools (0.195) unstable; urgency=medium
|
||||
|
||||
* Add support for the non-free-firmware components in all tools already
|
||||
referencing non-free.
|
||||
|
||||
-- Simon Quigley <tsimonq2@debian.org> Wed, 26 Jul 2023 13:03:31 -0500
|
||||
|
||||
ubuntu-dev-tools (0.194) unstable; urgency=medium
|
||||
|
||||
[ Gianfranco Costamagna ]
|
||||
* ubuntu-build: For some reasons, now you need to be authenticated before
|
||||
trying to use the "PersonTeam" class features.
|
||||
Do it at the begin instead of replicating the same code inside the
|
||||
tool itself.
|
||||
|
||||
[ Steve Langasek ]
|
||||
* Remove references to deprecated
|
||||
http://people.canonical.com/~ubuntu-archive.
|
||||
* Remove references to architectures not supported in any active
|
||||
Ubuntu release.
|
||||
* Remove references to ftpmaster.internal. When this name is resolvable
|
||||
but firewalled, syncpackage hangs; and these are tools for developers,
|
||||
not for running in an automated context in the DCs where
|
||||
ftpmaster.internal is reachable.
|
||||
* Excise all references to cdbs (including in test cases)
|
||||
* Set apt preferences for the -proposed pocket in mk-sbuild so that
|
||||
it works as expected for lunar and forward.
|
||||
|
||||
[ Robie Basak ]
|
||||
* ubuntutools/misc: swap iter_content for raw stream with "Accept-Encoding:
|
||||
identity" to fix .diff.gz downloads (LP: #2025748).
|
||||
|
||||
[ Vladimir Petko ]
|
||||
* Fix a typo introduced in the last upload that made mk-sbuild fail
|
||||
unconditionally. LP: #2017177.
|
||||
|
||||
-- Gianfranco Costamagna <locutusofborg@debian.org> Sat, 08 Jul 2023 08:42:05 +0200
|
||||
|
||||
ubuntu-dev-tools (0.193) unstable; urgency=medium
|
||||
|
||||
* Don't run linters at build time, or in autopkgtests. (Closes: #1031436).
|
||||
|
||||
-- Stefano Rivera <stefanor@debian.org> Sat, 25 Feb 2023 13:19:56 -0400
|
||||
|
||||
ubuntu-dev-tools (0.192) unstable; urgency=medium
|
||||
|
||||
[ Benjamin Drung ]
|
||||
* sponsor-patch:
|
||||
+ Ignore exit code 1 of debdiff call.
|
||||
+ Use --skip-patches instead of --no-preparation with dpkg-source -x.
|
||||
* Demote bzr/brz from Recommends to Suggests, as nowadays git is the way.
|
||||
Closes: #940531
|
||||
* Use PEP440 compliant version in setup.py (LP: #1991606)
|
||||
* Fix issues found by flake8 on the Python scripts
|
||||
* Check Python scripts with flake8 again
|
||||
* Format Python code with black and run black during package build
|
||||
* Sort Python imports with isort and run isort during package build
|
||||
* Replace deprecated optparse with argparse
|
||||
* requestbackport: Remove useless loop from locate_package
|
||||
* reverse-depends: Restore field titles format
|
||||
* test: Fix deprecated return value for test case
|
||||
* Fix all errors and warnings found by pylint and implement most refactorings
|
||||
and conventions. Run pylint during package build again.
|
||||
* Bump Standards-Version to 4.6.2
|
||||
* Drop unneeded X-Python3-Version from d/control
|
||||
|
||||
[ Masahiro Yamada ]
|
||||
* mk-sbuild:
|
||||
+ Handle the new location of the Debian bullseye security archive.
|
||||
Closes: #1001832; LP: #1955116
|
||||
|
||||
[ Mattia Rizzolo ]
|
||||
* requestbackport:
|
||||
+ Apply patch from Krytarik Raido and Unit 193 to update the template and
|
||||
workflow after the new Ubuntu Backport process has been established.
|
||||
LP: #1959115
|
||||
|
||||
-- Benjamin Drung <bdrung@debian.org> Wed, 01 Feb 2023 12:45:15 +0100
|
||||
|
||||
ubuntu-dev-tools (0.191) unstable; urgency=medium
|
||||
|
||||
[ Dan Streetman ]
|
||||
* lpapicache:
|
||||
+ Make sure that login() actually logins and doesn't use cached credentials.
|
||||
* ubuntu-build:
|
||||
+ Fix crash caused by a change in lpapicache that changed the default
|
||||
operation mode from authenticated to anonymous. LP: #1984113
|
||||
|
||||
[ Stefano Rivera ]
|
||||
* backportpackage:
|
||||
+ Add support for lsb-release-minimal, which doesn't have a Python module.
|
||||
Thanks to Gioele Barabucci for the patch. Closes: #1020901; LP: #1991828
|
||||
|
||||
[ Mattia Rizzolo ]
|
||||
* ubuntutools/archive.py:
|
||||
+ Fix operation of SourcePackage._source_urls() (as used, for example, in
|
||||
SourcePackage.pull() called by backportpackage) to also work when the
|
||||
class is instantiated with a URL as .dsc. Fixes regression from v0.184.
|
||||
Thanks to Unit 193 for the initial patch.
|
||||
|
||||
-- Mattia Rizzolo <mattia@debian.org> Tue, 11 Oct 2022 13:56:03 +0200
|
||||
|
||||
ubuntu-dev-tools (0.190) unstable; urgency=medium
|
||||
|
||||
[ Dimitri John Ledkov ]
|
||||
|
25
debian/control
vendored
25
debian/control
vendored
@ -6,9 +6,7 @@ Uploaders:
|
||||
Benjamin Drung <bdrung@debian.org>,
|
||||
Stefano Rivera <stefanor@debian.org>,
|
||||
Mattia Rizzolo <mattia@debian.org>,
|
||||
Simon Quigley <tsimonq2@debian.org>,
|
||||
Build-Depends:
|
||||
black <!nocheck>,
|
||||
dctrl-tools,
|
||||
debhelper-compat (= 13),
|
||||
devscripts (>= 2.11.0~),
|
||||
@ -16,26 +14,23 @@ Build-Depends:
|
||||
dh-python,
|
||||
distro-info (>= 0.2~),
|
||||
flake8,
|
||||
isort <!nocheck>,
|
||||
lsb-release,
|
||||
pylint <!nocheck>,
|
||||
python3-all,
|
||||
python3-apt,
|
||||
python3-dateutil,
|
||||
python3-debian,
|
||||
python3-debianbts,
|
||||
python3-distro-info,
|
||||
python3-httplib2,
|
||||
python3-launchpadlib-desktop,
|
||||
python3-launchpadlib,
|
||||
python3-pytest,
|
||||
python3-requests <!nocheck>,
|
||||
python3-setuptools,
|
||||
python3-yaml <!nocheck>,
|
||||
Standards-Version: 4.7.2
|
||||
Standards-Version: 4.6.1
|
||||
Rules-Requires-Root: no
|
||||
Vcs-Git: https://git.launchpad.net/ubuntu-dev-tools
|
||||
Vcs-Browser: https://git.launchpad.net/ubuntu-dev-tools
|
||||
Homepage: https://launchpad.net/ubuntu-dev-tools
|
||||
X-Python3-Version: >= 3.6
|
||||
|
||||
Package: ubuntu-dev-tools
|
||||
Architecture: all
|
||||
@ -54,10 +49,9 @@ Depends:
|
||||
python3-debianbts,
|
||||
python3-distro-info,
|
||||
python3-httplib2,
|
||||
python3-launchpadlib-desktop,
|
||||
python3-launchpadlib,
|
||||
python3-lazr.restfulclient,
|
||||
python3-ubuntutools (= ${binary:Version}),
|
||||
python3-yaml,
|
||||
sensible-utils,
|
||||
sudo,
|
||||
tzdata,
|
||||
@ -65,6 +59,8 @@ Depends:
|
||||
${perl:Depends},
|
||||
Recommends:
|
||||
arch-test,
|
||||
bzr | brz,
|
||||
bzr-builddeb | brz-debian,
|
||||
ca-certificates,
|
||||
debian-archive-keyring,
|
||||
debian-keyring,
|
||||
@ -72,14 +68,12 @@ Recommends:
|
||||
genisoimage,
|
||||
lintian,
|
||||
patch,
|
||||
sbuild | pbuilder | cowbuilder,
|
||||
pbuilder | cowbuilder | sbuild,
|
||||
python3-dns,
|
||||
quilt,
|
||||
reportbug (>= 3.39ubuntu1),
|
||||
ubuntu-keyring | ubuntu-archive-keyring,
|
||||
Suggests:
|
||||
bzr | brz,
|
||||
bzr-builddeb | brz-debian,
|
||||
qemu-user-static,
|
||||
Description: useful tools for Ubuntu developers
|
||||
This is a collection of useful tools that Ubuntu developers use to make their
|
||||
@ -118,8 +112,6 @@ Description: useful tools for Ubuntu developers
|
||||
- requestsync - files a sync request with Debian changelog and rationale.
|
||||
- reverse-depends - find the reverse dependencies (or build dependencies) of
|
||||
a package.
|
||||
- running-autopkgtests - lists the currently running and/or queued
|
||||
autopkgtests on the Ubuntu autopkgtest infrastructure
|
||||
- seeded-in-ubuntu - query if a package is safe to upload during a freeze.
|
||||
- setup-packaging-environment - assistant to get an Ubuntu installation
|
||||
ready for packaging work.
|
||||
@ -138,11 +130,10 @@ Package: python3-ubuntutools
|
||||
Architecture: all
|
||||
Section: python
|
||||
Depends:
|
||||
python3-dateutil,
|
||||
python3-debian,
|
||||
python3-distro-info,
|
||||
python3-httplib2,
|
||||
python3-launchpadlib-desktop,
|
||||
python3-launchpadlib,
|
||||
python3-lazr.restfulclient,
|
||||
python3-requests,
|
||||
sensible-utils,
|
||||
|
25
debian/copyright
vendored
25
debian/copyright
vendored
@ -11,7 +11,6 @@ Files: backportpackage
|
||||
doc/check-symbols.1
|
||||
doc/requestsync.1
|
||||
doc/ubuntu-iso.1
|
||||
doc/running-autopkgtests.1
|
||||
GPL-2
|
||||
README.updates
|
||||
requestsync
|
||||
@ -20,13 +19,12 @@ Files: backportpackage
|
||||
ubuntu-iso
|
||||
ubuntutools/requestsync/*.py
|
||||
Copyright: 2007, Albert Damen <albrt@gmx.net>
|
||||
2010-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2007-2023, Canonical Ltd.
|
||||
2010-2022, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2007-2010, Canonical Ltd.
|
||||
2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com>
|
||||
2010, Evan Broder <evan@ebroder.net>
|
||||
2006-2007, Luke Yelavich <themuso@ubuntu.com>
|
||||
2009-2010, Michael Bienia <geser@ubuntu.com>
|
||||
2024-2025, Simon Quigley <tsimonq2@debian.org>
|
||||
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
2008, Stephan Hermann <sh@sourcecode.de>
|
||||
2007, Steve Kowalik <stevenk@ubuntu.com>
|
||||
@ -74,28 +72,21 @@ License: GPL-2+
|
||||
On Debian systems, the complete text of the GNU General Public License
|
||||
version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
|
||||
|
||||
Files: doc/lp-bitesize.1
|
||||
Files: doc/bitesize.1
|
||||
doc/check-mir.1
|
||||
doc/grab-merge.1
|
||||
doc/merge-changelog.1
|
||||
doc/pm-helper.1
|
||||
doc/setup-packaging-environment.1
|
||||
doc/syncpackage.1
|
||||
lp-bitesize
|
||||
bitesize
|
||||
check-mir
|
||||
GPL-3
|
||||
grab-merge
|
||||
merge-changelog
|
||||
pm-helper
|
||||
pyproject.toml
|
||||
run-linters
|
||||
running-autopkgtests
|
||||
setup-packaging-environment
|
||||
syncpackage
|
||||
ubuntutools/running_autopkgtests.py
|
||||
ubuntutools/utils.py
|
||||
Copyright: 2010-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2007-2024, Canonical Ltd.
|
||||
Copyright: 2010, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2007-2011, Canonical Ltd.
|
||||
2008, Jonathan Patrick Davies <jpds@ubuntu.com>
|
||||
2008-2010, Martin Pitt <martin.pitt@canonical.com>
|
||||
2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
||||
@ -183,13 +174,11 @@ Files: doc/pull-debian-debdiff.1
|
||||
ubuntutools/update_maintainer.py
|
||||
ubuntutools/version.py
|
||||
update-maintainer
|
||||
.pylintrc
|
||||
Copyright: 2009-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||
Copyright: 2009-2011, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2010, Evan Broder <evan@ebroder.net>
|
||||
2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
||||
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
2017-2021, Dan Streetman <ddstreet@canonical.com>
|
||||
2024, Canonical Ltd.
|
||||
License: ISC
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
|
1
debian/rules
vendored
1
debian/rules
vendored
@ -7,6 +7,7 @@ override_dh_auto_clean:
|
||||
|
||||
override_dh_auto_test:
|
||||
ifeq (,$(filter nocheck,$(DEB_BUILD_OPTIONS)))
|
||||
flake8 -v --max-line-length=99
|
||||
python3 -m pytest -v ubuntutools
|
||||
endif
|
||||
|
||||
|
3
debian/source/lintian-overrides
vendored
3
debian/source/lintian-overrides
vendored
@ -1,3 +0,0 @@
|
||||
# pyc files are machine-generated; they're expected to have long lines and have unstated copyright
|
||||
source: file-without-copyright-information *.pyc [debian/copyright]
|
||||
source: very-long-line-length-in-source-file * > 512 [*.pyc:*]
|
5
debian/tests/control
vendored
5
debian/tests/control
vendored
@ -1,3 +1,8 @@
|
||||
Test-Command: flake8 -v --max-line-length=99
|
||||
Depends:
|
||||
flake8,
|
||||
Restrictions: allow-stderr
|
||||
|
||||
Test-Command: python3 -m pytest -v ubuntutools
|
||||
Depends:
|
||||
dh-make,
|
||||
|
@ -1,21 +1,21 @@
|
||||
.TH lp-bitesize "1" "May 9 2010" "ubuntu-dev-tools"
|
||||
.TH bitesize "1" "May 9 2010" "ubuntu-dev-tools"
|
||||
.SH NAME
|
||||
lp-bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
|
||||
bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B lp-bitesize \fR<\fIbug number\fR>
|
||||
.B bitesize \fR<\fIbug number\fR>
|
||||
.br
|
||||
.B lp-bitesize \-\-help
|
||||
.B bitesize \-\-help
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBlp-bitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
|
||||
\fBbitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
|
||||
also adds a comment to the bug indicating that you are willing to help with
|
||||
fixing it.
|
||||
It checks for permission to operate on a given bug first,
|
||||
then perform required tasks on Launchpad.
|
||||
|
||||
.SH OPTIONS
|
||||
Listed below are the command line options for \fBlp-bitesize\fR:
|
||||
Listed below are the command line options for \fBbitesize\fR:
|
||||
.TP
|
||||
.BR \-h ", " \-\-help
|
||||
Display a help message and exit.
|
||||
@ -48,7 +48,7 @@ The default value for \fB--lpinstance\fR.
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
|
||||
.SH AUTHORS
|
||||
\fBlp-bitesize\fR and this manual page were written by Daniel Holbach
|
||||
\fBbitesize\fR and this manual page were written by Daniel Holbach
|
||||
<daniel.holbach@canonical.com>.
|
||||
.PP
|
||||
Both are released under the terms of the GNU General Public License, version 3.
|
@ -20,7 +20,7 @@ like for example \fBpbuilder\-feisty\fP, \fBpbuilder\-sid\fP, \fBpbuilder\-gutsy
|
||||
.PP
|
||||
The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main
|
||||
difference between both is that pbuilder compresses the created chroot as a
|
||||
tarball, thus using less disc space but needing to uncompress (and possibly
|
||||
a tarball, thus using less disc space but needing to uncompress (and possibly
|
||||
compress) its contents again on each run, and cowbuilder doesn't do this.
|
||||
|
||||
.SH USAGE
|
||||
@ -38,7 +38,7 @@ This optional parameter will attempt to construct a chroot in a foreign
|
||||
architecture.
|
||||
For some architecture pairs (e.g. i386 on an amd64 install), the chroot
|
||||
will be created natively.
|
||||
For others (e.g. arm64 on an amd64 install), qemu\-user\-static will be
|
||||
For others (e.g. armel on an i386 install), qemu\-user\-static will be
|
||||
used.
|
||||
Note that some combinations (e.g. amd64 on an i386 install) require
|
||||
special separate kernel handling, and may break in unexpected ways.
|
||||
|
@ -1,44 +0,0 @@
|
||||
.\" Copyright (C) 2023, Canonical Ltd.
|
||||
.\"
|
||||
.\" This program is free software; you can redistribute it and/or
|
||||
.\" modify it under the terms of the GNU General Public License, version 3.
|
||||
.\"
|
||||
.\" This program is distributed in the hope that it will be useful,
|
||||
.\" but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
.\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
.\" General Public License for more details.
|
||||
.\"
|
||||
.\" You should have received a copy of the GNU General Public License
|
||||
.\" along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
.TH pm\-helper 1 "June 2023" ubuntu\-dev\-tools
|
||||
|
||||
.SH NAME
|
||||
pm\-helper \- helper to guide a developer through proposed\-migration work
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B pm\-helper \fR[\fIoptions\fR] [\fIpackage\fR]
|
||||
|
||||
.SH DESCRIPTION
|
||||
Claim a package from proposed\-migration to work on and get additional
|
||||
information (such as the state of the package in Debian) that may be helpful
|
||||
in unblocking it.
|
||||
.PP
|
||||
This tool is incomplete and under development.
|
||||
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-l \fIINSTANCE\fR, \fB\-\-launchpad\fR=\fIINSTANCE\fR
|
||||
Use the specified instance of Launchpad (e.g. "staging"), instead of
|
||||
the default of "production".
|
||||
.TP
|
||||
.B \-v\fR, \fB--verbose\fR
|
||||
be more verbose
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
Display a help message and exit
|
||||
|
||||
.SH AUTHORS
|
||||
\fBpm\-helper\fR and this manpage were written by Steve Langasek
|
||||
<steve.langasek@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the GPLv3 license.
|
@ -1,15 +0,0 @@
|
||||
.TH running\-autopkgtests "1" "18 January 2024" "ubuntu-dev-tools"
|
||||
.SH NAME
|
||||
running\-autopkgtests \- dumps a list of currently running autopkgtests
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B running\-autopkgtests
|
||||
|
||||
.SH DESCRIPTION
|
||||
Dumps a list of currently running and queued tests in Autopkgtest.
|
||||
Pass --running to only see running tests, or --queued to only see
|
||||
queued tests. Passing both will print both, which is the default behavior.
|
||||
|
||||
.SH AUTHOR
|
||||
.B running\-autopkgtests
|
||||
was written by Chris Peterson <chris.peterson@canonical.com>.
|
@ -11,7 +11,7 @@ contributors to get their Ubuntu installation ready for packaging work. It
|
||||
ensures that all four components from Ubuntu's official repositories are enabled
|
||||
along with their corresponding source repositories. It also installs a minimal
|
||||
set of packages needed for Ubuntu packaging work (ubuntu-dev-tools, devscripts,
|
||||
debhelper, patchutils, pbuilder, and build-essential). Finally, it assists
|
||||
debhelper, cdbs, patchutils, pbuilder, and build-essential). Finally, it assists
|
||||
in defining the DEBEMAIL and DEBFULLNAME environment variables.
|
||||
|
||||
.SH AUTHORS
|
||||
|
@ -4,11 +4,11 @@ syncpackage \- copy source packages from Debian to Ubuntu
|
||||
.\"
|
||||
.SH SYNOPSIS
|
||||
.B syncpackage
|
||||
[\fIoptions\fR] \fI<.dsc URL/path or package name(s)>\fR
|
||||
[\fIoptions\fR] \fI<.dsc URL/path or package name>\fR
|
||||
.\"
|
||||
.SH DESCRIPTION
|
||||
\fBsyncpackage\fR causes one or more source package(s) to be copied from Debian
|
||||
to Ubuntu.
|
||||
\fBsyncpackage\fR causes a source package to be copied from Debian to
|
||||
Ubuntu.
|
||||
.PP
|
||||
\fBsyncpackage\fR allows you to upload files with the same checksums of the
|
||||
Debian ones, as the common script used by Ubuntu archive administrators does,
|
||||
@ -58,7 +58,7 @@ Display more progress information.
|
||||
\fB\-F\fR, \fB\-\-fakesync\fR
|
||||
Perform a fakesync, to work around a tarball mismatch between Debian and
|
||||
Ubuntu.
|
||||
This option ignores blocklisting, and performs a local sync.
|
||||
This option ignores blacklisting, and performs a local sync.
|
||||
It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file
|
||||
for you to upload.
|
||||
.TP
|
||||
|
@ -1,14 +1,9 @@
|
||||
.TH UBUNTU-BUILD "1" "Mar 2024" "ubuntu-dev-tools"
|
||||
.TH UBUNTU-BUILD "1" "June 2010" "ubuntu-dev-tools"
|
||||
.SH NAME
|
||||
ubuntu-build \- command-line interface to Launchpad build operations
|
||||
|
||||
.SH SYNOPSIS
|
||||
.nf
|
||||
\fBubuntu-build\fR <srcpackage> <release> <operation>
|
||||
\fBubuntu-build\fR --batch [--retry] [--rescore \fIPRIORITY\fR] [--arch \fIARCH\fR [...]]
|
||||
[--series \fISERIES\fR] [--state \fIBUILD-STATE\fR]
|
||||
[-A \fIARCHIVE\fR] [pkg]...
|
||||
.fi
|
||||
.B ubuntu-build <srcpackage> <release> <operation>
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBubuntu-build\fR provides a command line interface to the Launchpad build
|
||||
@ -43,7 +38,8 @@ operations.
|
||||
\fB\-a\fR ARCHITECTURE, \fB\-\-arch\fR=\fIARCHITECTURE\fR
|
||||
Rebuild or rescore a specific architecture. Valid
|
||||
architectures are:
|
||||
armhf, arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x.
|
||||
armel, armhf, arm64, amd64, hppa, i386, ia64,
|
||||
lpia, powerpc, ppc64el, riscv64, s390x, sparc.
|
||||
.TP
|
||||
Batch processing:
|
||||
.IP
|
||||
@ -63,16 +59,15 @@ Retry builds (give\-back).
|
||||
\fB\-\-rescore\fR=\fIPRIORITY\fR
|
||||
Rescore builds to <priority>.
|
||||
.IP
|
||||
\fB\-\-arch\fR=\fIARCHITECTURE\fR
|
||||
\fB\-\-arch2\fR=\fIARCHITECTURE\fR
|
||||
Affect only 'architecture' (can be used several
|
||||
times). Valid architectures are:
|
||||
arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x.
|
||||
.IP
|
||||
\fB\-A=\fIARCHIVE\fR
|
||||
Act on the named archive (ppa) instead of on the main Ubuntu archive.
|
||||
armel, armhf, arm64, amd64, hppa, i386, ia64,
|
||||
lpia, powerpc, ppc64el, riscv64, s390x, sparc.
|
||||
|
||||
.SH AUTHORS
|
||||
\fBubuntu-build\fR was written by Martin Pitt <martin.pitt@canonical.com>, and
|
||||
this manual page was written by Jonathan Patrick Davies <jpds@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the terms of the GNU General Public License, version 3.
|
||||
Both are released under the terms of the GNU General Public License, version 3
|
||||
or (at your option) any later version.
|
||||
|
@ -22,10 +22,7 @@
|
||||
# UDT_EDIT_WRAPPER_TEMPLATE_RE: An extra boilerplate-detecting regex.
|
||||
# UDT_EDIT_WRAPPER_FILE_DESCRIPTION: The type of file being edited.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
|
||||
@ -33,30 +30,33 @@ from ubuntutools.question import EditFile
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] filename")
|
||||
parser.add_argument("filename", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
if not os.path.isfile(args.filename):
|
||||
parser.error(f"File {args.filename} does not exist")
|
||||
parser = optparse.OptionParser('%prog [options] filename')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if "UDT_EDIT_WRAPPER_EDITOR" in os.environ:
|
||||
os.environ["EDITOR"] = os.environ["UDT_EDIT_WRAPPER_EDITOR"]
|
||||
else:
|
||||
del os.environ["EDITOR"]
|
||||
if len(args) != 1:
|
||||
parser.error('A filename must be specified')
|
||||
body = args[0]
|
||||
if not os.path.isfile(body):
|
||||
parser.error('File %s does not exist' % body)
|
||||
|
||||
if "UDT_EDIT_WRAPPER_VISUAL" in os.environ:
|
||||
os.environ["VISUAL"] = os.environ["UDT_EDIT_WRAPPER_VISUAL"]
|
||||
if 'UDT_EDIT_WRAPPER_EDITOR' in os.environ:
|
||||
os.environ['EDITOR'] = os.environ['UDT_EDIT_WRAPPER_EDITOR']
|
||||
else:
|
||||
del os.environ["VISUAL"]
|
||||
del os.environ['EDITOR']
|
||||
|
||||
if 'UDT_EDIT_WRAPPER_VISUAL' in os.environ:
|
||||
os.environ['VISUAL'] = os.environ['UDT_EDIT_WRAPPER_VISUAL']
|
||||
else:
|
||||
del os.environ['VISUAL']
|
||||
|
||||
placeholders = []
|
||||
if "UDT_EDIT_WRAPPER_TEMPLATE_RE" in os.environ:
|
||||
placeholders.append(re.compile(os.environ["UDT_EDIT_WRAPPER_TEMPLATE_RE"]))
|
||||
if 'UDT_EDIT_WRAPPER_TEMPLATE_RE' in os.environ:
|
||||
placeholders.append(re.compile(
|
||||
os.environ['UDT_EDIT_WRAPPER_TEMPLATE_RE']))
|
||||
|
||||
description = os.environ.get("UDT_EDIT_WRAPPER_FILE_DESCRIPTION", "file")
|
||||
description = os.environ.get('UDT_EDIT_WRAPPER_FILE_DESCRIPTION', 'file')
|
||||
|
||||
EditFile(args.filename, description, placeholders).edit()
|
||||
EditFile(body, description, placeholders).edit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
75
grep-merges
75
grep-merges
@ -19,70 +19,63 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import optparse
|
||||
import sys
|
||||
import json
|
||||
|
||||
from httplib2 import Http, HttpLib2Error
|
||||
|
||||
import ubuntutools.misc
|
||||
from ubuntutools import getLogger
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
usage="%(prog)s [options] [string]",
|
||||
description="List pending merges from Debian matching string",
|
||||
)
|
||||
parser.add_argument("string", nargs="?", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
parser = optparse.OptionParser(
|
||||
usage='%prog [options] [string]',
|
||||
description='List pending merges from Debian matching string')
|
||||
args = parser.parse_args()[1]
|
||||
|
||||
if len(args) > 1:
|
||||
parser.error('Too many arguments')
|
||||
elif len(args) == 1:
|
||||
match = args[0]
|
||||
else:
|
||||
match = None
|
||||
|
||||
ubuntutools.misc.require_utf8()
|
||||
|
||||
for component in (
|
||||
"main",
|
||||
"main-manual",
|
||||
"restricted",
|
||||
"restricted-manual",
|
||||
"universe",
|
||||
"universe-manual",
|
||||
"multiverse",
|
||||
"multiverse-manual",
|
||||
):
|
||||
url = f"https://merges.ubuntu.com/{component}.json"
|
||||
for component in ('main', 'main-manual',
|
||||
'restricted', 'restricted-manual',
|
||||
'universe', 'universe-manual',
|
||||
'multiverse', 'multiverse-manual'):
|
||||
|
||||
url = 'https://merges.ubuntu.com/%s.json' % component
|
||||
try:
|
||||
headers, page = Http().request(url)
|
||||
except HttpLib2Error as e:
|
||||
Logger.exception(e)
|
||||
sys.exit(1)
|
||||
if headers.status != 200:
|
||||
Logger.error("%s: %s %s", url, headers.status, headers.reason)
|
||||
Logger.error("%s: %s %s" % (url, headers.status,
|
||||
headers.reason))
|
||||
sys.exit(1)
|
||||
|
||||
for merge in json.loads(page):
|
||||
package = merge["source_package"]
|
||||
author, uploader = "", ""
|
||||
if merge.get("user"):
|
||||
author = merge["user"]
|
||||
if merge.get("uploader"):
|
||||
uploader = f"({merge['uploader']})"
|
||||
teams = merge.get("teams", [])
|
||||
package = merge['source_package']
|
||||
author, uploader = '', ''
|
||||
if merge.get('user'):
|
||||
author = merge['user']
|
||||
if merge.get('uploader'):
|
||||
uploader = '(%s)' % merge['uploader']
|
||||
teams = merge.get('teams', [])
|
||||
|
||||
pretty_uploader = f"{author} {uploader}"
|
||||
if (
|
||||
args.string is None
|
||||
or args.string in package
|
||||
or args.string in author
|
||||
or args.string in uploader
|
||||
or args.string in teams
|
||||
):
|
||||
Logger.info("%s\t%s", package, pretty_uploader)
|
||||
pretty_uploader = '{} {}'.format(author, uploader)
|
||||
if (match is None or match in package or match in author
|
||||
or match in uploader or match in teams):
|
||||
Logger.info('%s\t%s' % (package, pretty_uploader))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -21,213 +21,40 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import debianbts
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
import webbrowser
|
||||
from collections.abc import Iterable
|
||||
from email.message import EmailMessage
|
||||
|
||||
import debianbts
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
ATTACHMENT_MAX_SIZE = 2000
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--browserless",
|
||||
action="store_true",
|
||||
help="Don't open the bug in the browser at the end",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
help="LP instance to connect to (default: production)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", action="store_true", help="Print info about the bug being imported"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Don't actually open a bug (also sets verbose)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--package", help="Launchpad package to file bug against (default: Same as Debian)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf", action="store_true", help="Don't read config files or environment variables."
|
||||
)
|
||||
parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_bug_numbers(bug_list: Iterable[str]) -> list[int]:
|
||||
def main():
|
||||
bug_re = re.compile(r"bug=(\d+)")
|
||||
|
||||
bug_nums = []
|
||||
|
||||
for bug_num in bug_list:
|
||||
if bug_num.startswith("http"):
|
||||
# bug URL
|
||||
match = bug_re.search(bug_num)
|
||||
if match is None:
|
||||
Logger.error("Can't determine bug number from %s", bug_num)
|
||||
sys.exit(1)
|
||||
bug_num = match.groups()[0]
|
||||
bug_num = bug_num.lstrip("#")
|
||||
bug_nums.append(int(bug_num))
|
||||
|
||||
return bug_nums
|
||||
|
||||
|
||||
def walk_multipart_message(message: EmailMessage) -> tuple[str, list[tuple[int, EmailMessage]]]:
|
||||
summary = ""
|
||||
attachments = []
|
||||
i = 1
|
||||
for part in message.walk():
|
||||
content_type = part.get_content_type()
|
||||
|
||||
if content_type.startswith("multipart/"):
|
||||
# we're already iterating on multipart items
|
||||
# let's just skip the multipart extra metadata
|
||||
continue
|
||||
if content_type == "application/pgp-signature":
|
||||
# we're not interested in importing pgp signatures
|
||||
continue
|
||||
|
||||
if part.is_attachment():
|
||||
attachments.append((i, part))
|
||||
elif content_type.startswith("image/"):
|
||||
# images here are not attachment, they are inline, but Launchpad can't handle that,
|
||||
# so let's add them as attachments
|
||||
summary += f"Message part #{i}\n"
|
||||
summary += f"[inline image '{part.get_filename()}']\n\n"
|
||||
attachments.append((i, part))
|
||||
elif content_type.startswith("text/html"):
|
||||
summary += f"Message part #{i}\n"
|
||||
summary += "[inline html]\n\n"
|
||||
attachments.append((i, part))
|
||||
elif content_type == "text/plain":
|
||||
summary += f"Message part #{i}\n"
|
||||
summary += part.get_content() + "\n"
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"""Unknown message part
|
||||
Your Debian bug is too weird to be imported in Launchpad, sorry.
|
||||
You can fix that by patching this script in ubuntu-dev-tools.
|
||||
Faulty message part:
|
||||
{part}"""
|
||||
)
|
||||
i += 1
|
||||
|
||||
return summary, attachments
|
||||
|
||||
|
||||
def process_bugs(
|
||||
bugs: Iterable[debianbts.Bugreport],
|
||||
launchpad: Launchpad,
|
||||
package: str,
|
||||
dry_run: bool = True,
|
||||
browserless: bool = False,
|
||||
) -> bool:
|
||||
debian = launchpad.distributions["debian"]
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
lp_debbugs = launchpad.bug_trackers.getByName(name="debbugs")
|
||||
|
||||
err = False
|
||||
for bug in bugs:
|
||||
ubupackage = bug.source
|
||||
if package:
|
||||
ubupackage = package
|
||||
bug_num = bug.bug_num
|
||||
subject = bug.subject
|
||||
log = debianbts.get_bug_log(bug_num)
|
||||
message = log[0]["message"]
|
||||
assert isinstance(message, EmailMessage)
|
||||
attachments: list[tuple[int, EmailMessage]] = []
|
||||
if message.is_multipart():
|
||||
summary, attachments = walk_multipart_message(message)
|
||||
else:
|
||||
summary = str(message.get_payload())
|
||||
|
||||
target = ubuntu.getSourcePackage(name=ubupackage)
|
||||
if target is None:
|
||||
Logger.error(
|
||||
"Source package '%s' is not in Ubuntu. Please specify "
|
||||
"the destination source package with --package",
|
||||
ubupackage,
|
||||
)
|
||||
err = True
|
||||
continue
|
||||
|
||||
description = f"Imported from Debian bug http://bugs.debian.org/{bug_num}:\n\n{summary}"
|
||||
# LP limits descriptions to 50K chars
|
||||
description = (description[:49994] + " [...]") if len(description) > 50000 else description
|
||||
|
||||
Logger.debug("Target: %s", target)
|
||||
Logger.debug("Subject: %s", subject)
|
||||
Logger.debug("Description: ")
|
||||
Logger.debug(description)
|
||||
for i, attachment in attachments:
|
||||
Logger.debug("Attachment #%s (%s)", i, attachment.get_filename() or "inline")
|
||||
Logger.debug("Content:")
|
||||
if attachment.get_content_type() == "text/plain":
|
||||
content = attachment.get_content()
|
||||
if len(content) > ATTACHMENT_MAX_SIZE:
|
||||
content = (
|
||||
content[:ATTACHMENT_MAX_SIZE]
|
||||
+ f" [attachment cropped after {ATTACHMENT_MAX_SIZE} characters...]"
|
||||
)
|
||||
Logger.debug(content)
|
||||
else:
|
||||
Logger.debug("[data]")
|
||||
|
||||
if dry_run:
|
||||
Logger.info("Dry-Run: not creating Ubuntu bug.")
|
||||
continue
|
||||
|
||||
u_bug = launchpad.bugs.createBug(target=target, title=subject, description=description)
|
||||
for i, attachment in attachments:
|
||||
name = f"#{i}-{attachment.get_filename() or "inline"}"
|
||||
content = attachment.get_content()
|
||||
if isinstance(content, str):
|
||||
# Launchpad only wants bytes
|
||||
content = content.encode()
|
||||
u_bug.addAttachment(
|
||||
filename=name,
|
||||
data=content,
|
||||
comment=f"Imported from Debian bug http://bugs.debian.org/{bug_num}",
|
||||
)
|
||||
d_sp = debian.getSourcePackage(name=package)
|
||||
if d_sp is None and package:
|
||||
d_sp = debian.getSourcePackage(name=package)
|
||||
d_task = u_bug.addTask(target=d_sp)
|
||||
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
|
||||
d_task.bug_watch = d_watch
|
||||
d_task.lp_save()
|
||||
Logger.info("Opened %s", u_bug.web_link)
|
||||
if not browserless:
|
||||
webbrowser.open(u_bug.web_link)
|
||||
|
||||
return err
|
||||
|
||||
|
||||
def main() -> None:
|
||||
options = parse_args()
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-b", "--browserless", action="store_true",
|
||||
help="Don't open the bug in the browser at the end")
|
||||
parser.add_argument("-l", "--lpinstance", metavar="INSTANCE",
|
||||
help="LP instance to connect to (default: production)")
|
||||
parser.add_argument("-v", "--verbose", action="store_true",
|
||||
help="Print info about the bug being imported")
|
||||
parser.add_argument("-n", "--dry-run", action="store_true",
|
||||
help="Don't actually open a bug (also sets verbose)")
|
||||
parser.add_argument("-p", "--package",
|
||||
help="Launchpad package to file bug against "
|
||||
"(default: Same as Debian)")
|
||||
parser.add_argument("--no-conf", action="store_true",
|
||||
help="Don't read config files or environment variables.")
|
||||
parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)")
|
||||
options = parser.parse_args()
|
||||
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.lpinstance is None:
|
||||
@ -242,15 +69,77 @@ def main() -> None:
|
||||
if options.verbose:
|
||||
Logger.setLevel(logging.DEBUG)
|
||||
|
||||
bugs = debianbts.get_status(get_bug_numbers(options.bugs))
|
||||
debian = launchpad.distributions['debian']
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
lp_debbugs = launchpad.bug_trackers.getByName(name='debbugs')
|
||||
|
||||
bug_nums = []
|
||||
|
||||
for bug_num in options.bugs:
|
||||
if bug_num.startswith("http"):
|
||||
# bug URL
|
||||
match = bug_re.search(bug_num)
|
||||
if match is None:
|
||||
Logger.error("Can't determine bug number from %s", bug_num)
|
||||
sys.exit(1)
|
||||
bug_num = match.groups()[0]
|
||||
bug_num = bug_num.lstrip("#")
|
||||
bug_num = int(bug_num)
|
||||
bug_nums.append(bug_num)
|
||||
|
||||
bugs = debianbts.get_status(*bug_nums)
|
||||
|
||||
if not bugs:
|
||||
Logger.error("Cannot find any of the listed bugs")
|
||||
sys.exit(1)
|
||||
|
||||
if process_bugs(bugs, launchpad, options.package, options.dry_run, options.browserless):
|
||||
err = False
|
||||
for bug in bugs:
|
||||
ubupackage = package = bug.source
|
||||
if options.package:
|
||||
ubupackage = options.package
|
||||
bug_num = bug.bug_num
|
||||
subject = bug.subject
|
||||
log = debianbts.get_bug_log(bug_num)
|
||||
summary = log[0]['message'].get_payload()
|
||||
target = ubuntu.getSourcePackage(name=ubupackage)
|
||||
if target is None:
|
||||
Logger.error("Source package '%s' is not in Ubuntu. Please specify "
|
||||
"the destination source package with --package",
|
||||
ubupackage)
|
||||
err = True
|
||||
continue
|
||||
|
||||
description = ('Imported from Debian bug http://bugs.debian.org/%d:\n\n%s' %
|
||||
(bug_num, summary))
|
||||
# LP limits descriptions to 50K chars
|
||||
description = (description[:49994] + ' [...]') if len(description) > 50000 else description
|
||||
|
||||
Logger.debug('Target: %s' % target)
|
||||
Logger.debug('Subject: %s' % subject)
|
||||
Logger.debug('Description: ')
|
||||
Logger.debug(description)
|
||||
|
||||
if options.dry_run:
|
||||
Logger.info('Dry-Run: not creating Ubuntu bug.')
|
||||
continue
|
||||
|
||||
u_bug = launchpad.bugs.createBug(target=target, title=subject,
|
||||
description=description)
|
||||
d_sp = debian.getSourcePackage(name=package)
|
||||
if d_sp is None and options.package:
|
||||
d_sp = debian.getSourcePackage(name=options.package)
|
||||
d_task = u_bug.addTask(target=d_sp)
|
||||
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
|
||||
d_task.bug_watch = d_watch
|
||||
d_task.lp_save()
|
||||
Logger.info("Opened %s", u_bug.web_link)
|
||||
if not options.browserless:
|
||||
webbrowser.open(u_bug.web_link)
|
||||
|
||||
if err:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -18,31 +18,24 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import sys
|
||||
|
||||
from debian.changelog import Changelog
|
||||
|
||||
from ubuntutools import getLogger
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def usage(exit_code=1):
|
||||
Logger.info(
|
||||
"""Usage: merge-changelog <left changelog> <right changelog>
|
||||
Logger.info('''Usage: merge-changelog <left changelog> <right changelog>
|
||||
|
||||
merge-changelog takes two changelogs that once shared a common source,
|
||||
merges them back together, and prints the merged result to stdout. This
|
||||
is useful if you need to manually merge a ubuntu package with a new
|
||||
Debian release of the package.
|
||||
"""
|
||||
)
|
||||
''')
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
########################################################################
|
||||
# Changelog Management
|
||||
########################################################################
|
||||
@ -51,9 +44,9 @@ Debian release of the package.
|
||||
def merge_changelog(left_changelog, right_changelog):
|
||||
"""Merge a changelog file."""
|
||||
|
||||
with open(left_changelog, encoding="utf-8") as f:
|
||||
with open(left_changelog) as f:
|
||||
left_cl = Changelog(f)
|
||||
with open(right_changelog, encoding="utf-8") as f:
|
||||
with open(right_changelog) as f:
|
||||
right_cl = Changelog(f)
|
||||
|
||||
left_versions = set(left_cl.versions)
|
||||
@ -62,9 +55,9 @@ def merge_changelog(left_changelog, right_changelog):
|
||||
right_blocks = iter(right_cl)
|
||||
|
||||
clist = sorted(left_versions | right_versions, reverse=True)
|
||||
remaining = len(clist)
|
||||
ci = len(clist)
|
||||
for version in clist:
|
||||
remaining -= 1
|
||||
ci -= 1
|
||||
if version in left_versions:
|
||||
block = next(left_blocks)
|
||||
if version in right_versions:
|
||||
@ -74,11 +67,11 @@ def merge_changelog(left_changelog, right_changelog):
|
||||
|
||||
assert block.version == version
|
||||
|
||||
Logger.info("%s%s", str(block).strip(), "\n" if remaining else "")
|
||||
Logger.info(str(block).strip() + ('\n' if ci else ''))
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 1 and sys.argv[1] in ("-h", "--help"):
|
||||
if len(sys.argv) > 1 and sys.argv[1] in ('-h', '--help'):
|
||||
usage(0)
|
||||
if len(sys.argv) != 3:
|
||||
usage(1)
|
||||
@ -90,5 +83,5 @@ def main():
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
131
mk-sbuild
131
mk-sbuild
@ -155,7 +155,6 @@ proxy="_unset_"
|
||||
DEBOOTSTRAP_NO_CHECK_GPG=0
|
||||
EATMYDATA=1
|
||||
CCACHE=0
|
||||
USE_PKGBINARYMANGLER=0
|
||||
|
||||
while :; do
|
||||
case "$1" in
|
||||
@ -167,7 +166,7 @@ while :; do
|
||||
--arch)
|
||||
CHROOT_ARCH="$2"
|
||||
case $2 in
|
||||
armhf|i386)
|
||||
armel|armhf|i386|lpia)
|
||||
if [ -z "$personality" ]; then
|
||||
personality="linux32"
|
||||
fi
|
||||
@ -304,27 +303,11 @@ if [ ! -w /var/lib/sbuild ]; then
|
||||
# Prepare a usable default .sbuildrc
|
||||
if [ ! -e ~/.sbuildrc ]; then
|
||||
cat > ~/.sbuildrc <<EOM
|
||||
# *** THIS COMMAND IS DEPRECATED ***
|
||||
#
|
||||
# In sbuild 0.87.0 and later, the unshare backend is available. This is
|
||||
# expected to become the default in a future release.
|
||||
#
|
||||
# This is the new preferred way of building Debian packages, making the manual
|
||||
# creation of schroots no longer necessary. To retain the default behavior,
|
||||
# you may remove this comment block and continue.
|
||||
#
|
||||
# To test the unshare backend while retaining the default settings, run sbuild
|
||||
# with --chroot-mode=unshare like this:
|
||||
# $ sbuild --chroot-mode=unshare --dist=unstable hello
|
||||
#
|
||||
# To switch to the unshare backend by default (recommended), uncomment the
|
||||
# following lines and delete the rest of the file (with the exception of the
|
||||
# last two lines):
|
||||
#\$chroot_mode = 'unshare';
|
||||
#\$unshare_mmdebstrap_keep_tarball = 1;
|
||||
|
||||
# *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW ***
|
||||
|
||||
# Mail address where logs are sent to (mandatory, no default!)
|
||||
\$mailto = '$USER';
|
||||
|
||||
# Name to use as override in .changes files for the Maintainer: field
|
||||
#\$maintainer_name='$USER <$USER@localhost>';
|
||||
|
||||
@ -414,41 +397,29 @@ fi
|
||||
# By default DEBOOTSTRAP_SCRIPT must match RELEASE
|
||||
DEBOOTSTRAP_SCRIPT="$RELEASE"
|
||||
|
||||
dist_ge() {
|
||||
local releases="$($3-distro-info --all)"
|
||||
local left=999
|
||||
local right=0
|
||||
local seq=1
|
||||
|
||||
for i in $releases; do
|
||||
if [ $1 = $i ]; then
|
||||
local left=$seq
|
||||
break
|
||||
fi
|
||||
seq=$((seq+1))
|
||||
done
|
||||
|
||||
seq=1
|
||||
for i in $releases; do
|
||||
if [ $2 = $i ]; then
|
||||
local right=$seq
|
||||
break
|
||||
fi
|
||||
seq=$((seq+1))
|
||||
done
|
||||
|
||||
[ $left -ge $right ] && return 0 || return 1
|
||||
}
|
||||
|
||||
ubuntu_dist_ge () {
|
||||
dist_ge $1 $2 ubuntu
|
||||
}
|
||||
|
||||
debian_dist_ge () {
|
||||
dist_ge $1 $2 debian
|
||||
}
|
||||
|
||||
if [ "$DISTRO" = "ubuntu" ]; then
|
||||
ubuntu_dist_ge() {
|
||||
local releases="$(ubuntu-distro-info --all)"
|
||||
local left=999
|
||||
local right=0
|
||||
local seq=1
|
||||
for i in $releases; do
|
||||
if [ $1 = $i ]; then
|
||||
local left=$seq
|
||||
break
|
||||
fi
|
||||
seq=$((seq+1))
|
||||
done
|
||||
seq=1
|
||||
for i in $releases; do
|
||||
if [ $2 = $i ]; then
|
||||
local right=$seq
|
||||
break
|
||||
fi
|
||||
seq=$((seq+1))
|
||||
done
|
||||
[ $left -ge $right ] && return 0 || return 1
|
||||
}
|
||||
# On Ubuntu, set DEBOOTSTRAP_SCRIPT to gutsy to allow building new RELEASES without new debootstrap
|
||||
DEBOOTSTRAP_SCRIPT=gutsy
|
||||
fi
|
||||
@ -668,7 +639,6 @@ ubuntu)
|
||||
if ubuntu_dist_ge "$RELEASE" "edgy"; then
|
||||
# Add pkgbinarymangler (edgy and later)
|
||||
BUILD_PKGS="$BUILD_PKGS pkgbinarymangler"
|
||||
USE_PKGBINARYMANGLER=1
|
||||
# Disable recommends for a smaller chroot (gutsy and later only)
|
||||
if ubuntu_dist_ge "$RELEASE" "gutsy"; then
|
||||
BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
|
||||
@ -685,7 +655,7 @@ debian)
|
||||
DEBOOTSTRAP_MIRROR="http://deb.debian.org/debian"
|
||||
fi
|
||||
if [ -z "$COMPONENTS" ]; then
|
||||
COMPONENTS="main non-free non-free-firmware contrib"
|
||||
COMPONENTS="main non-free contrib"
|
||||
fi
|
||||
if [ -z "$SOURCES_PROPOSED_SUITE" ]; then
|
||||
SOURCES_PROPOSED_SUITE="RELEASE-proposed-updates"
|
||||
@ -693,11 +663,7 @@ debian)
|
||||
# Debian only performs security updates
|
||||
SKIP_UPDATES=1
|
||||
if [ -z "$SOURCES_SECURITY_SUITE" ]; then
|
||||
if debian_dist_ge "$RELEASE" "bullseye"; then
|
||||
SOURCES_SECURITY_SUITE="RELEASE-security"
|
||||
else
|
||||
SOURCES_SECURITY_SUITE="RELEASE/updates"
|
||||
fi
|
||||
SOURCES_SECURITY_SUITE="RELEASE/updates"
|
||||
fi
|
||||
if [ -z "$SOURCES_SECURITY_URL" ]; then
|
||||
SOURCES_SECURITY_URL="http://security.debian.org/"
|
||||
@ -768,12 +734,12 @@ DEBOOTSTRAP_COMMAND=debootstrap
|
||||
if [ "$CHROOT_ARCH" != "$HOST_ARCH" ] ; then
|
||||
case "$CHROOT_ARCH-$HOST_ARCH" in
|
||||
# Sometimes we don't need qemu
|
||||
amd64-i386|arm64-armhf|armhf-arm64|i386-amd64|powerpc-ppc64|ppc64-powerpc)
|
||||
amd64-i386|amd64-lpia|armel-armhf|armhf-armel|arm64-armel|arm64-armhf|armel-arm64|armhf-arm64|i386-amd64|i386-lpia|lpia-i386|powerpc-ppc64|ppc64-powerpc|sparc-sparc64|sparc64-sparc)
|
||||
;;
|
||||
# Sometimes we do
|
||||
*)
|
||||
DEBOOTSTRAP_COMMAND=debootstrap
|
||||
if ! which "qemu-x86_64-static"; then
|
||||
DEBOOTSTRAP_COMMAND=qemu-debootstrap
|
||||
if ! which "$DEBOOTSTRAP_COMMAND"; then
|
||||
sudo apt-get install qemu-user-static
|
||||
fi
|
||||
;;
|
||||
@ -892,13 +858,6 @@ EOM
|
||||
fi
|
||||
fi
|
||||
if [ -z "$SKIP_PROPOSED" ]; then
|
||||
TEMP_PREFERENCES=`mktemp -t preferences-XXXXXX`
|
||||
cat >> "$TEMP_PREFERENCES" <<EOM
|
||||
# override for NotAutomatic: yes
|
||||
Package: *
|
||||
Pin: release a=*-proposed
|
||||
Pin-Priority: 500
|
||||
EOM
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb ${MIRROR_ARCHS}${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS}
|
||||
deb-src ${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS}
|
||||
@ -924,12 +883,9 @@ fi
|
||||
cat "$TEMP_SOURCES" | sed -e "s|RELEASE|$RELEASE|g" | \
|
||||
sudo bash -c "cat > $MNT/etc/apt/sources.list"
|
||||
rm -f "$TEMP_SOURCES"
|
||||
if [ -n "$TEMP_PREFERENCES" ]; then
|
||||
sudo mv "$TEMP_PREFERENCES" $MNT/etc/apt/preferences.d/proposed.pref
|
||||
fi
|
||||
|
||||
# Copy the timezone (uncomment this if you want to use your local time zone)
|
||||
#sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
|
||||
# Copy the timezone (comment this out if you want to leave the chroot at UTC)
|
||||
sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
|
||||
# Create a schroot entry for this chroot
|
||||
TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX`
|
||||
TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf
|
||||
@ -1048,25 +1004,6 @@ EOF
|
||||
EOM
|
||||
fi
|
||||
|
||||
if [ "$USE_PKGBINARYMANGLER" = 1 ]; then
|
||||
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||
mkdir -p /etc/pkgbinarymangler/
|
||||
cat > /etc/pkgbinarymangler/maintainermangler.conf <<EOF
|
||||
# pkgmaintainermangler configuration file
|
||||
|
||||
# pkgmaintainermangler will do nothing unless enable is set to "true"
|
||||
enable: true
|
||||
|
||||
# Configure what happens if /CurrentlyBuilding is present, but invalid
|
||||
# (i. e. it does not contain a Package: field). If "ignore" (default),
|
||||
# the file is ignored (i. e. the Maintainer field is mangled) and a
|
||||
# warning is printed. If "fail" (or any other value), pkgmaintainermangler
|
||||
# exits with an error, which causes a package build to fail.
|
||||
invalid_currentlybuilding: ignore
|
||||
EOF
|
||||
EOM
|
||||
fi
|
||||
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||
# Configure target architecture
|
||||
@ -1085,7 +1022,7 @@ apt-get update || true
|
||||
echo set debconf/frontend Noninteractive | debconf-communicate
|
||||
echo set debconf/priority critical | debconf-communicate
|
||||
# Install basic build tool set, trying to match buildd
|
||||
apt-get -y --force-yes -o Dpkg::Options::="--force-confold" install $BUILD_PKGS
|
||||
apt-get -y --force-yes install $BUILD_PKGS
|
||||
# Set up expected /dev entries
|
||||
if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi
|
||||
if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi
|
||||
|
408
pbuilder-dist
408
pbuilder-dist
@ -29,29 +29,26 @@
|
||||
# configurations. For example, a symlink called pbuilder-hardy will assume
|
||||
# that the target distribution is always meant to be Ubuntu Hardy.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
import subprocess
|
||||
import shutil
|
||||
|
||||
import debian.deb822
|
||||
from distro_info import DebianDistroInfo, DistroDataOutdated, UbuntuDistroInfo
|
||||
from contextlib import suppress
|
||||
from distro_info import DebianDistroInfo, UbuntuDistroInfo, DistroDataOutdated
|
||||
|
||||
import ubuntutools.misc
|
||||
import ubuntutools.version
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
class PbuilderDist:
|
||||
class PbuilderDist(object):
|
||||
def __init__(self, builder):
|
||||
# Base directory where pbuilder will put all the files it creates.
|
||||
self.base = None
|
||||
@ -90,36 +87,32 @@ class PbuilderDist:
|
||||
self.chroot_string = None
|
||||
|
||||
# Authentication method
|
||||
self.auth = "sudo"
|
||||
self.auth = 'sudo'
|
||||
|
||||
# Builder
|
||||
self.builder = builder
|
||||
|
||||
# Distro info
|
||||
self.debian_distro_info = DebianDistroInfo()
|
||||
self.ubuntu_distro_info = UbuntuDistroInfo()
|
||||
|
||||
self._debian_distros = self.debian_distro_info.all + ["stable", "testing", "unstable"]
|
||||
self._debian_distros = DebianDistroInfo().all + \
|
||||
['stable', 'testing', 'unstable']
|
||||
|
||||
# Ensure that the used builder is installed
|
||||
paths = set(os.environ["PATH"].split(":"))
|
||||
paths |= set(("/sbin", "/usr/sbin", "/usr/local/sbin"))
|
||||
paths = set(os.environ['PATH'].split(':'))
|
||||
paths |= set(('/sbin', '/usr/sbin', '/usr/local/sbin'))
|
||||
if not any(os.path.exists(os.path.join(p, builder)) for p in paths):
|
||||
Logger.error('Could not find "%s".', builder)
|
||||
sys.exit(1)
|
||||
|
||||
##############################################################
|
||||
|
||||
self.base = os.path.expanduser(os.environ.get("PBUILDFOLDER", "~/pbuilder/"))
|
||||
self.base = os.path.expanduser(os.environ.get('PBUILDFOLDER',
|
||||
'~/pbuilder/'))
|
||||
|
||||
if "SUDO_USER" in os.environ:
|
||||
Logger.warning(
|
||||
"Running under sudo. "
|
||||
"This is probably not what you want. "
|
||||
"pbuilder-dist will use sudo itself, "
|
||||
"when necessary."
|
||||
)
|
||||
if os.stat(os.environ["HOME"]).st_uid != os.getuid():
|
||||
if 'SUDO_USER' in os.environ:
|
||||
Logger.warning('Running under sudo. '
|
||||
'This is probably not what you want. '
|
||||
'pbuilder-dist will use sudo itself, '
|
||||
'when necessary.')
|
||||
if os.stat(os.environ['HOME']).st_uid != os.getuid():
|
||||
Logger.error("You don't own $HOME")
|
||||
sys.exit(1)
|
||||
|
||||
@ -130,8 +123,8 @@ class PbuilderDist:
|
||||
Logger.error('Cannot create base directory "%s"', self.base)
|
||||
sys.exit(1)
|
||||
|
||||
if "PBUILDAUTH" in os.environ:
|
||||
self.auth = os.environ["PBUILDAUTH"]
|
||||
if 'PBUILDAUTH' in os.environ:
|
||||
self.auth = os.environ['PBUILDAUTH']
|
||||
|
||||
self.system_architecture = ubuntutools.misc.host_architecture()
|
||||
self.system_distro = ubuntutools.misc.system_distribution()
|
||||
@ -141,7 +134,7 @@ class PbuilderDist:
|
||||
self.target_distro = self.system_distro
|
||||
|
||||
def set_target_distro(self, distro):
|
||||
"""PbuilderDist.set_target_distro(distro) -> None
|
||||
""" PbuilderDist.set_target_distro(distro) -> None
|
||||
|
||||
Check if the given target distribution name is correct, if it
|
||||
isn't know to the system ask the user for confirmation before
|
||||
@ -152,17 +145,16 @@ class PbuilderDist:
|
||||
Logger.error('"%s" is an invalid distribution codename.', distro)
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isfile(os.path.join("/usr/share/debootstrap/scripts/", distro)):
|
||||
if os.path.isdir("/usr/share/debootstrap/scripts/"):
|
||||
if not os.path.isfile(os.path.join('/usr/share/debootstrap/scripts/',
|
||||
distro)):
|
||||
if os.path.isdir('/usr/share/debootstrap/scripts/'):
|
||||
# Debian experimental doesn't have a debootstrap file but
|
||||
# should work nevertheless. Ubuntu releases automatically use
|
||||
# the gutsy script as of debootstrap 1.0.128+nmu2ubuntu1.1.
|
||||
if distro not in (self._debian_distros + self.ubuntu_distro_info.all):
|
||||
question = (
|
||||
f'Warning: Unknown distribution "{distro}". ' "Do you want to continue"
|
||||
)
|
||||
answer = YesNoQuestion().ask(question, "no")
|
||||
if answer == "no":
|
||||
# should work nevertheless.
|
||||
if distro not in self._debian_distros:
|
||||
question = ('Warning: Unknown distribution "%s". '
|
||||
'Do you want to continue' % distro)
|
||||
answer = YesNoQuestion().ask(question, 'no')
|
||||
if answer == 'no':
|
||||
sys.exit(0)
|
||||
else:
|
||||
Logger.error('Please install package "debootstrap".')
|
||||
@ -171,34 +163,33 @@ class PbuilderDist:
|
||||
self.target_distro = distro
|
||||
|
||||
def set_operation(self, operation):
|
||||
"""PbuilderDist.set_operation -> None
|
||||
""" PbuilderDist.set_operation -> None
|
||||
|
||||
Check if the given string is a valid pbuilder operation and
|
||||
depending on this either save it into the appropiate variable
|
||||
or finalize pbuilder-dist's execution.
|
||||
"""
|
||||
arguments = ("create", "update", "build", "clean", "login", "execute")
|
||||
arguments = ('create', 'update', 'build', 'clean', 'login', 'execute')
|
||||
|
||||
if operation not in arguments:
|
||||
if operation.endswith(".dsc"):
|
||||
if operation.endswith('.dsc'):
|
||||
if os.path.isfile(operation):
|
||||
self.operation = "build"
|
||||
self.operation = 'build'
|
||||
return [operation]
|
||||
Logger.error('Could not find file "%s".', operation)
|
||||
else:
|
||||
Logger.error('Could not find file "%s".', operation)
|
||||
sys.exit(1)
|
||||
else:
|
||||
Logger.error('"%s" is not a recognized argument.\n'
|
||||
'Please use one of these: %s.',
|
||||
operation, ', '.join(arguments))
|
||||
sys.exit(1)
|
||||
|
||||
Logger.error(
|
||||
'"%s" is not a recognized argument.\nPlease use one of these: %s.',
|
||||
operation,
|
||||
", ".join(arguments),
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
self.operation = operation
|
||||
return []
|
||||
else:
|
||||
self.operation = operation
|
||||
return []
|
||||
|
||||
def get_command(self, remaining_arguments=None):
|
||||
"""PbuilderDist.get_command -> string
|
||||
""" PbuilderDist.get_command -> string
|
||||
|
||||
Generate the pbuilder command which matches the given configuration
|
||||
and return it as a string.
|
||||
@ -209,34 +200,30 @@ class PbuilderDist:
|
||||
if self.build_architecture == self.system_architecture:
|
||||
self.chroot_string = self.target_distro
|
||||
else:
|
||||
self.chroot_string = self.target_distro + "-" + self.build_architecture
|
||||
self.chroot_string = (self.target_distro + '-'
|
||||
+ self.build_architecture)
|
||||
|
||||
prefix = os.path.join(self.base, self.chroot_string)
|
||||
if "--buildresult" not in remaining_arguments:
|
||||
result = os.path.normpath(f"{prefix}_result/")
|
||||
if '--buildresult' not in remaining_arguments:
|
||||
result = os.path.normpath('%s_result/' % prefix)
|
||||
else:
|
||||
location_of_arg = remaining_arguments.index("--buildresult")
|
||||
result = os.path.normpath(remaining_arguments[location_of_arg + 1])
|
||||
remaining_arguments.pop(location_of_arg + 1)
|
||||
location_of_arg = remaining_arguments.index('--buildresult')
|
||||
result = os.path.normpath(remaining_arguments[location_of_arg+1])
|
||||
remaining_arguments.pop(location_of_arg+1)
|
||||
remaining_arguments.pop(location_of_arg)
|
||||
|
||||
if not self.logfile and self.operation != "login":
|
||||
if self.operation == "build":
|
||||
dsc_files = [a for a in remaining_arguments if a.strip().endswith(".dsc")]
|
||||
if not self.logfile and self.operation != 'login':
|
||||
if self.operation == 'build':
|
||||
dsc_files = [a for a in remaining_arguments
|
||||
if a.strip().endswith('.dsc')]
|
||||
assert len(dsc_files) == 1
|
||||
dsc = debian.deb822.Dsc(open(dsc_files[0], encoding="utf-8"))
|
||||
version = ubuntutools.version.Version(dsc["Version"])
|
||||
name = (
|
||||
dsc["Source"]
|
||||
+ "_"
|
||||
+ version.strip_epoch()
|
||||
+ "_"
|
||||
+ self.build_architecture
|
||||
+ ".build"
|
||||
)
|
||||
dsc = debian.deb822.Dsc(open(dsc_files[0]))
|
||||
version = ubuntutools.version.Version(dsc['Version'])
|
||||
name = (dsc['Source'] + '_' + version.strip_epoch() + '_' +
|
||||
self.build_architecture + '.build')
|
||||
self.logfile = os.path.join(result, name)
|
||||
else:
|
||||
self.logfile = os.path.join(result, "last_operation.log")
|
||||
self.logfile = os.path.join(result, 'last_operation.log')
|
||||
|
||||
if not os.path.isdir(result):
|
||||
try:
|
||||
@ -246,89 +233,90 @@ class PbuilderDist:
|
||||
sys.exit(1)
|
||||
|
||||
arguments = [
|
||||
f"--{self.operation}",
|
||||
"--distribution",
|
||||
self.target_distro,
|
||||
"--buildresult",
|
||||
result,
|
||||
'--%s' % self.operation,
|
||||
'--distribution', self.target_distro,
|
||||
'--buildresult', result,
|
||||
]
|
||||
|
||||
if self.operation == "update":
|
||||
arguments += ["--override-config"]
|
||||
if self.operation == 'update':
|
||||
arguments += ['--override-config']
|
||||
|
||||
if self.builder == "pbuilder":
|
||||
arguments += ["--basetgz", prefix + "-base.tgz"]
|
||||
elif self.builder == "cowbuilder":
|
||||
arguments += ["--basepath", prefix + "-base.cow"]
|
||||
if self.builder == 'pbuilder':
|
||||
arguments += ['--basetgz', prefix + '-base.tgz']
|
||||
elif self.builder == 'cowbuilder':
|
||||
arguments += ['--basepath', prefix + '-base.cow']
|
||||
else:
|
||||
Logger.error('Unrecognized builder "%s".', self.builder)
|
||||
sys.exit(1)
|
||||
|
||||
if self.logfile:
|
||||
arguments += ["--logfile", self.logfile]
|
||||
arguments += ['--logfile', self.logfile]
|
||||
|
||||
if os.path.exists("/var/cache/archive/"):
|
||||
arguments += ["--bindmounts", "/var/cache/archive/"]
|
||||
if os.path.exists('/var/cache/archive/'):
|
||||
arguments += ['--bindmounts', '/var/cache/archive/']
|
||||
|
||||
config = UDTConfig()
|
||||
if self.target_distro in self._debian_distros:
|
||||
mirror = os.environ.get("MIRRORSITE", config.get_value("DEBIAN_MIRROR"))
|
||||
components = "main"
|
||||
mirror = os.environ.get('MIRRORSITE',
|
||||
config.get_value('DEBIAN_MIRROR'))
|
||||
components = 'main'
|
||||
if self.extra_components:
|
||||
components += " contrib non-free non-free-firmware"
|
||||
components += ' contrib non-free'
|
||||
else:
|
||||
mirror = os.environ.get("MIRRORSITE", config.get_value("UBUNTU_MIRROR"))
|
||||
if self.build_architecture not in ("amd64", "i386"):
|
||||
mirror = os.environ.get("MIRRORSITE", config.get_value("UBUNTU_PORTS_MIRROR"))
|
||||
components = "main restricted"
|
||||
mirror = os.environ.get('MIRRORSITE',
|
||||
config.get_value('UBUNTU_MIRROR'))
|
||||
if self.build_architecture not in ('amd64', 'i386'):
|
||||
mirror = os.environ.get(
|
||||
'MIRRORSITE', config.get_value('UBUNTU_PORTS_MIRROR'))
|
||||
components = 'main restricted'
|
||||
if self.extra_components:
|
||||
components += " universe multiverse"
|
||||
components += ' universe multiverse'
|
||||
|
||||
arguments += ["--mirror", mirror]
|
||||
arguments += ['--mirror', mirror]
|
||||
|
||||
othermirrors = []
|
||||
localrepo = f"/var/cache/archive/{self.target_distro}"
|
||||
localrepo = '/var/cache/archive/' + self.target_distro
|
||||
if os.path.exists(localrepo):
|
||||
repo = f"deb file:///var/cache/archive/ {self.target_distro}/"
|
||||
repo = 'deb file:///var/cache/archive/ %s/' % self.target_distro
|
||||
othermirrors.append(repo)
|
||||
|
||||
if self.target_distro in self._debian_distros:
|
||||
debian_info = DebianDistroInfo()
|
||||
try:
|
||||
codename = self.debian_distro_info.codename(
|
||||
self.target_distro, default=self.target_distro
|
||||
)
|
||||
codename = debian_info.codename(self.target_distro,
|
||||
default=self.target_distro)
|
||||
except DistroDataOutdated as error:
|
||||
Logger.warning(error)
|
||||
if codename in (self.debian_distro_info.devel(), "experimental"):
|
||||
if codename in (debian_info.devel(), 'experimental'):
|
||||
self.enable_security = False
|
||||
self.enable_updates = False
|
||||
self.enable_proposed = False
|
||||
elif codename in (self.debian_distro_info.testing(), "testing"):
|
||||
elif codename in (debian_info.testing(), 'testing'):
|
||||
self.enable_updates = False
|
||||
|
||||
if self.enable_security:
|
||||
pocket = "-security"
|
||||
pocket = '-security'
|
||||
with suppress(ValueError):
|
||||
# before bullseye (version 11) security suite is /updates
|
||||
if float(self.debian_distro_info.version(codename)) < 11.0:
|
||||
pocket = "/updates"
|
||||
othermirrors.append(
|
||||
f"deb {config.get_value('DEBSEC_MIRROR')}"
|
||||
f" {self.target_distro}{pocket} {components}"
|
||||
)
|
||||
if float(debian_info.version(codename)) < 11.0:
|
||||
pocket = '/updates'
|
||||
othermirrors.append('deb %s %s%s %s'
|
||||
% (config.get_value('DEBSEC_MIRROR'),
|
||||
self.target_distro, pocket, components))
|
||||
if self.enable_updates:
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}")
|
||||
othermirrors.append('deb %s %s-updates %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
if self.enable_proposed:
|
||||
othermirrors.append(
|
||||
f"deb {mirror} {self.target_distro}-proposed-updates {components}"
|
||||
)
|
||||
othermirrors.append('deb %s %s-proposed-updates %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
if self.enable_backports:
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-backports {components}")
|
||||
othermirrors.append('deb %s %s-backports %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
|
||||
aptcache = os.path.join(self.base, "aptcache", "debian")
|
||||
aptcache = os.path.join(self.base, 'aptcache', 'debian')
|
||||
else:
|
||||
try:
|
||||
dev_release = self.target_distro == self.ubuntu_distro_info.devel()
|
||||
dev_release = self.target_distro == UbuntuDistroInfo().devel()
|
||||
except DistroDataOutdated as error:
|
||||
Logger.warning(error)
|
||||
dev_release = True
|
||||
@ -338,45 +326,46 @@ class PbuilderDist:
|
||||
self.enable_updates = False
|
||||
|
||||
if self.enable_security:
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-security {components}")
|
||||
othermirrors.append('deb %s %s-security %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
if self.enable_updates:
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}")
|
||||
othermirrors.append('deb %s %s-updates %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
if self.enable_proposed:
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-proposed {components}")
|
||||
othermirrors.append('deb %s %s-proposed %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
|
||||
aptcache = os.path.join(self.base, "aptcache", "ubuntu")
|
||||
aptcache = os.path.join(self.base, 'aptcache', 'ubuntu')
|
||||
|
||||
if "OTHERMIRROR" in os.environ:
|
||||
othermirrors += os.environ["OTHERMIRROR"].split("|")
|
||||
if 'OTHERMIRROR' in os.environ:
|
||||
othermirrors += os.environ['OTHERMIRROR'].split('|')
|
||||
|
||||
if othermirrors:
|
||||
arguments += ["--othermirror", "|".join(othermirrors)]
|
||||
arguments += ['--othermirror', '|'.join(othermirrors)]
|
||||
|
||||
# Work around LP:#599695
|
||||
if (
|
||||
ubuntutools.misc.system_distribution() == "Debian"
|
||||
and self.target_distro not in self._debian_distros
|
||||
):
|
||||
if not os.path.exists("/usr/share/keyrings/ubuntu-archive-keyring.gpg"):
|
||||
Logger.error("ubuntu-keyring not installed")
|
||||
if (ubuntutools.misc.system_distribution() == 'Debian'
|
||||
and self.target_distro not in self._debian_distros):
|
||||
if not os.path.exists(
|
||||
'/usr/share/keyrings/ubuntu-archive-keyring.gpg'):
|
||||
Logger.error('ubuntu-keyring not installed')
|
||||
sys.exit(1)
|
||||
arguments += [
|
||||
"--debootstrapopts",
|
||||
"--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg",
|
||||
'--debootstrapopts',
|
||||
'--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg',
|
||||
]
|
||||
elif (
|
||||
ubuntutools.misc.system_distribution() == "Ubuntu"
|
||||
and self.target_distro in self._debian_distros
|
||||
):
|
||||
if not os.path.exists("/usr/share/keyrings/debian-archive-keyring.gpg"):
|
||||
Logger.error("debian-archive-keyring not installed")
|
||||
elif (ubuntutools.misc.system_distribution() == 'Ubuntu'
|
||||
and self.target_distro in self._debian_distros):
|
||||
if not os.path.exists(
|
||||
'/usr/share/keyrings/debian-archive-keyring.gpg'):
|
||||
Logger.error('debian-archive-keyring not installed')
|
||||
sys.exit(1)
|
||||
arguments += [
|
||||
"--debootstrapopts",
|
||||
"--keyring=/usr/share/keyrings/debian-archive-keyring.gpg",
|
||||
'--debootstrapopts',
|
||||
'--keyring=/usr/share/keyrings/debian-archive-keyring.gpg',
|
||||
]
|
||||
|
||||
arguments += ["--aptcache", aptcache, "--components", components]
|
||||
arguments += ['--aptcache', aptcache, '--components', components]
|
||||
|
||||
if not os.path.isdir(aptcache):
|
||||
try:
|
||||
@ -386,11 +375,13 @@ class PbuilderDist:
|
||||
sys.exit(1)
|
||||
|
||||
if self.build_architecture != self.system_architecture:
|
||||
arguments += ["--debootstrapopts", "--arch=" + self.build_architecture]
|
||||
arguments += ['--debootstrapopts',
|
||||
'--arch=' + self.build_architecture]
|
||||
|
||||
apt_conf_dir = os.path.join(self.base, f"etc/{self.target_distro}/apt.conf")
|
||||
apt_conf_dir = os.path.join(self.base,
|
||||
'etc/%s/apt.conf' % self.target_distro)
|
||||
if os.path.exists(apt_conf_dir):
|
||||
arguments += ["--aptconfdir", apt_conf_dir]
|
||||
arguments += ['--aptconfdir', apt_conf_dir]
|
||||
|
||||
# Append remaining arguments
|
||||
if remaining_arguments:
|
||||
@ -401,28 +392,28 @@ class PbuilderDist:
|
||||
# With both common variable name schemes (BTS: #659060).
|
||||
return [
|
||||
self.auth,
|
||||
"HOME=" + os.path.expanduser("~"),
|
||||
"ARCHITECTURE=" + self.build_architecture,
|
||||
"DISTRIBUTION=" + self.target_distro,
|
||||
"ARCH=" + self.build_architecture,
|
||||
"DIST=" + self.target_distro,
|
||||
"DEB_BUILD_OPTIONS=" + os.environ.get("DEB_BUILD_OPTIONS", ""),
|
||||
'HOME=' + os.path.expanduser('~'),
|
||||
'ARCHITECTURE=' + self.build_architecture,
|
||||
'DISTRIBUTION=' + self.target_distro,
|
||||
'ARCH=' + self.build_architecture,
|
||||
'DIST=' + self.target_distro,
|
||||
'DEB_BUILD_OPTIONS=' + os.environ.get('DEB_BUILD_OPTIONS', ''),
|
||||
self.builder,
|
||||
] + arguments
|
||||
|
||||
|
||||
def show_help(exit_code=0):
|
||||
"""help() -> None
|
||||
""" help() -> None
|
||||
|
||||
Print a help message for pbuilder-dist, and exit with the given code.
|
||||
"""
|
||||
Logger.info("See man pbuilder-dist for more information.")
|
||||
Logger.info('See man pbuilder-dist for more information.')
|
||||
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
def main():
|
||||
"""main() -> None
|
||||
""" main() -> None
|
||||
|
||||
This is pbuilder-dist's main function. It creates a PbuilderDist
|
||||
object, modifies all necessary settings taking data from the
|
||||
@ -430,25 +421,27 @@ def main():
|
||||
the script and runs pbuilder itself or exists with an error message.
|
||||
"""
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
parts = script_name.split("-")
|
||||
parts = script_name.split('-')
|
||||
|
||||
# Copy arguments into another list for save manipulation
|
||||
args = sys.argv[1:]
|
||||
|
||||
if "-" in script_name and parts[0] not in ("pbuilder", "cowbuilder") or len(parts) > 3:
|
||||
Logger.error('"%s" is not a valid name for a "pbuilder-dist" executable.', script_name)
|
||||
if ('-' in script_name and parts[0] not in ('pbuilder', 'cowbuilder')
|
||||
or len(parts) > 3):
|
||||
Logger.error('"%s" is not a valid name for a "pbuilder-dist" '
|
||||
'executable.', script_name)
|
||||
sys.exit(1)
|
||||
|
||||
if len(args) < 1:
|
||||
Logger.error("Insufficient number of arguments.")
|
||||
Logger.error('Insufficient number of arguments.')
|
||||
show_help(1)
|
||||
|
||||
if args[0] in ("-h", "--help", "help"):
|
||||
if args[0] in ('-h', '--help', 'help'):
|
||||
show_help(0)
|
||||
|
||||
app = PbuilderDist(parts[0])
|
||||
|
||||
if len(parts) > 1 and parts[1] != "dist" and "." not in parts[1]:
|
||||
if len(parts) > 1 and parts[1] != 'dist' and '.' not in parts[1]:
|
||||
app.set_target_distro(parts[1])
|
||||
else:
|
||||
app.set_target_distro(args.pop(0))
|
||||
@ -456,31 +449,24 @@ def main():
|
||||
if len(parts) > 2:
|
||||
requested_arch = parts[2]
|
||||
elif len(args) > 0:
|
||||
if shutil.which("arch-test") is not None:
|
||||
arch_test = subprocess.run(
|
||||
["arch-test", args[0]], check=False, stdout=subprocess.DEVNULL
|
||||
)
|
||||
if arch_test.returncode == 0:
|
||||
if shutil.which('arch-test') is not None:
|
||||
if subprocess.run(
|
||||
['arch-test', args[0]],
|
||||
stdout=subprocess.DEVNULL).returncode == 0:
|
||||
requested_arch = args.pop(0)
|
||||
elif os.path.isdir("/usr/lib/arch-test") and args[0] in os.listdir(
|
||||
"/usr/lib/arch-test/"
|
||||
):
|
||||
Logger.error(
|
||||
'Architecture "%s" is not supported on your '
|
||||
"currently running kernel. Consider installing "
|
||||
"the qemu-user-static package to enable the use of "
|
||||
"foreign architectures.",
|
||||
args[0],
|
||||
)
|
||||
elif (os.path.isdir('/usr/lib/arch-test')
|
||||
and args[0] in os.listdir('/usr/lib/arch-test/')):
|
||||
Logger.error('Architecture "%s" is not supported on your '
|
||||
'currently running kernel. Consider installing '
|
||||
'the qemu-user-static package to enable the use of '
|
||||
'foreign architectures.', args[0])
|
||||
sys.exit(1)
|
||||
else:
|
||||
requested_arch = None
|
||||
else:
|
||||
Logger.error(
|
||||
'Cannot determine if "%s" is a valid architecture. '
|
||||
"Please install the arch-test package and retry.",
|
||||
args[0],
|
||||
)
|
||||
Logger.error('Cannot determine if "%s" is a valid architecture. '
|
||||
'Please install the arch-test package and retry.',
|
||||
args[0])
|
||||
sys.exit(1)
|
||||
else:
|
||||
requested_arch = None
|
||||
@ -488,64 +474,62 @@ def main():
|
||||
if requested_arch:
|
||||
app.build_architecture = requested_arch
|
||||
# For some foreign architectures we need to use qemu
|
||||
if requested_arch != app.system_architecture and (
|
||||
app.system_architecture,
|
||||
requested_arch,
|
||||
) not in [
|
||||
("amd64", "i386"),
|
||||
("arm64", "arm"),
|
||||
("arm64", "armhf"),
|
||||
("powerpc", "ppc64"),
|
||||
("ppc64", "powerpc"),
|
||||
]:
|
||||
args += ["--debootstrap", "debootstrap"]
|
||||
if (requested_arch != app.system_architecture
|
||||
and (app.system_architecture, requested_arch) not in [
|
||||
('amd64', 'i386'), ('amd64', 'lpia'), ('arm', 'armel'),
|
||||
('armel', 'arm'), ('armel', 'armhf'), ('armhf', 'armel'),
|
||||
('arm64', 'arm'), ('arm64', 'armhf'), ('arm64', 'armel'),
|
||||
('i386', 'lpia'), ('lpia', 'i386'), ('powerpc', 'ppc64'),
|
||||
('ppc64', 'powerpc'), ('sparc', 'sparc64'),
|
||||
('sparc64', 'sparc')]):
|
||||
args += ['--debootstrap', 'qemu-debootstrap']
|
||||
|
||||
if "mainonly" in sys.argv or "--main-only" in sys.argv:
|
||||
if 'mainonly' in sys.argv or '--main-only' in sys.argv:
|
||||
app.extra_components = False
|
||||
if "mainonly" in sys.argv:
|
||||
args.remove("mainonly")
|
||||
if 'mainonly' in sys.argv:
|
||||
args.remove('mainonly')
|
||||
else:
|
||||
args.remove("--main-only")
|
||||
args.remove('--main-only')
|
||||
|
||||
if "--release-only" in sys.argv:
|
||||
args.remove("--release-only")
|
||||
if '--release-only' in sys.argv:
|
||||
args.remove('--release-only')
|
||||
app.enable_security = False
|
||||
app.enable_updates = False
|
||||
app.enable_proposed = False
|
||||
elif "--security-only" in sys.argv:
|
||||
args.remove("--security-only")
|
||||
elif '--security-only' in sys.argv:
|
||||
args.remove('--security-only')
|
||||
app.enable_updates = False
|
||||
app.enable_proposed = False
|
||||
elif "--updates-only" in sys.argv:
|
||||
args.remove("--updates-only")
|
||||
elif '--updates-only' in sys.argv:
|
||||
args.remove('--updates-only')
|
||||
app.enable_proposed = False
|
||||
elif "--backports" in sys.argv:
|
||||
args.remove("--backports")
|
||||
elif '--backports' in sys.argv:
|
||||
args.remove('--backports')
|
||||
app.enable_backports = True
|
||||
|
||||
if len(args) < 1:
|
||||
Logger.error("Insufficient number of arguments.")
|
||||
Logger.error('Insufficient number of arguments.')
|
||||
show_help(1)
|
||||
|
||||
# Parse the operation
|
||||
args = app.set_operation(args.pop(0)) + args
|
||||
|
||||
if app.operation == "build":
|
||||
if len([a for a in args if a.strip().endswith(".dsc")]) != 1:
|
||||
msg = "You have to specify one .dsc file if you want to build."
|
||||
if app.operation == 'build':
|
||||
if len([a for a in args if a.strip().endswith('.dsc')]) != 1:
|
||||
msg = 'You have to specify one .dsc file if you want to build.'
|
||||
Logger.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
# Execute the pbuilder command
|
||||
if "--debug-echo" not in args:
|
||||
if '--debug-echo' not in args:
|
||||
sys.exit(subprocess.call(app.get_command(args)))
|
||||
else:
|
||||
Logger.info(app.get_command([arg for arg in args if arg != "--debug-echo"]))
|
||||
Logger.info(app.get_command([arg for arg in args if arg != '--debug-echo']))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
Logger.error("Manually aborted.")
|
||||
Logger.error('Manually aborted.')
|
||||
sys.exit(1)
|
||||
|
142
pm-helper
142
pm-helper
@ -1,142 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# Find the next thing to work on for proposed-migration
|
||||
# Copyright (C) 2023 Canonical Ltd.
|
||||
# Author: Steve Langasek <steve.langasek@ubuntu.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License, version 3.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import lzma
|
||||
import sys
|
||||
import webbrowser
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import yaml
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools.utils import get_url
|
||||
|
||||
# proposed-migration is only concerned with the devel series; unlike other
|
||||
# tools, don't make this configurable
|
||||
excuses_url = "https://ubuntu-archive-team.ubuntu.com/proposed-migration/update_excuses.yaml.xz"
|
||||
|
||||
|
||||
def get_proposed_version(excuses, package):
|
||||
for k in excuses["sources"]:
|
||||
if k["source"] == package:
|
||||
return k.get("new-version")
|
||||
return None
|
||||
|
||||
|
||||
def claim_excuses_bug(launchpad, bug, package):
|
||||
print(f"LP: #{bug.id}: {bug.title}")
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
series = ubuntu.current_series.fullseriesname
|
||||
|
||||
for task in bug.bug_tasks:
|
||||
# targeting to a series doesn't make the default task disappear,
|
||||
# it just makes it useless
|
||||
if task.bug_target_name == f"{package} ({series})":
|
||||
our_task = task
|
||||
break
|
||||
if task.bug_target_name == f"{package} (Ubuntu)":
|
||||
our_task = task
|
||||
|
||||
if our_task.assignee == launchpad.me:
|
||||
print("Bug already assigned to you.")
|
||||
return True
|
||||
if our_task.assignee:
|
||||
print(f"Currently assigned to {our_task.assignee.name}")
|
||||
|
||||
print("""Do you want to claim this bug? [yN] """, end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if response.strip().lower().startswith("y"):
|
||||
our_task.assignee = launchpad.me
|
||||
our_task.lp_save()
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def create_excuses_bug(launchpad, package, version):
|
||||
print("Will open a new bug")
|
||||
bug = launchpad.bugs.createBug(
|
||||
title=f"proposed-migration for {package} {version}",
|
||||
tags=("update-excuse"),
|
||||
target=f"https://api.launchpad.net/devel/ubuntu/+source/{package}",
|
||||
description=f"{package} {version} is stuck in -proposed.",
|
||||
)
|
||||
|
||||
task = bug.bug_tasks[0]
|
||||
task.assignee = launchpad.me
|
||||
task.lp_save()
|
||||
|
||||
print(f"Opening {bug.web_link} in browser")
|
||||
webbrowser.open(bug.web_link)
|
||||
return bug
|
||||
|
||||
|
||||
def has_excuses_bugs(launchpad, package):
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
pkg = ubuntu.getSourcePackage(name=package)
|
||||
if not pkg:
|
||||
raise ValueError(f"No such source package: {package}")
|
||||
|
||||
tasks = pkg.searchTasks(tags=["update-excuse"], order_by=["id"])
|
||||
|
||||
bugs = [task.bug for task in tasks]
|
||||
if not bugs:
|
||||
return False
|
||||
|
||||
if len(bugs) == 1:
|
||||
print(f"There is 1 open update-excuse bug against {package}")
|
||||
else:
|
||||
print(f"There are {len(bugs)} open update-excuse bugs against {package}")
|
||||
|
||||
for bug in bugs:
|
||||
if claim_excuses_bug(launchpad, bug, package):
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", default=False, action="store_true", help="be more verbose"
|
||||
)
|
||||
parser.add_argument("package", nargs="?", help="act on this package only")
|
||||
args = parser.parse_args()
|
||||
|
||||
args.launchpad = Launchpad.login_with("pm-helper", args.launchpad_instance, version="devel")
|
||||
|
||||
f = get_url(excuses_url, False)
|
||||
with lzma.open(f) as lzma_f:
|
||||
excuses = yaml.load(lzma_f, Loader=yaml.CSafeLoader)
|
||||
|
||||
if args.package:
|
||||
try:
|
||||
if not has_excuses_bugs(args.launchpad, args.package):
|
||||
proposed_version = get_proposed_version(excuses, args.package)
|
||||
if not proposed_version:
|
||||
print(f"Package {args.package} not found in -proposed.")
|
||||
sys.exit(1)
|
||||
create_excuses_bug(args.launchpad, args.package, proposed_version)
|
||||
except ValueError as e:
|
||||
sys.stderr.write(f"{e}\n")
|
||||
else:
|
||||
pass # for now
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="debian", pull="ddebs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='debian', pull='ddebs')
|
||||
|
@ -17,32 +17,29 @@
|
||||
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
import debian.changelog
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.archive import DebianSourcePackage, DownloadError
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.version import Version
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def previous_version(package, version, distance):
|
||||
"Given an (extracted) package, determine the version distance versions ago"
|
||||
upver = Version(version).upstream_version
|
||||
filename = f"{package}-{upver}/debian/changelog"
|
||||
changelog_file = open(filename, "r", encoding="utf-8")
|
||||
filename = '%s-%s/debian/changelog' % (package, upver)
|
||||
changelog_file = open(filename, 'r')
|
||||
changelog = debian.changelog.Changelog(changelog_file.read())
|
||||
changelog_file.close()
|
||||
seen = 0
|
||||
for entry in changelog:
|
||||
if entry.distributions == "UNRELEASED":
|
||||
if entry.distributions == 'UNRELEASED':
|
||||
continue
|
||||
if seen == distance:
|
||||
return entry.version.full_version
|
||||
@ -51,78 +48,69 @@ def previous_version(package, version, distance):
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] <package> <version> [distance]")
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--fetch",
|
||||
dest="fetch_only",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Only fetch the source packages, don't diff.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--debian-mirror",
|
||||
metavar="DEBIAN_MIRROR",
|
||||
dest="debian_mirror",
|
||||
help="Preferred Debian mirror (default: http://deb.debian.org/debian)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--debsec-mirror",
|
||||
metavar="DEBSEC_MIRROR",
|
||||
dest="debsec_mirror",
|
||||
help="Preferred Debian Security mirror (default: http://security.debian.org)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
dest="no_conf",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument("package", help=argparse.SUPPRESS)
|
||||
parser.add_argument("version", help=argparse.SUPPRESS)
|
||||
parser.add_argument("distance", default=1, type=int, nargs="?", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
parser = optparse.OptionParser('%prog [options] <package> <version> '
|
||||
'[distance]')
|
||||
parser.add_option('-f', '--fetch',
|
||||
dest='fetch_only', default=False, action='store_true',
|
||||
help="Only fetch the source packages, don't diff.")
|
||||
parser.add_option('-d', '--debian-mirror', metavar='DEBIAN_MIRROR',
|
||||
dest='debian_mirror',
|
||||
help='Preferred Debian mirror '
|
||||
'(default: http://deb.debian.org/debian)')
|
||||
parser.add_option('-s', '--debsec-mirror', metavar='DEBSEC_MIRROR',
|
||||
dest='debsec_mirror',
|
||||
help='Preferred Debian Security mirror '
|
||||
'(default: http://security.debian.org)')
|
||||
parser.add_option('--no-conf',
|
||||
dest='no_conf', default=False, action='store_true',
|
||||
help="Don't read config files or environment variables")
|
||||
|
||||
config = UDTConfig(args.no_conf)
|
||||
if args.debian_mirror is None:
|
||||
args.debian_mirror = config.get_value("DEBIAN_MIRROR")
|
||||
if args.debsec_mirror is None:
|
||||
args.debsec_mirror = config.get_value("DEBSEC_MIRROR")
|
||||
mirrors = [args.debsec_mirror, args.debian_mirror]
|
||||
opts, args = parser.parse_args()
|
||||
if len(args) < 2:
|
||||
parser.error('Must specify package and version')
|
||||
elif len(args) > 3:
|
||||
parser.error('Too many arguments')
|
||||
package = args[0]
|
||||
version = args[1]
|
||||
distance = int(args[2]) if len(args) > 2 else 1
|
||||
|
||||
Logger.info("Downloading %s %s", args.package, args.version)
|
||||
config = UDTConfig(opts.no_conf)
|
||||
if opts.debian_mirror is None:
|
||||
opts.debian_mirror = config.get_value('DEBIAN_MIRROR')
|
||||
if opts.debsec_mirror is None:
|
||||
opts.debsec_mirror = config.get_value('DEBSEC_MIRROR')
|
||||
mirrors = [opts.debsec_mirror, opts.debian_mirror]
|
||||
|
||||
newpkg = DebianSourcePackage(args.package, args.version, mirrors=mirrors)
|
||||
Logger.info('Downloading %s %s', package, version)
|
||||
|
||||
newpkg = DebianSourcePackage(package, version, mirrors=mirrors)
|
||||
try:
|
||||
newpkg.pull()
|
||||
except DownloadError as e:
|
||||
Logger.error("Failed to download: %s", str(e))
|
||||
Logger.error('Failed to download: %s', str(e))
|
||||
sys.exit(1)
|
||||
newpkg.unpack()
|
||||
|
||||
if args.fetch_only:
|
||||
if opts.fetch_only:
|
||||
sys.exit(0)
|
||||
|
||||
oldversion = previous_version(args.package, args.version, args.distance)
|
||||
oldversion = previous_version(package, version, distance)
|
||||
if not oldversion:
|
||||
Logger.error("No previous version could be found")
|
||||
Logger.error('No previous version could be found')
|
||||
sys.exit(1)
|
||||
Logger.info("Downloading %s %s", args.package, oldversion)
|
||||
Logger.info('Downloading %s %s', package, oldversion)
|
||||
|
||||
oldpkg = DebianSourcePackage(args.package, oldversion, mirrors=mirrors)
|
||||
oldpkg = DebianSourcePackage(package, oldversion, mirrors=mirrors)
|
||||
try:
|
||||
oldpkg.pull()
|
||||
except DownloadError as e:
|
||||
Logger.error("Failed to download: %s", str(e))
|
||||
Logger.error('Failed to download: %s', str(e))
|
||||
sys.exit(1)
|
||||
Logger.info("file://%s", oldpkg.debdiff(newpkg, diffstat=True))
|
||||
Logger.info('file://' + oldpkg.debdiff(newpkg, diffstat=True))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
Logger.info("User abort.")
|
||||
Logger.info('User abort.')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="debian", pull="debs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='debian', pull='debs')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="debian", pull="source")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='debian', pull='source')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="debian", pull="udebs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='debian', pull='udebs')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ubuntu", pull="ddebs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='ubuntu', pull='ddebs')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ubuntu", pull="debs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='ubuntu', pull='debs')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ubuntu", pull="source")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='ubuntu', pull='source')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ubuntu", pull="udebs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='ubuntu', pull='udebs')
|
||||
|
5
pull-pkg
5
pull-pkg
@ -23,10 +23,7 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main()
|
||||
|
@ -6,10 +6,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ppa", pull="ddebs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='ppa', pull='ddebs')
|
||||
|
@ -6,10 +6,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ppa", pull="debs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='ppa', pull='debs')
|
||||
|
@ -6,10 +6,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ppa", pull="source")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='ppa', pull='source')
|
||||
|
@ -6,10 +6,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ppa", pull="udebs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='ppa', pull='udebs')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="uca", pull="ddebs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='uca', pull='ddebs')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="uca", pull="debs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='uca', pull='debs')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="uca", pull="source")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='uca', pull='source')
|
||||
|
@ -5,10 +5,7 @@
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="uca", pull="udebs")
|
||||
if __name__ == '__main__':
|
||||
PullPkg.main(distro='uca', pull='udebs')
|
||||
|
@ -1,6 +0,0 @@
|
||||
[tool.black]
|
||||
line-length = 99
|
||||
|
||||
[tool.isort]
|
||||
line_length = 99
|
||||
profile = "black"
|
295
requestbackport
295
requestbackport
@ -14,20 +14,22 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
import optparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
import apt
|
||||
from distro_info import UbuntuDistroInfo
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.lp.lpapicache import Distribution, Launchpad
|
||||
from ubuntutools.lp.lpapicache import Launchpad, Distribution
|
||||
from ubuntutools.lp.udtexceptions import PackageNotFoundException
|
||||
from ubuntutools.question import EditBugReport, YesNoQuestion, confirmation_prompt
|
||||
from ubuntutools.rdepends import RDependsException, query_rdepends
|
||||
from ubuntutools.question import (YesNoQuestion, EditBugReport,
|
||||
confirmation_prompt)
|
||||
from ubuntutools.rdepends import query_rdepends, RDependsException
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
@ -38,14 +40,16 @@ class DestinationException(Exception):
|
||||
def determine_destinations(source, destination):
|
||||
ubuntu_info = UbuntuDistroInfo()
|
||||
if destination is None:
|
||||
destination = ubuntu_info.lts()
|
||||
destination = ubuntu_info.stable()
|
||||
|
||||
if source not in ubuntu_info.all:
|
||||
raise DestinationException(f"Source release {source} does not exist")
|
||||
raise DestinationException("Source release %s does not exist" % source)
|
||||
if destination not in ubuntu_info.all:
|
||||
raise DestinationException(f"Destination release {destination} does not exist")
|
||||
raise DestinationException("Destination release %s does not exist"
|
||||
% destination)
|
||||
if destination not in ubuntu_info.supported():
|
||||
raise DestinationException(f"Destination release {destination} is not supported")
|
||||
raise DestinationException("Destination release %s is not supported"
|
||||
% destination)
|
||||
|
||||
found = False
|
||||
destinations = []
|
||||
@ -73,36 +77,41 @@ def determine_destinations(source, destination):
|
||||
|
||||
|
||||
def disclaimer():
|
||||
print(
|
||||
"Ubuntu's backports are not for fixing bugs in stable releases, "
|
||||
"but for bringing new features to older, stable releases.\n"
|
||||
"See https://wiki.ubuntu.com/UbuntuBackports for the Ubuntu "
|
||||
"Backports policy and processes.\n"
|
||||
"See https://wiki.ubuntu.com/StableReleaseUpdates for the process "
|
||||
"for fixing bugs in stable releases."
|
||||
)
|
||||
print("Ubuntu's backports are not for fixing bugs in stable releases, "
|
||||
"but for bringing new features to older, stable releases.\n"
|
||||
"See https://wiki.ubuntu.com/UbuntuBackports for the Ubuntu "
|
||||
"Backports policy and processes.\n"
|
||||
"See https://wiki.ubuntu.com/StableReleaseUpdates for the process "
|
||||
"for fixing bugs in stable releases.")
|
||||
confirmation_prompt()
|
||||
|
||||
|
||||
def check_existing(package):
|
||||
def check_existing(package, destinations):
|
||||
"""Search for possible existing bug reports"""
|
||||
distro = Distribution("ubuntu")
|
||||
srcpkg = distro.getSourcePackage(name=package.getPackageName())
|
||||
|
||||
bugs = srcpkg.searchTasks(
|
||||
omit_duplicates=True,
|
||||
search_text="[BPO]",
|
||||
status=["Incomplete", "New", "Confirmed", "Triaged", "In Progress", "Fix Committed"],
|
||||
)
|
||||
# The LP bug search is indexed, not substring:
|
||||
query = re.findall(r'[a-z]+', package)
|
||||
bugs = []
|
||||
for release in destinations:
|
||||
project_name = '{}-backports'.format(release)
|
||||
try:
|
||||
project = Launchpad.projects[project_name]
|
||||
except KeyError:
|
||||
Logger.error("The backports tracking project '%s' doesn't seem to "
|
||||
"exist. Please check the situation with the "
|
||||
"backports team.", project_name)
|
||||
sys.exit(1)
|
||||
bugs += project.searchTasks(omit_duplicates=True,
|
||||
search_text=query,
|
||||
status=["Incomplete", "New", "Confirmed",
|
||||
"Triaged", "In Progress",
|
||||
"Fix Committed"])
|
||||
if not bugs:
|
||||
return
|
||||
|
||||
Logger.info(
|
||||
"There are existing bug reports that look similar to your "
|
||||
"request. Please check before continuing:"
|
||||
)
|
||||
Logger.info("There are existing bug reports that look similar to your "
|
||||
"request. Please check before continuing:")
|
||||
|
||||
for bug in sorted([bug_task.bug for bug_task in bugs], key=lambda bug: bug.id):
|
||||
for bug in sorted(set(bug_task.bug for bug_task in bugs)):
|
||||
Logger.info(" * LP: #%-7i: %s %s", bug.id, bug.title, bug.web_link)
|
||||
|
||||
confirmation_prompt()
|
||||
@ -113,9 +122,9 @@ def find_rdepends(releases, published_binaries):
|
||||
|
||||
# We want to display every pubilshed binary, even if it has no rdepends
|
||||
for binpkg in published_binaries:
|
||||
intermediate[binpkg] # pylint: disable=pointless-statement
|
||||
intermediate[binpkg]
|
||||
|
||||
for arch in ("any", "source"):
|
||||
for arch in ('any', 'source'):
|
||||
for release in releases:
|
||||
for binpkg in published_binaries:
|
||||
try:
|
||||
@ -126,20 +135,20 @@ def find_rdepends(releases, published_binaries):
|
||||
for relationship, rdeps in raw_rdeps.items():
|
||||
for rdep in rdeps:
|
||||
# Ignore circular deps:
|
||||
if rdep["Package"] in published_binaries:
|
||||
if rdep['Package'] in published_binaries:
|
||||
continue
|
||||
# arch==any queries return Reverse-Build-Deps:
|
||||
if arch == "any" and rdep.get("Architectures", []) == ["source"]:
|
||||
if arch == 'any' and rdep.get('Architectures', []) == ['source']:
|
||||
continue
|
||||
intermediate[binpkg][rdep["Package"]].append((release, relationship))
|
||||
intermediate[binpkg][rdep['Package']].append((release, relationship))
|
||||
|
||||
output = []
|
||||
for binpkg, rdeps in intermediate.items():
|
||||
output += ["", binpkg, "-" * len(binpkg)]
|
||||
output += ['', binpkg, '-' * len(binpkg)]
|
||||
for pkg, appearences in rdeps.items():
|
||||
output += [f"* {pkg}"]
|
||||
output += ['* %s' % pkg]
|
||||
for release, relationship in appearences:
|
||||
output += [f" [ ] {release} ({relationship})"]
|
||||
output += [' [ ] %s (%s)' % (release, relationship)]
|
||||
|
||||
found_any = sum(len(rdeps) for rdeps in intermediate.values())
|
||||
if found_any:
|
||||
@ -154,8 +163,8 @@ def find_rdepends(releases, published_binaries):
|
||||
"package currently in the release still works with the new "
|
||||
"%(package)s installed. "
|
||||
"Reverse- Recommends, Suggests, and Enhances don't need to be "
|
||||
"tested, and are listed for completeness-sake.",
|
||||
] + output
|
||||
"tested, and are listed for completeness-sake."
|
||||
] + output
|
||||
else:
|
||||
output = ["No reverse dependencies"]
|
||||
|
||||
@ -163,164 +172,146 @@ def find_rdepends(releases, published_binaries):
|
||||
|
||||
|
||||
def locate_package(package, distribution):
|
||||
archive = Distribution("ubuntu").getArchive()
|
||||
try:
|
||||
package_spph = archive.getSourcePackage(package, distribution)
|
||||
return package_spph
|
||||
except PackageNotFoundException as e:
|
||||
archive = Distribution('ubuntu').getArchive()
|
||||
for pass_ in ('source', 'binary'):
|
||||
try:
|
||||
package_spph = archive.getSourcePackage(package, distribution)
|
||||
return package_spph
|
||||
except PackageNotFoundException as e:
|
||||
if pass_ == 'binary':
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
apt_pkg = apt.Cache()[package]
|
||||
except KeyError:
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
continue
|
||||
package = apt_pkg.candidate.source_name
|
||||
Logger.info(
|
||||
"Binary package specified, considering its source package instead: %s", package
|
||||
)
|
||||
return None
|
||||
Logger.info("Binary package specified, considering its source "
|
||||
"package instead: %s", package)
|
||||
|
||||
|
||||
def request_backport(package_spph, source, destinations):
|
||||
|
||||
published_binaries = set()
|
||||
for bpph in package_spph.getBinaries():
|
||||
published_binaries.add(bpph.getPackageName())
|
||||
|
||||
if not published_binaries:
|
||||
Logger.error(
|
||||
"%s (%s) has no published binaries in %s. ",
|
||||
package_spph.getPackageName(),
|
||||
package_spph.getVersion(),
|
||||
source,
|
||||
)
|
||||
Logger.info(
|
||||
"Is it stuck in bin-NEW? It can't be backported until "
|
||||
"the binaries have been accepted."
|
||||
)
|
||||
Logger.error("%s (%s) has no published binaries in %s. ",
|
||||
package_spph.getPackageName(), package_spph.getVersion(),
|
||||
source)
|
||||
Logger.info("Is it stuck in bin-NEW? It can't be backported until "
|
||||
"the binaries have been accepted.")
|
||||
sys.exit(1)
|
||||
|
||||
testing = ["[Testing]", ""]
|
||||
testing = []
|
||||
testing += ["You can test-build the backport in your PPA with "
|
||||
"backportpackage:"]
|
||||
testing += ["$ backportpackage -u ppa:<lp username>/<ppa name> "
|
||||
"-s %s -d %s %s"
|
||||
% (source, dest, package_spph.getPackageName())
|
||||
for dest in destinations]
|
||||
testing += [""]
|
||||
for dest in destinations:
|
||||
testing += [f" * {dest.capitalize()}:"]
|
||||
testing += [" [ ] Package builds without modification"]
|
||||
testing += [f" [ ] {binary} installs cleanly and runs" for binary in published_binaries]
|
||||
testing += ['* %s:' % dest]
|
||||
testing += ["[ ] Package builds without modification"]
|
||||
testing += ["[ ] %s installs cleanly and runs" % binary
|
||||
for binary in published_binaries]
|
||||
|
||||
subst = {
|
||||
"package": package_spph.getPackageName(),
|
||||
"version": package_spph.getVersion(),
|
||||
"component": package_spph.getComponent(),
|
||||
"source": package_spph.getSeriesAndPocket(),
|
||||
"destinations": ", ".join(destinations),
|
||||
'package': package_spph.getPackageName(),
|
||||
'version': package_spph.getVersion(),
|
||||
'component': package_spph.getComponent(),
|
||||
'source': package_spph.getSeriesAndPocket(),
|
||||
'destinations': ', '.join(destinations),
|
||||
}
|
||||
subject = "[BPO] %(package)s %(version)s to %(destinations)s" % subst
|
||||
body = (
|
||||
"\n".join(
|
||||
subject = ("Please backport %(package)s %(version)s (%(component)s) "
|
||||
"from %(source)s" % subst)
|
||||
body = ('\n'.join(
|
||||
[
|
||||
"[Impact]",
|
||||
"",
|
||||
" * Justification for backporting the new version to the stable release.",
|
||||
"",
|
||||
"[Scope]",
|
||||
"",
|
||||
" * List the Ubuntu release you will backport from,"
|
||||
" and the specific package version.",
|
||||
"",
|
||||
" * List the Ubuntu release(s) you will backport to.",
|
||||
"",
|
||||
"[Other Info]",
|
||||
"",
|
||||
" * Anything else you think is useful to include",
|
||||
"",
|
||||
"Please backport %(package)s %(version)s (%(component)s) "
|
||||
"from %(source)s to %(destinations)s.",
|
||||
"",
|
||||
"Reason for the backport:",
|
||||
"========================",
|
||||
">>> Enter your reasoning here <<<",
|
||||
"",
|
||||
"Testing:",
|
||||
"========",
|
||||
"Mark off items in the checklist [X] as you test them, "
|
||||
"but please leave the checklist so that backporters can quickly "
|
||||
"evaluate the state of testing.",
|
||||
""
|
||||
]
|
||||
+ testing
|
||||
+ [""]
|
||||
+ find_rdepends(destinations, published_binaries)
|
||||
+ [""]
|
||||
)
|
||||
% subst
|
||||
)
|
||||
+ [""]) % subst)
|
||||
|
||||
editor = EditBugReport(subject, body)
|
||||
editor.edit()
|
||||
subject, body = editor.get_report()
|
||||
|
||||
Logger.info("The final report is:\nSummary: %s\nDescription:\n%s\n", subject, body)
|
||||
Logger.info('The final report is:\nSummary: %s\nDescription:\n%s\n',
|
||||
subject, body)
|
||||
if YesNoQuestion().ask("Request this backport", "yes") == "no":
|
||||
sys.exit(1)
|
||||
|
||||
distro = Distribution("ubuntu")
|
||||
pkgname = package_spph.getPackageName()
|
||||
|
||||
bug = Launchpad.bugs.createBug(
|
||||
title=subject, description=body, target=distro.getSourcePackage(name=pkgname)
|
||||
)
|
||||
|
||||
bug.subscribe(person=Launchpad.people["ubuntu-backporters"])
|
||||
|
||||
for dest in destinations:
|
||||
series = distro.getSeries(dest)
|
||||
try:
|
||||
bug.addTask(target=series.getSourcePackage(name=pkgname))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
break
|
||||
targets = [Launchpad.projects['%s-backports' % destination]
|
||||
for destination in destinations]
|
||||
bug = Launchpad.bugs.createBug(title=subject, description=body,
|
||||
target=targets[0])
|
||||
for target in targets[1:]:
|
||||
bug.addTask(target=target)
|
||||
|
||||
Logger.info("Backport request filed as %s", bug.web_link)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] package")
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--destination",
|
||||
metavar="DEST",
|
||||
help="Backport to DEST release and necessary "
|
||||
"intermediate releases "
|
||||
"(default: current LTS release)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--source",
|
||||
metavar="SOURCE",
|
||||
help="Backport from SOURCE release (default: current devel release)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
default=None,
|
||||
help="Launchpad instance to connect to (default: production).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
action="store_true",
|
||||
dest="no_conf",
|
||||
default=False,
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument("package", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
parser = optparse.OptionParser('%prog [options] package')
|
||||
parser.add_option('-d', '--destination', metavar='DEST',
|
||||
help='Backport to DEST release and necessary '
|
||||
'intermediate releases '
|
||||
'(default: current stable release)')
|
||||
parser.add_option('-s', '--source', metavar='SOURCE',
|
||||
help='Backport from SOURCE release '
|
||||
'(default: current devel release)')
|
||||
parser.add_option('-l', '--lpinstance', metavar='INSTANCE', default=None,
|
||||
help='Launchpad instance to connect to '
|
||||
'(default: production).')
|
||||
parser.add_option('--no-conf', action='store_true',
|
||||
dest='no_conf', default=False,
|
||||
help="Don't read config files or environment variables")
|
||||
options, args = parser.parse_args()
|
||||
|
||||
config = UDTConfig(args.no_conf)
|
||||
if len(args) != 1:
|
||||
parser.error("One (and only one) package must be specified")
|
||||
package = args[0]
|
||||
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
Launchpad.login(args.lpinstance)
|
||||
config = UDTConfig(options.no_conf)
|
||||
|
||||
if args.source is None:
|
||||
args.source = Distribution("ubuntu").getDevelopmentSeries().name
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value('LPINSTANCE')
|
||||
Launchpad.login(options.lpinstance)
|
||||
|
||||
if options.source is None:
|
||||
options.source = Distribution('ubuntu').getDevelopmentSeries().name
|
||||
|
||||
try:
|
||||
destinations = determine_destinations(args.source, args.destination)
|
||||
destinations = determine_destinations(options.source,
|
||||
options.destination)
|
||||
except DestinationException as e:
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
|
||||
disclaimer()
|
||||
|
||||
package_spph = locate_package(args.package, args.source)
|
||||
check_existing(package, destinations)
|
||||
|
||||
check_existing(package_spph)
|
||||
request_backport(package_spph, args.source, destinations)
|
||||
package_spph = locate_package(package, options.source)
|
||||
request_backport(package_spph, options.source, destinations)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
388
requestsync
388
requestsync
@ -26,19 +26,19 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
import argparse
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from distro_info import UbuntuDistroInfo
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig, ubu_email
|
||||
from ubuntutools.lp import udtexceptions
|
||||
from ubuntutools.misc import require_utf8
|
||||
from ubuntutools.question import EditBugReport, confirmation_prompt
|
||||
from ubuntutools.question import confirmation_prompt, EditBugReport
|
||||
from ubuntutools.version import Version
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
#
|
||||
@ -48,190 +48,170 @@ Logger = getLogger()
|
||||
|
||||
def main():
|
||||
# Our usage options.
|
||||
usage = "%(prog)s [options] <source package> [<target release> [base version]]"
|
||||
parser = argparse.ArgumentParser(usage=usage)
|
||||
usage = ('Usage: %prog [options] '
|
||||
'<source package> [<target release> [base version]]')
|
||||
parser = optparse.OptionParser(usage)
|
||||
|
||||
parser.add_argument(
|
||||
"-d", dest="dist", default="unstable", help="Debian distribution to sync from."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-k",
|
||||
dest="keyid",
|
||||
default=None,
|
||||
help="GnuPG key ID to use for signing report "
|
||||
"(only used when emailing the sync request).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
action="store_true",
|
||||
dest="newpkg",
|
||||
default=False,
|
||||
help="Whether package to sync is a new package in Ubuntu.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--email",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Use a PGP-signed email for filing the sync request, rather than the LP API.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--lp",
|
||||
dest="deprecated_lp_flag",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
dest="lpinstance",
|
||||
default=None,
|
||||
help="Launchpad instance to connect to (default: production).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", action="store_true", dest="sponsorship", default=False, help="Force sponsorship"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
action="store_true",
|
||||
dest="missing_changelog_ok",
|
||||
default=False,
|
||||
help="Allow changelog to be manually filled in when missing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
action="store_true",
|
||||
dest="ffe",
|
||||
default=False,
|
||||
help="Use this after FeatureFreeze for non-bug fix "
|
||||
"syncs, changes default subscription to the "
|
||||
"appropriate release team.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
action="store_true",
|
||||
dest="no_conf",
|
||||
default=False,
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument("source_package", help=argparse.SUPPRESS)
|
||||
parser.add_argument("release", nargs="?", help=argparse.SUPPRESS)
|
||||
parser.add_argument("base_version", nargs="?", type=Version, help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
parser.add_option('-d', type='string',
|
||||
dest='dist', default='unstable',
|
||||
help='Debian distribution to sync from.')
|
||||
parser.add_option('-k', type='string',
|
||||
dest='keyid', default=None,
|
||||
help='GnuPG key ID to use for signing report '
|
||||
'(only used when emailing the sync request).')
|
||||
parser.add_option('-n', action='store_true',
|
||||
dest='newpkg', default=False,
|
||||
help='Whether package to sync is a new package in '
|
||||
'Ubuntu.')
|
||||
parser.add_option('--email', action='store_true', default=False,
|
||||
help='Use a PGP-signed email for filing the sync '
|
||||
'request, rather than the LP API.')
|
||||
parser.add_option('--lp', dest='deprecated_lp_flag',
|
||||
action='store_true', default=False,
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
parser.add_option('-l', '--lpinstance', metavar='INSTANCE',
|
||||
dest='lpinstance', default=None,
|
||||
help='Launchpad instance to connect to '
|
||||
'(default: production).')
|
||||
parser.add_option('-s', action='store_true',
|
||||
dest='sponsorship', default=False,
|
||||
help='Force sponsorship')
|
||||
parser.add_option('-C', action='store_true',
|
||||
dest='missing_changelog_ok', default=False,
|
||||
help='Allow changelog to be manually filled in '
|
||||
'when missing')
|
||||
parser.add_option('-e', action='store_true',
|
||||
dest='ffe', default=False,
|
||||
help='Use this after FeatureFreeze for non-bug fix '
|
||||
'syncs, changes default subscription to the '
|
||||
'appropriate release team.')
|
||||
parser.add_option('--no-conf', action='store_true',
|
||||
dest='no_conf', default=False,
|
||||
help="Don't read config files or environment variables")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if not len(args):
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
require_utf8()
|
||||
|
||||
config = UDTConfig(args.no_conf)
|
||||
config = UDTConfig(options.no_conf)
|
||||
|
||||
if args.deprecated_lp_flag:
|
||||
if options.deprecated_lp_flag:
|
||||
Logger.info("The --lp flag is now default, ignored.")
|
||||
if args.email:
|
||||
args.lpapi = False
|
||||
if options.email:
|
||||
options.lpapi = False
|
||||
else:
|
||||
args.lpapi = config.get_value("USE_LPAPI", default=True, boolean=True)
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
options.lpapi = config.get_value('USE_LPAPI', default=True,
|
||||
boolean=True)
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value('LPINSTANCE')
|
||||
|
||||
if args.keyid is None:
|
||||
args.keyid = config.get_value("KEYID")
|
||||
if options.keyid is None:
|
||||
options.keyid = config.get_value('KEYID')
|
||||
|
||||
if not args.lpapi:
|
||||
if args.lpinstance == "production":
|
||||
bug_mail_domain = "bugs.launchpad.net"
|
||||
elif args.lpinstance == "staging":
|
||||
bug_mail_domain = "bugs.staging.launchpad.net"
|
||||
if not options.lpapi:
|
||||
if options.lpinstance == 'production':
|
||||
bug_mail_domain = 'bugs.launchpad.net'
|
||||
elif options.lpinstance == 'staging':
|
||||
bug_mail_domain = 'bugs.staging.launchpad.net'
|
||||
else:
|
||||
Logger.error("Error: Unknown launchpad instance: %s", args.lpinstance)
|
||||
Logger.error('Error: Unknown launchpad instance: %s'
|
||||
% options.lpinstance)
|
||||
sys.exit(1)
|
||||
|
||||
mailserver_host = config.get_value(
|
||||
"SMTP_SERVER", default=None, compat_keys=["UBUSMTP", "DEBSMTP"]
|
||||
)
|
||||
if not args.lpapi and not mailserver_host:
|
||||
mailserver_host = config.get_value('SMTP_SERVER',
|
||||
default=None,
|
||||
compat_keys=['UBUSMTP', 'DEBSMTP'])
|
||||
if not options.lpapi and not mailserver_host:
|
||||
try:
|
||||
import DNS # pylint: disable=import-outside-toplevel
|
||||
|
||||
import DNS
|
||||
DNS.DiscoverNameServers()
|
||||
mxlist = DNS.mxlookup(bug_mail_domain)
|
||||
firstmx = mxlist[0]
|
||||
mailserver_host = firstmx[1]
|
||||
except ImportError:
|
||||
Logger.error("Please install python-dns to support Launchpad mail server lookup.")
|
||||
Logger.error('Please install python-dns to support '
|
||||
'Launchpad mail server lookup.')
|
||||
sys.exit(1)
|
||||
|
||||
mailserver_port = config.get_value(
|
||||
"SMTP_PORT", default=25, compat_keys=["UBUSMTP_PORT", "DEBSMTP_PORT"]
|
||||
)
|
||||
mailserver_user = config.get_value("SMTP_USER", compat_keys=["UBUSMTP_USER", "DEBSMTP_USER"])
|
||||
mailserver_pass = config.get_value("SMTP_PASS", compat_keys=["UBUSMTP_PASS", "DEBSMTP_PASS"])
|
||||
mailserver_port = config.get_value('SMTP_PORT', default=25,
|
||||
compat_keys=['UBUSMTP_PORT',
|
||||
'DEBSMTP_PORT'])
|
||||
mailserver_user = config.get_value('SMTP_USER',
|
||||
compat_keys=['UBUSMTP_USER',
|
||||
'DEBSMTP_USER'])
|
||||
mailserver_pass = config.get_value('SMTP_PASS',
|
||||
compat_keys=['UBUSMTP_PASS',
|
||||
'DEBSMTP_PASS'])
|
||||
|
||||
# import the needed requestsync module
|
||||
# pylint: disable=import-outside-toplevel
|
||||
if args.lpapi:
|
||||
if options.lpapi:
|
||||
from ubuntutools.requestsync.lp import (check_existing_reports,
|
||||
get_debian_srcpkg,
|
||||
get_ubuntu_srcpkg,
|
||||
get_ubuntu_delta_changelog,
|
||||
need_sponsorship, post_bug)
|
||||
from ubuntutools.lp.lpapicache import Distribution, Launchpad
|
||||
from ubuntutools.requestsync.lp import (
|
||||
check_existing_reports,
|
||||
get_debian_srcpkg,
|
||||
get_ubuntu_delta_changelog,
|
||||
get_ubuntu_srcpkg,
|
||||
need_sponsorship,
|
||||
post_bug,
|
||||
)
|
||||
|
||||
# See if we have LP credentials and exit if we don't -
|
||||
# cannot continue in this case
|
||||
|
||||
try:
|
||||
# devel for changelogUrl()
|
||||
Launchpad.login(service=args.lpinstance, api_version="devel")
|
||||
Launchpad.login(service=options.lpinstance, api_version='devel')
|
||||
except IOError:
|
||||
sys.exit(1)
|
||||
else:
|
||||
from ubuntutools.requestsync.mail import (
|
||||
check_existing_reports,
|
||||
get_debian_srcpkg,
|
||||
get_ubuntu_delta_changelog,
|
||||
get_ubuntu_srcpkg,
|
||||
mail_bug,
|
||||
need_sponsorship,
|
||||
)
|
||||
|
||||
if not any(x in os.environ for x in ("UBUMAIL", "DEBEMAIL", "EMAIL")):
|
||||
Logger.error(
|
||||
"The environment variable UBUMAIL, DEBEMAIL or EMAIL needs "
|
||||
"to be set to let this script mail the sync request."
|
||||
)
|
||||
from ubuntutools.requestsync.mail import (check_existing_reports,
|
||||
get_debian_srcpkg,
|
||||
get_ubuntu_srcpkg,
|
||||
get_ubuntu_delta_changelog,
|
||||
mail_bug, need_sponsorship)
|
||||
if not any(x in os.environ for x in ('UBUMAIL', 'DEBEMAIL', 'EMAIL')):
|
||||
Logger.error('The environment variable UBUMAIL, DEBEMAIL or EMAIL needs '
|
||||
'to be set to let this script mail the sync request.')
|
||||
sys.exit(1)
|
||||
|
||||
newsource = args.newpkg
|
||||
sponsorship = args.sponsorship
|
||||
distro = args.dist
|
||||
ffe = args.ffe
|
||||
lpapi = args.lpapi
|
||||
newsource = options.newpkg
|
||||
sponsorship = options.sponsorship
|
||||
distro = options.dist
|
||||
ffe = options.ffe
|
||||
lpapi = options.lpapi
|
||||
need_interaction = False
|
||||
srcpkg = args.source_package
|
||||
force_base_version = None
|
||||
srcpkg = args[0]
|
||||
|
||||
if not args.release:
|
||||
if len(args) == 1:
|
||||
if lpapi:
|
||||
args.release = Distribution("ubuntu").getDevelopmentSeries().name
|
||||
release = Distribution('ubuntu').getDevelopmentSeries().name
|
||||
else:
|
||||
ubu_info = UbuntuDistroInfo()
|
||||
args.release = ubu_info.devel()
|
||||
Logger.warning("Target release missing - assuming %s", args.release)
|
||||
release = ubu_info.devel()
|
||||
Logger.warning('Target release missing - assuming %s' % release)
|
||||
elif len(args) == 2:
|
||||
release = args[1]
|
||||
elif len(args) == 3:
|
||||
release = args[1]
|
||||
force_base_version = Version(args[2])
|
||||
else:
|
||||
Logger.error('Too many arguments.')
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
# Get the current Ubuntu source package
|
||||
try:
|
||||
ubuntu_srcpkg = get_ubuntu_srcpkg(srcpkg, args.release, "Proposed")
|
||||
ubuntu_srcpkg = get_ubuntu_srcpkg(srcpkg, release, 'Proposed')
|
||||
ubuntu_version = Version(ubuntu_srcpkg.getVersion())
|
||||
ubuntu_component = ubuntu_srcpkg.getComponent()
|
||||
newsource = False # override the -n flag
|
||||
except udtexceptions.PackageNotFoundException:
|
||||
ubuntu_srcpkg = None
|
||||
ubuntu_version = Version("~")
|
||||
ubuntu_version = Version('~')
|
||||
ubuntu_component = None # Set after getting the Debian info
|
||||
if not newsource:
|
||||
Logger.info("'%s' doesn't exist in 'Ubuntu %s'.", srcpkg, args.release)
|
||||
Logger.info("'%s' doesn't exist in 'Ubuntu %s'." % (srcpkg, release))
|
||||
Logger.info("Do you want to sync a new package?")
|
||||
confirmation_prompt()
|
||||
newsource = True
|
||||
@ -252,16 +232,15 @@ def main():
|
||||
sys.exit(1)
|
||||
|
||||
if ubuntu_component is None:
|
||||
if debian_component == "main":
|
||||
ubuntu_component = "universe"
|
||||
if debian_component == 'main':
|
||||
ubuntu_component = 'universe'
|
||||
else:
|
||||
ubuntu_component = "multiverse"
|
||||
ubuntu_component = 'multiverse'
|
||||
|
||||
# Stop if Ubuntu has already the version from Debian or a newer version
|
||||
if (ubuntu_version >= debian_version) and args.lpapi:
|
||||
if (ubuntu_version >= debian_version) and options.lpapi:
|
||||
# try rmadison
|
||||
import ubuntutools.requestsync.mail # pylint: disable=import-outside-toplevel
|
||||
|
||||
import ubuntutools.requestsync.mail
|
||||
try:
|
||||
debian_srcpkg = ubuntutools.requestsync.mail.get_debian_srcpkg(srcpkg, distro)
|
||||
debian_version = Version(debian_srcpkg.getVersion())
|
||||
@ -271,80 +250,72 @@ def main():
|
||||
sys.exit(1)
|
||||
|
||||
if ubuntu_version == debian_version:
|
||||
Logger.error(
|
||||
"The versions in Debian and Ubuntu are the same already (%s). Aborting.",
|
||||
ubuntu_version,
|
||||
)
|
||||
Logger.error('The versions in Debian and Ubuntu are the '
|
||||
'same already (%s). Aborting.' % ubuntu_version)
|
||||
sys.exit(1)
|
||||
if ubuntu_version > debian_version:
|
||||
Logger.error(
|
||||
"The version in Ubuntu (%s) is newer than the version in Debian (%s). Aborting.",
|
||||
ubuntu_version,
|
||||
debian_version,
|
||||
)
|
||||
Logger.error('The version in Ubuntu (%s) is newer than '
|
||||
'the version in Debian (%s). Aborting.'
|
||||
% (ubuntu_version, debian_version))
|
||||
sys.exit(1)
|
||||
|
||||
# -s flag not specified - check if we do need sponsorship
|
||||
if not sponsorship:
|
||||
sponsorship = need_sponsorship(srcpkg, ubuntu_component, args.release)
|
||||
sponsorship = need_sponsorship(srcpkg, ubuntu_component, release)
|
||||
|
||||
if not sponsorship and not ffe:
|
||||
Logger.error(
|
||||
"Consider using syncpackage(1) for syncs that "
|
||||
"do not require feature freeze exceptions."
|
||||
)
|
||||
Logger.error('Consider using syncpackage(1) for syncs that '
|
||||
'do not require feature freeze exceptions.')
|
||||
|
||||
# Check for existing package reports
|
||||
if not newsource:
|
||||
check_existing_reports(srcpkg)
|
||||
|
||||
# Generate bug report
|
||||
pkg_to_sync = (
|
||||
f"{srcpkg} {debian_version} ({ubuntu_component})"
|
||||
f" from Debian {distro} ({debian_component})"
|
||||
)
|
||||
title = f"Sync {pkg_to_sync}"
|
||||
pkg_to_sync = ('%s %s (%s) from Debian %s (%s)'
|
||||
% (srcpkg, debian_version, ubuntu_component,
|
||||
distro, debian_component))
|
||||
title = "Sync %s" % pkg_to_sync
|
||||
if ffe:
|
||||
title = "FFe: " + title
|
||||
report = f"Please sync {pkg_to_sync}\n\n"
|
||||
report = "Please sync %s\n\n" % pkg_to_sync
|
||||
|
||||
if "ubuntu" in str(ubuntu_version):
|
||||
if 'ubuntu' in str(ubuntu_version):
|
||||
need_interaction = True
|
||||
|
||||
Logger.info("Changes have been made to the package in Ubuntu.")
|
||||
Logger.info("Please edit the report and give an explanation.")
|
||||
Logger.info("Not saving the report file will abort the request.")
|
||||
report += (
|
||||
f"Explanation of the Ubuntu delta and why it can be dropped:\n"
|
||||
f"{get_ubuntu_delta_changelog(ubuntu_srcpkg)}\n>>> ENTER_EXPLANATION_HERE <<<\n\n"
|
||||
)
|
||||
Logger.info('Changes have been made to the package in Ubuntu.')
|
||||
Logger.info('Please edit the report and give an explanation.')
|
||||
Logger.info('Not saving the report file will abort the request.')
|
||||
report += ('Explanation of the Ubuntu delta and why it can be '
|
||||
'dropped:\n%s\n>>> ENTER_EXPLANATION_HERE <<<\n\n'
|
||||
% get_ubuntu_delta_changelog(ubuntu_srcpkg))
|
||||
|
||||
if ffe:
|
||||
need_interaction = True
|
||||
|
||||
Logger.info("To approve FeatureFreeze exception, you need to state")
|
||||
Logger.info("the reason why you feel it is necessary.")
|
||||
Logger.info("Not saving the report file will abort the request.")
|
||||
report += "Explanation of FeatureFreeze exception:\n>>> ENTER_EXPLANATION_HERE <<<\n\n"
|
||||
Logger.info('To approve FeatureFreeze exception, you need to state')
|
||||
Logger.info('the reason why you feel it is necessary.')
|
||||
Logger.info('Not saving the report file will abort the request.')
|
||||
report += ('Explanation of FeatureFreeze exception:\n'
|
||||
'>>> ENTER_EXPLANATION_HERE <<<\n\n')
|
||||
|
||||
if need_interaction:
|
||||
confirmation_prompt()
|
||||
|
||||
base_version = args.base_version or ubuntu_version
|
||||
base_version = force_base_version or ubuntu_version
|
||||
|
||||
if newsource:
|
||||
report += "All changelog entries:\n\n"
|
||||
report += 'All changelog entries:\n\n'
|
||||
else:
|
||||
report += f"Changelog entries since current {args.release} version {ubuntu_version}:\n\n"
|
||||
report += ('Changelog entries since current %s version %s:\n\n'
|
||||
% (release, ubuntu_version))
|
||||
changelog = debian_srcpkg.getChangelog(since_version=base_version)
|
||||
if not changelog:
|
||||
if not args.missing_changelog_ok:
|
||||
Logger.error(
|
||||
"Did not retrieve any changelog entries. "
|
||||
"Do you need to specify '-C'? "
|
||||
"Was the package recently uploaded? (check "
|
||||
"http://packages.debian.org/changelogs/)"
|
||||
)
|
||||
if not options.missing_changelog_ok:
|
||||
Logger.error("Did not retrieve any changelog entries. "
|
||||
"Do you need to specify '-C'? "
|
||||
"Was the package recently uploaded? (check "
|
||||
"http://packages.debian.org/changelogs/)")
|
||||
sys.exit(1)
|
||||
else:
|
||||
need_interaction = True
|
||||
@ -355,49 +326,36 @@ def main():
|
||||
editor.edit(optional=not need_interaction)
|
||||
title, report = editor.get_report()
|
||||
|
||||
if "XXX FIXME" in report:
|
||||
Logger.error(
|
||||
"changelog boilerplate found in report, "
|
||||
"please manually add changelog when using '-C'"
|
||||
)
|
||||
if 'XXX FIXME' in report:
|
||||
Logger.error("changelog boilerplate found in report, "
|
||||
"please manually add changelog when using '-C'")
|
||||
sys.exit(1)
|
||||
|
||||
# bug status and bug subscriber
|
||||
status = "confirmed"
|
||||
subscribe = "ubuntu-archive"
|
||||
status = 'confirmed'
|
||||
subscribe = 'ubuntu-archive'
|
||||
if sponsorship:
|
||||
status = "new"
|
||||
subscribe = "ubuntu-sponsors"
|
||||
status = 'new'
|
||||
subscribe = 'ubuntu-sponsors'
|
||||
if ffe:
|
||||
status = "new"
|
||||
subscribe = "ubuntu-release"
|
||||
status = 'new'
|
||||
subscribe = 'ubuntu-release'
|
||||
|
||||
srcpkg = None if newsource else srcpkg
|
||||
srcpkg = not newsource and srcpkg or None
|
||||
if lpapi:
|
||||
# Map status to the values expected by LP API
|
||||
mapping = {"new": "New", "confirmed": "Confirmed"}
|
||||
mapping = {'new': 'New', 'confirmed': 'Confirmed'}
|
||||
# Post sync request using LP API
|
||||
post_bug(srcpkg, subscribe, mapping[status], title, report)
|
||||
else:
|
||||
email_from = ubu_email(export=False)[1]
|
||||
# Mail sync request
|
||||
mail_bug(
|
||||
srcpkg,
|
||||
subscribe,
|
||||
status,
|
||||
title,
|
||||
report,
|
||||
bug_mail_domain,
|
||||
args.keyid,
|
||||
email_from,
|
||||
mailserver_host,
|
||||
mailserver_port,
|
||||
mailserver_user,
|
||||
mailserver_pass,
|
||||
)
|
||||
mail_bug(srcpkg, subscribe, status, title, report, bug_mail_domain,
|
||||
options.keyid, email_from, mailserver_host, mailserver_port,
|
||||
mailserver_user, mailserver_pass)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
|
@ -1,6 +1,5 @@
|
||||
python-debian
|
||||
python-debianbts
|
||||
dateutil
|
||||
distro-info
|
||||
httplib2
|
||||
launchpadlib
|
||||
|
212
reverse-depends
212
reverse-depends
@ -14,18 +14,16 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from distro_info import DistroDataOutdated
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.misc import codename_to_distribution, system_distribution, vendor_to_distroinfo
|
||||
from ubuntutools.rdepends import RDependsException, query_rdepends
|
||||
from ubuntutools.misc import (system_distribution, vendor_to_distroinfo,
|
||||
codename_to_distribution)
|
||||
from ubuntutools.rdepends import query_rdepends, RDependsException
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
DEFAULT_MAX_DEPTH = 10 # We want avoid any infinite loop...
|
||||
@ -37,107 +35,77 @@ def main():
|
||||
default_release = system_distro_info.devel()
|
||||
except DistroDataOutdated as e:
|
||||
Logger.warning(e)
|
||||
default_release = "unstable"
|
||||
default_release = 'unstable'
|
||||
|
||||
description = (
|
||||
"List reverse-dependencies of package. "
|
||||
"If the package name is prefixed with src: then the "
|
||||
"reverse-dependencies of all the binary packages that "
|
||||
"the specified source package builds will be listed."
|
||||
)
|
||||
description = ("List reverse-dependencies of package. "
|
||||
"If the package name is prefixed with src: then the "
|
||||
"reverse-dependencies of all the binary packages that "
|
||||
"the specified source package builds will be listed.")
|
||||
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument(
|
||||
"-r",
|
||||
"--release",
|
||||
default=default_release,
|
||||
help="Query dependencies in RELEASE. Default: %(default)s",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-R",
|
||||
"--without-recommends",
|
||||
action="store_false",
|
||||
dest="recommends",
|
||||
help="Only consider Depends relationships, not Recommends",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", "--with-suggests", action="store_true", help="Also consider Suggests relationships"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--build-depends",
|
||||
action="store_true",
|
||||
help="Query build dependencies (synonym for --arch=source)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a", "--arch", default="any", help="Query dependencies in ARCH. Default: any"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--component",
|
||||
action="append",
|
||||
help="Only consider reverse-dependencies in COMPONENT. "
|
||||
"Can be specified multiple times. Default: all",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--list", action="store_true", help="Display a simple, machine-readable list"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u",
|
||||
"--service-url",
|
||||
metavar="URL",
|
||||
dest="server",
|
||||
default=None,
|
||||
help="Reverse Dependencies webservice URL. Default: UbuntuWire",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-x",
|
||||
"--recursive",
|
||||
action="store_true",
|
||||
help="Consider to find reverse dependencies recursively.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--recursive-depth",
|
||||
type=int,
|
||||
default=DEFAULT_MAX_DEPTH,
|
||||
help="If recusive, you can specify the depth.",
|
||||
)
|
||||
parser.add_argument("package")
|
||||
parser.add_argument('-r', '--release', default=default_release,
|
||||
help='Query dependencies in RELEASE. '
|
||||
'Default: %s' % default_release)
|
||||
parser.add_argument('-R', '--without-recommends', action='store_false',
|
||||
dest='recommends',
|
||||
help='Only consider Depends relationships, '
|
||||
'not Recommends')
|
||||
parser.add_argument('-s', '--with-suggests', action='store_true',
|
||||
help='Also consider Suggests relationships')
|
||||
parser.add_argument('-b', '--build-depends', action='store_true',
|
||||
help='Query build dependencies (synonym for --arch=source)')
|
||||
parser.add_argument('-a', '--arch', default='any',
|
||||
help='Query dependencies in ARCH. Default: any')
|
||||
parser.add_argument('-c', '--component', action='append',
|
||||
help='Only consider reverse-dependencies in COMPONENT. '
|
||||
'Can be specified multiple times. Default: all')
|
||||
parser.add_argument('-l', '--list', action='store_true',
|
||||
help='Display a simple, machine-readable list')
|
||||
parser.add_argument('-u', '--service-url', metavar='URL',
|
||||
dest='server', default=None,
|
||||
help='Reverse Dependencies webservice URL. '
|
||||
'Default: UbuntuWire')
|
||||
parser.add_argument('-x', '--recursive', action='store_true',
|
||||
help='Consider to find reverse dependencies recursively.')
|
||||
parser.add_argument('-d', '--recursive-depth', type=int,
|
||||
default=DEFAULT_MAX_DEPTH,
|
||||
help='If recusive, you can specify the depth.')
|
||||
parser.add_argument('package')
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
opts = {}
|
||||
if options.server is not None:
|
||||
opts["server"] = options.server
|
||||
opts['server'] = options.server
|
||||
|
||||
# Convert unstable/testing aliases to codenames:
|
||||
distribution = codename_to_distribution(options.release)
|
||||
if not distribution:
|
||||
parser.error(f"Unknown release codename {options.release}")
|
||||
parser.error('Unknown release codename %s' % options.release)
|
||||
distro_info = vendor_to_distroinfo(distribution)()
|
||||
try:
|
||||
options.release = distro_info.codename(options.release, default=options.release)
|
||||
options.release = distro_info.codename(options.release,
|
||||
default=options.release)
|
||||
except DistroDataOutdated:
|
||||
# We already logged a warning
|
||||
pass
|
||||
|
||||
if options.build_depends:
|
||||
options.arch = "source"
|
||||
options.arch = 'source'
|
||||
|
||||
if options.arch == "source":
|
||||
if options.arch == 'source':
|
||||
fields = [
|
||||
"Reverse-Build-Depends",
|
||||
"Reverse-Build-Depends-Indep",
|
||||
"Reverse-Build-Depends-Arch",
|
||||
"Reverse-Testsuite-Triggers",
|
||||
'Reverse-Build-Depends',
|
||||
'Reverse-Build-Depends-Indep',
|
||||
'Reverse-Build-Depends-Arch',
|
||||
'Reverse-Testsuite-Triggers',
|
||||
]
|
||||
else:
|
||||
fields = ["Reverse-Depends"]
|
||||
fields = ['Reverse-Depends']
|
||||
if options.recommends:
|
||||
fields.append("Reverse-Recommends")
|
||||
fields.append('Reverse-Recommends')
|
||||
if options.with_suggests:
|
||||
fields.append("Reverse-Suggests")
|
||||
fields.append('Reverse-Suggests')
|
||||
|
||||
def build_results(package, result, fields, component, recursive):
|
||||
try:
|
||||
@ -151,9 +119,9 @@ def main():
|
||||
if fields:
|
||||
data = {k: v for k, v in data.items() if k in fields}
|
||||
if component:
|
||||
data = {
|
||||
k: [rdep for rdep in v if rdep["Component"] in component] for k, v in data.items()
|
||||
}
|
||||
data = {k: [rdep for rdep in v
|
||||
if rdep['Component'] in component]
|
||||
for k, v in data.items()}
|
||||
data = {k: v for k, v in data.items() if v}
|
||||
|
||||
result[package] = data
|
||||
@ -161,16 +129,13 @@ def main():
|
||||
if recursive > 0:
|
||||
for rdeps in result[package].values():
|
||||
for rdep in rdeps:
|
||||
build_results(rdep["Package"], result, fields, component, recursive - 1)
|
||||
build_results(
|
||||
rdep['Package'], result, fields, component, recursive - 1)
|
||||
|
||||
result = {}
|
||||
build_results(
|
||||
options.package,
|
||||
result,
|
||||
fields,
|
||||
options.component,
|
||||
options.recursive and options.recursive_depth or 0,
|
||||
)
|
||||
options.package, result, fields, options.component,
|
||||
options.recursive and options.recursive_depth or 0)
|
||||
|
||||
if options.list:
|
||||
display_consise(result)
|
||||
@ -183,59 +148,52 @@ def display_verbose(package, values):
|
||||
Logger.info("No reverse dependencies found")
|
||||
return
|
||||
|
||||
def log_package(values, package, arch, dependency, visited, offset=0):
|
||||
line = f"{' ' * offset}* {package}"
|
||||
def log_field(field):
|
||||
Logger.info(field)
|
||||
Logger.info('=' * len(field))
|
||||
|
||||
def log_package(values, package, arch, dependency, offset=0):
|
||||
line = ' ' * offset + '* %s' % package
|
||||
if all_archs and set(arch) != all_archs:
|
||||
line += f" [{' '.join(sorted(arch))}]"
|
||||
line += ' [%s]' % ' '.join(sorted(arch))
|
||||
if dependency:
|
||||
if len(line) < 30:
|
||||
line += " " * (30 - len(line))
|
||||
line += f" (for {dependency})"
|
||||
line += ' ' * (30 - len(line))
|
||||
line += ' (for %s)' % dependency
|
||||
Logger.info(line)
|
||||
if package in visited:
|
||||
return
|
||||
visited = visited.copy().add(package)
|
||||
data = values.get(package)
|
||||
if data:
|
||||
offset = offset + 1
|
||||
for rdeps in data.values():
|
||||
for rdep in rdeps:
|
||||
log_package(
|
||||
values,
|
||||
rdep["Package"],
|
||||
rdep.get("Architectures", all_archs),
|
||||
rdep.get("Dependency"),
|
||||
visited,
|
||||
offset,
|
||||
)
|
||||
log_package(values,
|
||||
rdep['Package'],
|
||||
rdep.get('Architectures', all_archs),
|
||||
rdep.get('Dependency'),
|
||||
offset)
|
||||
|
||||
all_archs = set()
|
||||
# This isn't accurate, but we make up for it by displaying what we found
|
||||
for data in values.values():
|
||||
for rdeps in data.values():
|
||||
for rdep in rdeps:
|
||||
if "Architectures" in rdep:
|
||||
all_archs.update(rdep["Architectures"])
|
||||
if 'Architectures' in rdep:
|
||||
all_archs.update(rdep['Architectures'])
|
||||
|
||||
for field, rdeps in values[package].items():
|
||||
Logger.info("%s", field)
|
||||
Logger.info("%s", "=" * len(field))
|
||||
rdeps.sort(key=lambda x: x["Package"])
|
||||
Logger.info(field)
|
||||
rdeps.sort(key=lambda x: x['Package'])
|
||||
for rdep in rdeps:
|
||||
log_package(
|
||||
values,
|
||||
rdep["Package"],
|
||||
rdep.get("Architectures", all_archs),
|
||||
rdep.get("Dependency"),
|
||||
{package},
|
||||
)
|
||||
log_package(values,
|
||||
rdep['Package'],
|
||||
rdep.get('Architectures', all_archs),
|
||||
rdep.get('Dependency'))
|
||||
Logger.info("")
|
||||
|
||||
if all_archs:
|
||||
Logger.info(
|
||||
"Packages without architectures listed are reverse-dependencies in: %s",
|
||||
", ".join(sorted(list(all_archs))),
|
||||
)
|
||||
Logger.info("Packages without architectures listed are "
|
||||
"reverse-dependencies in: %s"
|
||||
% ', '.join(sorted(list(all_archs))))
|
||||
|
||||
|
||||
def display_consise(values):
|
||||
@ -243,10 +201,10 @@ def display_consise(values):
|
||||
for data in values.values():
|
||||
for rdeps in data.values():
|
||||
for rdep in rdeps:
|
||||
result.add(rdep["Package"])
|
||||
result.add(rdep['Package'])
|
||||
|
||||
Logger.info("\n".join(sorted(list(result))))
|
||||
Logger.info('\n'.join(sorted(list(result))))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
19
run-linters
19
run-linters
@ -1,19 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
# Copyright 2023, Canonical Ltd.
|
||||
# SPDX-License-Identifier: GPL-3.0
|
||||
|
||||
PYTHON_SCRIPTS=$(grep -l -r '^#! */usr/bin/python3$' .)
|
||||
|
||||
echo "Running black..."
|
||||
black --check --diff . $PYTHON_SCRIPTS
|
||||
|
||||
echo "Running isort..."
|
||||
isort --check-only --diff .
|
||||
|
||||
echo "Running flake8..."
|
||||
flake8 --max-line-length=99 --ignore=E203,W503 . $PYTHON_SCRIPTS
|
||||
|
||||
echo "Running pylint..."
|
||||
pylint $(find * -name '*.py') $PYTHON_SCRIPTS
|
@ -1,81 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
|
||||
|
||||
# Authors:
|
||||
# Andy P. Whitcroft
|
||||
# Christian Ehrhardt
|
||||
# Chris Peterson <chris.peterson@canonical.com>
|
||||
#
|
||||
# Copyright (C) 2024 Canonical Ltd.
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License version 3, as published
|
||||
# by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranties of
|
||||
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Dumps a list of currently running tests in Autopkgtest"""
|
||||
|
||||
__example__ = """
|
||||
Display first listed test running on amd64 hardware:
|
||||
$ running-autopkgtests | grep amd64 | head -n1
|
||||
R 0:01:40 systemd-upstream - focal amd64\
|
||||
upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb',\
|
||||
'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true',\
|
||||
'UPSTREAM_PULL_REQUEST=23153',\
|
||||
'GITHUB_STATUSES_URL=https://api.github.com/repos/\
|
||||
systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
|
||||
"""
|
||||
|
||||
import sys
|
||||
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
||||
|
||||
from ubuntutools.running_autopkgtests import get_queued, get_running
|
||||
|
||||
|
||||
def parse_args():
|
||||
description = (
|
||||
"Dumps a list of currently running and queued tests in Autopkgtest. "
|
||||
"Pass --running to only see running tests, or --queued to only see "
|
||||
"queued tests. Passing both will print both, which is the default behavior. "
|
||||
)
|
||||
|
||||
parser = ArgumentParser(
|
||||
prog="running-autopkgtests",
|
||||
description=description,
|
||||
epilog=f"example: {__example__}",
|
||||
formatter_class=RawDescriptionHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-r", "--running", action="store_true", help="Print runnning autopkgtests (default: true)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q", "--queued", action="store_true", help="Print queued autopkgtests (default: true)"
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
# If neither flag was specified, default to both not neither
|
||||
if not options.running and not options.queued:
|
||||
options.running = True
|
||||
options.queued = True
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
if args.running:
|
||||
print(get_running())
|
||||
if args.queued:
|
||||
print(get_queued())
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
118
seeded-in-ubuntu
118
seeded-in-ubuntu
@ -14,53 +14,51 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import gzip
|
||||
import json
|
||||
import optparse
|
||||
import os
|
||||
import time
|
||||
import urllib.request
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.lp.lpapicache import Distribution, Launchpad, PackageNotFoundException
|
||||
from ubuntutools.lp.lpapicache import (Distribution, Launchpad,
|
||||
PackageNotFoundException)
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
DATA_URL = "http://qa.ubuntuwire.org/ubuntu-seeded-packages/seeded.json.gz"
|
||||
DATA_URL = 'http://qa.ubuntuwire.org/ubuntu-seeded-packages/seeded.json.gz'
|
||||
|
||||
|
||||
def load_index(url):
|
||||
"""Download a new copy of the image contents index, if necessary,
|
||||
'''Download a new copy of the image contents index, if necessary,
|
||||
and read it.
|
||||
"""
|
||||
cachedir = os.path.expanduser("~/.cache/ubuntu-dev-tools")
|
||||
seeded = os.path.join(cachedir, "seeded.json.gz")
|
||||
'''
|
||||
cachedir = os.path.expanduser('~/.cache/ubuntu-dev-tools')
|
||||
fn = os.path.join(cachedir, 'seeded.json.gz')
|
||||
|
||||
if not os.path.isfile(seeded) or time.time() - os.path.getmtime(seeded) > 60 * 60 * 2:
|
||||
if (not os.path.isfile(fn)
|
||||
or time.time() - os.path.getmtime(fn) > 60 * 60 * 2):
|
||||
if not os.path.isdir(cachedir):
|
||||
os.makedirs(cachedir)
|
||||
urllib.request.urlretrieve(url, seeded)
|
||||
urllib.request.urlretrieve(url, fn)
|
||||
|
||||
try:
|
||||
with gzip.open(seeded, "r") as f:
|
||||
with gzip.open(fn, 'r') as f:
|
||||
return json.load(f)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
Logger.error(
|
||||
"Unable to parse seed data: %s. Deleting cached data, please try again.", str(e)
|
||||
)
|
||||
os.unlink(seeded)
|
||||
return None
|
||||
except Exception as e:
|
||||
Logger.error("Unable to parse seed data: %s. "
|
||||
"Deleting cached data, please try again.",
|
||||
str(e))
|
||||
os.unlink(fn)
|
||||
|
||||
|
||||
def resolve_binaries(sources):
|
||||
"""Return a dict of source:binaries for all binary packages built by
|
||||
'''Return a dict of source:binaries for all binary packages built by
|
||||
sources
|
||||
"""
|
||||
archive = Distribution("ubuntu").getArchive()
|
||||
'''
|
||||
archive = Distribution('ubuntu').getArchive()
|
||||
binaries = {}
|
||||
for source in sources:
|
||||
try:
|
||||
@ -68,84 +66,80 @@ def resolve_binaries(sources):
|
||||
except PackageNotFoundException as e:
|
||||
Logger.error(str(e))
|
||||
continue
|
||||
binaries[source] = sorted(set(bpph.getPackageName() for bpph in spph.getBinaries()))
|
||||
binaries[source] = sorted(set(bpph.getPackageName()
|
||||
for bpph in spph.getBinaries()))
|
||||
|
||||
return binaries
|
||||
|
||||
|
||||
def present_on(appearences):
|
||||
"""Format a list of (flavor, type) tuples into a human-readable string"""
|
||||
'''Format a list of (flavor, type) tuples into a human-readable string'''
|
||||
present = collections.defaultdict(set)
|
||||
for flavor, type_ in appearences:
|
||||
present[flavor].add(type_)
|
||||
for flavor, types in present.items():
|
||||
if len(types) > 1:
|
||||
types.discard("supported")
|
||||
output = [f" {flavor}: {', '.join(sorted(types))}" for flavor, types in present.items()]
|
||||
types.discard('supported')
|
||||
output = [' %s: %s' % (flavor, ', '.join(sorted(types)))
|
||||
for flavor, types in present.items()]
|
||||
output.sort()
|
||||
return "\n".join(output)
|
||||
return '\n'.join(output)
|
||||
|
||||
|
||||
def output_binaries(index, binaries):
|
||||
"""Print binaries found in index"""
|
||||
'''Print binaries found in index'''
|
||||
for binary in binaries:
|
||||
if binary in index:
|
||||
Logger.info("%s is seeded in:", binary)
|
||||
Logger.info("%s is seeded in:" % binary)
|
||||
Logger.info(present_on(index[binary]))
|
||||
else:
|
||||
Logger.info("%s is not seeded (and may not exist).", binary)
|
||||
Logger.info("%s is not seeded (and may not exist)." % binary)
|
||||
|
||||
|
||||
def output_by_source(index, by_source):
|
||||
"""Logger.Info(binaries found in index. Grouped by source"""
|
||||
'''Logger.Info(binaries found in index. Grouped by source'''
|
||||
for source, binaries in by_source.items():
|
||||
seen = False
|
||||
if not binaries:
|
||||
Logger.info(
|
||||
"Status unknown: No binary packages built by the latest "
|
||||
"%s.\nTry again using -b and the expected binary packages.",
|
||||
source,
|
||||
)
|
||||
Logger.info("Status unknown: No binary packages built by the latest "
|
||||
"%s.\nTry again using -b and the expected binary packages."
|
||||
% source)
|
||||
continue
|
||||
for binary in binaries:
|
||||
if binary in index:
|
||||
seen = True
|
||||
Logger.info("%s (from %s) is seeded in:", binary, source)
|
||||
Logger.info("%s (from %s) is seeded in:" % (binary, source))
|
||||
Logger.info(present_on(index[binary]))
|
||||
if not seen:
|
||||
Logger.info("%s's binaries are not seeded.", source)
|
||||
Logger.info("%s's binaries are not seeded." % source)
|
||||
|
||||
|
||||
def main():
|
||||
"""Query which images the specified packages are on"""
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] package...")
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--binary",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Binary packages are being specified, not source packages (fast)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u",
|
||||
"--data-url",
|
||||
metavar="URL",
|
||||
default=DATA_URL,
|
||||
help="URL for the seeded packages index. Default: UbuntuWire",
|
||||
)
|
||||
parser.add_argument("packages", metavar="package", nargs="+", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
'''Query which images the specified packages are on'''
|
||||
parser = optparse.OptionParser('%prog [options] package...')
|
||||
parser.add_option('-b', '--binary',
|
||||
default=False, action='store_true',
|
||||
help="Binary packages are being specified, "
|
||||
"not source packages (fast)")
|
||||
parser.add_option('-u', '--data-url', metavar='URL',
|
||||
default=DATA_URL,
|
||||
help='URL for the seeded packages index. '
|
||||
'Default: UbuntuWire')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) < 1:
|
||||
parser.error("At least one package must be specified")
|
||||
|
||||
# Login anonymously to LP
|
||||
Launchpad.login_anonymously()
|
||||
|
||||
index = load_index(args.data_url)
|
||||
if args.binary:
|
||||
output_binaries(index, args.packages)
|
||||
index = load_index(options.data_url)
|
||||
if options.binary:
|
||||
output_binaries(index, args)
|
||||
else:
|
||||
binaries = resolve_binaries(args.packages)
|
||||
binaries = resolve_binaries(args)
|
||||
output_by_source(index, binaries)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -104,7 +104,7 @@ echo "In order to do packaging work, you'll need a minimal set of packages."
|
||||
echo "Those, together with other packages which, though optional, have proven"
|
||||
echo "to be useful, will now be installed."
|
||||
echo
|
||||
sudo apt-get install ubuntu-dev-tools devscripts debhelper patchutils pbuilder build-essential
|
||||
sudo apt-get install ubuntu-dev-tools devscripts debhelper cdbs patchutils pbuilder build-essential
|
||||
separator2
|
||||
|
||||
echo "Enabling the source repository"
|
||||
|
145
setup.py
145
setup.py
@ -1,100 +1,81 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from setuptools import setup
|
||||
import glob
|
||||
import pathlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
|
||||
def get_debian_version() -> str:
|
||||
"""Look what Debian version we have."""
|
||||
changelog = pathlib.Path(__file__).parent / "debian" / "changelog"
|
||||
with changelog.open("r", encoding="utf-8") as changelog_f:
|
||||
head = changelog_f.readline()
|
||||
# look/set what version we have
|
||||
changelog = "debian/changelog"
|
||||
if os.path.exists(changelog):
|
||||
head = open(changelog, 'r', encoding='utf-8').readline()
|
||||
match = re.compile(r".*\((.*)\).*").match(head)
|
||||
if not match:
|
||||
raise ValueError(f"Failed to extract Debian version from '{head}'.")
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def make_pep440_compliant(version: str) -> str:
|
||||
"""Convert the version into a PEP440 compliant version."""
|
||||
public_version_re = re.compile(r"^([0-9][0-9.]*(?:(?:a|b|rc|.post|.dev)[0-9]+)*)\+?")
|
||||
_, public, local = public_version_re.split(version, maxsplit=1)
|
||||
if not local:
|
||||
return version
|
||||
sanitized_local = re.sub("[+~]+", ".", local).strip(".")
|
||||
pep440_version = f"{public}+{sanitized_local}"
|
||||
assert re.match("^[a-zA-Z0-9.]+$", sanitized_local), f"'{pep440_version}' not PEP440 compliant"
|
||||
return pep440_version
|
||||
|
||||
if match:
|
||||
version = match.group(1)
|
||||
|
||||
scripts = [
|
||||
"backportpackage",
|
||||
"check-mir",
|
||||
"check-symbols",
|
||||
"dch-repeat",
|
||||
"grab-merge",
|
||||
"grep-merges",
|
||||
"import-bug-from-debian",
|
||||
"lp-bitesize",
|
||||
"merge-changelog",
|
||||
"mk-sbuild",
|
||||
"pbuilder-dist",
|
||||
"pbuilder-dist-simple",
|
||||
"pm-helper",
|
||||
"pull-pkg",
|
||||
"pull-debian-debdiff",
|
||||
"pull-debian-source",
|
||||
"pull-debian-debs",
|
||||
"pull-debian-ddebs",
|
||||
"pull-debian-udebs",
|
||||
"pull-lp-source",
|
||||
"pull-lp-debs",
|
||||
"pull-lp-ddebs",
|
||||
"pull-lp-udebs",
|
||||
"pull-ppa-source",
|
||||
"pull-ppa-debs",
|
||||
"pull-ppa-ddebs",
|
||||
"pull-ppa-udebs",
|
||||
"pull-uca-source",
|
||||
"pull-uca-debs",
|
||||
"pull-uca-ddebs",
|
||||
"pull-uca-udebs",
|
||||
"requestbackport",
|
||||
"requestsync",
|
||||
"reverse-depends",
|
||||
"running-autopkgtests",
|
||||
"seeded-in-ubuntu",
|
||||
"setup-packaging-environment",
|
||||
"sponsor-patch",
|
||||
"submittodebian",
|
||||
"syncpackage",
|
||||
"ubuntu-build",
|
||||
"ubuntu-iso",
|
||||
"ubuntu-upload-permission",
|
||||
"update-maintainer",
|
||||
'backportpackage',
|
||||
'bitesize',
|
||||
'check-mir',
|
||||
'check-symbols',
|
||||
'dch-repeat',
|
||||
'grab-merge',
|
||||
'grep-merges',
|
||||
'import-bug-from-debian',
|
||||
'merge-changelog',
|
||||
'mk-sbuild',
|
||||
'pbuilder-dist',
|
||||
'pbuilder-dist-simple',
|
||||
'pull-pkg',
|
||||
'pull-debian-debdiff',
|
||||
'pull-debian-source',
|
||||
'pull-debian-debs',
|
||||
'pull-debian-ddebs',
|
||||
'pull-debian-udebs',
|
||||
'pull-lp-source',
|
||||
'pull-lp-debs',
|
||||
'pull-lp-ddebs',
|
||||
'pull-lp-udebs',
|
||||
'pull-ppa-source',
|
||||
'pull-ppa-debs',
|
||||
'pull-ppa-ddebs',
|
||||
'pull-ppa-udebs',
|
||||
'pull-uca-source',
|
||||
'pull-uca-debs',
|
||||
'pull-uca-ddebs',
|
||||
'pull-uca-udebs',
|
||||
'requestbackport',
|
||||
'requestsync',
|
||||
'reverse-depends',
|
||||
'seeded-in-ubuntu',
|
||||
'setup-packaging-environment',
|
||||
'sponsor-patch',
|
||||
'submittodebian',
|
||||
'syncpackage',
|
||||
'ubuntu-build',
|
||||
'ubuntu-iso',
|
||||
'ubuntu-upload-permission',
|
||||
'update-maintainer',
|
||||
]
|
||||
data_files = [
|
||||
("share/bash-completion/completions", glob.glob("bash_completion/*")),
|
||||
("share/man/man1", glob.glob("doc/*.1")),
|
||||
("share/man/man5", glob.glob("doc/*.5")),
|
||||
("share/ubuntu-dev-tools", ["enforced-editing-wrapper"]),
|
||||
('share/bash-completion/completions', glob.glob("bash_completion/*")),
|
||||
('share/man/man1', glob.glob("doc/*.1")),
|
||||
('share/man/man5', glob.glob("doc/*.5")),
|
||||
('share/ubuntu-dev-tools', ['enforced-editing-wrapper']),
|
||||
]
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
setup(
|
||||
name="ubuntu-dev-tools",
|
||||
version=make_pep440_compliant(get_debian_version()),
|
||||
name='ubuntu-dev-tools',
|
||||
version=version,
|
||||
scripts=scripts,
|
||||
packages=[
|
||||
"ubuntutools",
|
||||
"ubuntutools/lp",
|
||||
"ubuntutools/requestsync",
|
||||
"ubuntutools/sponsor_patch",
|
||||
"ubuntutools/test",
|
||||
'ubuntutools',
|
||||
'ubuntutools/lp',
|
||||
'ubuntutools/requestsync',
|
||||
'ubuntutools/sponsor_patch',
|
||||
'ubuntutools/test',
|
||||
],
|
||||
data_files=data_files,
|
||||
test_suite="ubuntutools.test",
|
||||
test_suite='ubuntutools.test',
|
||||
)
|
||||
|
190
sponsor-patch
190
sponsor-patch
@ -14,153 +14,123 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import optparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import logging
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.builder import get_builder
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.sponsor_patch.sponsor_patch import check_dependencies, sponsor_patch
|
||||
from ubuntutools.sponsor_patch.sponsor_patch import sponsor_patch, check_dependencies
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def parse(script_name):
|
||||
"""Parse the command line parameters."""
|
||||
usage = (
|
||||
"%(prog)s [options] <bug number>\n"
|
||||
"One of --upload, --workdir, or --sponsor must be specified."
|
||||
)
|
||||
epilog = f"See {script_name}(1) for more info."
|
||||
parser = argparse.ArgumentParser(usage=usage, epilog=epilog)
|
||||
usage = ("%s [options] <bug number>\n" % (script_name)
|
||||
+ "One of --upload, --workdir, or --sponsor must be specified.")
|
||||
epilog = "See %s(1) for more info." % (script_name)
|
||||
parser = optparse.OptionParser(usage=usage, epilog=epilog)
|
||||
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--build",
|
||||
dest="build",
|
||||
help="Build the package with the specified builder.",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-B", "--builder", dest="builder", help="Specify the package builder (default pbuilder)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--edit",
|
||||
help="launch sub-shell to allow editing of the patch",
|
||||
dest="edit",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-k", "--key", dest="keyid", help="Specify the key ID to be used for signing."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
dest="lpinstance",
|
||||
help="Launchpad instance to connect to (default: production)",
|
||||
metavar="INSTANCE",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
dest="no_conf",
|
||||
help="Don't read config files or environment variables.",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--sponsor",
|
||||
help="sponsoring; equals -b -u ubuntu",
|
||||
dest="sponsoring",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u", "--upload", dest="upload", help="Specify an upload destination (default none)."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-U",
|
||||
"--update",
|
||||
dest="update",
|
||||
action="store_true",
|
||||
help="Update the build environment before building.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", help="print more information", dest="verbose", action="store_true"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-w",
|
||||
"--workdir",
|
||||
dest="workdir",
|
||||
help="Specify a working directory (default is a "
|
||||
"temporary directory, deleted afterwards).",
|
||||
)
|
||||
parser.add_argument("bug_number", type=int, help=argparse.SUPPRESS)
|
||||
parser.add_option("-b", "--build", dest="build",
|
||||
help="Build the package with the specified builder.",
|
||||
action="store_true", default=False)
|
||||
parser.add_option("-B", "--builder", dest="builder", default=None,
|
||||
help="Specify the package builder (default pbuilder)")
|
||||
parser.add_option("-e", "--edit",
|
||||
help="launch sub-shell to allow editing of the patch",
|
||||
dest="edit", action="store_true", default=False)
|
||||
parser.add_option("-k", "--key", dest="keyid", default=None,
|
||||
help="Specify the key ID to be used for signing.")
|
||||
parser.add_option("-l", "--lpinstance", dest="lpinstance", default=None,
|
||||
help="Launchpad instance to connect to "
|
||||
"(default: production)",
|
||||
metavar="INSTANCE")
|
||||
parser.add_option("--no-conf", dest="no_conf", default=False,
|
||||
help="Don't read config files or environment variables.",
|
||||
action="store_true")
|
||||
parser.add_option("-s", "--sponsor", help="sponsoring; equals -b -u ubuntu",
|
||||
dest="sponsoring", action="store_true", default=False)
|
||||
parser.add_option("-u", "--upload", dest="upload", default=None,
|
||||
help="Specify an upload destination (default none).")
|
||||
parser.add_option("-U", "--update", dest="update", default=False,
|
||||
action="store_true",
|
||||
help="Update the build environment before building.")
|
||||
parser.add_option("-v", "--verbose", help="print more information",
|
||||
dest="verbose", action="store_true", default=False)
|
||||
parser.add_option("-w", "--workdir", dest="workdir", default=None,
|
||||
help="Specify a working directory (default is a "
|
||||
"temporary directory, deleted afterwards).")
|
||||
|
||||
args = parser.parse_args()
|
||||
if args.verbose:
|
||||
(options, args) = parser.parse_args()
|
||||
if options.verbose:
|
||||
Logger.setLevel(logging.DEBUG)
|
||||
check_dependencies()
|
||||
|
||||
config = UDTConfig(args.no_conf)
|
||||
if args.builder is None:
|
||||
args.builder = config.get_value("BUILDER")
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
if not args.update:
|
||||
args.update = config.get_value("UPDATE_BUILDER", boolean=True)
|
||||
if args.workdir is None:
|
||||
args.workdir = config.get_value("WORKDIR")
|
||||
if args.keyid is None:
|
||||
args.keyid = config.get_value("KEYID")
|
||||
if len(args) == 0:
|
||||
Logger.error("No bug number specified.")
|
||||
sys.exit(1)
|
||||
elif len(args) > 1:
|
||||
Logger.error("Multiple bug numbers specified: %s" % (", ".join(args)))
|
||||
sys.exit(1)
|
||||
|
||||
if args.sponsoring:
|
||||
args.build = True
|
||||
args.upload = "ubuntu"
|
||||
bug_number = args[0]
|
||||
if bug_number.isdigit():
|
||||
bug_number = int(bug_number)
|
||||
else:
|
||||
Logger.error("Invalid bug number specified: %s" % (bug_number))
|
||||
sys.exit(1)
|
||||
|
||||
return args
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.builder is None:
|
||||
options.builder = config.get_value("BUILDER")
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value("LPINSTANCE")
|
||||
if not options.update:
|
||||
options.update = config.get_value("UPDATE_BUILDER", boolean=True)
|
||||
if options.workdir is None:
|
||||
options.workdir = config.get_value("WORKDIR")
|
||||
if options.keyid is None:
|
||||
options.keyid = config.get_value("KEYID")
|
||||
|
||||
if options.sponsoring:
|
||||
options.build = True
|
||||
options.upload = "ubuntu"
|
||||
|
||||
return (options, bug_number)
|
||||
|
||||
|
||||
def main():
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
args = parse(script_name)
|
||||
(options, bug_number) = parse(script_name)
|
||||
|
||||
builder = get_builder(args.builder)
|
||||
builder = get_builder(options.builder)
|
||||
if not builder:
|
||||
sys.exit(1)
|
||||
|
||||
if not args.upload and not args.workdir:
|
||||
Logger.error("Please specify either a working directory or an upload target!")
|
||||
if not options.upload and not options.workdir:
|
||||
Logger.error("Please specify either a working directory or an upload "
|
||||
"target!")
|
||||
sys.exit(1)
|
||||
|
||||
if args.workdir is None:
|
||||
workdir = tempfile.mkdtemp(prefix=script_name + "-")
|
||||
if options.workdir is None:
|
||||
workdir = tempfile.mkdtemp(prefix=script_name+"-")
|
||||
else:
|
||||
workdir = args.workdir
|
||||
workdir = options.workdir
|
||||
|
||||
try:
|
||||
sponsor_patch(
|
||||
args.bug_number,
|
||||
args.build,
|
||||
builder,
|
||||
args.edit,
|
||||
args.keyid,
|
||||
args.lpinstance,
|
||||
args.update,
|
||||
args.upload,
|
||||
workdir,
|
||||
)
|
||||
sponsor_patch(bug_number, options.build, builder, options.edit,
|
||||
options.keyid, options.lpinstance, options.update,
|
||||
options.upload, workdir)
|
||||
except KeyboardInterrupt:
|
||||
Logger.error("User abort.")
|
||||
sys.exit(2)
|
||||
finally:
|
||||
if args.workdir is None:
|
||||
if options.workdir is None:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
|
||||
|
190
submittodebian
190
submittodebian
@ -22,36 +22,32 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
"""Submit the Ubuntu changes in a package to Debian.
|
||||
|
||||
Run inside an unpacked Ubuntu source package.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from subprocess import DEVNULL, PIPE, Popen, call, check_call, run
|
||||
|
||||
from subprocess import call, check_call, run, Popen, PIPE, DEVNULL
|
||||
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from debian.changelog import Changelog
|
||||
from distro_info import DistroDataOutdated, UbuntuDistroInfo
|
||||
from distro_info import UbuntuDistroInfo, DistroDataOutdated
|
||||
|
||||
from ubuntutools.config import ubu_email
|
||||
from ubuntutools.question import YesNoQuestion, EditFile
|
||||
from ubuntutools.update_maintainer import update_maintainer, restore_maintainer
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import ubu_email
|
||||
from ubuntutools.question import EditFile, YesNoQuestion
|
||||
from ubuntutools.update_maintainer import restore_maintainer, update_maintainer
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def get_most_recent_debian_version(changelog):
|
||||
for block in changelog:
|
||||
version = block.version.full_version
|
||||
if not re.search("(ubuntu|build)", version):
|
||||
if not re.search('(ubuntu|build)', version):
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_bug_body(changelog):
|
||||
@ -69,20 +65,19 @@ In Ubuntu, the attached patch was applied to achieve the following:
|
||||
%s
|
||||
|
||||
Thanks for considering the patch.
|
||||
""" % (
|
||||
"\n".join(entry.changes())
|
||||
)
|
||||
""" % ("\n".join([a for a in entry.changes()]))
|
||||
return msg
|
||||
|
||||
|
||||
def build_source_package():
|
||||
if os.path.isdir(".bzr"):
|
||||
cmd = ["bzr", "bd", "--builder=dpkg-buildpackage", "-S", "--", "-uc", "-us", "-nc"]
|
||||
if os.path.isdir('.bzr'):
|
||||
cmd = ['bzr', 'bd', '--builder=dpkg-buildpackage', '-S',
|
||||
'--', '-uc', '-us', '-nc']
|
||||
else:
|
||||
cmd = ["dpkg-buildpackage", "-S", "-uc", "-us", "-nc"]
|
||||
cmd = ['dpkg-buildpackage', '-S', '-uc', '-us', '-nc']
|
||||
env = os.environ.copy()
|
||||
# Unset DEBEMAIL in case there's an @ubuntu.com e-mail address
|
||||
env.pop("DEBEMAIL", None)
|
||||
env.pop('DEBEMAIL', None)
|
||||
check_call(cmd, env=env)
|
||||
|
||||
|
||||
@ -93,35 +88,30 @@ def gen_debdiff(tmpdir, changelog):
|
||||
newver = next(changelog_it).version
|
||||
oldver = next(changelog_it).version
|
||||
|
||||
debdiff = os.path.join(tmpdir, f"{pkg}_{newver}.debdiff")
|
||||
debdiff = os.path.join(tmpdir, '%s_%s.debdiff' % (pkg, newver))
|
||||
|
||||
diff_cmd = ["bzr", "diff", "-r", "tag:" + str(oldver)]
|
||||
diff_cmd = ['bzr', 'diff', '-r', 'tag:' + str(oldver)]
|
||||
if call(diff_cmd, stdout=DEVNULL, stderr=DEVNULL) == 1:
|
||||
Logger.info("Extracting bzr diff between %s and %s", oldver, newver)
|
||||
Logger.info("Extracting bzr diff between %s and %s" % (oldver, newver))
|
||||
else:
|
||||
if oldver.epoch is not None:
|
||||
oldver = str(oldver)[str(oldver).index(":") + 1 :]
|
||||
oldver = str(oldver)[str(oldver).index(":") + 1:]
|
||||
if newver.epoch is not None:
|
||||
newver = str(newver)[str(newver).index(":") + 1 :]
|
||||
newver = str(newver)[str(newver).index(":") + 1:]
|
||||
|
||||
olddsc = f"../{pkg}_{oldver}.dsc"
|
||||
newdsc = f"../{pkg}_{newver}.dsc"
|
||||
olddsc = '../%s_%s.dsc' % (pkg, oldver)
|
||||
newdsc = '../%s_%s.dsc' % (pkg, newver)
|
||||
|
||||
check_file(olddsc)
|
||||
check_file(newdsc)
|
||||
|
||||
Logger.info("Generating debdiff between %s and %s", oldver, newver)
|
||||
diff_cmd = ["debdiff", olddsc, newdsc]
|
||||
Logger.info("Generating debdiff between %s and %s" % (oldver, newver))
|
||||
diff_cmd = ['debdiff', olddsc, newdsc]
|
||||
|
||||
with Popen(diff_cmd, stdout=PIPE, encoding="utf-8") as diff:
|
||||
with open(debdiff, "w", encoding="utf-8") as debdiff_f:
|
||||
run(
|
||||
["filterdiff", "-x", "*changelog*"],
|
||||
check=False,
|
||||
stdin=diff.stdout,
|
||||
stdout=debdiff_f,
|
||||
encoding="utf-8",
|
||||
)
|
||||
with Popen(diff_cmd, stdout=PIPE, encoding='utf-8') as diff:
|
||||
with open(debdiff, 'w', encoding='utf-8') as debdiff_f:
|
||||
run(['filterdiff', '-x', '*changelog*'],
|
||||
stdin=diff.stdout, stdout=debdiff_f, encoding='utf-8')
|
||||
|
||||
return debdiff
|
||||
|
||||
@ -129,10 +119,11 @@ def gen_debdiff(tmpdir, changelog):
|
||||
def check_file(fname, critical=True):
|
||||
if os.path.exists(fname):
|
||||
return fname
|
||||
if not critical:
|
||||
return False
|
||||
Logger.info("Couldn't find «%s».\n", fname)
|
||||
sys.exit(1)
|
||||
else:
|
||||
if not critical:
|
||||
return False
|
||||
Logger.info("Couldn't find «%s».\n" % fname)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def submit_bugreport(body, debdiff, deb_version, changelog):
|
||||
@ -140,84 +131,76 @@ def submit_bugreport(body, debdiff, deb_version, changelog):
|
||||
devel = UbuntuDistroInfo().devel()
|
||||
except DistroDataOutdated as e:
|
||||
Logger.info(str(e))
|
||||
devel = ""
|
||||
devel = ''
|
||||
|
||||
if os.path.dirname(sys.argv[0]).startswith("/usr/bin"):
|
||||
editor_path = "/usr/share/ubuntu-dev-tools"
|
||||
if os.path.dirname(sys.argv[0]).startswith('/usr/bin'):
|
||||
editor_path = '/usr/share/ubuntu-dev-tools'
|
||||
else:
|
||||
editor_path = os.path.dirname(sys.argv[0])
|
||||
env = dict(os.environ.items())
|
||||
if "EDITOR" in env:
|
||||
env["UDT_EDIT_WRAPPER_EDITOR"] = env["EDITOR"]
|
||||
if "VISUAL" in env:
|
||||
env["UDT_EDIT_WRAPPER_VISUAL"] = env["VISUAL"]
|
||||
env["EDITOR"] = os.path.join(editor_path, "enforced-editing-wrapper")
|
||||
env["VISUAL"] = os.path.join(editor_path, "enforced-editing-wrapper")
|
||||
env["UDT_EDIT_WRAPPER_TEMPLATE_RE"] = ".*REPLACE THIS WITH ACTUAL INFORMATION.*"
|
||||
env["UDT_EDIT_WRAPPER_FILE_DESCRIPTION"] = "bug report"
|
||||
if 'EDITOR' in env:
|
||||
env['UDT_EDIT_WRAPPER_EDITOR'] = env['EDITOR']
|
||||
if 'VISUAL' in env:
|
||||
env['UDT_EDIT_WRAPPER_VISUAL'] = env['VISUAL']
|
||||
env['EDITOR'] = os.path.join(editor_path, 'enforced-editing-wrapper')
|
||||
env['VISUAL'] = os.path.join(editor_path, 'enforced-editing-wrapper')
|
||||
env['UDT_EDIT_WRAPPER_TEMPLATE_RE'] = (
|
||||
'.*REPLACE THIS WITH ACTUAL INFORMATION.*')
|
||||
env['UDT_EDIT_WRAPPER_FILE_DESCRIPTION'] = 'bug report'
|
||||
|
||||
# In external mua mode, attachments are lost (Reportbug bug: #679907)
|
||||
internal_mua = True
|
||||
for cfgfile in ("/etc/reportbug.conf", "~/.reportbugrc"):
|
||||
for cfgfile in ('/etc/reportbug.conf', '~/.reportbugrc'):
|
||||
cfgfile = os.path.expanduser(cfgfile)
|
||||
if not os.path.exists(cfgfile):
|
||||
continue
|
||||
with open(cfgfile, "r", encoding="utf-8") as f:
|
||||
with open(cfgfile, 'r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line in ("gnus", "mutt", "nmh") or line.startswith("mua "):
|
||||
if line in ('gnus', 'mutt', 'nmh') or line.startswith('mua '):
|
||||
internal_mua = False
|
||||
break
|
||||
|
||||
cmd = (
|
||||
"reportbug",
|
||||
"--no-check-available",
|
||||
"--no-check-installed",
|
||||
"--pseudo-header",
|
||||
"User: ubuntu-devel@lists.ubuntu.com",
|
||||
"--pseudo-header",
|
||||
f"Usertags: origin-ubuntu {devel} ubuntu-patch",
|
||||
"--tag",
|
||||
"patch",
|
||||
"--bts",
|
||||
"debian",
|
||||
"--include",
|
||||
body,
|
||||
"--attach" if internal_mua else "--include",
|
||||
debdiff,
|
||||
"--package-version",
|
||||
deb_version,
|
||||
changelog.package,
|
||||
)
|
||||
cmd = ('reportbug',
|
||||
'--no-check-available',
|
||||
'--no-check-installed',
|
||||
'--pseudo-header', 'User: ubuntu-devel@lists.ubuntu.com',
|
||||
'--pseudo-header', 'Usertags: origin-ubuntu %s ubuntu-patch'
|
||||
% devel,
|
||||
'--tag', 'patch',
|
||||
'--bts', 'debian',
|
||||
'--include', body,
|
||||
'--attach' if internal_mua else '--include', debdiff,
|
||||
'--package-version', deb_version,
|
||||
changelog.package)
|
||||
check_call(cmd, env=env)
|
||||
|
||||
|
||||
def check_reportbug_config():
|
||||
reportbugrc_filename = os.path.expanduser("~/.reportbugrc")
|
||||
if os.path.exists(reportbugrc_filename):
|
||||
fn = os.path.expanduser('~/.reportbugrc')
|
||||
if os.path.exists(fn):
|
||||
return
|
||||
email = ubu_email()[1]
|
||||
reportbugrc = f"""# Reportbug configuration generated by submittodebian(1)
|
||||
reportbugrc = """# Reportbug configuration generated by submittodebian(1)
|
||||
# See reportbug.conf(5) for the configuration file format.
|
||||
|
||||
# Use Debian's reportbug SMTP Server:
|
||||
# Note: it's limited to 5 connections per hour, and cannot CC you at submission
|
||||
# time. See /usr/share/doc/reportbug/README.Users.gz for more details.
|
||||
smtphost reportbug.debian.org:587
|
||||
header "X-Debbugs-CC: {email}"
|
||||
header "X-Debbugs-CC: %s"
|
||||
no-cc
|
||||
|
||||
# Use GMail's SMTP Server:
|
||||
#smtphost smtp.googlemail.com:587
|
||||
#smtpuser "<your address>@gmail.com"
|
||||
#smtptls
|
||||
"""
|
||||
""" % email
|
||||
|
||||
with open(reportbugrc_filename, "w", encoding="utf-8") as f:
|
||||
with open(fn, 'w') as f:
|
||||
f.write(reportbugrc)
|
||||
|
||||
Logger.info(
|
||||
"""\
|
||||
Logger.info("""\
|
||||
You have not configured reportbug. Assuming this is the first time you have
|
||||
used it. Writing a ~/.reportbugrc that will use Debian's mail server, and CC
|
||||
the bug to you at <%s>
|
||||
@ -228,43 +211,40 @@ the bug to you at <%s>
|
||||
|
||||
If this is not correct, please exit now and edit ~/.reportbugrc or run
|
||||
reportbug --configure for its configuration wizard.
|
||||
""",
|
||||
email,
|
||||
reportbugrc.strip(),
|
||||
)
|
||||
""" % (email, reportbugrc.strip()))
|
||||
|
||||
if YesNoQuestion().ask("Continue submitting this bug", "yes") == "no":
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
description = 'Submit the Ubuntu changes in a package to Debian. ' + \
|
||||
'Run inside an unpacked Ubuntu source package.'
|
||||
parser = optparse.OptionParser(description=description)
|
||||
parser.parse_args()
|
||||
|
||||
if not os.path.exists("/usr/bin/reportbug"):
|
||||
Logger.error(
|
||||
"This utility requires the «reportbug» package, which isn't currently installed."
|
||||
)
|
||||
if not os.path.exists('/usr/bin/reportbug'):
|
||||
Logger.error("This utility requires the «reportbug» package, which isn't "
|
||||
"currently installed.")
|
||||
sys.exit(1)
|
||||
|
||||
check_reportbug_config()
|
||||
changelog_file = check_file("debian/changelog", critical=False) or check_file(
|
||||
"../debian/changelog"
|
||||
)
|
||||
with open(changelog_file, encoding="utf-8") as f:
|
||||
changelog_file = (check_file('debian/changelog', critical=False) or
|
||||
check_file('../debian/changelog'))
|
||||
with open(changelog_file) as f:
|
||||
changelog = Changelog(f.read())
|
||||
|
||||
deb_version = get_most_recent_debian_version(changelog)
|
||||
bug_body = get_bug_body(changelog)
|
||||
|
||||
tmpdir = mkdtemp()
|
||||
body = os.path.join(tmpdir, "bug_body")
|
||||
with open(body, "wb") as f:
|
||||
f.write(bug_body.encode("utf-8"))
|
||||
body = os.path.join(tmpdir, 'bug_body')
|
||||
with open(body, 'wb') as f:
|
||||
f.write(bug_body.encode('utf-8'))
|
||||
|
||||
restore_maintainer("debian")
|
||||
restore_maintainer('debian')
|
||||
build_source_package()
|
||||
update_maintainer("debian")
|
||||
update_maintainer('debian')
|
||||
|
||||
debdiff = gen_debdiff(tmpdir, changelog)
|
||||
|
||||
@ -272,7 +252,7 @@ def main():
|
||||
# reverted in the most recent build
|
||||
build_source_package()
|
||||
|
||||
EditFile(debdiff, "debdiff").edit(optional=True)
|
||||
EditFile(debdiff, 'debdiff').edit(optional=True)
|
||||
|
||||
submit_bugreport(body, debdiff, deb_version, changelog)
|
||||
os.unlink(body)
|
||||
@ -280,5 +260,5 @@ def main():
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
845
syncpackage
845
syncpackage
File diff suppressed because it is too large
Load Diff
482
ubuntu-build
482
ubuntu-build
@ -2,16 +2,16 @@
|
||||
#
|
||||
# ubuntu-build - command line interface for Launchpad buildd operations.
|
||||
#
|
||||
# Copyright (C) 2007-2024 Canonical Ltd.
|
||||
# Copyright (C) 2007 Canonical Ltd.
|
||||
# Authors:
|
||||
# - Martin Pitt <martin.pitt@canonical.com>
|
||||
# - Jonathan Davies <jpds@ubuntu.com>
|
||||
# - Michael Bienia <geser@ubuntu.com>
|
||||
# - Steve Langasek <steve.langasek@canonical.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, version 3 of the License.
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
@ -22,181 +22,106 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
# Our modules to import.
|
||||
import sys
|
||||
|
||||
import lazr.restfulclient.errors
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.lp.udtexceptions import PocketDoesNotExistError
|
||||
from optparse import OptionGroup
|
||||
from optparse import OptionParser
|
||||
from ubuntutools.lp.udtexceptions import (SeriesNotFoundException,
|
||||
PackageNotFoundException,
|
||||
PocketDoesNotExistError,)
|
||||
from ubuntutools.lp.lpapicache import Distribution, PersonTeam
|
||||
from ubuntutools.misc import split_release_pocket
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def get_build_states(pkg, archs):
|
||||
res = []
|
||||
|
||||
for build in pkg.getBuilds():
|
||||
if build.arch_tag in archs:
|
||||
res.append(f" {build.arch_tag}: {build.buildstate}")
|
||||
msg = "\n".join(res)
|
||||
return f"Build state(s) for '{pkg.source_package_name}':\n{msg}"
|
||||
|
||||
|
||||
def rescore_builds(pkg, archs, score):
|
||||
res = []
|
||||
|
||||
for build in pkg.getBuilds():
|
||||
arch = build.arch_tag
|
||||
if arch in archs:
|
||||
if not build.can_be_rescored:
|
||||
continue
|
||||
try:
|
||||
build.rescore(score=score)
|
||||
res.append(f" {arch}: done")
|
||||
except lazr.restfulclient.errors.Unauthorized:
|
||||
Logger.error(
|
||||
"You don't have the permissions to rescore builds."
|
||||
" Ignoring your rescore request."
|
||||
)
|
||||
return None
|
||||
except lazr.restfulclient.errors.BadRequest:
|
||||
Logger.info("Cannot rescore build of %s on %s.", build.source_package_name, arch)
|
||||
res.append(f" {arch}: failed")
|
||||
|
||||
msg = "\n".join(res)
|
||||
return f"Rescoring builds of '{pkg.source_package_name}' to {score}:\n{msg}"
|
||||
|
||||
|
||||
def retry_builds(pkg, archs):
|
||||
res = []
|
||||
for build in pkg.getBuilds():
|
||||
arch = build.arch_tag
|
||||
if arch in archs:
|
||||
try:
|
||||
build.retry()
|
||||
res.append(f" {arch}: done")
|
||||
except lazr.restfulclient.errors.BadRequest:
|
||||
res.append(f" {arch}: failed")
|
||||
msg = "\n".join(res)
|
||||
return f"Retrying builds of '{pkg.source_package_name}':\n{msg}"
|
||||
|
||||
|
||||
def main():
|
||||
# Usage.
|
||||
usage = "%(prog)s <srcpackage> <release> <operation>\n\n"
|
||||
usage = "%prog <srcpackage> <release> <operation>\n\n"
|
||||
usage += "Where operation may be one of: rescore, retry, or status.\n"
|
||||
usage += "Only Launchpad Buildd Admins may rescore package builds."
|
||||
|
||||
# Valid architectures.
|
||||
valid_archs = set(
|
||||
["armhf", "arm64", "amd64", "i386", "powerpc", "ppc64el", "riscv64", "s390x"]
|
||||
)
|
||||
valid_archs = set([
|
||||
"armel", "armhf", "arm64", "amd64", "hppa", "i386", "ia64",
|
||||
"lpia", "powerpc", "ppc64el", "riscv64", "s390x", "sparc",
|
||||
])
|
||||
|
||||
# Prepare our option parser.
|
||||
parser = argparse.ArgumentParser(usage=usage)
|
||||
opt_parser = OptionParser(usage)
|
||||
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--arch",
|
||||
action="append",
|
||||
dest="architecture",
|
||||
help=f"Rebuild or rescore a specific architecture. Valid architectures "
|
||||
f"include: {', '.join(valid_archs)}.",
|
||||
)
|
||||
|
||||
parser.add_argument("-A", "--archive", help="operate on ARCHIVE", default="ubuntu")
|
||||
# Retry options
|
||||
retry_rescore_options = OptionGroup(opt_parser, "Retry and rescore options",
|
||||
"These options may only be used with "
|
||||
"the 'retry' and 'rescore' operations.")
|
||||
retry_rescore_options.add_option("-a", "--arch", type="string",
|
||||
action="append", dest="architecture",
|
||||
help="Rebuild or rescore a specific "
|
||||
"architecture. Valid architectures "
|
||||
"include: %s." %
|
||||
", ".join(valid_archs))
|
||||
|
||||
# Batch processing options
|
||||
batch_options = parser.add_argument_group(
|
||||
"Batch processing",
|
||||
"These options and parameter ordering is only "
|
||||
"available in --batch mode.\nUsage: "
|
||||
"ubuntu-build --batch [options] <package>...",
|
||||
)
|
||||
batch_options.add_argument(
|
||||
"--batch", action="store_true", dest="batch", help="Enable batch mode"
|
||||
)
|
||||
batch_options.add_argument(
|
||||
"--series",
|
||||
action="store",
|
||||
dest="series",
|
||||
help="Selects the Ubuntu series to operate on (default: current development series)",
|
||||
)
|
||||
batch_options.add_argument(
|
||||
"--retry", action="store_true", dest="retry", help="Retry builds (give-back)."
|
||||
)
|
||||
batch_options.add_argument(
|
||||
"--rescore",
|
||||
action="store",
|
||||
dest="priority",
|
||||
type=int,
|
||||
help="Rescore builds to <priority>.",
|
||||
)
|
||||
batch_options.add_argument(
|
||||
"--state",
|
||||
action="store",
|
||||
dest="state",
|
||||
help="Act on builds that are in the specified state",
|
||||
)
|
||||
batch_options = OptionGroup(opt_parser, "Batch processing",
|
||||
"These options and parameter ordering is only "
|
||||
"available in --batch mode.\nUsage: "
|
||||
"ubuntu-build --batch [options] <package>...")
|
||||
batch_options.add_option('--batch',
|
||||
action='store_true', dest='batch', default=False,
|
||||
help='Enable batch mode')
|
||||
batch_options.add_option('--series',
|
||||
action='store', dest='series', type='string',
|
||||
help='Selects the Ubuntu series to operate on '
|
||||
'(default: current development series)')
|
||||
batch_options.add_option('--retry',
|
||||
action='store_true', dest='retry', default=False,
|
||||
help='Retry builds (give-back).')
|
||||
batch_options.add_option('--rescore',
|
||||
action='store', dest='priority', type='int',
|
||||
help='Rescore builds to <priority>.')
|
||||
batch_options.add_option('--arch2', action='append', dest='architecture',
|
||||
type='string',
|
||||
help="Affect only 'architecture' (can be used "
|
||||
"several times). Valid architectures are: %s."
|
||||
% ', '.join(valid_archs))
|
||||
|
||||
parser.add_argument("packages", metavar="package", nargs="*", help=argparse.SUPPRESS)
|
||||
# Add the retry options to the main group.
|
||||
opt_parser.add_option_group(retry_rescore_options)
|
||||
# Add the batch mode to the main group.
|
||||
opt_parser.add_option_group(batch_options)
|
||||
|
||||
# Parse our options.
|
||||
args = parser.parse_args()
|
||||
(options, args) = opt_parser.parse_args()
|
||||
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", "production", version="devel")
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
|
||||
if args.batch:
|
||||
release = args.series
|
||||
if not release:
|
||||
# ppas don't have a proposed pocket so just use the release pocket;
|
||||
# but for the main archive we default to -proposed
|
||||
release = ubuntu.getDevelopmentSeries()[0].name
|
||||
if args.archive == "ubuntu":
|
||||
release = f"{release}-proposed"
|
||||
try:
|
||||
(release, pocket) = split_release_pocket(release)
|
||||
except PocketDoesNotExistError as error:
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Check we have the correct number of arguments.
|
||||
if len(args.packages) < 3:
|
||||
parser.error("Incorrect number of arguments.")
|
||||
|
||||
try:
|
||||
package = str(args.packages[0]).lower()
|
||||
release = str(args.packages[1]).lower()
|
||||
operation = str(args.packages[2]).lower()
|
||||
except IndexError:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
archive = launchpad.archives.getByReference(reference=args.archive)
|
||||
try:
|
||||
distroseries = ubuntu.getSeries(name_or_version=release)
|
||||
except lazr.restfulclient.errors.NotFound as error:
|
||||
Logger.error(error)
|
||||
if not len(args):
|
||||
opt_parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
if not args.batch:
|
||||
if not options.batch:
|
||||
# Check we have the correct number of arguments.
|
||||
if len(args) < 3:
|
||||
opt_parser.error("Incorrect number of arguments.")
|
||||
|
||||
try:
|
||||
package = str(args[0]).lower()
|
||||
release = str(args[1]).lower()
|
||||
op = str(args[2]).lower()
|
||||
except IndexError:
|
||||
opt_parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
# Check our operation.
|
||||
if operation not in ("rescore", "retry", "status"):
|
||||
Logger.error("Invalid operation: %s.", operation)
|
||||
if op not in ("rescore", "retry", "status"):
|
||||
Logger.error("Invalid operation: %s." % op)
|
||||
sys.exit(1)
|
||||
|
||||
# If the user has specified an architecture to build, we only wish to
|
||||
# rebuild it and nothing else.
|
||||
if args.architecture:
|
||||
if args.architecture[0] not in valid_archs:
|
||||
Logger.error("Invalid architecture specified: %s.", args.architecture[0])
|
||||
if options.architecture:
|
||||
if options.architecture[0] not in valid_archs:
|
||||
Logger.error("Invalid architecture specified: %s."
|
||||
% options.architecture[0])
|
||||
sys.exit(1)
|
||||
else:
|
||||
one_arch = True
|
||||
@ -210,239 +135,148 @@ def main():
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
|
||||
# Get the ubuntu archive
|
||||
try:
|
||||
ubuntu_archive = Distribution('ubuntu').getArchive()
|
||||
# Will fail here if we have no credentials, bail out
|
||||
except IOError:
|
||||
sys.exit(1)
|
||||
# Get list of published sources for package in question.
|
||||
try:
|
||||
sources = archive.getPublishedSources(
|
||||
distro_series=distroseries,
|
||||
exact_match=True,
|
||||
pocket=pocket,
|
||||
source_name=package,
|
||||
status="Published",
|
||||
)[0]
|
||||
except IndexError:
|
||||
Logger.error("No publication found for package %s", package)
|
||||
sources = ubuntu_archive.getSourcePackage(package, release, pocket)
|
||||
distroseries = Distribution('ubuntu').getSeries(release)
|
||||
except (SeriesNotFoundException, PackageNotFoundException) as error:
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
# Get list of builds for that package.
|
||||
builds = sources.getBuilds()
|
||||
|
||||
# Find out the version and component in given release.
|
||||
version = sources.source_package_version
|
||||
component = sources.component_name
|
||||
version = sources.getVersion()
|
||||
component = sources.getComponent()
|
||||
|
||||
# Operations that are remaining may only be done by Ubuntu developers
|
||||
# (retry) or buildd admins (rescore). Check if the proper permissions
|
||||
# are in place.
|
||||
if operation == "retry":
|
||||
necessary_privs = archive.checkUpload(
|
||||
component=sources.getComponent(),
|
||||
distroseries=distroseries,
|
||||
person=launchpad.me,
|
||||
pocket=pocket,
|
||||
sourcepackagename=sources.getPackageName(),
|
||||
)
|
||||
if not necessary_privs:
|
||||
Logger.error(
|
||||
"You cannot perform the %s operation on a %s package as you"
|
||||
" do not have the permissions to do this action.",
|
||||
operation,
|
||||
component,
|
||||
)
|
||||
sys.exit(1)
|
||||
me = PersonTeam.me
|
||||
if op == "rescore":
|
||||
necessary_privs = me.isLpTeamMember('launchpad-buildd-admins')
|
||||
if op == "retry":
|
||||
necessary_privs = me.canUploadPackage(ubuntu_archive, distroseries,
|
||||
sources.getPackageName(),
|
||||
sources.getComponent(),
|
||||
pocket=pocket)
|
||||
|
||||
if op in ('rescore', 'retry') and not necessary_privs:
|
||||
Logger.error("You cannot perform the %s operation on a %s "
|
||||
"package as you do not have the permissions "
|
||||
"to do this action." % (op, component))
|
||||
sys.exit(1)
|
||||
|
||||
# Output details.
|
||||
Logger.info(
|
||||
"The source version for '%s' in %s (%s) is at %s.",
|
||||
package,
|
||||
release.capitalize(),
|
||||
component,
|
||||
version,
|
||||
)
|
||||
Logger.info("The source version for '%s' in %s (%s) is at %s." %
|
||||
(package, release.capitalize(), component, version))
|
||||
|
||||
Logger.info("Current build status for this package:")
|
||||
|
||||
# Output list of arches for package and their status.
|
||||
done = False
|
||||
for build in builds:
|
||||
if one_arch and build.arch_tag != args.architecture[0]:
|
||||
if one_arch and build.arch_tag != options.architecture[0]:
|
||||
# Skip this architecture.
|
||||
continue
|
||||
|
||||
done = True
|
||||
Logger.info("%s: %s.", build.arch_tag, build.buildstate)
|
||||
if operation == "rescore":
|
||||
Logger.info("%s: %s." % (build.arch_tag, build.buildstate))
|
||||
if op == 'rescore':
|
||||
if build.can_be_rescored:
|
||||
# FIXME: make priority an option
|
||||
priority = 5000
|
||||
Logger.info("Rescoring build %s to %d...", build.arch_tag, priority)
|
||||
try:
|
||||
build.rescore(score=priority)
|
||||
except lazr.restfulclient.errors.Unauthorized:
|
||||
Logger.error(
|
||||
"You don't have the permissions to rescore builds."
|
||||
" Ignoring your rescore request."
|
||||
)
|
||||
break
|
||||
Logger.info('Rescoring build %s to %d...' % (build.arch_tag, priority))
|
||||
build.rescore(score=priority)
|
||||
else:
|
||||
Logger.info("Cannot rescore build on %s.", build.arch_tag)
|
||||
if operation == "retry":
|
||||
Logger.info('Cannot rescore build on %s.' % build.arch_tag)
|
||||
if op == 'retry':
|
||||
if build.can_be_retried:
|
||||
Logger.info("Retrying build on %s...", build.arch_tag)
|
||||
Logger.info('Retrying build on %s...' % build.arch_tag)
|
||||
build.retry()
|
||||
else:
|
||||
Logger.info("Cannot retry build on %s.", build.arch_tag)
|
||||
Logger.info('Cannot retry build on %s.' % build.arch_tag)
|
||||
|
||||
# We are done
|
||||
if done:
|
||||
sys.exit(0)
|
||||
|
||||
Logger.info("No builds for '%s' found in the %s release", package, release.capitalize())
|
||||
Logger.info("No builds for '%s' found in the %s release" % (package, release.capitalize()))
|
||||
Logger.info("It may have been built in a former release.")
|
||||
sys.exit(0)
|
||||
|
||||
# Batch mode
|
||||
|
||||
if not args.architecture:
|
||||
if not options.architecture:
|
||||
# no specific architectures specified, assume all valid ones
|
||||
archs = valid_archs
|
||||
else:
|
||||
archs = set(args.architecture)
|
||||
archs = set(options.architecture)
|
||||
|
||||
# filter out duplicate and invalid architectures
|
||||
archs.intersection_update(valid_archs)
|
||||
|
||||
if not args.packages:
|
||||
retry_count = 0
|
||||
can_rescore = True
|
||||
release = options.series
|
||||
if not release:
|
||||
release = (Distribution('ubuntu').getDevelopmentSeries().name
|
||||
+ '-proposed')
|
||||
try:
|
||||
(release, pocket) = split_release_pocket(release)
|
||||
except PocketDoesNotExistError as error:
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
|
||||
if not args.state:
|
||||
if args.retry:
|
||||
args.state = "Failed to build"
|
||||
elif args.priority:
|
||||
args.state = "Needs building"
|
||||
# there is no equivalent to series.getBuildRecords() for a ppa.
|
||||
# however, we don't want to have to traverse all build records for
|
||||
# all series when working on the main archive, so we use
|
||||
# series.getBuildRecords() for ubuntu and handle ppas separately
|
||||
series = ubuntu.getSeries(name_or_version=release)
|
||||
if args.archive == "ubuntu":
|
||||
builds = series.getBuildRecords(build_state=args.state, pocket=pocket)
|
||||
else:
|
||||
builds = []
|
||||
for build in archive.getBuildRecords(build_state=args.state, pocket=pocket):
|
||||
if not build.current_source_publication:
|
||||
continue
|
||||
if build.current_source_publication.distro_series == series:
|
||||
builds.append(build)
|
||||
for build in builds:
|
||||
if build.arch_tag not in archs:
|
||||
continue
|
||||
if not build.current_source_publication:
|
||||
continue
|
||||
# fixme: refactor
|
||||
# Check permissions (part 2): check upload permissions for the
|
||||
# source package
|
||||
can_retry = args.retry and archive.checkUpload(
|
||||
component=build.current_source_publication.component_name,
|
||||
distroseries=series,
|
||||
person=launchpad.me,
|
||||
pocket=pocket,
|
||||
sourcepackagename=build.source_package_name,
|
||||
)
|
||||
if args.retry and not can_retry:
|
||||
Logger.error(
|
||||
"You don't have the permissions to retry the build of '%s', skipping.",
|
||||
build.source_package_name,
|
||||
)
|
||||
continue
|
||||
Logger.info(
|
||||
"The source version for '%s' in '%s' (%s) is: %s",
|
||||
build.source_package_name,
|
||||
release,
|
||||
pocket,
|
||||
build.source_package_version,
|
||||
)
|
||||
ubuntu_archive = Distribution('ubuntu').getArchive()
|
||||
try:
|
||||
distroseries = Distribution('ubuntu').getSeries(release)
|
||||
except SeriesNotFoundException as error:
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
me = PersonTeam.me
|
||||
|
||||
if args.retry and build.can_be_retried:
|
||||
Logger.info(
|
||||
"Retrying build of %s on %s...", build.source_package_name, build.arch_tag
|
||||
)
|
||||
try:
|
||||
build.retry()
|
||||
retry_count += 1
|
||||
except lazr.restfulclient.errors.BadRequest:
|
||||
Logger.info(
|
||||
"Failed to retry build of %s on %s",
|
||||
build.source_package_name,
|
||||
build.arch_tag,
|
||||
)
|
||||
# Check permisions (part 1): Rescoring can only be done by buildd admins
|
||||
can_rescore = ((options.priority
|
||||
and me.isLpTeamMember('launchpad-buildd-admins'))
|
||||
or False)
|
||||
if options.priority and not can_rescore:
|
||||
Logger.error("You don't have the permissions to rescore "
|
||||
"builds. Ignoring your rescore request.")
|
||||
|
||||
if args.priority and can_rescore:
|
||||
if build.can_be_rescored:
|
||||
try:
|
||||
build.rescore(score=args.priority)
|
||||
except lazr.restfulclient.errors.Unauthorized:
|
||||
Logger.error(
|
||||
"You don't have the permissions to rescore builds."
|
||||
" Ignoring your rescore request."
|
||||
)
|
||||
can_rescore = False
|
||||
except lazr.restfulclient.errors.BadRequest:
|
||||
Logger.info(
|
||||
"Cannot rescore build of %s on %s.",
|
||||
build.source_package_name,
|
||||
build.arch_tag,
|
||||
)
|
||||
|
||||
Logger.info("")
|
||||
if args.retry:
|
||||
Logger.info("%d package builds retried", retry_count)
|
||||
sys.exit(0)
|
||||
|
||||
for pkg in args.packages:
|
||||
for pkg in args:
|
||||
try:
|
||||
pkg = archive.getPublishedSources(
|
||||
distro_series=distroseries,
|
||||
exact_match=True,
|
||||
pocket=pocket,
|
||||
source_name=pkg,
|
||||
status="Published",
|
||||
)[0]
|
||||
except IndexError:
|
||||
Logger.error("No publication found for package %s", pkg)
|
||||
pkg = ubuntu_archive.getSourcePackage(pkg, release, pocket)
|
||||
except PackageNotFoundException as error:
|
||||
Logger.error(error)
|
||||
continue
|
||||
|
||||
# Check permissions (part 2): check upload permissions for the source
|
||||
# package
|
||||
can_retry = args.retry and archive.checkUpload(
|
||||
component=pkg.component_name,
|
||||
distroseries=distroseries,
|
||||
person=launchpad.me,
|
||||
pocket=pocket,
|
||||
sourcepackagename=pkg.source_package_name,
|
||||
)
|
||||
if args.retry and not can_retry:
|
||||
Logger.error(
|
||||
"You don't have the permissions to retry the "
|
||||
"build of '%s'. Ignoring your request.",
|
||||
pkg.source_package_name,
|
||||
)
|
||||
can_retry = options.retry and me.canUploadPackage(ubuntu_archive,
|
||||
distroseries,
|
||||
pkg.getPackageName(),
|
||||
pkg.getComponent())
|
||||
if options.retry and not can_retry:
|
||||
Logger.error("You don't have the permissions to retry the "
|
||||
"build of '%s'. Ignoring your request."
|
||||
% pkg.getPackageName())
|
||||
|
||||
Logger.info(
|
||||
"The source version for '%s' in '%s' (%s) is: %s",
|
||||
pkg.source_package_name,
|
||||
release,
|
||||
pocket,
|
||||
pkg.source_package_version,
|
||||
)
|
||||
Logger.info("The source version for '%s' in '%s' (%s) is: %s" %
|
||||
(pkg.getPackageName(), release, pocket, pkg.getVersion()))
|
||||
|
||||
Logger.info(get_build_states(pkg, archs))
|
||||
Logger.info(pkg.getBuildStates(archs))
|
||||
if can_retry:
|
||||
Logger.info(retry_builds(pkg, archs))
|
||||
if args.priority:
|
||||
Logger.info(rescore_builds(pkg, archs, args.priority))
|
||||
Logger.info(pkg.retryBuilds(archs))
|
||||
if options.priority and can_rescore:
|
||||
Logger.info(pkg.rescoreBuilds(archs, options.priority))
|
||||
|
||||
Logger.info("")
|
||||
Logger.info('')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
36
ubuntu-iso
36
ubuntu-iso
@ -20,23 +20,19 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import optparse
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from ubuntutools import getLogger
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def extract(iso, path):
|
||||
command = ["isoinfo", "-R", "-i", iso, "-x", path]
|
||||
pipe = subprocess.run(
|
||||
command, check=False, encoding="utf-8", stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
command = ['isoinfo', '-R', '-i', iso, '-x', path]
|
||||
pipe = subprocess.run(command, encoding='utf-8',
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
if pipe.returncode != 0:
|
||||
sys.stderr.write(pipe.stderr)
|
||||
@ -46,22 +42,22 @@ def extract(iso, path):
|
||||
|
||||
|
||||
def main():
|
||||
desc = "Given an ISO, %(prog)s will display the Ubuntu version information"
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] iso...", description=desc)
|
||||
parser.add_argument("isos", nargs="*", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
desc = 'Given an ISO, %prog will display the Ubuntu version information'
|
||||
parser = optparse.OptionParser(usage='%prog [options] iso...',
|
||||
description=desc)
|
||||
isos = parser.parse_args()[1]
|
||||
err = False
|
||||
|
||||
for iso in args.isos:
|
||||
if len(args.isos) > 1:
|
||||
prefix = f"{iso}:"
|
||||
for iso in isos:
|
||||
if len(isos) > 1:
|
||||
prefix = '%s:' % iso
|
||||
else:
|
||||
prefix = ""
|
||||
prefix = ''
|
||||
|
||||
version = extract(iso, "/.disk/info")
|
||||
version = extract(iso, '/.disk/info')
|
||||
|
||||
if len(version) == 0:
|
||||
Logger.error("%s does not appear to be an Ubuntu ISO", iso)
|
||||
Logger.error('%s does not appear to be an Ubuntu ISO' % iso)
|
||||
err = True
|
||||
continue
|
||||
|
||||
@ -71,6 +67,6 @@ def main():
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
sys.exit(0)
|
||||
|
@ -14,159 +14,131 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.lp.lpapicache import (
|
||||
Distribution,
|
||||
Launchpad,
|
||||
PackageNotFoundException,
|
||||
Packageset,
|
||||
PersonTeam,
|
||||
SeriesNotFoundException,
|
||||
)
|
||||
from ubuntutools.lp.lpapicache import (Launchpad, Distribution, PersonTeam,
|
||||
Packageset, PackageNotFoundException,
|
||||
SeriesNotFoundException)
|
||||
from ubuntutools.misc import split_release_pocket
|
||||
|
||||
from ubuntutools import getLogger
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
"""Parse arguments and return (options, package)"""
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] package")
|
||||
parser.add_argument(
|
||||
"-r",
|
||||
"--release",
|
||||
metavar="RELEASE",
|
||||
help="Use RELEASE, rather than the current development release",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--list-uploaders",
|
||||
action="store_true",
|
||||
help="List all the people/teams with upload rights",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--list-team-members",
|
||||
action="store_true",
|
||||
help="List all team members of teams with upload rights (implies --list-uploaders)",
|
||||
)
|
||||
parser.add_argument("package", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
'''Parse arguments and return (options, package)'''
|
||||
parser = optparse.OptionParser('%prog [options] package')
|
||||
parser.add_option('-r', '--release', default=None, metavar='RELEASE',
|
||||
help='Use RELEASE, rather than the current development '
|
||||
'release')
|
||||
parser.add_option('-a', '--list-uploaders',
|
||||
default=False, action='store_true',
|
||||
help='List all the people/teams with upload rights')
|
||||
parser.add_option('-t', '--list-team-members',
|
||||
default=False, action='store_true',
|
||||
help='List all team members of teams with upload rights '
|
||||
'(implies --list-uploaders)')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if args.list_team_members:
|
||||
args.list_uploaders = True
|
||||
if len(args) != 1:
|
||||
parser.error("One (and only one) package must be specified")
|
||||
package = args[0]
|
||||
|
||||
return args
|
||||
if options.list_team_members:
|
||||
options.list_uploaders = True
|
||||
|
||||
return (options, package)
|
||||
|
||||
|
||||
def main():
|
||||
"""Query upload permissions"""
|
||||
args = parse_arguments()
|
||||
'''Query upload permissions'''
|
||||
options, package = parse_arguments()
|
||||
# Need to be logged in to see uploaders:
|
||||
Launchpad.login()
|
||||
|
||||
ubuntu = Distribution("ubuntu")
|
||||
ubuntu = Distribution('ubuntu')
|
||||
archive = ubuntu.getArchive()
|
||||
if args.release is None:
|
||||
args.release = ubuntu.getDevelopmentSeries().name
|
||||
if options.release is None:
|
||||
options.release = ubuntu.getDevelopmentSeries().name
|
||||
try:
|
||||
release, pocket = split_release_pocket(args.release)
|
||||
release, pocket = split_release_pocket(options.release)
|
||||
series = ubuntu.getSeries(release)
|
||||
except SeriesNotFoundException as e:
|
||||
Logger.error(str(e))
|
||||
sys.exit(2)
|
||||
|
||||
try:
|
||||
spph = archive.getSourcePackage(args.package)
|
||||
spph = archive.getSourcePackage(package)
|
||||
except PackageNotFoundException as e:
|
||||
Logger.error(str(e))
|
||||
sys.exit(2)
|
||||
component = spph.getComponent()
|
||||
if args.list_uploaders and (
|
||||
pocket != "Release"
|
||||
or series.status in ("Experimental", "Active Development", "Pre-release Freeze")
|
||||
):
|
||||
component_uploader = archive.getUploadersForComponent(component_name=component)[0]
|
||||
Logger.info("All upload permissions for %s:", args.package)
|
||||
Logger.info("")
|
||||
Logger.info("Component (%s)", component)
|
||||
Logger.info("============%s", "=" * len(component))
|
||||
print_uploaders([component_uploader], args.list_team_members)
|
||||
if (options.list_uploaders and (pocket != 'Release' or series.status in
|
||||
('Experimental', 'Active Development', 'Pre-release Freeze'))):
|
||||
|
||||
packagesets = sorted(
|
||||
Packageset.setsIncludingSource(distroseries=series, sourcepackagename=args.package),
|
||||
key=lambda p: p.name,
|
||||
)
|
||||
component_uploader = archive.getUploadersForComponent(
|
||||
component_name=component)[0]
|
||||
Logger.info("All upload permissions for %s:" % package)
|
||||
Logger.info("")
|
||||
Logger.info("Component (%s)" % component)
|
||||
Logger.info("============" + ("=" * len(component)))
|
||||
print_uploaders([component_uploader], options.list_team_members)
|
||||
|
||||
packagesets = sorted(Packageset.setsIncludingSource(
|
||||
distroseries=series,
|
||||
sourcepackagename=package), key=lambda p: p.name)
|
||||
if packagesets:
|
||||
Logger.info("")
|
||||
Logger.info("Packagesets")
|
||||
Logger.info("===========")
|
||||
for packageset in packagesets:
|
||||
Logger.info("")
|
||||
Logger.info("%s:", packageset.name)
|
||||
print_uploaders(
|
||||
archive.getUploadersForPackageset(packageset=packageset),
|
||||
args.list_team_members,
|
||||
)
|
||||
Logger.info("%s:" % packageset.name)
|
||||
print_uploaders(archive.getUploadersForPackageset(
|
||||
packageset=packageset), options.list_team_members)
|
||||
|
||||
ppu_uploaders = archive.getUploadersForPackage(source_package_name=args.package)
|
||||
ppu_uploaders = archive.getUploadersForPackage(
|
||||
source_package_name=package)
|
||||
if ppu_uploaders:
|
||||
Logger.info("")
|
||||
Logger.info("Per-Package-Uploaders")
|
||||
Logger.info("=====================")
|
||||
Logger.info("")
|
||||
print_uploaders(ppu_uploaders, args.list_team_members)
|
||||
print_uploaders(ppu_uploaders, options.list_team_members)
|
||||
Logger.info("")
|
||||
|
||||
if PersonTeam.me.canUploadPackage(archive, series, args.package, component, pocket):
|
||||
Logger.info("You can upload %s to %s.", args.package, args.release)
|
||||
if PersonTeam.me.canUploadPackage(archive, series, package, component,
|
||||
pocket):
|
||||
Logger.info("You can upload %s to %s." % (package, options.release))
|
||||
else:
|
||||
Logger.info("You can not upload %s to %s, yourself.", args.package, args.release)
|
||||
if (
|
||||
series.status in ("Current Stable Release", "Supported", "Obsolete")
|
||||
and pocket == "Release"
|
||||
):
|
||||
Logger.info(
|
||||
"%s is in the '%s' state. You may want to query the %s-proposed pocket.",
|
||||
release,
|
||||
series.status,
|
||||
release,
|
||||
)
|
||||
Logger.info("You can not upload %s to %s, yourself." % (package, options.release))
|
||||
if (series.status in ('Current Stable Release', 'Supported', 'Obsolete')
|
||||
and pocket == 'Release'):
|
||||
Logger.info("%s is in the '%s' state. You may want to query the %s-proposed pocket." %
|
||||
(release, series.status, release))
|
||||
else:
|
||||
Logger.info(
|
||||
"But you can still contribute to it via the sponsorship "
|
||||
"process: https://wiki.ubuntu.com/SponsorshipProcess"
|
||||
)
|
||||
if not args.list_uploaders:
|
||||
Logger.info(
|
||||
"To see who has the necessary upload rights, "
|
||||
"use the --list-uploaders option."
|
||||
)
|
||||
Logger.info("But you can still contribute to it via the sponsorship "
|
||||
"process: https://wiki.ubuntu.com/SponsorshipProcess")
|
||||
if not options.list_uploaders:
|
||||
Logger.info("To see who has the necessary upload rights, "
|
||||
"use the --list-uploaders option.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def print_uploaders(uploaders, expand_teams=False, prefix=""):
|
||||
def print_uploaders(uploaders, expand_teams=False, prefix=''):
|
||||
"""Given a list of uploaders, pretty-print them all
|
||||
Each line is prefixed with prefix.
|
||||
If expand_teams is set, recurse, adding more spaces to prefix on each
|
||||
recursion.
|
||||
"""
|
||||
for uploader in sorted(uploaders, key=lambda p: p.display_name):
|
||||
Logger.info(
|
||||
"%s* %s (%s)%s",
|
||||
prefix,
|
||||
uploader.display_name,
|
||||
uploader.name,
|
||||
" [team]" if uploader.is_team else "",
|
||||
)
|
||||
Logger.info("%s* %s (%s)%s" %
|
||||
(prefix, uploader.display_name, uploader.name,
|
||||
' [team]' if uploader.is_team else ''))
|
||||
if expand_teams and uploader.is_team:
|
||||
print_uploaders(uploader.participants, True, prefix=prefix + " ")
|
||||
print_uploaders(uploader.participants, True, prefix=prefix + ' ')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -7,8 +7,8 @@ import logging
|
||||
import sys
|
||||
|
||||
|
||||
def getLogger(): # pylint: disable=invalid-name
|
||||
"""Get the logger instance for this module
|
||||
def getLogger():
|
||||
''' Get the logger instance for this module
|
||||
|
||||
Quick guide for using this or not: if you want to call ubuntutools
|
||||
module code and have its output print to stdout/stderr ONLY, you can
|
||||
@ -33,12 +33,12 @@ def getLogger(): # pylint: disable=invalid-name
|
||||
This should only be used by runnable scripts provided by the
|
||||
ubuntu-dev-tools package, or other runnable scripts that want the behavior
|
||||
described above.
|
||||
"""
|
||||
'''
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.propagate = False
|
||||
|
||||
fmt = logging.Formatter("%(message)s")
|
||||
fmt = logging.Formatter('%(message)s')
|
||||
|
||||
stdout_handler = logging.StreamHandler(stream=sys.stdout)
|
||||
stdout_handler.setFormatter(fmt)
|
||||
@ -47,7 +47,7 @@ def getLogger(): # pylint: disable=invalid-name
|
||||
|
||||
stderr_handler = logging.StreamHandler(stream=sys.stderr)
|
||||
stderr_handler.setFormatter(fmt)
|
||||
stderr_handler.setLevel(logging.INFO + 1)
|
||||
stderr_handler.setLevel(logging.INFO+1)
|
||||
logger.addHandler(stderr_handler)
|
||||
|
||||
return logger
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -18,10 +18,10 @@
|
||||
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
#
|
||||
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -31,21 +31,20 @@ def _build_preparation(result_directory):
|
||||
os.makedirs(result_directory)
|
||||
|
||||
|
||||
class Builder:
|
||||
class Builder(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
cmd = ["dpkg-architecture", "-qDEB_BUILD_ARCH_CPU"]
|
||||
self.architecture = subprocess.check_output(cmd, encoding="utf-8").strip()
|
||||
self.architecture = subprocess.check_output(cmd, encoding='utf-8').strip()
|
||||
|
||||
def _build_failure(self, returncode, dsc_file):
|
||||
if returncode != 0:
|
||||
Logger.error(
|
||||
"Failed to build %s from source with %s.", os.path.basename(dsc_file), self.name
|
||||
)
|
||||
Logger.error("Failed to build %s from source with %s." %
|
||||
(os.path.basename(dsc_file), self.name))
|
||||
return returncode
|
||||
|
||||
def exists_in_path(self):
|
||||
for path in os.environ.get("PATH", os.defpath).split(os.pathsep):
|
||||
for path in os.environ.get('PATH', os.defpath).split(os.pathsep):
|
||||
if os.path.isfile(os.path.join(path, self.name)):
|
||||
return True
|
||||
return False
|
||||
@ -58,7 +57,8 @@ class Builder:
|
||||
|
||||
def _update_failure(self, returncode, dist):
|
||||
if returncode != 0:
|
||||
Logger.error("Failed to update %s chroot for %s.", dist, self.name)
|
||||
Logger.error("Failed to update %s chroot for %s." %
|
||||
(dist, self.name))
|
||||
return returncode
|
||||
|
||||
|
||||
@ -68,39 +68,19 @@ class Pbuilder(Builder):
|
||||
|
||||
def build(self, dsc_file, dist, result_directory):
|
||||
_build_preparation(result_directory)
|
||||
cmd = [
|
||||
"sudo",
|
||||
"-E",
|
||||
f"ARCH={self.architecture}",
|
||||
f"DIST={dist}",
|
||||
self.name,
|
||||
"--build",
|
||||
"--architecture",
|
||||
self.architecture,
|
||||
"--distribution",
|
||||
dist,
|
||||
"--buildresult",
|
||||
result_directory,
|
||||
dsc_file,
|
||||
]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = ["sudo", "-E", "ARCH=" + self.architecture, "DIST=" + dist,
|
||||
self.name, "--build",
|
||||
"--architecture", self.architecture, "--distribution", dist,
|
||||
"--buildresult", result_directory, dsc_file]
|
||||
Logger.debug(' '.join(cmd))
|
||||
returncode = subprocess.call(cmd)
|
||||
return self._build_failure(returncode, dsc_file)
|
||||
|
||||
def update(self, dist):
|
||||
cmd = [
|
||||
"sudo",
|
||||
"-E",
|
||||
f"ARCH={self.architecture}",
|
||||
f"DIST={dist}",
|
||||
self.name,
|
||||
"--update",
|
||||
"--architecture",
|
||||
self.architecture,
|
||||
"--distribution",
|
||||
dist,
|
||||
]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = ["sudo", "-E", "ARCH=" + self.architecture, "DIST=" + dist,
|
||||
self.name, "--update",
|
||||
"--architecture", self.architecture, "--distribution", dist]
|
||||
Logger.debug(' '.join(cmd))
|
||||
returncode = subprocess.call(cmd)
|
||||
return self._update_failure(returncode, dist)
|
||||
|
||||
@ -111,22 +91,15 @@ class Pbuilderdist(Builder):
|
||||
|
||||
def build(self, dsc_file, dist, result_directory):
|
||||
_build_preparation(result_directory)
|
||||
cmd = [
|
||||
self.name,
|
||||
dist,
|
||||
self.architecture,
|
||||
"build",
|
||||
dsc_file,
|
||||
"--buildresult",
|
||||
result_directory,
|
||||
]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = [self.name, dist, self.architecture,
|
||||
"build", dsc_file, "--buildresult", result_directory]
|
||||
Logger.debug(' '.join(cmd))
|
||||
returncode = subprocess.call(cmd)
|
||||
return self._build_failure(returncode, dsc_file)
|
||||
|
||||
def update(self, dist):
|
||||
cmd = [self.name, dist, self.architecture, "update"]
|
||||
Logger.debug(" ".join(cmd))
|
||||
Logger.debug(' '.join(cmd))
|
||||
returncode = subprocess.call(cmd)
|
||||
return self._update_failure(returncode, dist)
|
||||
|
||||
@ -138,40 +111,41 @@ class Sbuild(Builder):
|
||||
def build(self, dsc_file, dist, result_directory):
|
||||
_build_preparation(result_directory)
|
||||
workdir = os.getcwd()
|
||||
Logger.debug("cd %s", result_directory)
|
||||
Logger.debug("cd " + result_directory)
|
||||
os.chdir(result_directory)
|
||||
cmd = ["sbuild", "--arch-all", f"--dist={dist}", f"--arch={self.architecture}", dsc_file]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = ["sbuild", "--arch-all", "--dist=" + dist,
|
||||
"--arch=" + self.architecture, dsc_file]
|
||||
Logger.debug(' '.join(cmd))
|
||||
returncode = subprocess.call(cmd)
|
||||
Logger.debug("cd %s", workdir)
|
||||
Logger.debug("cd " + workdir)
|
||||
os.chdir(workdir)
|
||||
return self._build_failure(returncode, dsc_file)
|
||||
|
||||
def update(self, dist):
|
||||
cmd = ["schroot", "--list"]
|
||||
Logger.debug(" ".join(cmd))
|
||||
process = subprocess.run(cmd, check=False, stdout=subprocess.PIPE, encoding="utf-8")
|
||||
Logger.debug(' '.join(cmd))
|
||||
process = subprocess.run(cmd, stdout=subprocess.PIPE, encoding='utf-8')
|
||||
chroots, _ = process.stdout.strip().split()
|
||||
if process.returncode != 0:
|
||||
return process.returncode
|
||||
|
||||
params = {"dist": dist, "arch": self.architecture}
|
||||
for chroot in (
|
||||
"%(dist)s-%(arch)s-sbuild-source",
|
||||
"%(dist)s-sbuild-source",
|
||||
"%(dist)s-%(arch)s-source",
|
||||
"%(dist)s-source",
|
||||
):
|
||||
for chroot in ("%(dist)s-%(arch)s-sbuild-source",
|
||||
"%(dist)s-sbuild-source",
|
||||
"%(dist)s-%(arch)s-source",
|
||||
"%(dist)s-source"):
|
||||
chroot = chroot % params
|
||||
if chroot in chroots:
|
||||
break
|
||||
else:
|
||||
return 1
|
||||
|
||||
commands = [["sbuild-update"], ["sbuild-distupgrade"], ["sbuild-clean", "-a", "-c"]]
|
||||
commands = [["sbuild-update"],
|
||||
["sbuild-distupgrade"],
|
||||
["sbuild-clean", "-a", "-c"]]
|
||||
for cmd in commands:
|
||||
# pylint: disable=W0631
|
||||
Logger.debug("%s %s", " ".join(cmd), chroot)
|
||||
Logger.debug(' '.join(cmd) + " " + chroot)
|
||||
ret = subprocess.call(cmd + [chroot])
|
||||
# pylint: enable=W0631
|
||||
if ret != 0:
|
||||
@ -182,9 +156,9 @@ class Sbuild(Builder):
|
||||
_SUPPORTED_BUILDERS = {
|
||||
"cowbuilder": lambda: Pbuilder("cowbuilder"),
|
||||
"cowbuilder-dist": lambda: Pbuilderdist("cowbuilder-dist"),
|
||||
"pbuilder": Pbuilder,
|
||||
"pbuilder-dist": Pbuilderdist,
|
||||
"sbuild": Sbuild,
|
||||
"pbuilder": lambda: Pbuilder(),
|
||||
"pbuilder-dist": lambda: Pbuilderdist(),
|
||||
"sbuild": lambda: Sbuild(),
|
||||
}
|
||||
|
||||
|
||||
@ -196,5 +170,5 @@ def get_builder(name):
|
||||
Logger.error("Builder doesn't appear to be installed: %s", name)
|
||||
else:
|
||||
Logger.error("Unsupported builder specified: %s.", name)
|
||||
Logger.error("Supported builders: %s", ", ".join(sorted(_SUPPORTED_BUILDERS.keys())))
|
||||
return None
|
||||
Logger.error("Supported builders: %s",
|
||||
", ".join(sorted(_SUPPORTED_BUILDERS.keys())))
|
||||
|
@ -15,39 +15,39 @@
|
||||
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import pwd
|
||||
import re
|
||||
import shlex
|
||||
import socket
|
||||
import sys
|
||||
import locale
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UDTConfig:
|
||||
class UDTConfig(object):
|
||||
"""Ubuntu Dev Tools configuration file (devscripts config file) and
|
||||
environment variable parsing.
|
||||
"""
|
||||
|
||||
no_conf = False
|
||||
# Package wide configuration variables.
|
||||
# These are reqired to be used by at least two scripts.
|
||||
defaults = {
|
||||
"BUILDER": "pbuilder",
|
||||
"DEBIAN_MIRROR": "http://deb.debian.org/debian",
|
||||
"DEBSEC_MIRROR": "http://security.debian.org",
|
||||
"DEBIAN_DDEBS_MIRROR": "http://debug.mirrors.debian.org/debian-debug",
|
||||
"LPINSTANCE": "production",
|
||||
"MIRROR_FALLBACK": True,
|
||||
"UBUNTU_MIRROR": "http://archive.ubuntu.com/ubuntu",
|
||||
"UBUNTU_PORTS_MIRROR": "http://ports.ubuntu.com",
|
||||
"UBUNTU_DDEBS_MIRROR": "http://ddebs.ubuntu.com",
|
||||
"UPDATE_BUILDER": False,
|
||||
"WORKDIR": None,
|
||||
"KEYID": None,
|
||||
'BUILDER': 'pbuilder',
|
||||
'DEBIAN_MIRROR': 'http://deb.debian.org/debian',
|
||||
'DEBSEC_MIRROR': 'http://security.debian.org',
|
||||
'DEBIAN_DDEBS_MIRROR': 'http://debug.mirrors.debian.org/debian-debug',
|
||||
'LPINSTANCE': 'production',
|
||||
'MIRROR_FALLBACK': True,
|
||||
'UBUNTU_MIRROR': 'http://archive.ubuntu.com/ubuntu',
|
||||
'UBUNTU_PORTS_MIRROR': 'http://ports.ubuntu.com',
|
||||
'UBUNTU_INTERNAL_MIRROR': 'http://ftpmaster.internal/ubuntu',
|
||||
'UBUNTU_DDEBS_MIRROR': 'http://ddebs.ubuntu.com',
|
||||
'UPDATE_BUILDER': False,
|
||||
'WORKDIR': None,
|
||||
'KEYID': None,
|
||||
}
|
||||
# Populated from the configuration files:
|
||||
config = {}
|
||||
@ -55,32 +55,30 @@ class UDTConfig:
|
||||
def __init__(self, no_conf=False, prefix=None):
|
||||
self.no_conf = no_conf
|
||||
if prefix is None:
|
||||
prefix = os.path.basename(sys.argv[0]).upper().replace("-", "_")
|
||||
prefix = os.path.basename(sys.argv[0]).upper().replace('-', '_')
|
||||
self.prefix = prefix
|
||||
if not no_conf:
|
||||
self.config = self.parse_devscripts_config()
|
||||
|
||||
@staticmethod
|
||||
def parse_devscripts_config():
|
||||
def parse_devscripts_config(self):
|
||||
"""Read the devscripts configuration files, and return the values as a
|
||||
dictionary
|
||||
"""
|
||||
config = {}
|
||||
for filename in ("/etc/devscripts.conf", "~/.devscripts"):
|
||||
for filename in ('/etc/devscripts.conf', '~/.devscripts'):
|
||||
try:
|
||||
with open(os.path.expanduser(filename), "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
f = open(os.path.expanduser(filename), 'r')
|
||||
except IOError:
|
||||
continue
|
||||
try:
|
||||
tokens = shlex.split(content, comments=True)
|
||||
except ValueError as e:
|
||||
Logger.error("Error parsing %s: %s", filename, e)
|
||||
continue
|
||||
for token in tokens:
|
||||
if "=" in token:
|
||||
key, value = token.split("=", 1)
|
||||
for line in f:
|
||||
parsed = shlex.split(line, comments=True)
|
||||
if len(parsed) > 1:
|
||||
Logger.warning('Cannot parse variable assignment in %s: %s',
|
||||
getattr(f, 'name', '<config>'), line)
|
||||
if len(parsed) >= 1 and '=' in parsed[0]:
|
||||
key, value = parsed[0].split('=', 1)
|
||||
config[key] = value
|
||||
f.close()
|
||||
return config
|
||||
|
||||
def get_value(self, key, default=None, boolean=False, compat_keys=()):
|
||||
@ -97,9 +95,9 @@ class UDTConfig:
|
||||
if default is None and key in self.defaults:
|
||||
default = self.defaults[key]
|
||||
|
||||
keys = [f"{self.prefix}_{key}"]
|
||||
keys = [self.prefix + '_' + key]
|
||||
if key in self.defaults:
|
||||
keys.append(f"UBUNTUTOOLS_{key}")
|
||||
keys.append('UBUNTUTOOLS_' + key)
|
||||
keys += compat_keys
|
||||
|
||||
for k in keys:
|
||||
@ -107,19 +105,16 @@ class UDTConfig:
|
||||
if k in store:
|
||||
value = store[k]
|
||||
if boolean:
|
||||
if value in ("yes", "no"):
|
||||
value = value == "yes"
|
||||
if value in ('yes', 'no'):
|
||||
value = value == 'yes'
|
||||
else:
|
||||
continue
|
||||
if k in compat_keys:
|
||||
replacements = f"{self.prefix}_{key}"
|
||||
replacements = self.prefix + '_' + key
|
||||
if key in self.defaults:
|
||||
replacements += f"or UBUNTUTOOLS_{key}"
|
||||
Logger.warning(
|
||||
"Using deprecated configuration variable %s. You should use %s.",
|
||||
k,
|
||||
replacements,
|
||||
)
|
||||
replacements += 'or UBUNTUTOOLS_' + key
|
||||
Logger.warning('Using deprecated configuration variable %s. '
|
||||
'You should use %s.', k, replacements)
|
||||
return value
|
||||
return default
|
||||
|
||||
@ -137,7 +132,7 @@ def ubu_email(name=None, email=None, export=True):
|
||||
|
||||
Return name, email.
|
||||
"""
|
||||
name_email_re = re.compile(r"^\s*(.+?)\s*<(.+@.+)>\s*$")
|
||||
name_email_re = re.compile(r'^\s*(.+?)\s*<(.+@.+)>\s*$')
|
||||
|
||||
if email:
|
||||
match = name_email_re.match(email)
|
||||
@ -145,16 +140,11 @@ def ubu_email(name=None, email=None, export=True):
|
||||
name = match.group(1)
|
||||
email = match.group(2)
|
||||
|
||||
if export and not name and not email and "UBUMAIL" not in os.environ:
|
||||
if export and not name and not email and 'UBUMAIL' not in os.environ:
|
||||
export = False
|
||||
|
||||
for var, target in (
|
||||
("UBUMAIL", "email"),
|
||||
("DEBFULLNAME", "name"),
|
||||
("DEBEMAIL", "email"),
|
||||
("EMAIL", "email"),
|
||||
("NAME", "name"),
|
||||
):
|
||||
for var, target in (('UBUMAIL', 'email'), ('DEBFULLNAME', 'name'), ('DEBEMAIL', 'email'),
|
||||
('EMAIL', 'email'), ('NAME', 'name')):
|
||||
if name and email:
|
||||
break
|
||||
if var in os.environ:
|
||||
@ -164,30 +154,30 @@ def ubu_email(name=None, email=None, export=True):
|
||||
name = match.group(1)
|
||||
if not email:
|
||||
email = match.group(2)
|
||||
elif target == "name" and not name:
|
||||
elif target == 'name' and not name:
|
||||
name = os.environ[var].strip()
|
||||
elif target == "email" and not email:
|
||||
elif target == 'email' and not email:
|
||||
email = os.environ[var].strip()
|
||||
|
||||
if not name:
|
||||
gecos_name = pwd.getpwuid(os.getuid()).pw_gecos.split(",")[0].strip()
|
||||
gecos_name = pwd.getpwuid(os.getuid()).pw_gecos.split(',')[0].strip()
|
||||
if gecos_name:
|
||||
name = gecos_name
|
||||
|
||||
if not email:
|
||||
mailname = socket.getfqdn()
|
||||
if os.path.isfile("/etc/mailname"):
|
||||
mailname = open("/etc/mailname", "r", encoding="utf-8").read().strip()
|
||||
email = f"{pwd.getpwuid(os.getuid()).pw_name}@{mailname}"
|
||||
if os.path.isfile('/etc/mailname'):
|
||||
mailname = open('/etc/mailname', 'r').read().strip()
|
||||
email = pwd.getpwuid(os.getuid()).pw_name + '@' + mailname
|
||||
|
||||
if export:
|
||||
os.environ["DEBFULLNAME"] = name
|
||||
os.environ["DEBEMAIL"] = email
|
||||
os.environ['DEBFULLNAME'] = name
|
||||
os.environ['DEBEMAIL'] = email
|
||||
|
||||
# decode env var or gecos raw string with the current locale's encoding
|
||||
encoding = locale.getlocale()[1]
|
||||
encoding = locale.getdefaultlocale()[1]
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
encoding = 'utf-8'
|
||||
if name and isinstance(name, bytes):
|
||||
name = name.decode(encoding)
|
||||
return name, email
|
||||
|
@ -2,5 +2,5 @@
|
||||
# ubuntu-dev-tools Launchpad Python modules.
|
||||
#
|
||||
|
||||
SERVICE = "production"
|
||||
API_VERSION = "devel"
|
||||
service = 'production'
|
||||
api_version = 'devel'
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,26 +1,33 @@
|
||||
class PackageNotFoundException(BaseException):
|
||||
"""Thrown when a package is not found"""
|
||||
""" Thrown when a package is not found """
|
||||
pass
|
||||
|
||||
|
||||
class SeriesNotFoundException(BaseException):
|
||||
"""Thrown when a distroseries is not found"""
|
||||
""" Thrown when a distroseries is not found """
|
||||
pass
|
||||
|
||||
|
||||
class PocketDoesNotExistError(Exception):
|
||||
"""Raised when a invalid pocket is used."""
|
||||
'''Raised when a invalid pocket is used.'''
|
||||
pass
|
||||
|
||||
|
||||
class ArchiveNotFoundException(BaseException):
|
||||
"""Thrown when an archive for a distibution is not found"""
|
||||
""" Thrown when an archive for a distibution is not found """
|
||||
pass
|
||||
|
||||
|
||||
class AlreadyLoggedInError(Exception):
|
||||
"""Raised when a second login is attempted."""
|
||||
'''Raised when a second login is attempted.'''
|
||||
pass
|
||||
|
||||
|
||||
class ArchSeriesNotFoundException(BaseException):
|
||||
"""Thrown when a distroarchseries is not found."""
|
||||
pass
|
||||
|
||||
|
||||
class InvalidDistroValueError(ValueError):
|
||||
"""Thrown when distro value is invalid"""
|
||||
""" Thrown when distro value is invalid """
|
||||
pass
|
||||
|
@ -22,49 +22,51 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
import distro_info
|
||||
import hashlib
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from contextlib import suppress
|
||||
from pathlib import Path
|
||||
from subprocess import CalledProcessError, check_output
|
||||
from subprocess import check_output, CalledProcessError
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import distro_info
|
||||
import requests
|
||||
|
||||
from ubuntutools.lp.udtexceptions import PocketDoesNotExistError
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DEFAULT_POCKETS = ("Release", "Security", "Updates", "Proposed")
|
||||
POCKETS = DEFAULT_POCKETS + ("Backports",)
|
||||
DEFAULT_POCKETS = ('Release', 'Security', 'Updates', 'Proposed')
|
||||
POCKETS = DEFAULT_POCKETS + ('Backports',)
|
||||
|
||||
DEFAULT_STATUSES = ("Pending", "Published")
|
||||
STATUSES = DEFAULT_STATUSES + ("Superseded", "Deleted", "Obsolete")
|
||||
DEFAULT_STATUSES = ('Pending', 'Published')
|
||||
STATUSES = DEFAULT_STATUSES + ('Superseded', 'Deleted', 'Obsolete')
|
||||
|
||||
UPLOAD_QUEUE_STATUSES = ("New", "Unapproved", "Accepted", "Done", "Rejected")
|
||||
UPLOAD_QUEUE_STATUSES = ('New', 'Unapproved', 'Accepted', 'Done', 'Rejected')
|
||||
|
||||
DOWNLOAD_BLOCKSIZE_DEFAULT = 8192
|
||||
|
||||
_SYSTEM_DISTRIBUTION_CHAIN: list[str] = []
|
||||
_system_distribution_chain = []
|
||||
|
||||
|
||||
class DownloadError(Exception):
|
||||
"Unable to pull a source package"
|
||||
pass
|
||||
|
||||
|
||||
class NotFoundError(DownloadError):
|
||||
"Source package not found"
|
||||
pass
|
||||
|
||||
|
||||
def system_distribution_chain():
|
||||
"""system_distribution_chain() -> [string]
|
||||
""" system_distribution_chain() -> [string]
|
||||
|
||||
Detect the system's distribution as well as all of its parent
|
||||
distributions and return them as a list of strings, with the
|
||||
@ -72,36 +74,31 @@ def system_distribution_chain():
|
||||
the distribution chain can't be determined, print an error message
|
||||
and return an empty list.
|
||||
"""
|
||||
if len(_SYSTEM_DISTRIBUTION_CHAIN) == 0:
|
||||
global _system_distribution_chain
|
||||
if len(_system_distribution_chain) == 0:
|
||||
try:
|
||||
vendor = check_output(("dpkg-vendor", "--query", "Vendor"), encoding="utf-8").strip()
|
||||
_SYSTEM_DISTRIBUTION_CHAIN.append(vendor)
|
||||
vendor = check_output(('dpkg-vendor', '--query', 'Vendor'),
|
||||
encoding='utf-8').strip()
|
||||
_system_distribution_chain.append(vendor)
|
||||
except CalledProcessError:
|
||||
Logger.error("Could not determine what distribution you are running.")
|
||||
Logger.error('Could not determine what distribution you are running.')
|
||||
return []
|
||||
|
||||
while True:
|
||||
try:
|
||||
parent = check_output(
|
||||
(
|
||||
"dpkg-vendor",
|
||||
"--vendor",
|
||||
_SYSTEM_DISTRIBUTION_CHAIN[-1],
|
||||
"--query",
|
||||
"Parent",
|
||||
),
|
||||
encoding="utf-8",
|
||||
).strip()
|
||||
parent = check_output((
|
||||
'dpkg-vendor', '--vendor', _system_distribution_chain[-1],
|
||||
'--query', 'Parent'), encoding='utf-8').strip()
|
||||
except CalledProcessError:
|
||||
# Vendor has no parent
|
||||
break
|
||||
_SYSTEM_DISTRIBUTION_CHAIN.append(parent)
|
||||
_system_distribution_chain.append(parent)
|
||||
|
||||
return _SYSTEM_DISTRIBUTION_CHAIN
|
||||
return _system_distribution_chain
|
||||
|
||||
|
||||
def system_distribution():
|
||||
"""system_distro() -> string
|
||||
""" system_distro() -> string
|
||||
|
||||
Detect the system's distribution and return it as a string. If the
|
||||
name of the distribution can't be determined, print an error message
|
||||
@ -111,40 +108,42 @@ def system_distribution():
|
||||
|
||||
|
||||
def host_architecture():
|
||||
"""host_architecture -> string
|
||||
""" host_architecture -> string
|
||||
|
||||
Detect the host's architecture and return it as a string. If the
|
||||
architecture can't be determined, print an error message and return None.
|
||||
"""
|
||||
|
||||
try:
|
||||
arch = check_output(("dpkg", "--print-architecture"), encoding="utf-8").strip()
|
||||
arch = check_output(('dpkg', '--print-architecture'),
|
||||
encoding='utf-8').strip()
|
||||
except CalledProcessError:
|
||||
arch = None
|
||||
|
||||
if not arch or "not found" in arch:
|
||||
Logger.error("Not running on a Debian based system; could not detect its architecture.")
|
||||
if not arch or 'not found' in arch:
|
||||
Logger.error('Not running on a Debian based system; '
|
||||
'could not detect its architecture.')
|
||||
return None
|
||||
|
||||
return arch
|
||||
|
||||
|
||||
def readlist(filename, uniq=True):
|
||||
"""readlist(filename, uniq) -> list
|
||||
""" readlist(filename, uniq) -> list
|
||||
|
||||
Read a list of words from the indicated file. If 'uniq' is True, filter
|
||||
out duplicated words.
|
||||
"""
|
||||
path = Path(filename)
|
||||
p = Path(filename)
|
||||
|
||||
if not path.is_file():
|
||||
Logger.error("File %s does not exist.", path)
|
||||
if not p.is_file():
|
||||
Logger.error(f'File {p} does not exist.')
|
||||
return False
|
||||
|
||||
content = path.read_text(encoding="utf-8").replace("\n", " ").replace(",", " ")
|
||||
content = p.read_text().replace('\n', ' ').replace(',', ' ')
|
||||
|
||||
if not content.strip():
|
||||
Logger.error("File %s is empty.", path)
|
||||
Logger.error(f'File {p} is empty.')
|
||||
return False
|
||||
|
||||
items = [item for item in content.split() if item]
|
||||
@ -155,44 +154,42 @@ def readlist(filename, uniq=True):
|
||||
return items
|
||||
|
||||
|
||||
def split_release_pocket(release, default="Release"):
|
||||
"""Splits the release and pocket name.
|
||||
def split_release_pocket(release, default='Release'):
|
||||
'''Splits the release and pocket name.
|
||||
|
||||
If the argument doesn't contain a pocket name then the 'Release' pocket
|
||||
is assumed.
|
||||
|
||||
Returns the release and pocket name.
|
||||
"""
|
||||
'''
|
||||
pocket = default
|
||||
|
||||
if release is None:
|
||||
raise ValueError("No release name specified")
|
||||
raise ValueError('No release name specified')
|
||||
|
||||
if "-" in release:
|
||||
(release, pocket) = release.rsplit("-", 1)
|
||||
if '-' in release:
|
||||
(release, pocket) = release.rsplit('-', 1)
|
||||
pocket = pocket.capitalize()
|
||||
|
||||
if pocket not in POCKETS:
|
||||
raise PocketDoesNotExistError(f"Pocket '{pocket}' does not exist.")
|
||||
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket)
|
||||
|
||||
return (release, pocket)
|
||||
|
||||
|
||||
def require_utf8():
|
||||
"""Can be called by programs that only function in UTF-8 locales"""
|
||||
if locale.getpreferredencoding() != "UTF-8":
|
||||
'''Can be called by programs that only function in UTF-8 locales'''
|
||||
if locale.getpreferredencoding() != 'UTF-8':
|
||||
Logger.error("This program only functions in a UTF-8 locale. Aborting.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
_vendor_to_distroinfo = {
|
||||
"Debian": distro_info.DebianDistroInfo,
|
||||
"Ubuntu": distro_info.UbuntuDistroInfo,
|
||||
}
|
||||
_vendor_to_distroinfo = {"Debian": distro_info.DebianDistroInfo,
|
||||
"Ubuntu": distro_info.UbuntuDistroInfo}
|
||||
|
||||
|
||||
def vendor_to_distroinfo(vendor):
|
||||
"""vendor_to_distroinfo(string) -> DistroInfo class
|
||||
""" vendor_to_distroinfo(string) -> DistroInfo class
|
||||
|
||||
Convert a string name of a distribution into a DistroInfo subclass
|
||||
representing that distribution, or None if the distribution is
|
||||
@ -202,7 +199,7 @@ def vendor_to_distroinfo(vendor):
|
||||
|
||||
|
||||
def codename_to_distribution(codename):
|
||||
"""codename_to_distribution(string) -> string
|
||||
""" codename_to_distribution(string) -> string
|
||||
|
||||
Finds a given release codename in your distribution's genaology
|
||||
(i.e. looking at the current distribution and its parents), or
|
||||
@ -215,11 +212,10 @@ def codename_to_distribution(codename):
|
||||
|
||||
if info().valid(codename):
|
||||
return distro
|
||||
return None
|
||||
|
||||
|
||||
def verify_file_checksums(pathname, checksums=None, size=0):
|
||||
"""verify checksums of file
|
||||
def verify_file_checksums(pathname, checksums={}, size=0):
|
||||
""" verify checksums of file
|
||||
|
||||
Any failure will log an error.
|
||||
|
||||
@ -232,39 +228,35 @@ def verify_file_checksums(pathname, checksums=None, size=0):
|
||||
|
||||
Returns True if all checks pass, False otherwise
|
||||
"""
|
||||
if checksums is None:
|
||||
checksums = {}
|
||||
path = Path(pathname)
|
||||
p = Path(pathname)
|
||||
|
||||
if not path.is_file():
|
||||
Logger.error("File %s not found", path)
|
||||
if not p.is_file():
|
||||
Logger.error(f'File {p} not found')
|
||||
return False
|
||||
filesize = path.stat().st_size
|
||||
filesize = p.stat().st_size
|
||||
if size and size != filesize:
|
||||
Logger.error("File %s incorrect size, got %s expected %s", path, filesize, size)
|
||||
Logger.error(f'File {p} incorrect size, got {filesize} expected {size}')
|
||||
return False
|
||||
|
||||
for alg, checksum in checksums.items():
|
||||
hash_ = hashlib.new(alg)
|
||||
with path.open("rb") as f:
|
||||
for (alg, checksum) in checksums.items():
|
||||
h = hashlib.new(alg)
|
||||
with p.open('rb') as f:
|
||||
while True:
|
||||
block = f.read(hash_.block_size)
|
||||
block = f.read(h.block_size)
|
||||
if len(block) == 0:
|
||||
break
|
||||
hash_.update(block)
|
||||
digest = hash_.hexdigest()
|
||||
h.update(block)
|
||||
digest = h.hexdigest()
|
||||
if digest == checksum:
|
||||
Logger.debug("File %s checksum (%s) verified: %s", path, alg, checksum)
|
||||
Logger.debug(f'File {p} checksum ({alg}) verified: {checksum}')
|
||||
else:
|
||||
Logger.error(
|
||||
"File %s checksum (%s) mismatch: got %s expected %s", path, alg, digest, checksum
|
||||
)
|
||||
Logger.error(f'File {p} checksum ({alg}) mismatch: got {digest} expected {checksum}')
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def verify_file_checksum(pathname, alg, checksum, size=0):
|
||||
"""verify checksum of file
|
||||
""" verify checksum of file
|
||||
|
||||
pathname: str or Path
|
||||
full path to file
|
||||
@ -281,7 +273,7 @@ def verify_file_checksum(pathname, alg, checksum, size=0):
|
||||
|
||||
|
||||
def extract_authentication(url):
|
||||
"""Remove plaintext authentication data from a URL
|
||||
""" Remove plaintext authentication data from a URL
|
||||
|
||||
If the URL has a username:password in its netloc, this removes it
|
||||
and returns the remaining URL, along with the username and password
|
||||
@ -290,18 +282,14 @@ def extract_authentication(url):
|
||||
|
||||
This returns a tuple in the form (url, username, password)
|
||||
"""
|
||||
components = urlparse(url)
|
||||
if components.username or components.password:
|
||||
return (
|
||||
components._replace(netloc=components.hostname).geturl(),
|
||||
components.username,
|
||||
components.password,
|
||||
)
|
||||
u = urlparse(url)
|
||||
if u.username or u.password:
|
||||
return (u._replace(netloc=u.hostname).geturl(), u.username, u.password)
|
||||
return (url, None, None)
|
||||
|
||||
|
||||
def download(src, dst, size=0, *, blocksize=DOWNLOAD_BLOCKSIZE_DEFAULT):
|
||||
"""download/copy a file/url to local file
|
||||
""" download/copy a file/url to local file
|
||||
|
||||
src: str or Path
|
||||
Source to copy from (file path or url)
|
||||
@ -327,148 +315,128 @@ def download(src, dst, size=0, *, blocksize=DOWNLOAD_BLOCKSIZE_DEFAULT):
|
||||
dst = dst / Path(parsedsrc.path).name
|
||||
|
||||
# Copy if src is a local file
|
||||
if parsedsrc.scheme in ["", "file"]:
|
||||
if parsedsrc.scheme in ['', 'file']:
|
||||
src = Path(parsedsrc.path).expanduser().resolve()
|
||||
if src != parsedsrc.path:
|
||||
Logger.info("Parsed %s as %s", parsedsrc.path, src)
|
||||
Logger.info(f'Parsed {parsedsrc.path} as {src}')
|
||||
if not src.exists():
|
||||
raise NotFoundError(f"Source file {src} not found")
|
||||
raise NotFoundError(f'Source file {src} not found')
|
||||
if dst.exists():
|
||||
if src.samefile(dst):
|
||||
Logger.info("Using existing file %s", dst)
|
||||
Logger.info(f'Using existing file {dst}')
|
||||
return dst
|
||||
Logger.info("Replacing existing file %s", dst)
|
||||
Logger.info("Copying file %s to %s", src, dst)
|
||||
Logger.info(f'Replacing existing file {dst}')
|
||||
Logger.info(f'Copying file {src} to {dst}')
|
||||
shutil.copyfile(src, dst)
|
||||
return dst
|
||||
|
||||
(src, username, password) = extract_authentication(src)
|
||||
auth = (username, password) if username or password else None
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
tmpdst = Path(tmpdir) / "dst"
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
tmpdst = Path(d) / 'dst'
|
||||
try:
|
||||
# We must use "Accept-Encoding: identity" so that Launchpad doesn't
|
||||
# compress changes files. See LP: #2025748.
|
||||
with requests.get(
|
||||
src, stream=True, timeout=60, auth=auth, headers={"accept-encoding": "identity"}
|
||||
) as fsrc:
|
||||
with tmpdst.open("wb") as fdst:
|
||||
fsrc.raise_for_status()
|
||||
_download(fsrc, fdst, size, blocksize=blocksize)
|
||||
except requests.exceptions.HTTPError as error:
|
||||
if error.response is not None and error.response.status_code == 404:
|
||||
raise NotFoundError(f"URL {src} not found: {error}") from error
|
||||
raise DownloadError(error) from error
|
||||
except requests.exceptions.ConnectionError as error:
|
||||
with requests.get(src, stream=True, auth=auth) as fsrc, tmpdst.open('wb') as fdst:
|
||||
fsrc.raise_for_status()
|
||||
_download(fsrc, fdst, size, blocksize=blocksize)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response is not None and e.response.status_code == 404:
|
||||
raise NotFoundError(f'URL {src} not found: {e}')
|
||||
raise DownloadError(e)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
# This is likely a archive hostname that doesn't resolve, like 'ftpmaster.internal'
|
||||
raise NotFoundError(f"URL {src} not found: {error}") from error
|
||||
except requests.exceptions.RequestException as error:
|
||||
raise DownloadError(error) from error
|
||||
raise NotFoundError(f'URL {src} not found: {e}')
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise DownloadError(e)
|
||||
shutil.move(tmpdst, dst)
|
||||
return dst
|
||||
|
||||
|
||||
class _StderrProgressBar:
|
||||
class _StderrProgressBar(object):
|
||||
BAR_WIDTH_MIN = 40
|
||||
BAR_WIDTH_DEFAULT = 60
|
||||
|
||||
def __init__(self, max_width):
|
||||
self.full_width = min(max_width, self.BAR_WIDTH_DEFAULT)
|
||||
self.width = self.full_width - len("[] 99%")
|
||||
self.width = self.full_width - len('[] 99%')
|
||||
self.show_progress = self.full_width >= self.BAR_WIDTH_MIN
|
||||
|
||||
def update(self, progress, total):
|
||||
if not self.show_progress:
|
||||
return
|
||||
pct = progress * 100 // total
|
||||
pctstr = f"{pct:>3}%"
|
||||
pctstr = f'{pct:>3}%'
|
||||
barlen = self.width * pct // 100
|
||||
barstr = "=" * barlen
|
||||
barstr = f"{barstr[:-1]}>"
|
||||
barstr = '=' * barlen
|
||||
barstr = barstr[:-1] + '>'
|
||||
barstr = barstr.ljust(self.width)
|
||||
fullstr = f"\r[{barstr}]{pctstr}"
|
||||
fullstr = f'\r[{barstr}]{pctstr}'
|
||||
sys.stderr.write(fullstr)
|
||||
sys.stderr.flush()
|
||||
|
||||
def finish(self):
|
||||
if not self.show_progress:
|
||||
return
|
||||
sys.stderr.write("\n")
|
||||
sys.stderr.write('\n')
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def _download(fsrc, fdst, size, *, blocksize):
|
||||
"""helper method to download src to dst using requests library."""
|
||||
""" helper method to download src to dst using requests library. """
|
||||
url = fsrc.url
|
||||
Logger.debug("Using URL: %s", url)
|
||||
Logger.debug(f'Using URL: {url}')
|
||||
|
||||
if not size:
|
||||
with suppress(AttributeError, TypeError, ValueError):
|
||||
size = int(fsrc.headers.get("Content-Length"))
|
||||
size = int(fsrc.headers.get('Content-Length'))
|
||||
|
||||
parsed = urlparse(url)
|
||||
filename = Path(parsed.path).name
|
||||
hostname = parsed.hostname
|
||||
sizemb = f" ({size / 1024.0 / 1024:0.3f} MiB)" if size else ""
|
||||
Logger.info("Downloading %s from %s%s", filename, hostname, sizemb)
|
||||
sizemb = ' (%0.3f MiB)' % (size / 1024.0 / 1024) if size else ''
|
||||
Logger.info(f'Downloading {filename} from {hostname}{sizemb}')
|
||||
|
||||
# Don't show progress if:
|
||||
# logging INFO is suppressed
|
||||
# stderr isn't a tty
|
||||
# we don't know the total file size
|
||||
# the file is content-encoded (i.e. compressed)
|
||||
show_progress = all(
|
||||
(
|
||||
Logger.isEnabledFor(logging.INFO),
|
||||
sys.stderr.isatty(),
|
||||
size > 0,
|
||||
"Content-Encoding" not in fsrc.headers,
|
||||
)
|
||||
)
|
||||
show_progress = all((Logger.isEnabledFor(logging.INFO),
|
||||
sys.stderr.isatty(),
|
||||
size > 0,
|
||||
'Content-Encoding' not in fsrc.headers))
|
||||
|
||||
terminal_width = 0
|
||||
if show_progress:
|
||||
try:
|
||||
terminal_width = os.get_terminal_size(sys.stderr.fileno()).columns
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
Logger.error("Error finding stderr width, suppressing progress bar: %s", e)
|
||||
except Exception as e:
|
||||
Logger.error(f'Error finding stderr width, suppressing progress bar: {e}')
|
||||
progress_bar = _StderrProgressBar(max_width=terminal_width)
|
||||
|
||||
downloaded = 0
|
||||
try:
|
||||
while True:
|
||||
# We use fsrc.raw so that compressed files stay compressed as we
|
||||
# write them to disk. For example, if this is a .diff.gz, then it
|
||||
# needs to remain compressed and unmodified to remain valid as part
|
||||
# of a source package later, even though Launchpad sends
|
||||
# "Content-Encoding: gzip" and the requests library therefore would
|
||||
# want to decompress it. See LP: #2025748.
|
||||
block = fsrc.raw.read(blocksize)
|
||||
if not block:
|
||||
break
|
||||
for block in fsrc.iter_content(blocksize):
|
||||
fdst.write(block)
|
||||
downloaded += len(block)
|
||||
progress_bar.update(downloaded, size)
|
||||
finally:
|
||||
progress_bar.finish()
|
||||
if size and size > downloaded:
|
||||
Logger.error(
|
||||
"Partial download: %0.3f MiB of %0.3f MiB",
|
||||
downloaded / 1024.0 / 1024,
|
||||
size / 1024.0 / 1024,
|
||||
)
|
||||
Logger.error('Partial download: %0.3f MiB of %0.3f MiB' %
|
||||
(downloaded / 1024.0 / 1024,
|
||||
size / 1024.0 / 1024))
|
||||
|
||||
|
||||
def _download_text(src, binary, *, blocksize):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
dst = Path(tmpdir) / "dst"
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
dst = Path(d) / 'dst'
|
||||
download(src, dst, blocksize=blocksize)
|
||||
return dst.read_bytes() if binary else dst.read_text()
|
||||
|
||||
|
||||
def download_text(src, mode=None, *, blocksize=DOWNLOAD_BLOCKSIZE_DEFAULT):
|
||||
"""Return the text content of a downloaded file
|
||||
""" Return the text content of a downloaded file
|
||||
|
||||
src: str or Path
|
||||
Source to copy from (file path or url)
|
||||
@ -481,9 +449,9 @@ def download_text(src, mode=None, *, blocksize=DOWNLOAD_BLOCKSIZE_DEFAULT):
|
||||
|
||||
Returns text content of downloaded file
|
||||
"""
|
||||
return _download_text(src, binary="b" in (mode or ""), blocksize=blocksize)
|
||||
return _download_text(src, binary='b' in (mode or ''), blocksize=blocksize)
|
||||
|
||||
|
||||
def download_bytes(src, *, blocksize=DOWNLOAD_BLOCKSIZE_DEFAULT):
|
||||
"""Same as download_text() but returns bytes"""
|
||||
""" Same as download_text() but returns bytes """
|
||||
return _download_text(src, binary=True, blocksize=blocksize)
|
||||
|
@ -22,58 +22,54 @@
|
||||
# ##################################################################
|
||||
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import errno
|
||||
import subprocess
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from distro_info import DebianDistroInfo
|
||||
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ubuntutools.archive import (UbuntuSourcePackage, DebianSourcePackage,
|
||||
UbuntuCloudArchiveSourcePackage,
|
||||
PersonalPackageArchiveSourcePackage)
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.lp.lpapicache import (Distribution, Launchpad)
|
||||
from ubuntutools.lp.udtexceptions import (AlreadyLoggedInError,
|
||||
SeriesNotFoundException,
|
||||
PackageNotFoundException,
|
||||
PocketDoesNotExistError,
|
||||
InvalidDistroValueError)
|
||||
from ubuntutools.misc import (split_release_pocket,
|
||||
host_architecture,
|
||||
download,
|
||||
UPLOAD_QUEUE_STATUSES,
|
||||
STATUSES)
|
||||
|
||||
|
||||
# by default we use standard logging.getLogger() and only use
|
||||
# ubuntutools.getLogger() in PullPkg().main()
|
||||
from ubuntutools import getLogger as ubuntutools_getLogger
|
||||
from ubuntutools.archive import (
|
||||
DebianSourcePackage,
|
||||
PersonalPackageArchiveSourcePackage,
|
||||
UbuntuCloudArchiveSourcePackage,
|
||||
UbuntuSourcePackage,
|
||||
)
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.lp.lpapicache import Distribution, Launchpad
|
||||
from ubuntutools.lp.udtexceptions import (
|
||||
AlreadyLoggedInError,
|
||||
InvalidDistroValueError,
|
||||
PackageNotFoundException,
|
||||
PocketDoesNotExistError,
|
||||
SeriesNotFoundException,
|
||||
)
|
||||
from ubuntutools.misc import (
|
||||
STATUSES,
|
||||
UPLOAD_QUEUE_STATUSES,
|
||||
download,
|
||||
host_architecture,
|
||||
split_release_pocket,
|
||||
)
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
PULL_SOURCE = "source"
|
||||
PULL_DEBS = "debs"
|
||||
PULL_DDEBS = "ddebs"
|
||||
PULL_UDEBS = "udebs"
|
||||
PULL_LIST = "list"
|
||||
PULL_SOURCE = 'source'
|
||||
PULL_DEBS = 'debs'
|
||||
PULL_DDEBS = 'ddebs'
|
||||
PULL_UDEBS = 'udebs'
|
||||
PULL_LIST = 'list'
|
||||
|
||||
VALID_PULLS = [PULL_SOURCE, PULL_DEBS, PULL_DDEBS, PULL_UDEBS, PULL_LIST]
|
||||
VALID_BINARY_PULLS = [PULL_DEBS, PULL_DDEBS, PULL_UDEBS]
|
||||
|
||||
DISTRO_DEBIAN = "debian"
|
||||
DISTRO_UBUNTU = "ubuntu"
|
||||
DISTRO_UCA = "uca"
|
||||
DISTRO_PPA = "ppa"
|
||||
DISTRO_DEBIAN = 'debian'
|
||||
DISTRO_UBUNTU = 'ubuntu'
|
||||
DISTRO_UCA = 'uca'
|
||||
DISTRO_PPA = 'ppa'
|
||||
|
||||
DISTRO_PKG_CLASS = {
|
||||
DISTRO_DEBIAN: DebianSourcePackage,
|
||||
@ -85,12 +81,12 @@ VALID_DISTROS = DISTRO_PKG_CLASS.keys()
|
||||
|
||||
|
||||
class InvalidPullValueError(ValueError):
|
||||
"""Thrown when --pull value is invalid"""
|
||||
""" Thrown when --pull value is invalid """
|
||||
pass
|
||||
|
||||
|
||||
class PullPkg:
|
||||
class PullPkg(object):
|
||||
"""Class used to pull file(s) associated with a specific package"""
|
||||
|
||||
@classmethod
|
||||
def main(cls, *args, **kwargs):
|
||||
"""For use by stand-alone cmdline scripts.
|
||||
@ -105,74 +101,59 @@ class PullPkg:
|
||||
unexpected errors will flow up to the caller.
|
||||
On success, this simply returns.
|
||||
"""
|
||||
logger = ubuntutools_getLogger()
|
||||
Logger = ubuntutools_getLogger()
|
||||
|
||||
try:
|
||||
cls(*args, **kwargs).pull()
|
||||
return
|
||||
except KeyboardInterrupt:
|
||||
logger.info("User abort.")
|
||||
except (
|
||||
PackageNotFoundException,
|
||||
SeriesNotFoundException,
|
||||
PocketDoesNotExistError,
|
||||
InvalidDistroValueError,
|
||||
InvalidPullValueError,
|
||||
) as error:
|
||||
logger.error(str(error))
|
||||
Logger.info('User abort.')
|
||||
except (PackageNotFoundException, SeriesNotFoundException,
|
||||
PocketDoesNotExistError, InvalidDistroValueError,
|
||||
InvalidPullValueError) as e:
|
||||
Logger.error(str(e))
|
||||
sys.exit(errno.ENOENT)
|
||||
|
||||
def __init__(self, *args, **kwargs): # pylint: disable=unused-argument
|
||||
self._default_pull = kwargs.get("pull")
|
||||
self._default_distro = kwargs.get("distro")
|
||||
self._default_arch = kwargs.get("arch", host_architecture())
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._default_pull = kwargs.get('pull')
|
||||
self._default_distro = kwargs.get('distro')
|
||||
self._default_arch = kwargs.get('arch', host_architecture())
|
||||
|
||||
def parse_args(self, args):
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
args = args[:]
|
||||
|
||||
help_default_pull = "What to pull: " + ", ".join(VALID_PULLS)
|
||||
if self._default_pull:
|
||||
help_default_pull += f" (default: {self._default_pull})"
|
||||
help_default_pull += (" (default: %s)" % self._default_pull)
|
||||
help_default_distro = "Pull from: " + ", ".join(VALID_DISTROS)
|
||||
if self._default_distro:
|
||||
help_default_distro += f" (default: {self._default_distro})"
|
||||
help_default_arch = "Get binary packages for arch"
|
||||
help_default_arch += f"(default: {self._default_arch})"
|
||||
help_default_distro += (" (default: %s)" % self._default_distro)
|
||||
help_default_arch = ("Get binary packages for arch")
|
||||
help_default_arch += ("(default: %s)" % self._default_arch)
|
||||
|
||||
# use add_help=False because we do parse_known_args() below, and if
|
||||
# that sees --help then it exits immediately
|
||||
parser = ArgumentParser(add_help=False)
|
||||
parser.add_argument("-L", "--login", action="store_true", help="Login to Launchpad")
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", action="count", default=0, help="Increase verbosity/debug"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d", "--download-only", action="store_true", help="Do not extract the source package"
|
||||
)
|
||||
parser.add_argument("-m", "--mirror", action="append", help="Preferred mirror(s)")
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
action="store_true",
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-verify-signature",
|
||||
action="store_true",
|
||||
help="Don't fail if dsc signature can't be verified",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--status",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Search for packages with specific status(es)",
|
||||
)
|
||||
parser.add_argument("-a", "--arch", default=self._default_arch, help=help_default_arch)
|
||||
parser.add_argument("-p", "--pull", default=self._default_pull, help=help_default_pull)
|
||||
parser.add_argument(
|
||||
"-D", "--distro", default=self._default_distro, help=help_default_distro
|
||||
)
|
||||
parser.add_argument('-L', '--login', action='store_true',
|
||||
help="Login to Launchpad")
|
||||
parser.add_argument('-v', '--verbose', action='count', default=0,
|
||||
help="Increase verbosity/debug")
|
||||
parser.add_argument('-d', '--download-only', action='store_true',
|
||||
help="Do not extract the source package")
|
||||
parser.add_argument('-m', '--mirror', action='append',
|
||||
help='Preferred mirror(s)')
|
||||
parser.add_argument('--no-conf', action='store_true',
|
||||
help="Don't read config files or environment variables")
|
||||
parser.add_argument('--no-verify-signature', action='store_true',
|
||||
help="Don't fail if dsc signature can't be verified")
|
||||
parser.add_argument('-s', '--status', action='append', default=[],
|
||||
help="Search for packages with specific status(es)")
|
||||
parser.add_argument('-a', '--arch', default=self._default_arch,
|
||||
help=help_default_arch)
|
||||
parser.add_argument('-p', '--pull', default=self._default_pull,
|
||||
help=help_default_pull)
|
||||
parser.add_argument('-D', '--distro', default=self._default_distro,
|
||||
help=help_default_distro)
|
||||
|
||||
# add distro-specific params
|
||||
try:
|
||||
@ -182,84 +163,75 @@ class PullPkg:
|
||||
distro = None
|
||||
|
||||
if distro == DISTRO_UBUNTU:
|
||||
parser.add_argument(
|
||||
"--security",
|
||||
action="store_true",
|
||||
help="Pull from the Ubuntu Security Team (proposed) PPA",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--upload-queue", action="store_true", help="Pull from the Ubuntu upload queue"
|
||||
)
|
||||
parser.add_argument('--security', action='store_true',
|
||||
help='Pull from the Ubuntu Security Team (proposed) PPA')
|
||||
parser.add_argument('--upload-queue', action='store_true',
|
||||
help='Pull from the Ubuntu upload queue')
|
||||
if distro == DISTRO_PPA:
|
||||
parser.add_argument("--ppa", help="PPA to pull from")
|
||||
parser.add_argument('--ppa', help='PPA to pull from')
|
||||
if parser.parse_known_args(args)[0].ppa is None:
|
||||
# check for any param starting with "ppa:"
|
||||
# if found, move it to a --ppa param
|
||||
for param in args:
|
||||
if param.startswith("ppa:"):
|
||||
if param.startswith('ppa:'):
|
||||
args.remove(param)
|
||||
args.insert(0, param)
|
||||
args.insert(0, "--ppa")
|
||||
args.insert(0, '--ppa')
|
||||
break
|
||||
|
||||
# add the positional params
|
||||
parser.add_argument("package", help="Package name to pull")
|
||||
parser.add_argument("release", nargs="?", help="Release to pull from")
|
||||
parser.add_argument("version", nargs="?", help="Package version to pull")
|
||||
parser.add_argument('package', help="Package name to pull")
|
||||
parser.add_argument('release', nargs='?', help="Release to pull from")
|
||||
parser.add_argument('version', nargs='?', help="Package version to pull")
|
||||
|
||||
epilog = (
|
||||
"Note on --status: if a version is provided, all status types "
|
||||
"will be searched; if no version is provided, by default only "
|
||||
"'Pending' and 'Published' status will be searched."
|
||||
)
|
||||
epilog = ("Note on --status: if a version is provided, all status types "
|
||||
"will be searched; if no version is provided, by default only "
|
||||
"'Pending' and 'Published' status will be searched.")
|
||||
|
||||
# since parser has no --help handler, create a new parser that does
|
||||
newparser = ArgumentParser(parents=[parser], epilog=epilog)
|
||||
|
||||
return self.parse_options(vars(newparser.parse_args(args)))
|
||||
|
||||
@staticmethod
|
||||
def parse_pull(pull):
|
||||
def parse_pull(self, pull):
|
||||
if not pull:
|
||||
raise InvalidPullValueError("Must specify --pull")
|
||||
|
||||
# allow 'dbgsym' as alias for 'ddebs'
|
||||
if pull == "dbgsym":
|
||||
if pull == 'dbgsym':
|
||||
Logger.debug("Pulling '%s' for '%s'", PULL_DDEBS, pull)
|
||||
pull = PULL_DDEBS
|
||||
# assume anything starting with 'bin' means 'debs'
|
||||
if str(pull).startswith("bin"):
|
||||
if str(pull).startswith('bin'):
|
||||
Logger.debug("Pulling '%s' for '%s'", PULL_DEBS, pull)
|
||||
pull = PULL_DEBS
|
||||
# verify pull action is valid
|
||||
if pull not in VALID_PULLS:
|
||||
raise InvalidPullValueError(f"Invalid pull action '{pull}'")
|
||||
raise InvalidPullValueError("Invalid pull action '%s'" % pull)
|
||||
|
||||
return pull
|
||||
|
||||
@staticmethod
|
||||
def parse_distro(distro):
|
||||
def parse_distro(self, distro):
|
||||
if not distro:
|
||||
raise InvalidDistroValueError("Must specify --distro")
|
||||
|
||||
distro = distro.lower()
|
||||
|
||||
# allow 'lp' for 'ubuntu'
|
||||
if distro == "lp":
|
||||
if distro == 'lp':
|
||||
Logger.debug("Using distro '%s' for '%s'", DISTRO_UBUNTU, distro)
|
||||
distro = DISTRO_UBUNTU
|
||||
# assume anything with 'cloud' is UCA
|
||||
if re.match(r".*cloud.*", distro):
|
||||
if re.match(r'.*cloud.*', distro):
|
||||
Logger.debug("Using distro '%s' for '%s'", DISTRO_UCA, distro)
|
||||
distro = DISTRO_UCA
|
||||
# verify distro is valid
|
||||
if distro not in VALID_DISTROS:
|
||||
raise InvalidDistroValueError(f"Invalid distro '{distro}'")
|
||||
raise InvalidDistroValueError("Invalid distro '%s'" % distro)
|
||||
|
||||
return distro
|
||||
|
||||
@staticmethod
|
||||
def parse_release(distro, release):
|
||||
def parse_release(self, distro, release):
|
||||
if distro == DISTRO_UCA:
|
||||
return UbuntuCloudArchiveSourcePackage.parseReleaseAndPocket(release)
|
||||
|
||||
@ -277,14 +249,15 @@ class PullPkg:
|
||||
|
||||
if distro == DISTRO_PPA:
|
||||
# PPAs are part of Ubuntu distribution
|
||||
distribution = Distribution(DISTRO_UBUNTU)
|
||||
d = Distribution(DISTRO_UBUNTU)
|
||||
else:
|
||||
distribution = Distribution(distro)
|
||||
d = Distribution(distro)
|
||||
|
||||
# let SeriesNotFoundException flow up
|
||||
distribution.getSeries(release)
|
||||
d.getSeries(release)
|
||||
|
||||
Logger.debug("Using distro '%s' release '%s' pocket '%s'", distro, release, pocket)
|
||||
Logger.debug("Using distro '%s' release '%s' pocket '%s'",
|
||||
distro, release, pocket)
|
||||
return (release, pocket)
|
||||
|
||||
def parse_release_and_version(self, distro, release, version, try_swap=True):
|
||||
@ -308,196 +281,153 @@ class PullPkg:
|
||||
# they should all be provided, though the optional ones may be None
|
||||
|
||||
# type bool
|
||||
assert "verbose" in options
|
||||
assert "download_only" in options
|
||||
assert "no_conf" in options
|
||||
assert "no_verify_signature" in options
|
||||
assert "status" in options
|
||||
assert 'verbose' in options
|
||||
assert 'download_only' in options
|
||||
assert 'no_conf' in options
|
||||
assert 'no_verify_signature' in options
|
||||
assert 'status' in options
|
||||
# type string
|
||||
assert "pull" in options
|
||||
assert "distro" in options
|
||||
assert "arch" in options
|
||||
assert "package" in options
|
||||
assert 'pull' in options
|
||||
assert 'distro' in options
|
||||
assert 'arch' in options
|
||||
assert 'package' in options
|
||||
# type string, optional
|
||||
assert "release" in options
|
||||
assert "version" in options
|
||||
assert 'release' in options
|
||||
assert 'version' in options
|
||||
# type list of strings, optional
|
||||
assert "mirror" in options
|
||||
assert 'mirror' in options
|
||||
|
||||
options["pull"] = self.parse_pull(options["pull"])
|
||||
options["distro"] = self.parse_distro(options["distro"])
|
||||
options['pull'] = self.parse_pull(options['pull'])
|
||||
options['distro'] = self.parse_distro(options['distro'])
|
||||
|
||||
# ensure these are always included so we can just check for None/False later
|
||||
options["ppa"] = options.get("ppa", None)
|
||||
options["security"] = options.get("security", False)
|
||||
options["upload_queue"] = options.get("upload_queue", False)
|
||||
options['ppa'] = options.get('ppa', None)
|
||||
options['security'] = options.get('security', False)
|
||||
options['upload_queue'] = options.get('upload_queue', False)
|
||||
|
||||
return options
|
||||
|
||||
def _get_params(self, options):
|
||||
distro = options["distro"]
|
||||
pull = options["pull"]
|
||||
distro = options['distro']
|
||||
pull = options['pull']
|
||||
|
||||
params = {}
|
||||
params["package"] = options["package"]
|
||||
params["arch"] = options["arch"]
|
||||
params['package'] = options['package']
|
||||
|
||||
if options["release"]:
|
||||
(release, version, pocket) = self.parse_release_and_version(
|
||||
distro, options["release"], options["version"]
|
||||
)
|
||||
params["series"] = release
|
||||
params["version"] = version
|
||||
params["pocket"] = pocket
|
||||
if options['release']:
|
||||
(r, v, p) = self.parse_release_and_version(distro, options['release'],
|
||||
options['version'])
|
||||
params['series'] = r
|
||||
params['version'] = v
|
||||
params['pocket'] = p
|
||||
|
||||
if params["package"].endswith(".dsc") and not params["series"] and not params["version"]:
|
||||
params["dscfile"] = params["package"]
|
||||
params.pop("package")
|
||||
if (params['package'].endswith('.dsc') and not params['series'] and not params['version']):
|
||||
params['dscfile'] = params['package']
|
||||
params.pop('package')
|
||||
|
||||
if options["security"]:
|
||||
if options["ppa"]:
|
||||
Logger.warning("Both --security and --ppa specified, ignoring --ppa")
|
||||
Logger.debug("Checking Ubuntu Security PPA")
|
||||
if options['security']:
|
||||
if options['ppa']:
|
||||
Logger.warning('Both --security and --ppa specified, ignoring --ppa')
|
||||
Logger.debug('Checking Ubuntu Security PPA')
|
||||
# --security is just a shortcut for --ppa ppa:ubuntu-security-proposed/ppa
|
||||
options["ppa"] = "ubuntu-security-proposed/ppa"
|
||||
options['ppa'] = 'ubuntu-security-proposed/ppa'
|
||||
|
||||
if options["ppa"]:
|
||||
if options["ppa"].startswith("ppa:"):
|
||||
params["ppa"] = options["ppa"][4:]
|
||||
if options['ppa']:
|
||||
if options['ppa'].startswith('ppa:'):
|
||||
params['ppa'] = options['ppa'][4:]
|
||||
else:
|
||||
params["ppa"] = options["ppa"]
|
||||
params['ppa'] = options['ppa']
|
||||
elif distro == DISTRO_PPA:
|
||||
raise ValueError("Must specify PPA to pull from")
|
||||
raise ValueError('Must specify PPA to pull from')
|
||||
|
||||
mirrors = []
|
||||
if options["mirror"]:
|
||||
mirrors.extend(options["mirror"])
|
||||
if options['mirror']:
|
||||
mirrors.extend(options['mirror'])
|
||||
if pull == PULL_DDEBS:
|
||||
config = UDTConfig(options["no_conf"])
|
||||
ddebs_mirror = config.get_value(distro.upper() + "_DDEBS_MIRROR")
|
||||
config = UDTConfig(options['no_conf'])
|
||||
ddebs_mirror = config.get_value(distro.upper() + '_DDEBS_MIRROR')
|
||||
if ddebs_mirror:
|
||||
mirrors.append(ddebs_mirror)
|
||||
if mirrors:
|
||||
Logger.debug("using mirrors %s", ", ".join(mirrors))
|
||||
params["mirrors"] = mirrors
|
||||
params['mirrors'] = mirrors
|
||||
|
||||
params["verify_signature"] = not options["no_verify_signature"]
|
||||
params['verify_signature'] = not options['no_verify_signature']
|
||||
|
||||
params["status"] = STATUSES if "all" in options["status"] else options["status"]
|
||||
params['status'] = STATUSES if 'all' in options['status'] else options['status']
|
||||
|
||||
# special handling for upload queue
|
||||
if options["upload_queue"]:
|
||||
if len(options["status"]) > 1:
|
||||
raise ValueError(
|
||||
"Too many --status provided, can only search for a single status or 'all'"
|
||||
)
|
||||
if not options["status"]:
|
||||
params["status"] = None
|
||||
elif options["status"][0].lower() == "all":
|
||||
params["status"] = "all"
|
||||
elif options["status"][0].capitalize() in UPLOAD_QUEUE_STATUSES:
|
||||
params["status"] = options["status"][0].capitalize()
|
||||
if options['upload_queue']:
|
||||
if len(options['status']) > 1:
|
||||
raise ValueError("Too many --status provided, "
|
||||
"can only search for a single status or 'all'")
|
||||
if not options['status']:
|
||||
params['status'] = None
|
||||
elif options['status'][0].lower() == 'all':
|
||||
params['status'] = 'all'
|
||||
elif options['status'][0].capitalize() in UPLOAD_QUEUE_STATUSES:
|
||||
params['status'] = options['status'][0].capitalize()
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid upload queue status '{options['status'][0]}':"
|
||||
f" valid values are {', '.join(UPLOAD_QUEUE_STATUSES)}"
|
||||
)
|
||||
msg = ("Invalid upload queue status '%s': valid values are %s" %
|
||||
(options['status'][0], ', '.join(UPLOAD_QUEUE_STATUSES)))
|
||||
raise ValueError(msg)
|
||||
|
||||
return params
|
||||
|
||||
def pull(self, args=None):
|
||||
def pull(self, args=sys.argv[1:]):
|
||||
"""Pull (download) specified package file(s)"""
|
||||
options = self.parse_args(args)
|
||||
|
||||
if options["verbose"]:
|
||||
if options['verbose']:
|
||||
Logger.setLevel(logging.DEBUG)
|
||||
if options["verbose"] > 1:
|
||||
if options['verbose'] > 1:
|
||||
logging.getLogger(__package__).setLevel(logging.DEBUG)
|
||||
|
||||
Logger.debug("pullpkg options: %s", options)
|
||||
|
||||
pull = options["pull"]
|
||||
distro = options["distro"]
|
||||
pull = options['pull']
|
||||
distro = options['distro']
|
||||
|
||||
if options["login"]:
|
||||
if options['login']:
|
||||
Logger.debug("Logging in to Launchpad:")
|
||||
try:
|
||||
Launchpad.login()
|
||||
except AlreadyLoggedInError:
|
||||
Logger.error(
|
||||
"Launchpad singleton has already performed a login, "
|
||||
"and its design prevents another login"
|
||||
)
|
||||
Logger.error("Launchpad singleton has already performed a login, "
|
||||
"and its design prevents another login")
|
||||
Logger.warning("Continuing anyway, with existing Launchpad instance")
|
||||
|
||||
params = self._get_params(options)
|
||||
package = params["package"]
|
||||
package = params['package']
|
||||
|
||||
if options["upload_queue"]:
|
||||
if options['upload_queue']:
|
||||
# upload queue API is different/simpler
|
||||
self.pull_upload_queue( # pylint: disable=missing-kwoa
|
||||
pull, arch=options["arch"], download_only=options["download_only"], **params
|
||||
)
|
||||
self.pull_upload_queue(pull, arch=options['arch'],
|
||||
download_only=options['download_only'],
|
||||
**params)
|
||||
return
|
||||
|
||||
# call implementation, and allow exceptions to flow up to caller
|
||||
srcpkg = DISTRO_PKG_CLASS[distro](**params)
|
||||
spph = srcpkg.lp_spph
|
||||
|
||||
Logger.info("Found %s", spph.display_name)
|
||||
|
||||
# The VCS detection logic was modeled after `apt source`
|
||||
for key in srcpkg.dsc.keys():
|
||||
original_key = key
|
||||
key = key.lower()
|
||||
|
||||
if key.startswith("vcs-"):
|
||||
if key == "vcs-browser":
|
||||
continue
|
||||
if key == "vcs-git":
|
||||
vcs = "Git"
|
||||
elif key == "vcs-bzr":
|
||||
vcs = "Bazaar"
|
||||
else:
|
||||
continue
|
||||
|
||||
uri = srcpkg.dsc[original_key]
|
||||
|
||||
Logger.warning(
|
||||
"\nNOTICE: '%s' packaging is maintained in "
|
||||
"the '%s' version control system at:\n %s\n",
|
||||
package,
|
||||
vcs,
|
||||
uri,
|
||||
)
|
||||
|
||||
if vcs == "Bazaar":
|
||||
vcscmd = " $ bzr branch " + uri
|
||||
elif vcs == "Git":
|
||||
vcscmd = " $ git clone " + uri
|
||||
|
||||
if vcscmd:
|
||||
Logger.info(
|
||||
"Please use:\n%s\n"
|
||||
"to retrieve the latest (possibly unreleased) updates to the package.\n",
|
||||
vcscmd,
|
||||
)
|
||||
Logger.info('Found %s', spph.display_name)
|
||||
|
||||
if pull == PULL_LIST:
|
||||
Logger.info("Source files:")
|
||||
for f in srcpkg.dsc["Files"]:
|
||||
Logger.info(" %s", f["name"])
|
||||
for f in srcpkg.dsc['Files']:
|
||||
Logger.info(" %s", f['name'])
|
||||
Logger.info("Binary files:")
|
||||
for f in spph.getBinaries(options["arch"]):
|
||||
archtext = ""
|
||||
for f in spph.getBinaries(options['arch']):
|
||||
archtext = ''
|
||||
name = f.getFileName()
|
||||
if name.rpartition(".")[0].endswith("all"):
|
||||
if name.rpartition('.')[0].endswith('all'):
|
||||
archtext = f" ({f.arch})"
|
||||
Logger.info(" %s%s", name, archtext)
|
||||
Logger.info(f" {name}{archtext}")
|
||||
elif pull == PULL_SOURCE:
|
||||
# allow DownloadError to flow up to caller
|
||||
srcpkg.pull()
|
||||
if options["download_only"]:
|
||||
if options['download_only']:
|
||||
Logger.debug("--download-only specified, not extracting")
|
||||
else:
|
||||
srcpkg.unpack()
|
||||
@ -505,116 +435,104 @@ class PullPkg:
|
||||
name = None
|
||||
if package != spph.getPackageName():
|
||||
Logger.info("Pulling only binary package '%s'", package)
|
||||
Logger.info(
|
||||
"Use package name '%s' to pull all binary packages", spph.getPackageName()
|
||||
)
|
||||
Logger.info("Use package name '%s' to pull all binary packages",
|
||||
spph.getPackageName())
|
||||
name = package
|
||||
|
||||
# e.g. 'debs' -> 'deb'
|
||||
ext = pull.rstrip("s")
|
||||
ext = pull.rstrip('s')
|
||||
|
||||
if distro == DISTRO_DEBIAN:
|
||||
# Debian ddebs don't use .ddeb extension, unfortunately :(
|
||||
if pull in [PULL_DEBS, PULL_DDEBS]:
|
||||
name = name or ".*"
|
||||
ext = "deb"
|
||||
name = name or '.*'
|
||||
ext = 'deb'
|
||||
if pull == PULL_DEBS:
|
||||
name += r"(?<!-dbgsym)$"
|
||||
name += r'(?<!-dbgsym)$'
|
||||
if pull == PULL_DDEBS:
|
||||
name += r"-dbgsym$"
|
||||
name += r'-dbgsym$'
|
||||
|
||||
# allow DownloadError to flow up to caller
|
||||
total = srcpkg.pull_binaries(name=name, ext=ext, arch=options["arch"])
|
||||
total = srcpkg.pull_binaries(name=name, ext=ext, arch=options['arch'])
|
||||
if total < 1:
|
||||
Logger.error("No %s found for %s %s", pull, package, spph.getVersion())
|
||||
Logger.error("No %s found for %s %s", pull,
|
||||
package, spph.getVersion())
|
||||
else:
|
||||
Logger.error("Internal error: invalid pull value after parse_pull()")
|
||||
raise InvalidPullValueError(f"Invalid pull value '{pull}'")
|
||||
raise InvalidPullValueError("Invalid pull value '%s'" % pull)
|
||||
|
||||
def pull_upload_queue(
|
||||
self,
|
||||
pull,
|
||||
*,
|
||||
package,
|
||||
version=None,
|
||||
arch=None,
|
||||
series=None,
|
||||
pocket=None,
|
||||
status=None,
|
||||
download_only=None,
|
||||
**kwargs,
|
||||
): # pylint: disable=no-self-use,unused-argument
|
||||
def pull_upload_queue(self, pull, *,
|
||||
package, version=None, arch=None, series=None, pocket=None,
|
||||
status=None, download_only=None, **kwargs):
|
||||
if not series:
|
||||
Logger.error("Using --upload-queue requires specifying series")
|
||||
return
|
||||
|
||||
series = Distribution("ubuntu").getSeries(series)
|
||||
series = Distribution('ubuntu').getSeries(series)
|
||||
|
||||
queueparams = {"name": package}
|
||||
queueparams = {'name': package}
|
||||
if pocket:
|
||||
queueparams["pocket"] = pocket
|
||||
queueparams['pocket'] = pocket
|
||||
|
||||
if status == "all":
|
||||
queueparams["status"] = None
|
||||
queuetype = "any"
|
||||
if status == 'all':
|
||||
queueparams['status'] = None
|
||||
queuetype = 'any'
|
||||
elif status:
|
||||
queueparams["status"] = status
|
||||
queueparams['status'] = status
|
||||
queuetype = status
|
||||
else:
|
||||
queuetype = "Unapproved"
|
||||
queuetype = 'Unapproved'
|
||||
|
||||
packages = [
|
||||
p
|
||||
for p in series.getPackageUploads(**queueparams)
|
||||
if p.package_version == version or str(p.id) == version or not version
|
||||
]
|
||||
packages = [p for p in series.getPackageUploads(**queueparams) if
|
||||
p.package_version == version or
|
||||
str(p.id) == version or
|
||||
not version]
|
||||
|
||||
if pull == PULL_SOURCE:
|
||||
packages = [p for p in packages if p.contains_source]
|
||||
elif pull in VALID_BINARY_PULLS:
|
||||
packages = [
|
||||
p
|
||||
for p in packages
|
||||
if p.contains_build
|
||||
and (arch in ["all", "any"] or arch in p.display_arches.replace(",", "").split())
|
||||
]
|
||||
packages = [p for p in packages if
|
||||
p.contains_build and
|
||||
(arch in ['all', 'any'] or
|
||||
arch in p.display_arches.replace(',', '').split())]
|
||||
|
||||
if not packages:
|
||||
msg = f"Package {package} not found in {queuetype} upload queue for {series.name}"
|
||||
msg = ("Package %s not found in %s upload queue for %s" %
|
||||
(package, queuetype, series.name))
|
||||
if version:
|
||||
msg += f" with version/id {version}"
|
||||
msg += " with version/id %s" % version
|
||||
if pull in VALID_BINARY_PULLS:
|
||||
msg += f" for arch {arch}"
|
||||
msg += " for arch %s" % arch
|
||||
raise PackageNotFoundException(msg)
|
||||
|
||||
if pull == PULL_LIST:
|
||||
for pkg in packages:
|
||||
msg = f"Found {pkg.package_name} {pkg.package_version} (ID {pkg.id})"
|
||||
if pkg.display_arches:
|
||||
msg += f" arch {pkg.display_arches}"
|
||||
for p in packages:
|
||||
msg = "Found %s %s (ID %s)" % (p.package_name, p.package_version, p.id)
|
||||
if p.display_arches:
|
||||
msg += " arch %s" % p.display_arches
|
||||
Logger.info(msg)
|
||||
url = pkg.changesFileUrl()
|
||||
url = p.changesFileUrl()
|
||||
if url:
|
||||
Logger.info("Changes file:")
|
||||
Logger.info(" %s", url)
|
||||
else:
|
||||
Logger.info("No changes file")
|
||||
urls = pkg.sourceFileUrls()
|
||||
urls = p.sourceFileUrls()
|
||||
if urls:
|
||||
Logger.info("Source files:")
|
||||
for url in urls:
|
||||
Logger.info(" %s", url)
|
||||
else:
|
||||
Logger.info("No source files")
|
||||
urls = pkg.binaryFileUrls()
|
||||
urls = p.binaryFileUrls()
|
||||
if urls:
|
||||
Logger.info("Binary files:")
|
||||
for url in urls:
|
||||
Logger.info(" %s", url)
|
||||
Logger.info(" { %s }", pkg.binaryFileProperties(url))
|
||||
Logger.info(" { %s }" % p.binaryFileProperties(url))
|
||||
else:
|
||||
Logger.info("No binary files")
|
||||
urls = pkg.customFileUrls()
|
||||
urls = p.customFileUrls()
|
||||
if urls:
|
||||
Logger.info("Custom files:")
|
||||
for url in urls:
|
||||
@ -624,58 +542,53 @@ class PullPkg:
|
||||
if len(packages) > 1:
|
||||
msg = "Found multiple packages"
|
||||
if version:
|
||||
msg += f" with version {version}, please specify the ID instead"
|
||||
msg += " with version %s, please specify the ID instead" % version
|
||||
else:
|
||||
msg += ", please specify the version"
|
||||
Logger.error("Available package versions/ids are:")
|
||||
for pkg in packages:
|
||||
Logger.error("%s %s (id %s)", pkg.package_name, pkg.package_version, pkg.id)
|
||||
for p in packages:
|
||||
Logger.error("%s %s (id %s)" % (p.package_name, p.package_version, p.id))
|
||||
raise PackageNotFoundException(msg)
|
||||
|
||||
pkg = packages[0]
|
||||
p = packages[0]
|
||||
|
||||
urls = set(pkg.customFileUrls())
|
||||
if pkg.changesFileUrl():
|
||||
urls.add(pkg.changesFileUrl())
|
||||
urls = set(p.customFileUrls())
|
||||
if p.changesFileUrl():
|
||||
urls.add(p.changesFileUrl())
|
||||
|
||||
if pull == PULL_SOURCE:
|
||||
urls |= set(pkg.sourceFileUrls())
|
||||
urls |= set(p.sourceFileUrls())
|
||||
if not urls:
|
||||
Logger.error("No source files to download")
|
||||
dscfile = None
|
||||
for url in urls:
|
||||
dst = download(url, os.getcwd())
|
||||
if dst.name.endswith(".dsc"):
|
||||
if dst.name.endswith('.dsc'):
|
||||
dscfile = dst
|
||||
if download_only:
|
||||
Logger.debug("--download-only specified, not extracting")
|
||||
elif not dscfile:
|
||||
Logger.error("No source dsc file found, cannot extract")
|
||||
else:
|
||||
cmd = ["dpkg-source", "-x", dscfile.name]
|
||||
Logger.debug(" ".join(cmd))
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
check=False,
|
||||
encoding="utf-8",
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
cmd = ['dpkg-source', '-x', dscfile.name]
|
||||
Logger.debug(' '.join(cmd))
|
||||
result = subprocess.run(cmd, encoding='utf-8',
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
if result.returncode != 0:
|
||||
Logger.error("Source unpack failed.")
|
||||
Logger.error('Source unpack failed.')
|
||||
Logger.debug(result.stdout)
|
||||
else:
|
||||
name = ".*"
|
||||
name = '.*'
|
||||
if pull == PULL_DEBS:
|
||||
name = rf"{name}(?<!-di)(?<!-dbgsym)$"
|
||||
name = r'{}(?<!-di)(?<!-dbgsym)$'.format(name)
|
||||
elif pull == PULL_DDEBS:
|
||||
name += "-dbgsym$"
|
||||
name += '-dbgsym$'
|
||||
elif pull == PULL_UDEBS:
|
||||
name += "-di$"
|
||||
name += '-di$'
|
||||
else:
|
||||
raise InvalidPullValueError(f"Invalid pull value {pull}")
|
||||
raise InvalidPullValueError("Invalid pull value %s" % pull)
|
||||
|
||||
urls |= set(pkg.binaryFileUrls())
|
||||
urls |= set(p.binaryFileUrls())
|
||||
if not urls:
|
||||
Logger.error("No binary files to download")
|
||||
for url in urls:
|
||||
|
@ -16,14 +16,14 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import tempfile
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
class Question:
|
||||
class Question(object):
|
||||
def __init__(self, options, show_help=True):
|
||||
assert len(options) >= 2
|
||||
self.options = [s.lower() for s in options]
|
||||
@ -31,9 +31,9 @@ class Question:
|
||||
|
||||
def get_options(self):
|
||||
if len(self.options) == 2:
|
||||
options = f"{self.options[0]} or {self.options[1]}"
|
||||
options = self.options[0] + " or " + self.options[1]
|
||||
else:
|
||||
options = f"{', '.join(self.options[:-1])}, or {self.options[-1]}"
|
||||
options = ", ".join(self.options[:-1]) + ", or " + self.options[-1]
|
||||
return options
|
||||
|
||||
def ask(self, question, default=None):
|
||||
@ -57,7 +57,7 @@ class Question:
|
||||
try:
|
||||
selected = input(question).strip().lower()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\nAborting as requested.")
|
||||
print('\nAborting as requested.')
|
||||
sys.exit(1)
|
||||
if selected == "":
|
||||
selected = default
|
||||
@ -67,7 +67,7 @@ class Question:
|
||||
if selected == option[0]:
|
||||
selected = option
|
||||
if selected not in self.options:
|
||||
print(f"Please answer the question with {self.get_options()}.")
|
||||
print("Please answer the question with " + self.get_options() + ".")
|
||||
return selected
|
||||
|
||||
|
||||
@ -78,7 +78,7 @@ class YesNoQuestion(Question):
|
||||
|
||||
def input_number(question, min_number, max_number, default=None):
|
||||
if default:
|
||||
question += f" [{default}]? "
|
||||
question += " [%i]? " % (default)
|
||||
else:
|
||||
question += "? "
|
||||
selected = None
|
||||
@ -86,7 +86,7 @@ def input_number(question, min_number, max_number, default=None):
|
||||
try:
|
||||
selected = input(question).strip()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\nAborting as requested.")
|
||||
print('\nAborting as requested.')
|
||||
sys.exit(1)
|
||||
if default and selected == "":
|
||||
selected = default
|
||||
@ -94,40 +94,40 @@ def input_number(question, min_number, max_number, default=None):
|
||||
try:
|
||||
selected = int(selected)
|
||||
if selected < min_number or selected > max_number:
|
||||
print(f"Please input a number between {min_number} and {max_number}.")
|
||||
print("Please input a number between %i and %i." % (min_number, max_number))
|
||||
except ValueError:
|
||||
print("Please input a number.")
|
||||
assert isinstance(selected, int)
|
||||
assert type(selected) == int
|
||||
return selected
|
||||
|
||||
|
||||
def confirmation_prompt(message=None, action=None):
|
||||
"""Display message, or a stock message including action, and wait for the
|
||||
user to press Enter
|
||||
"""
|
||||
'''Display message, or a stock message including action, and wait for the
|
||||
user to press Enter
|
||||
'''
|
||||
if message is None:
|
||||
if action is None:
|
||||
action = "continue"
|
||||
message = f"Press [Enter] to {action}. Press [Ctrl-C] to abort now."
|
||||
action = 'continue'
|
||||
message = 'Press [Enter] to %s. Press [Ctrl-C] to abort now.' % action
|
||||
try:
|
||||
input(message)
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\nAborting as requested.")
|
||||
print('\nAborting as requested.')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
class EditFile:
|
||||
class EditFile(object):
|
||||
def __init__(self, filename, description, placeholders=None):
|
||||
self.filename = filename
|
||||
self.description = description
|
||||
if placeholders is None:
|
||||
placeholders = (re.compile(r"^>>>.*<<<$", re.UNICODE),)
|
||||
placeholders = (re.compile(r'^>>>.*<<<$', re.UNICODE),)
|
||||
self.placeholders = placeholders
|
||||
|
||||
def edit(self, optional=False):
|
||||
if optional:
|
||||
print(f"\n\nCurrently the {self.description} looks like:")
|
||||
with open(self.filename, "r", encoding="utf-8") as f:
|
||||
print("\n\nCurrently the %s looks like:" % self.description)
|
||||
with open(self.filename, 'r', encoding='utf-8') as f:
|
||||
print(f.read())
|
||||
if YesNoQuestion().ask("Edit", "no") == "no":
|
||||
return
|
||||
@ -135,65 +135,68 @@ class EditFile:
|
||||
done = False
|
||||
while not done:
|
||||
old_mtime = os.stat(self.filename).st_mtime
|
||||
subprocess.check_call(["sensible-editor", self.filename])
|
||||
subprocess.check_call(['sensible-editor', self.filename])
|
||||
modified = old_mtime != os.stat(self.filename).st_mtime
|
||||
placeholders_present = False
|
||||
if self.placeholders:
|
||||
with open(self.filename, "r", encoding="utf-8") as f:
|
||||
with open(self.filename, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
for placeholder in self.placeholders:
|
||||
if placeholder.search(line.strip()):
|
||||
placeholders_present = True
|
||||
|
||||
if placeholders_present:
|
||||
print(
|
||||
f"Placeholders still present in the {self.description}. "
|
||||
f"Please replace them with useful information."
|
||||
)
|
||||
confirmation_prompt(action="edit again")
|
||||
print("Placeholders still present in the %s. "
|
||||
"Please replace them with useful information."
|
||||
% self.description)
|
||||
confirmation_prompt(action='edit again')
|
||||
elif not modified:
|
||||
print(f"The {self.description} was not modified")
|
||||
print("The %s was not modified" % self.description)
|
||||
if YesNoQuestion().ask("Edit again", "yes") == "no":
|
||||
done = True
|
||||
elif self.check_edit():
|
||||
done = True
|
||||
|
||||
def check_edit(self): # pylint: disable=no-self-use
|
||||
"""Override this to implement extra checks on the edited report.
|
||||
def check_edit(self):
|
||||
'''Override this to implement extra checks on the edited report.
|
||||
Should return False if another round of editing is needed,
|
||||
and should prompt the user to confirm that, if necessary.
|
||||
"""
|
||||
'''
|
||||
return True
|
||||
|
||||
|
||||
class EditBugReport(EditFile):
|
||||
split_re = re.compile(r"^Summary.*?:\s+(.*?)\s+Description:\s+(.*)$", re.DOTALL | re.UNICODE)
|
||||
split_re = re.compile(r'^Summary.*?:\s+(.*?)\s+'
|
||||
r'Description:\s+(.*)$',
|
||||
re.DOTALL | re.UNICODE)
|
||||
|
||||
def __init__(self, subject, body, placeholders=None):
|
||||
prefix = f"{os.path.basename(sys.argv[0])}_"
|
||||
tmpfile = tempfile.NamedTemporaryFile(prefix=prefix, suffix=".txt", delete=False)
|
||||
tmpfile.write((f"Summary (one line):\n{subject}\n\nDescription:\n{body}").encode("utf-8"))
|
||||
prefix = os.path.basename(sys.argv[0]) + '_'
|
||||
tmpfile = tempfile.NamedTemporaryFile(prefix=prefix, suffix='.txt',
|
||||
delete=False)
|
||||
tmpfile.write((u'Summary (one line):\n%s\n\nDescription:\n%s'
|
||||
% (subject, body)).encode('utf-8'))
|
||||
tmpfile.close()
|
||||
super().__init__(tmpfile.name, "bug report", placeholders)
|
||||
super(EditBugReport, self).__init__(tmpfile.name, 'bug report',
|
||||
placeholders)
|
||||
|
||||
def check_edit(self):
|
||||
with open(self.filename, "r", encoding="utf-8") as f:
|
||||
with open(self.filename, 'r', encoding='utf-8') as f:
|
||||
report = f.read()
|
||||
|
||||
if self.split_re.match(report) is None:
|
||||
print(
|
||||
f"The {self.description} doesn't start with 'Summary:' and 'Description:' blocks"
|
||||
)
|
||||
confirmation_prompt("edit again")
|
||||
print("The %s doesn't start with 'Summary:' and 'Description:' "
|
||||
"blocks" % self.description)
|
||||
confirmation_prompt('edit again')
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_report(self):
|
||||
with open(self.filename, "r", encoding="utf-8") as f:
|
||||
with open(self.filename, 'r', encoding='utf-8') as f:
|
||||
report = f.read()
|
||||
|
||||
match = self.split_re.match(report)
|
||||
title = match.group(1).replace("\n", " ")
|
||||
title = match.group(1).replace(u'\n', u' ')
|
||||
report = (title, match.group(2))
|
||||
os.unlink(self.filename)
|
||||
return report
|
||||
|
@ -22,12 +22,13 @@ class RDependsException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def query_rdepends(package, release, arch, server="http://qa.ubuntuwire.org/rdepends"):
|
||||
def query_rdepends(package, release, arch,
|
||||
server='http://qa.ubuntuwire.org/rdepends'):
|
||||
"""Look up a packages reverse-dependencies on the Ubuntuwire
|
||||
Reverse- webservice
|
||||
"""
|
||||
|
||||
url = os.path.join(server, "v1", release, arch, package)
|
||||
url = os.path.join(server, 'v1', release, arch, package)
|
||||
|
||||
response, data = httplib2.Http().request(url)
|
||||
if response.status != 200:
|
||||
|
@ -20,7 +20,6 @@
|
||||
# Please see the /usr/share/common-licenses/GPL-2 file for the full text
|
||||
# of the GNU General Public License license.
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from debian.deb822 import Changes
|
||||
@ -28,19 +27,16 @@ from distro_info import DebianDistroInfo, DistroDataOutdated
|
||||
from httplib2 import Http, HttpLib2Error
|
||||
|
||||
from ubuntutools.lp import udtexceptions
|
||||
from ubuntutools.lp.lpapicache import (
|
||||
Distribution,
|
||||
DistributionSourcePackage,
|
||||
Launchpad,
|
||||
PersonTeam,
|
||||
)
|
||||
from ubuntutools.lp.lpapicache import (Launchpad, Distribution, PersonTeam,
|
||||
DistributionSourcePackage)
|
||||
from ubuntutools.question import confirmation_prompt
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_debian_srcpkg(name, release):
|
||||
debian = Distribution("debian")
|
||||
debian = Distribution('debian')
|
||||
debian_archive = debian.getArchive()
|
||||
|
||||
try:
|
||||
@ -51,83 +47,82 @@ def get_debian_srcpkg(name, release):
|
||||
return debian_archive.getSourcePackage(name, release)
|
||||
|
||||
|
||||
def get_ubuntu_srcpkg(name, release, pocket="Release"):
|
||||
ubuntu = Distribution("ubuntu")
|
||||
def get_ubuntu_srcpkg(name, release, pocket='Release'):
|
||||
ubuntu = Distribution('ubuntu')
|
||||
ubuntu_archive = ubuntu.getArchive()
|
||||
|
||||
try:
|
||||
return ubuntu_archive.getSourcePackage(name, release, pocket)
|
||||
except udtexceptions.PackageNotFoundException:
|
||||
if pocket != "Release":
|
||||
parent_pocket = "Release"
|
||||
if pocket == "Updates":
|
||||
parent_pocket = "Proposed"
|
||||
if pocket != 'Release':
|
||||
parent_pocket = 'Release'
|
||||
if pocket == 'Updates':
|
||||
parent_pocket = 'Proposed'
|
||||
return get_ubuntu_srcpkg(name, release, parent_pocket)
|
||||
raise
|
||||
|
||||
|
||||
def need_sponsorship(name, component, release):
|
||||
"""
|
||||
'''
|
||||
Check if the user has upload permissions for either the package
|
||||
itself or the component
|
||||
"""
|
||||
archive = Distribution("ubuntu").getArchive()
|
||||
distroseries = Distribution("ubuntu").getSeries(release)
|
||||
'''
|
||||
archive = Distribution('ubuntu').getArchive()
|
||||
distroseries = Distribution('ubuntu').getSeries(release)
|
||||
|
||||
need_sponsor = not PersonTeam.me.canUploadPackage(archive, distroseries, name, component)
|
||||
need_sponsor = not PersonTeam.me.canUploadPackage(archive, distroseries,
|
||||
name, component)
|
||||
if need_sponsor:
|
||||
print(
|
||||
"""You are not able to upload this package directly to Ubuntu.
|
||||
print('''You are not able to upload this package directly to Ubuntu.
|
||||
Your sync request shall require an approval by a member of the appropriate
|
||||
sponsorship team, who shall be subscribed to this bug report.
|
||||
This must be done before it can be processed by a member of the Ubuntu Archive
|
||||
team."""
|
||||
)
|
||||
team.''')
|
||||
confirmation_prompt()
|
||||
|
||||
return need_sponsor
|
||||
|
||||
|
||||
def check_existing_reports(srcpkg):
|
||||
"""
|
||||
'''
|
||||
Check existing bug reports on Launchpad for a possible sync request.
|
||||
|
||||
If found ask for confirmation on filing a request.
|
||||
"""
|
||||
'''
|
||||
|
||||
# Fetch the package's bug list from Launchpad
|
||||
pkg = Distribution("ubuntu").getSourcePackage(name=srcpkg)
|
||||
pkg_bug_list = pkg.searchTasks(
|
||||
status=["Incomplete", "New", "Confirmed", "Triaged", "In Progress", "Fix Committed"],
|
||||
omit_duplicates=True,
|
||||
)
|
||||
pkg = Distribution('ubuntu').getSourcePackage(name=srcpkg)
|
||||
pkg_bug_list = pkg.searchTasks(status=["Incomplete", "New", "Confirmed",
|
||||
"Triaged", "In Progress",
|
||||
"Fix Committed"],
|
||||
omit_duplicates=True)
|
||||
|
||||
# Search bug list for other sync requests.
|
||||
for bug in pkg_bug_list:
|
||||
# check for Sync or sync and the package name
|
||||
if not bug.is_complete and f"ync {srcpkg}" in bug.title:
|
||||
print(
|
||||
f"The following bug could be a possible duplicate sync bug on Launchpad:\n"
|
||||
f" * {bug.title} ({bug.web_link})\n"
|
||||
f"Please check the above URL to verify this before continuing."
|
||||
)
|
||||
if not bug.is_complete and 'ync %s' % srcpkg in bug.title:
|
||||
print('The following bug could be a possible duplicate sync bug '
|
||||
'on Launchpad:\n'
|
||||
' * %s (%s)\n'
|
||||
'Please check the above URL to verify this before '
|
||||
'continuing.'
|
||||
% (bug.title, bug.web_link))
|
||||
confirmation_prompt()
|
||||
|
||||
|
||||
def get_ubuntu_delta_changelog(srcpkg):
|
||||
"""
|
||||
'''
|
||||
Download the Ubuntu changelog and extract the entries since the last sync
|
||||
from Debian.
|
||||
"""
|
||||
archive = Distribution("ubuntu").getArchive()
|
||||
spph = archive.getPublishedSources(
|
||||
source_name=srcpkg.getPackageName(), exact_match=True, pocket="Release"
|
||||
)
|
||||
'''
|
||||
archive = Distribution('ubuntu').getArchive()
|
||||
spph = archive.getPublishedSources(source_name=srcpkg.getPackageName(),
|
||||
exact_match=True, pocket='Release')
|
||||
debian_info = DebianDistroInfo()
|
||||
name_chars = "[-+0-9a-z.]"
|
||||
topline = re.compile(
|
||||
rf"^(\w%({name_chars})s*) \(([^\(\) \t]+)\)((\s+%({name_chars})s+)+)\;", re.IGNORECASE
|
||||
)
|
||||
topline = re.compile(r'^(\w%(name_chars)s*) \(([^\(\) \t]+)\)'
|
||||
r'((\s+%(name_chars)s+)+)\;'
|
||||
% {'name_chars': '[-+0-9a-z.]'},
|
||||
re.IGNORECASE)
|
||||
delta = []
|
||||
for record in spph:
|
||||
changes_url = record.changesFileUrl()
|
||||
@ -135,57 +130,61 @@ def get_ubuntu_delta_changelog(srcpkg):
|
||||
# Native sync
|
||||
break
|
||||
try:
|
||||
response = Http().request(changes_url)[0]
|
||||
response, body = Http().request(changes_url)
|
||||
except HttpLib2Error as e:
|
||||
Logger.error(str(e))
|
||||
break
|
||||
if response.status != 200:
|
||||
Logger.error("%s: %s %s", changes_url, response.status, response.reason)
|
||||
Logger.error("%s: %s %s", changes_url, response.status,
|
||||
response.reason)
|
||||
break
|
||||
|
||||
changes = Changes(Http().request(changes_url)[1])
|
||||
for line in changes["Changes"].splitlines():
|
||||
for line in changes['Changes'].splitlines():
|
||||
line = line[1:]
|
||||
match = topline.match(line)
|
||||
if match:
|
||||
distribution = match.group(3).split()[0].split("-")[0]
|
||||
m = topline.match(line)
|
||||
if m:
|
||||
distribution = m.group(3).split()[0].split('-')[0]
|
||||
if debian_info.valid(distribution):
|
||||
break
|
||||
if line.startswith(" "):
|
||||
if line.startswith(u' '):
|
||||
delta.append(line)
|
||||
else:
|
||||
continue
|
||||
break
|
||||
|
||||
return "\n".join(delta)
|
||||
return '\n'.join(delta)
|
||||
|
||||
|
||||
def post_bug(srcpkg, subscribe, status, bugtitle, bugtext):
|
||||
"""
|
||||
'''
|
||||
Use the LP API to file the sync request.
|
||||
"""
|
||||
'''
|
||||
|
||||
print(f"The final report is:\nSummary: {bugtitle}\nDescription:\n{bugtext}\n")
|
||||
print('The final report is:\nSummary: %s\nDescription:\n%s\n'
|
||||
% (bugtitle, bugtext))
|
||||
confirmation_prompt()
|
||||
|
||||
if srcpkg:
|
||||
# pylint: disable=protected-access
|
||||
bug_target = DistributionSourcePackage(f"{Launchpad._root_uri}ubuntu/+source/{srcpkg}")
|
||||
bug_target = DistributionSourcePackage(
|
||||
'%subuntu/+source/%s' % (Launchpad._root_uri, srcpkg))
|
||||
else:
|
||||
# new source package
|
||||
bug_target = Distribution("ubuntu")
|
||||
bug_target = Distribution('ubuntu')
|
||||
|
||||
# create bug
|
||||
bug = Launchpad.bugs.createBug(title=bugtitle, description=bugtext, target=bug_target())
|
||||
bug = Launchpad.bugs.createBug(title=bugtitle, description=bugtext,
|
||||
target=bug_target())
|
||||
|
||||
# newly created bugreports have only one task
|
||||
task = bug.bug_tasks[0]
|
||||
# only members of ubuntu-bugcontrol can set importance
|
||||
if PersonTeam.me.isLpTeamMember("ubuntu-bugcontrol"):
|
||||
task.importance = "Wishlist"
|
||||
if PersonTeam.me.isLpTeamMember('ubuntu-bugcontrol'):
|
||||
task.importance = 'Wishlist'
|
||||
task.status = status
|
||||
task.lp_save()
|
||||
|
||||
bug.subscribe(person=PersonTeam(subscribe)())
|
||||
|
||||
print(f"Sync request filed as bug #{bug.id}: {bug.web_link}")
|
||||
print('Sync request filed as bug #%i: %s'
|
||||
% (bug.id, bug.web_link))
|
||||
|
@ -20,13 +20,12 @@
|
||||
# Please see the /usr/share/common-licenses/GPL-2 file for the full text
|
||||
# of the GNU General Public License license.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import smtplib
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from debian.changelog import Changelog
|
||||
@ -34,18 +33,19 @@ from distro_info import DebianDistroInfo, DistroDataOutdated
|
||||
|
||||
from ubuntutools.archive import DebianSourcePackage, UbuntuSourcePackage
|
||||
from ubuntutools.lp.udtexceptions import PackageNotFoundException
|
||||
from ubuntutools.question import YesNoQuestion, confirmation_prompt
|
||||
from ubuntutools.question import confirmation_prompt, YesNoQuestion
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"get_debian_srcpkg",
|
||||
"get_ubuntu_srcpkg",
|
||||
"need_sponsorship",
|
||||
"check_existing_reports",
|
||||
"get_ubuntu_delta_changelog",
|
||||
"mail_bug",
|
||||
'get_debian_srcpkg',
|
||||
'get_ubuntu_srcpkg',
|
||||
'need_sponsorship',
|
||||
'check_existing_reports',
|
||||
'get_ubuntu_delta_changelog',
|
||||
'mail_bug',
|
||||
]
|
||||
|
||||
|
||||
@ -67,86 +67,73 @@ def get_ubuntu_srcpkg(name, release):
|
||||
|
||||
|
||||
def need_sponsorship(name, component, release):
|
||||
"""
|
||||
'''
|
||||
Ask the user if he has upload permissions for the package or the
|
||||
component.
|
||||
"""
|
||||
'''
|
||||
|
||||
val = YesNoQuestion().ask(
|
||||
f"Do you have upload permissions for the '{component}' component or "
|
||||
f"the package '{name}' in Ubuntu {release}?\nIf in doubt answer 'n'.",
|
||||
"no",
|
||||
)
|
||||
return val == "no"
|
||||
val = YesNoQuestion().ask("Do you have upload permissions for the '%s' component or "
|
||||
"the package '%s' in Ubuntu %s?\nIf in doubt answer 'n'." %
|
||||
(component, name, release), 'no')
|
||||
return val == 'no'
|
||||
|
||||
|
||||
def check_existing_reports(srcpkg):
|
||||
"""
|
||||
'''
|
||||
Point the user to the URL to manually check for duplicate bug reports.
|
||||
"""
|
||||
print(
|
||||
f"Please check on https://bugs.launchpad.net/ubuntu/+source/{srcpkg}/+bugs\n"
|
||||
f"for duplicate sync requests before continuing."
|
||||
)
|
||||
'''
|
||||
print('Please check on '
|
||||
'https://bugs.launchpad.net/ubuntu/+source/%s/+bugs\n'
|
||||
'for duplicate sync requests before continuing.' % srcpkg)
|
||||
confirmation_prompt()
|
||||
|
||||
|
||||
def get_ubuntu_delta_changelog(srcpkg):
|
||||
"""
|
||||
'''
|
||||
Download the Ubuntu changelog and extract the entries since the last sync
|
||||
from Debian.
|
||||
"""
|
||||
'''
|
||||
changelog = Changelog(srcpkg.getChangelog())
|
||||
if changelog is None:
|
||||
return ""
|
||||
return ''
|
||||
delta = []
|
||||
debian_info = DebianDistroInfo()
|
||||
for block in changelog:
|
||||
distribution = block.distributions.split()[0].split("-")[0]
|
||||
distribution = block.distributions.split()[0].split('-')[0]
|
||||
if debian_info.valid(distribution):
|
||||
break
|
||||
delta += [str(change) for change in block.changes() if change.strip()]
|
||||
delta += [str(change) for change in block.changes()
|
||||
if change.strip()]
|
||||
|
||||
return "\n".join(delta)
|
||||
return '\n'.join(delta)
|
||||
|
||||
|
||||
def mail_bug(
|
||||
srcpkg,
|
||||
subscribe,
|
||||
status,
|
||||
bugtitle,
|
||||
bugtext,
|
||||
bug_mail_domain,
|
||||
keyid,
|
||||
myemailaddr,
|
||||
mailserver_host,
|
||||
mailserver_port,
|
||||
mailserver_user,
|
||||
mailserver_pass,
|
||||
):
|
||||
"""
|
||||
def mail_bug(srcpkg, subscribe, status, bugtitle, bugtext, bug_mail_domain,
|
||||
keyid, myemailaddr, mailserver_host, mailserver_port,
|
||||
mailserver_user, mailserver_pass):
|
||||
'''
|
||||
Submit the sync request per email.
|
||||
"""
|
||||
'''
|
||||
|
||||
to = f"new@{bug_mail_domain}"
|
||||
to = 'new@' + bug_mail_domain
|
||||
|
||||
# generate mailbody
|
||||
if srcpkg:
|
||||
mailbody = f" affects ubuntu/{srcpkg}\n"
|
||||
mailbody = ' affects ubuntu/%s\n' % srcpkg
|
||||
else:
|
||||
mailbody = " affects ubuntu\n"
|
||||
mailbody += f"""\
|
||||
status {status}
|
||||
mailbody = ' affects ubuntu\n'
|
||||
mailbody += '''\
|
||||
status %s
|
||||
importance wishlist
|
||||
subscribe {subscribe}
|
||||
subscribe %s
|
||||
done
|
||||
|
||||
{bugtext}"""
|
||||
%s''' % (status, subscribe, bugtext)
|
||||
|
||||
# prepare sign command
|
||||
gpg_command = None
|
||||
for cmd in ("gnome-gpg", "gpg2", "gpg"):
|
||||
if os.access(f"/usr/bin/{cmd}", os.X_OK):
|
||||
for cmd in ('gnome-gpg', 'gpg2', 'gpg'):
|
||||
if os.access('/usr/bin/%s' % cmd, os.X_OK):
|
||||
gpg_command = [cmd]
|
||||
break
|
||||
|
||||
@ -154,130 +141,107 @@ def mail_bug(
|
||||
Logger.error("Cannot locate gpg, please install the 'gnupg' package!")
|
||||
sys.exit(1)
|
||||
|
||||
gpg_command.append("--clearsign")
|
||||
gpg_command.append('--clearsign')
|
||||
if keyid:
|
||||
gpg_command.extend(("-u", keyid))
|
||||
gpg_command.extend(('-u', keyid))
|
||||
|
||||
# sign the mail body
|
||||
gpg = subprocess.Popen(
|
||||
gpg_command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, encoding="utf-8"
|
||||
)
|
||||
gpg_command, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
encoding='utf-8')
|
||||
signed_report = gpg.communicate(mailbody)[0]
|
||||
if gpg.returncode != 0:
|
||||
Logger.error("%s failed.", gpg_command[0])
|
||||
sys.exit(1)
|
||||
|
||||
# generate email
|
||||
mail = f"""\
|
||||
From: {myemailaddr}
|
||||
To: {to}
|
||||
Subject: {bugtitle}
|
||||
mail = '''\
|
||||
From: %s
|
||||
To: %s
|
||||
Subject: %s
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
|
||||
{signed_report}"""
|
||||
%s''' % (myemailaddr, to, bugtitle, signed_report)
|
||||
|
||||
print(f"The final report is:\n{mail}")
|
||||
print('The final report is:\n%s' % mail)
|
||||
confirmation_prompt()
|
||||
|
||||
# save mail in temporary file
|
||||
backup = tempfile.NamedTemporaryFile(
|
||||
mode="w",
|
||||
mode='w',
|
||||
delete=False,
|
||||
prefix=f"requestsync-{re.sub('[^a-zA-Z0-9_-]', '', bugtitle.replace(' ', '_'))}",
|
||||
prefix='requestsync-' + re.sub(r'[^a-zA-Z0-9_-]', '', bugtitle.replace(' ', '_'))
|
||||
)
|
||||
with backup:
|
||||
backup.write(mail)
|
||||
|
||||
Logger.info(
|
||||
"The e-mail has been saved in %s and will be deleted after succesful transmission",
|
||||
backup.name,
|
||||
)
|
||||
Logger.info('The e-mail has been saved in %s and will be deleted '
|
||||
'after succesful transmission', backup.name)
|
||||
|
||||
# connect to the server
|
||||
while True:
|
||||
try:
|
||||
Logger.info("Connecting to %s:%s ...", mailserver_host, mailserver_port)
|
||||
smtp = smtplib.SMTP(mailserver_host, mailserver_port)
|
||||
Logger.info('Connecting to %s:%s ...', mailserver_host,
|
||||
mailserver_port)
|
||||
s = smtplib.SMTP(mailserver_host, mailserver_port)
|
||||
break
|
||||
except smtplib.SMTPConnectError as error:
|
||||
except smtplib.SMTPConnectError as s:
|
||||
try:
|
||||
# py2 path
|
||||
# pylint: disable=unsubscriptable-object
|
||||
Logger.error(
|
||||
"Could not connect to %s:%s: %s (%i)",
|
||||
mailserver_host,
|
||||
mailserver_port,
|
||||
error[1],
|
||||
error[0],
|
||||
)
|
||||
Logger.error('Could not connect to %s:%s: %s (%i)',
|
||||
mailserver_host, mailserver_port, s[1], s[0])
|
||||
except TypeError:
|
||||
# pylint: disable=no-member
|
||||
Logger.error(
|
||||
"Could not connect to %s:%s: %s (%i)",
|
||||
mailserver_host,
|
||||
mailserver_port,
|
||||
error.strerror,
|
||||
error.errno,
|
||||
)
|
||||
if error.smtp_code == 421:
|
||||
confirmation_prompt(
|
||||
message="This is a temporary error, press [Enter] "
|
||||
"to retry. Press [Ctrl-C] to abort now."
|
||||
)
|
||||
except socket.error as error:
|
||||
Logger.error('Could not connect to %s:%s: %s (%i)',
|
||||
mailserver_host, mailserver_port, s.strerror, s.errno)
|
||||
if s.smtp_code == 421:
|
||||
confirmation_prompt(message='This is a temporary error, press [Enter] '
|
||||
'to retry. Press [Ctrl-C] to abort now.')
|
||||
except socket.error as s:
|
||||
try:
|
||||
# py2 path
|
||||
# pylint: disable=unsubscriptable-object
|
||||
Logger.error(
|
||||
"Could not connect to %s:%s: %s (%i)",
|
||||
mailserver_host,
|
||||
mailserver_port,
|
||||
error[1],
|
||||
error[0],
|
||||
)
|
||||
Logger.error('Could not connect to %s:%s: %s (%i)',
|
||||
mailserver_host, mailserver_port, s[1], s[0])
|
||||
except TypeError:
|
||||
# pylint: disable=no-member
|
||||
Logger.error(
|
||||
"Could not connect to %s:%s: %s (%i)",
|
||||
mailserver_host,
|
||||
mailserver_port,
|
||||
error.strerror,
|
||||
error.errno,
|
||||
)
|
||||
Logger.error('Could not connect to %s:%s: %s (%i)',
|
||||
mailserver_host, mailserver_port, s.strerror, s.errno)
|
||||
return
|
||||
|
||||
if mailserver_user and mailserver_pass:
|
||||
try:
|
||||
smtp.login(mailserver_user, mailserver_pass)
|
||||
s.login(mailserver_user, mailserver_pass)
|
||||
except smtplib.SMTPAuthenticationError:
|
||||
Logger.error("Error authenticating to the server: invalid username and password.")
|
||||
smtp.quit()
|
||||
Logger.error('Error authenticating to the server: '
|
||||
'invalid username and password.')
|
||||
s.quit()
|
||||
return
|
||||
except smtplib.SMTPException:
|
||||
Logger.error("Unknown SMTP error.")
|
||||
smtp.quit()
|
||||
Logger.error('Unknown SMTP error.')
|
||||
s.quit()
|
||||
return
|
||||
|
||||
while True:
|
||||
try:
|
||||
smtp.sendmail(myemailaddr, to, mail.encode("utf-8"))
|
||||
smtp.quit()
|
||||
s.sendmail(myemailaddr, to, mail.encode('utf-8'))
|
||||
s.quit()
|
||||
os.remove(backup.name)
|
||||
Logger.info("Sync request mailed.")
|
||||
Logger.info('Sync request mailed.')
|
||||
break
|
||||
except smtplib.SMTPRecipientsRefused as smtperror:
|
||||
smtp_code, smtp_message = smtperror.recipients[to]
|
||||
Logger.error("Error while sending: %i, %s", smtp_code, smtp_message)
|
||||
Logger.error('Error while sending: %i, %s', smtp_code, smtp_message)
|
||||
if smtp_code == 450:
|
||||
confirmation_prompt(
|
||||
message="This is a temporary error, press [Enter] "
|
||||
"to retry. Press [Ctrl-C] to abort now."
|
||||
)
|
||||
confirmation_prompt(message='This is a temporary error, press [Enter] '
|
||||
'to retry. Press [Ctrl-C] to abort now.')
|
||||
else:
|
||||
return
|
||||
except smtplib.SMTPResponseException as error:
|
||||
Logger.error("Error while sending: %i, %s", error.smtp_code, error.smtp_error)
|
||||
except smtplib.SMTPResponseException as e:
|
||||
Logger.error('Error while sending: %i, %s',
|
||||
e.smtp_code, e.smtp_error)
|
||||
return
|
||||
except smtplib.SMTPServerDisconnected:
|
||||
Logger.error("Server disconnected while sending the mail.")
|
||||
Logger.error('Server disconnected while sending the mail.')
|
||||
return
|
||||
|
@ -1,95 +0,0 @@
|
||||
# Copyright (C) 2024 Canonical Ltd.
|
||||
# Author: Chris Peterson <chris.peterson@canonical.com>
|
||||
# Author: Andy P. Whitcroft
|
||||
# Author: Christian Ehrhardt
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License version 3, as published
|
||||
# by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranties of
|
||||
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import sys
|
||||
import urllib
|
||||
import urllib.request
|
||||
|
||||
URL_RUNNING = "http://autopkgtest.ubuntu.com/static/running.json"
|
||||
URL_QUEUED = "http://autopkgtest.ubuntu.com/queues.json"
|
||||
|
||||
|
||||
def _get_jobs(url: str) -> dict:
|
||||
request = urllib.request.Request(url, headers={"Cache-Control": "max-age-0"})
|
||||
with urllib.request.urlopen(request) as response:
|
||||
data = response.read()
|
||||
jobs = json.loads(data.decode("utf-8"))
|
||||
|
||||
return jobs
|
||||
|
||||
|
||||
def get_running():
|
||||
jobs = _get_jobs(URL_RUNNING)
|
||||
|
||||
running = []
|
||||
for pkg in jobs:
|
||||
for handle in jobs[pkg]:
|
||||
for series in jobs[pkg][handle]:
|
||||
for arch in jobs[pkg][handle][series]:
|
||||
jobinfo = jobs[pkg][handle][series][arch]
|
||||
triggers = ",".join(jobinfo[0].get("triggers", "-"))
|
||||
ppas = ",".join(jobinfo[0].get("ppas", "-"))
|
||||
time = jobinfo[1]
|
||||
env = jobinfo[0].get("env", "-")
|
||||
time = str(datetime.timedelta(seconds=jobinfo[1]))
|
||||
try:
|
||||
line = (
|
||||
f"R {time:6} {pkg:30} {'-':10} {series:8} {arch:8}"
|
||||
f" {ppas:31} {triggers} {env}\n"
|
||||
)
|
||||
running.append((jobinfo[1], line))
|
||||
except BrokenPipeError:
|
||||
sys.exit(1)
|
||||
|
||||
output = ""
|
||||
for time, row in sorted(running, reverse=True):
|
||||
output += f"{row}"
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def get_queued():
|
||||
queues = _get_jobs(URL_QUEUED)
|
||||
output = ""
|
||||
for origin in queues:
|
||||
for series in queues[origin]:
|
||||
for arch in queues[origin][series]:
|
||||
n = 0
|
||||
for key in queues[origin][series][arch]:
|
||||
if key == "private job":
|
||||
pkg = triggers = ppas = "private job"
|
||||
else:
|
||||
(pkg, json_data) = key.split(maxsplit=1)
|
||||
try:
|
||||
jobinfo = json.loads(json_data)
|
||||
triggers = ",".join(jobinfo.get("triggers", "-"))
|
||||
ppas = ",".join(jobinfo.get("ppas", "-"))
|
||||
except json.decoder.JSONDecodeError:
|
||||
pkg = triggers = ppas = "failed to parse"
|
||||
continue
|
||||
|
||||
n = n + 1
|
||||
try:
|
||||
output += (
|
||||
f"Q{n:04d} {'-:--':>6} {pkg:30} {origin:10} {series:8} {arch:8}"
|
||||
f" {ppas:31} {triggers}\n"
|
||||
)
|
||||
except BrokenPipeError:
|
||||
sys.exit(1)
|
||||
return output
|
@ -15,7 +15,6 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from urllib.parse import unquote
|
||||
@ -26,6 +25,7 @@ import httplib2
|
||||
|
||||
from ubuntutools.version import Version
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -37,7 +37,7 @@ def is_sync(bug):
|
||||
return "sync" in bug.title.lower().split(" ") or "sync" in bug.tags
|
||||
|
||||
|
||||
class BugTask:
|
||||
class BugTask(object):
|
||||
def __init__(self, bug_task, launchpad):
|
||||
self.bug_task = bug_task
|
||||
self.launchpad = launchpad
|
||||
@ -58,7 +58,7 @@ class BugTask:
|
||||
self.series = components[2].lower()
|
||||
|
||||
if self.package is None:
|
||||
title_re = r"^Sync ([a-z0-9+.-]+) [a-z0-9.+:~-]+ \([a-z]+\) from.*"
|
||||
title_re = r'^Sync ([a-z0-9+.-]+) [a-z0-9.+:~-]+ \([a-z]+\) from.*'
|
||||
match = re.match(title_re, self.get_bug_title(), re.U | re.I)
|
||||
if match is not None:
|
||||
self.package = match.group(1)
|
||||
@ -68,42 +68,34 @@ class BugTask:
|
||||
dsc_file = ""
|
||||
for url in source_files:
|
||||
filename = unquote(os.path.basename(url))
|
||||
Logger.debug("Downloading %s...", filename)
|
||||
Logger.debug("Downloading %s..." % (filename))
|
||||
# HttpLib2 isn't suitable for large files (it reads into memory),
|
||||
# but we want its https certificate validation on the .dsc
|
||||
if url.endswith(".dsc"):
|
||||
response, data = httplib2.Http().request(url)
|
||||
assert response.status == 200
|
||||
with open(filename, "wb") as f:
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
dsc_file = os.path.join(os.getcwd(), filename)
|
||||
else:
|
||||
urlretrieve(url, filename)
|
||||
assert os.path.isfile(dsc_file), f"{dsc_file} does not exist."
|
||||
assert os.path.isfile(dsc_file), "%s does not exist." % (dsc_file)
|
||||
return dsc_file
|
||||
|
||||
def get_branch_link(self):
|
||||
return "lp:" + self.project + "/" + self.get_series() + "/" + self.package
|
||||
return "lp:" + self.project + "/" + self.get_series() + "/" + \
|
||||
self.package
|
||||
|
||||
def get_bug_title(self):
|
||||
"""Returns the title of the related bug."""
|
||||
return self.bug_task.bug.title
|
||||
|
||||
def get_long_info(self):
|
||||
return (
|
||||
"Bug task: "
|
||||
+ str(self.bug_task)
|
||||
+ "\n"
|
||||
+ "Package: "
|
||||
+ str(self.package)
|
||||
+ "\n"
|
||||
+ "Project: "
|
||||
+ str(self.project)
|
||||
+ "\n"
|
||||
+ "Series: "
|
||||
+ str(self.series)
|
||||
)
|
||||
return "Bug task: " + str(self.bug_task) + "\n" + \
|
||||
"Package: " + str(self.package) + "\n" + \
|
||||
"Project: " + str(self.project) + "\n" + \
|
||||
"Series: " + str(self.series)
|
||||
|
||||
def get_lp_task(self):
|
||||
"""Returns the Launchpad bug task object."""
|
||||
@ -126,7 +118,8 @@ class BugTask:
|
||||
if self.series is None or latest_release:
|
||||
dist = self.launchpad.distributions[self.project]
|
||||
return dist.current_series.name
|
||||
return self.series
|
||||
else:
|
||||
return self.series
|
||||
|
||||
def get_short_info(self):
|
||||
return self.bug_task.bug_target_name + ": " + self.bug_task.status
|
||||
@ -144,16 +137,14 @@ class BugTask:
|
||||
dist = self.launchpad.distributions[project]
|
||||
archive = dist.getArchive(name="primary")
|
||||
distro_series = dist.getSeries(name_or_version=series)
|
||||
published = archive.getPublishedSources(
|
||||
source_name=self.package,
|
||||
distro_series=distro_series,
|
||||
status="Published",
|
||||
exact_match=True,
|
||||
)
|
||||
published = archive.getPublishedSources(source_name=self.package,
|
||||
distro_series=distro_series,
|
||||
status="Published",
|
||||
exact_match=True)
|
||||
|
||||
latest_source = None
|
||||
for source in published:
|
||||
if source.pocket in ("Release", "Security", "Updates", "Proposed"):
|
||||
if source.pocket in ('Release', 'Security', 'Updates', 'Proposed'):
|
||||
latest_source = source
|
||||
break
|
||||
return latest_source
|
||||
@ -165,7 +156,7 @@ class BugTask:
|
||||
def get_latest_released_version(self):
|
||||
source = self.get_source(True)
|
||||
if source is None: # Not currently published in Ubuntu
|
||||
version = "~"
|
||||
version = '~'
|
||||
else:
|
||||
version = source.source_package_version
|
||||
return Version(version)
|
||||
|
@ -15,27 +15,27 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from functools import reduce
|
||||
|
||||
from ubuntutools.sponsor_patch.question import ask_for_manual_fixing
|
||||
from functools import reduce
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Patch:
|
||||
class Patch(object):
|
||||
"""This object represents a patch that can be downloaded from Launchpad."""
|
||||
|
||||
def __init__(self, patch):
|
||||
self._patch = patch
|
||||
self._patch_file = re.sub(" |/", "_", patch.title)
|
||||
if not reduce(
|
||||
lambda r, x: r or self._patch.title.endswith(x), (".debdiff", ".diff", ".patch"), False
|
||||
):
|
||||
Logger.debug("Patch %s does not have a proper file extension.", self._patch.title)
|
||||
if not reduce(lambda r, x: r or self._patch.title.endswith(x),
|
||||
(".debdiff", ".diff", ".patch"), False):
|
||||
Logger.debug("Patch %s does not have a proper file extension." %
|
||||
(self._patch.title))
|
||||
self._patch_file += ".patch"
|
||||
self._full_path = os.path.realpath(self._patch_file)
|
||||
self._changed_files = None
|
||||
@ -45,36 +45,21 @@ class Patch:
|
||||
assert self._changed_files is not None, "You forgot to download the patch."
|
||||
edit = False
|
||||
if self.is_debdiff():
|
||||
cmd = [
|
||||
"patch",
|
||||
"--merge",
|
||||
"--force",
|
||||
"-p",
|
||||
str(self.get_strip_level()),
|
||||
"-i",
|
||||
self._full_path,
|
||||
]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = ["patch", "--merge", "--force", "-p",
|
||||
str(self.get_strip_level()), "-i", self._full_path]
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error(
|
||||
"Failed to apply debdiff %s to %s %s.",
|
||||
self._patch_file,
|
||||
task.package,
|
||||
task.get_version(),
|
||||
)
|
||||
Logger.error("Failed to apply debdiff %s to %s %s.",
|
||||
self._patch_file, task.package, task.get_version())
|
||||
if not edit:
|
||||
ask_for_manual_fixing()
|
||||
edit = True
|
||||
else:
|
||||
cmd = ["add-patch", self._full_path]
|
||||
Logger.debug(" ".join(cmd))
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error(
|
||||
"Failed to apply diff %s to %s %s.",
|
||||
self._patch_file,
|
||||
task.package,
|
||||
task.get_version(),
|
||||
)
|
||||
Logger.error("Failed to apply diff %s to %s %s.",
|
||||
self._patch_file, task.package, task.get_version())
|
||||
if not edit:
|
||||
ask_for_manual_fixing()
|
||||
edit = True
|
||||
@ -82,13 +67,13 @@ class Patch:
|
||||
|
||||
def download(self):
|
||||
"""Downloads the patch from Launchpad."""
|
||||
Logger.debug("Downloading %s.", self._patch_file)
|
||||
Logger.debug("Downloading %s." % (self._patch_file))
|
||||
patch_f = open(self._patch_file, "wb")
|
||||
patch_f.write(self._patch.data.open().read())
|
||||
patch_f.close()
|
||||
|
||||
cmd = ["diffstat", "-l", "-p0", self._full_path]
|
||||
changed_files = subprocess.check_output(cmd, encoding="utf-8")
|
||||
changed_files = subprocess.check_output(cmd, encoding='utf-8')
|
||||
self._changed_files = [f for f in changed_files.split("\n") if f != ""]
|
||||
|
||||
def get_strip_level(self):
|
||||
@ -96,11 +81,13 @@ class Patch:
|
||||
assert self._changed_files is not None, "You forgot to download the patch."
|
||||
strip_level = None
|
||||
if self.is_debdiff():
|
||||
changelog = [f for f in self._changed_files if f.endswith("debian/changelog")][0]
|
||||
changelog = [f for f in self._changed_files
|
||||
if f.endswith("debian/changelog")][0]
|
||||
strip_level = len(changelog.split(os.sep)) - 2
|
||||
return strip_level
|
||||
|
||||
def is_debdiff(self):
|
||||
"""Checks if the patch is a debdiff (= modifies debian/changelog)."""
|
||||
assert self._changed_files is not None, "You forgot to download the patch."
|
||||
return len([f for f in self._changed_files if f.endswith("debian/changelog")]) > 0
|
||||
return len([f for f in self._changed_files
|
||||
if f.endswith("debian/changelog")]) > 0
|
||||
|
@ -37,7 +37,8 @@ def ask_for_ignoring_or_fixing():
|
||||
def ask_for_manual_fixing():
|
||||
"""Ask the user to resolve an issue manually."""
|
||||
|
||||
answer = YesNoQuestion().ask("Do you want to resolve this issue manually", "yes")
|
||||
answer = YesNoQuestion().ask("Do you want to resolve this issue manually",
|
||||
"yes")
|
||||
if answer == "no":
|
||||
user_abort()
|
||||
|
||||
|
@ -15,7 +15,6 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
@ -25,22 +24,21 @@ import debian.changelog
|
||||
import debian.deb822
|
||||
|
||||
from ubuntutools.question import Question, YesNoQuestion
|
||||
from ubuntutools.sponsor_patch.question import (
|
||||
ask_for_ignoring_or_fixing,
|
||||
ask_for_manual_fixing,
|
||||
user_abort,
|
||||
)
|
||||
|
||||
from ubuntutools.sponsor_patch.question import (ask_for_ignoring_or_fixing,
|
||||
ask_for_manual_fixing,
|
||||
user_abort)
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_series(launchpad):
|
||||
"""Returns a tuple with the development and list of supported series."""
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
devel_series = ubuntu.current_series.name
|
||||
supported_series = [
|
||||
series.name for series in ubuntu.series if series.active and series.name != devel_series
|
||||
]
|
||||
supported_series = [series.name for series in ubuntu.series
|
||||
if series.active and series.name != devel_series]
|
||||
return (devel_series, supported_series)
|
||||
|
||||
|
||||
@ -51,14 +49,14 @@ def strip_epoch(version):
|
||||
return "1.1.3-1".
|
||||
"""
|
||||
|
||||
parts = version.full_version.split(":")
|
||||
parts = version.full_version.split(':')
|
||||
if len(parts) > 1:
|
||||
del parts[0]
|
||||
version_without_epoch = ":".join(parts)
|
||||
version_without_epoch = ':'.join(parts)
|
||||
return version_without_epoch
|
||||
|
||||
|
||||
class SourcePackage:
|
||||
class SourcePackage(object):
|
||||
"""This class represents a source package."""
|
||||
|
||||
def __init__(self, package, builder, workdir, branch):
|
||||
@ -76,10 +74,11 @@ class SourcePackage:
|
||||
if upload == "ubuntu":
|
||||
self._print_logs()
|
||||
question = Question(["yes", "edit", "no"])
|
||||
answer = question.ask("Do you want to acknowledge the sync request", "no")
|
||||
answer = question.ask("Do you want to acknowledge the sync request",
|
||||
"no")
|
||||
if answer == "edit":
|
||||
return False
|
||||
if answer == "no":
|
||||
elif answer == "no":
|
||||
user_abort()
|
||||
|
||||
bug = task.bug
|
||||
@ -91,32 +90,33 @@ class SourcePackage:
|
||||
|
||||
msg = "Sync request ACK'd."
|
||||
if self._build_log:
|
||||
msg = (
|
||||
f"{self._package} {self._version} builds"
|
||||
f" on {self._builder.get_architecture()}. {msg}"
|
||||
)
|
||||
msg = ("%s %s builds on %s. " + msg) % \
|
||||
(self._package, self._version,
|
||||
self._builder.get_architecture())
|
||||
bug.newMessage(content=msg, subject="sponsor-patch")
|
||||
Logger.debug("Acknowledged sync request bug #%i.", bug.id)
|
||||
|
||||
bug.subscribe(person=launchpad.people["ubuntu-archive"])
|
||||
bug.subscribe(person=launchpad.people['ubuntu-archive'])
|
||||
Logger.debug("Subscribed ubuntu-archive to bug #%i.", bug.id)
|
||||
|
||||
bug.subscribe(person=launchpad.me)
|
||||
Logger.debug("Subscribed me to bug #%i.", bug.id)
|
||||
|
||||
sponsorsteam = launchpad.people["ubuntu-sponsors"]
|
||||
sponsorsteam = launchpad.people['ubuntu-sponsors']
|
||||
for sub in bug.subscriptions:
|
||||
if sub.person == sponsorsteam and sub.canBeUnsubscribedByUser():
|
||||
bug.unsubscribe(person=launchpad.people["ubuntu-sponsors"])
|
||||
Logger.debug("Unsubscribed ubuntu-sponsors from bug #%i.", bug.id)
|
||||
bug.unsubscribe(person=launchpad.people['ubuntu-sponsors'])
|
||||
Logger.debug("Unsubscribed ubuntu-sponsors from bug #%i.",
|
||||
bug.id)
|
||||
elif sub.person == sponsorsteam:
|
||||
Logger.debug("Couldn't unsubscribe ubuntu-sponsors from bug #%i.", bug.id)
|
||||
Logger.debug("Couldn't unsubscribe ubuntu-sponsors from "
|
||||
"bug #%i.", bug.id)
|
||||
|
||||
Logger.info("Successfully acknowledged sync request bug #%i.", bug.id)
|
||||
Logger.info("Successfully acknowledged sync request bug #%i.",
|
||||
bug.id)
|
||||
else:
|
||||
Logger.error(
|
||||
"Sync requests can only be acknowledged when the upload target is Ubuntu."
|
||||
)
|
||||
Logger.error("Sync requests can only be acknowledged when the "
|
||||
"upload target is Ubuntu.")
|
||||
sys.exit(1)
|
||||
return True
|
||||
|
||||
@ -135,35 +135,34 @@ class SourcePackage:
|
||||
else:
|
||||
target = upload
|
||||
question = Question(["yes", "edit", "no"])
|
||||
answer = question.ask(f"Do you want to upload the package to {target}", "no")
|
||||
answer = question.ask("Do you want to upload the package to %s" % target, "no")
|
||||
if answer == "edit":
|
||||
return False
|
||||
if answer == "no":
|
||||
elif answer == "no":
|
||||
user_abort()
|
||||
cmd = ["dput", "--force", upload, self._changes_file]
|
||||
Logger.debug(" ".join(cmd))
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error(
|
||||
"Upload of %s to %s failed.", os.path.basename(self._changes_file), upload
|
||||
)
|
||||
Logger.error("Upload of %s to %s failed." %
|
||||
(os.path.basename(self._changes_file), upload))
|
||||
sys.exit(1)
|
||||
|
||||
# Push the branch if the package is uploaded to the Ubuntu archive.
|
||||
if upload == "ubuntu" and self._branch:
|
||||
cmd = ["debcommit"]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = ['debcommit']
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error("Bzr commit failed.")
|
||||
Logger.error('Bzr commit failed.')
|
||||
sys.exit(1)
|
||||
cmd = ["bzr", "mark-uploaded"]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = ['bzr', 'mark-uploaded']
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error("Bzr tagging failed.")
|
||||
Logger.error('Bzr tagging failed.')
|
||||
sys.exit(1)
|
||||
cmd = ["bzr", "push", ":parent"]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = ['bzr', 'push', ':parent']
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error("Bzr push failed.")
|
||||
Logger.error('Bzr push failed.')
|
||||
sys.exit(1)
|
||||
return True
|
||||
|
||||
@ -176,10 +175,8 @@ class SourcePackage:
|
||||
|
||||
if dist is None:
|
||||
dist = re.sub("-.*$", "", self._changelog.distributions)
|
||||
build_name = (
|
||||
f"{self._package}_{strip_epoch(self._version)}"
|
||||
f"_{self._builder.get_architecture()}.build"
|
||||
)
|
||||
build_name = "{}_{}_{}.build".format(self._package, strip_epoch(self._version),
|
||||
self._builder.get_architecture())
|
||||
self._build_log = os.path.join(self._buildresult, build_name)
|
||||
|
||||
successful_built = False
|
||||
@ -194,18 +191,20 @@ class SourcePackage:
|
||||
update = False
|
||||
|
||||
# build package
|
||||
result = self._builder.build(self._dsc_file, dist, self._buildresult)
|
||||
result = self._builder.build(self._dsc_file, dist,
|
||||
self._buildresult)
|
||||
if result != 0:
|
||||
question = Question(["yes", "update", "retry", "no"])
|
||||
answer = question.ask("Do you want to resolve this issue manually", "yes")
|
||||
if answer == "yes":
|
||||
break
|
||||
if answer == "update":
|
||||
elif answer == "update":
|
||||
update = True
|
||||
continue
|
||||
if answer == "retry":
|
||||
elif answer == "retry":
|
||||
continue
|
||||
user_abort()
|
||||
else:
|
||||
user_abort()
|
||||
successful_built = True
|
||||
if not successful_built:
|
||||
# We want to do a manual fix if the build failed.
|
||||
@ -225,14 +224,13 @@ class SourcePackage:
|
||||
"""
|
||||
|
||||
if self._branch:
|
||||
cmd = ["bzr", "builddeb", "--builder=debuild", "-S", "--", "--no-lintian", "-nc"]
|
||||
cmd = ['bzr', 'builddeb', '--builder=debuild', '-S',
|
||||
'--', '--no-lintian', '-nc']
|
||||
else:
|
||||
cmd = ["debuild", "--no-lintian", "-nc", "-S"]
|
||||
cmd = ['debuild', '--no-lintian', '-nc', '-S']
|
||||
cmd.append("-v" + previous_version.full_version)
|
||||
if (
|
||||
previous_version.upstream_version == self._changelog.upstream_version
|
||||
and upload == "ubuntu"
|
||||
):
|
||||
if previous_version.upstream_version == \
|
||||
self._changelog.upstream_version and upload == "ubuntu":
|
||||
# FIXME: Add proper check that catches cases like changed
|
||||
# compression (.tar.gz -> tar.bz2) and multiple orig source tarballs
|
||||
cmd.append("-sd")
|
||||
@ -241,9 +239,9 @@ class SourcePackage:
|
||||
if keyid is not None:
|
||||
cmd += ["-k" + keyid]
|
||||
env = os.environ
|
||||
if upload == "ubuntu":
|
||||
env["DEB_VENDOR"] = "Ubuntu"
|
||||
Logger.debug(" ".join(cmd))
|
||||
if upload == 'ubuntu':
|
||||
env['DEB_VENDOR'] = 'Ubuntu'
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd, env=env) != 0:
|
||||
Logger.error("Failed to build source tarball.")
|
||||
# TODO: Add a "retry" option
|
||||
@ -254,9 +252,8 @@ class SourcePackage:
|
||||
@property
|
||||
def _changes_file(self):
|
||||
"""Returns the file name of the .changes file."""
|
||||
return os.path.join(
|
||||
self._workdir, f"{self._package}_{strip_epoch(self._version)}_source.changes"
|
||||
)
|
||||
return os.path.join(self._workdir, "{}_{}_source.changes"
|
||||
.format(self._package, strip_epoch(self._version)))
|
||||
|
||||
def check_target(self, upload, launchpad):
|
||||
"""Make sure that the target is correct.
|
||||
@ -268,24 +265,18 @@ class SourcePackage:
|
||||
(devel_series, supported_series) = _get_series(launchpad)
|
||||
|
||||
if upload == "ubuntu":
|
||||
allowed = (
|
||||
supported_series + [s + "-proposed" for s in supported_series] + [devel_series]
|
||||
)
|
||||
allowed = supported_series + \
|
||||
[s + "-proposed" for s in supported_series] + \
|
||||
[devel_series]
|
||||
if self._changelog.distributions not in allowed:
|
||||
Logger.error(
|
||||
"%s is not an allowed series. It needs to be one of %s.",
|
||||
self._changelog.distributions,
|
||||
", ".join(allowed),
|
||||
)
|
||||
Logger.error("%s is not an allowed series. It needs to be one of %s." %
|
||||
(self._changelog.distributions, ", ".join(allowed)))
|
||||
return ask_for_ignoring_or_fixing()
|
||||
elif upload and upload.startswith("ppa/"):
|
||||
allowed = supported_series + [devel_series]
|
||||
if self._changelog.distributions not in allowed:
|
||||
Logger.error(
|
||||
"%s is not an allowed series. It needs to be one of %s.",
|
||||
self._changelog.distributions,
|
||||
", ".join(allowed),
|
||||
)
|
||||
Logger.error("%s is not an allowed series. It needs to be one of %s." %
|
||||
(self._changelog.distributions, ", ".join(allowed)))
|
||||
return ask_for_ignoring_or_fixing()
|
||||
return True
|
||||
|
||||
@ -297,21 +288,18 @@ class SourcePackage:
|
||||
"""
|
||||
|
||||
if self._version <= previous_version:
|
||||
Logger.error(
|
||||
"The version %s is not greater than the already available %s.",
|
||||
self._version,
|
||||
previous_version,
|
||||
)
|
||||
Logger.error("The version %s is not greater than the already "
|
||||
"available %s.", self._version, previous_version)
|
||||
return ask_for_ignoring_or_fixing()
|
||||
return True
|
||||
|
||||
def check_sync_request_version(self, bug_number, task):
|
||||
"""Check if the downloaded version of the package is mentioned in the
|
||||
bug title."""
|
||||
bug title."""
|
||||
|
||||
if not task.title_contains(self._version):
|
||||
print(f"Bug #{bug_number} title: {task.get_bug_title()}")
|
||||
msg = f"Is {self._package} {self._version} the version that should be synced"
|
||||
print("Bug #%i title: %s" % (bug_number, task.get_bug_title()))
|
||||
msg = "Is %s %s the version that should be synced" % (self._package, self._version)
|
||||
answer = YesNoQuestion().ask(msg, "no")
|
||||
if answer == "no":
|
||||
user_abort()
|
||||
@ -319,27 +307,32 @@ class SourcePackage:
|
||||
@property
|
||||
def _debdiff_filename(self):
|
||||
"""Returns the file name of the .debdiff file."""
|
||||
debdiff_name = f"{self._package}_{strip_epoch(self._version)}.debdiff"
|
||||
debdiff_name = "{}_{}.debdiff".format(self._package, strip_epoch(self._version))
|
||||
return os.path.join(self._workdir, debdiff_name)
|
||||
|
||||
@property
|
||||
def _dsc_file(self):
|
||||
"""Returns the file name of the .dsc file."""
|
||||
return os.path.join(self._workdir, f"{self._package}_{strip_epoch(self._version)}.dsc")
|
||||
return os.path.join(self._workdir, "{}_{}.dsc".format(self._package,
|
||||
strip_epoch(self._version)))
|
||||
|
||||
def generate_debdiff(self, dsc_file):
|
||||
"""Generates a debdiff between the given .dsc file and this source
|
||||
package."""
|
||||
package."""
|
||||
|
||||
assert os.path.isfile(dsc_file), f"{dsc_file} does not exist."
|
||||
assert os.path.isfile(self._dsc_file), f"{self._dsc_file} does not exist."
|
||||
assert os.path.isfile(dsc_file), "%s does not exist." % (dsc_file)
|
||||
assert os.path.isfile(self._dsc_file), "%s does not exist." % \
|
||||
(self._dsc_file)
|
||||
cmd = ["debdiff", dsc_file, self._dsc_file]
|
||||
if not Logger.isEnabledFor(logging.DEBUG):
|
||||
cmd.insert(1, "-q")
|
||||
Logger.debug("%s > %s", " ".join(cmd), self._debdiff_filename)
|
||||
with open(self._debdiff_filename, "w", encoding="utf-8") as debdiff_file:
|
||||
debdiff = subprocess.run(cmd, check=False, stdout=debdiff_file)
|
||||
assert debdiff.returncode in (0, 1)
|
||||
Logger.debug(' '.join(cmd) + " > " + self._debdiff_filename)
|
||||
debdiff = subprocess.check_output(cmd, encoding='utf-8')
|
||||
|
||||
# write debdiff file
|
||||
debdiff_file = open(self._debdiff_filename, "w")
|
||||
debdiff_file.writelines(debdiff)
|
||||
debdiff_file.close()
|
||||
|
||||
def is_fixed(self, lp_bug):
|
||||
"""Make sure that the given Launchpad bug is closed.
|
||||
@ -348,8 +341,8 @@ class SourcePackage:
|
||||
change something.
|
||||
"""
|
||||
|
||||
assert os.path.isfile(self._changes_file), f"{self._changes_file} does not exist."
|
||||
changes = debian.deb822.Changes(open(self._changes_file, encoding="utf-8"))
|
||||
assert os.path.isfile(self._changes_file), "%s does not exist." % (self._changes_file)
|
||||
changes = debian.deb822.Changes(open(self._changes_file))
|
||||
fixed_bugs = []
|
||||
if "Launchpad-Bugs-Fixed" in changes:
|
||||
fixed_bugs = changes["Launchpad-Bugs-Fixed"].split(" ")
|
||||
@ -361,7 +354,7 @@ class SourcePackage:
|
||||
lp_bug = lp_bug.duplicate_of
|
||||
|
||||
if lp_bug.id not in fixed_bugs:
|
||||
Logger.error("Launchpad bug #%i is not closed by new version.", lp_bug.id)
|
||||
Logger.error("Launchpad bug #%i is not closed by new version." % (lp_bug.id))
|
||||
return ask_for_ignoring_or_fixing()
|
||||
return True
|
||||
|
||||
@ -369,7 +362,7 @@ class SourcePackage:
|
||||
"""Print things that should be checked before uploading a package."""
|
||||
|
||||
lintian_filename = self._run_lintian()
|
||||
print(f"\nPlease check {self._package} {self._version} carefully:")
|
||||
print("\nPlease check %s %s carefully:" % (self._package, self._version))
|
||||
if os.path.isfile(self._debdiff_filename):
|
||||
print("file://" + self._debdiff_filename)
|
||||
print("file://" + lintian_filename)
|
||||
@ -386,9 +379,8 @@ class SourcePackage:
|
||||
# Check the changelog
|
||||
self._changelog = debian.changelog.Changelog()
|
||||
try:
|
||||
self._changelog.parse_changelog(
|
||||
open("debian/changelog", encoding="utf-8"), max_blocks=1, strict=True
|
||||
)
|
||||
self._changelog.parse_changelog(open("debian/changelog"),
|
||||
max_blocks=1, strict=True)
|
||||
except debian.changelog.ChangelogParseError as error:
|
||||
Logger.error("The changelog entry doesn't validate: %s", str(error))
|
||||
ask_for_manual_fixing()
|
||||
@ -398,10 +390,8 @@ class SourcePackage:
|
||||
try:
|
||||
self._version = self._changelog.get_version()
|
||||
except IndexError:
|
||||
Logger.error(
|
||||
"Debian package version could not be determined. "
|
||||
"debian/changelog is probably malformed."
|
||||
)
|
||||
Logger.error("Debian package version could not be determined. "
|
||||
"debian/changelog is probably malformed.")
|
||||
ask_for_manual_fixing()
|
||||
return False
|
||||
|
||||
@ -415,29 +405,25 @@ class SourcePackage:
|
||||
|
||||
# Determine whether to use the source or binary build for lintian
|
||||
if self._build_log:
|
||||
build_changes = (
|
||||
self._package
|
||||
+ "_"
|
||||
+ strip_epoch(self._version)
|
||||
+ "_"
|
||||
+ self._builder.get_architecture()
|
||||
+ ".changes"
|
||||
)
|
||||
build_changes = self._package + "_" + strip_epoch(self._version) + \
|
||||
"_" + self._builder.get_architecture() + ".changes"
|
||||
changes_for_lintian = os.path.join(self._buildresult, build_changes)
|
||||
else:
|
||||
changes_for_lintian = self._changes_file
|
||||
|
||||
# Check lintian
|
||||
assert os.path.isfile(changes_for_lintian), f"{changes_for_lintian} does not exist."
|
||||
cmd = ["lintian", "-IE", "--pedantic", "-q", "--profile", "ubuntu", changes_for_lintian]
|
||||
lintian_filename = os.path.join(
|
||||
self._workdir, self._package + "_" + strip_epoch(self._version) + ".lintian"
|
||||
)
|
||||
Logger.debug("%s > %s", " ".join(cmd), lintian_filename)
|
||||
report = subprocess.check_output(cmd, encoding="utf-8")
|
||||
assert os.path.isfile(changes_for_lintian), "%s does not exist." % \
|
||||
(changes_for_lintian)
|
||||
cmd = ["lintian", "-IE", "--pedantic", "-q", "--profile", "ubuntu",
|
||||
changes_for_lintian]
|
||||
lintian_filename = os.path.join(self._workdir,
|
||||
self._package + "_" +
|
||||
strip_epoch(self._version) + ".lintian")
|
||||
Logger.debug(' '.join(cmd) + " > " + lintian_filename)
|
||||
report = subprocess.check_output(cmd, encoding='utf-8')
|
||||
|
||||
# write lintian report file
|
||||
lintian_file = open(lintian_filename, "w", encoding="utf-8")
|
||||
lintian_file = open(lintian_filename, "w")
|
||||
lintian_file.writelines(report)
|
||||
lintian_file.close()
|
||||
|
||||
@ -447,25 +433,17 @@ class SourcePackage:
|
||||
"""Does a sync of the source package."""
|
||||
|
||||
if upload == "ubuntu":
|
||||
cmd = [
|
||||
"syncpackage",
|
||||
self._package,
|
||||
"-b",
|
||||
str(bug_number),
|
||||
"-f",
|
||||
"-s",
|
||||
requester,
|
||||
"-V",
|
||||
str(self._version),
|
||||
"-d",
|
||||
series,
|
||||
]
|
||||
Logger.debug(" ".join(cmd))
|
||||
cmd = ["syncpackage", self._package, "-b", str(bug_number), "-f",
|
||||
"-s", requester, "-V", str(self._version),
|
||||
"-d", series]
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error("Syncing of %s %s failed.", self._package, str(self._version))
|
||||
Logger.error("Syncing of %s %s failed.", self._package,
|
||||
str(self._version))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# FIXME: Support this use case!
|
||||
Logger.error("Uploading a synced package other than to Ubuntu is not supported yet!")
|
||||
Logger.error("Uploading a synced package other than to Ubuntu "
|
||||
"is not supported yet!")
|
||||
sys.exit(1)
|
||||
return True
|
||||
|
@ -15,7 +15,6 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pwd
|
||||
import shutil
|
||||
@ -23,43 +22,47 @@ import subprocess
|
||||
import sys
|
||||
|
||||
from distro_info import UbuntuDistroInfo
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools.update_maintainer import (update_maintainer,
|
||||
MaintainerUpdateException)
|
||||
from ubuntutools.question import input_number
|
||||
|
||||
from ubuntutools.sponsor_patch.bugtask import BugTask, is_sync
|
||||
from ubuntutools.sponsor_patch.patch import Patch
|
||||
from ubuntutools.sponsor_patch.question import ask_for_manual_fixing
|
||||
from ubuntutools.sponsor_patch.source_package import SourcePackage
|
||||
from ubuntutools.update_maintainer import MaintainerUpdateException, update_maintainer
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_command_available(command, check_sbin=False):
|
||||
"Is command in $PATH?"
|
||||
path = os.environ.get("PATH", "/usr/bin:/bin").split(":")
|
||||
path = os.environ.get('PATH', '/usr/bin:/bin').split(':')
|
||||
if check_sbin:
|
||||
path += [f"{directory[:-3]}sbin" for directory in path if directory.endswith("/bin")]
|
||||
return any(os.access(os.path.join(directory, command), os.X_OK) for directory in path)
|
||||
path += [directory[:-3] + 'sbin'
|
||||
for directory in path if directory.endswith('/bin')]
|
||||
return any(os.access(os.path.join(directory, command), os.X_OK)
|
||||
for directory in path)
|
||||
|
||||
|
||||
def check_dependencies():
|
||||
"Do we have all the commands we need for full functionality?"
|
||||
missing = []
|
||||
for cmd in ("patch", "bzr", "quilt", "dput", "lintian"):
|
||||
for cmd in ('patch', 'bzr', 'quilt', 'dput', 'lintian'):
|
||||
if not is_command_available(cmd):
|
||||
missing.append(cmd)
|
||||
if not is_command_available("bzr-buildpackage"):
|
||||
missing.append("bzr-builddeb")
|
||||
if not any(
|
||||
is_command_available(cmd, check_sbin=True) for cmd in ("pbuilder", "sbuild", "cowbuilder")
|
||||
):
|
||||
missing.append("pbuilder/cowbuilder/sbuild")
|
||||
if not is_command_available('bzr-buildpackage'):
|
||||
missing.append('bzr-builddeb')
|
||||
if not any(is_command_available(cmd, check_sbin=True)
|
||||
for cmd in ('pbuilder', 'sbuild', 'cowbuilder')):
|
||||
missing.append('pbuilder/cowbuilder/sbuild')
|
||||
|
||||
if missing:
|
||||
Logger.warning(
|
||||
"sponsor-patch requires %s to be installed for full functionality", ", ".join(missing)
|
||||
)
|
||||
Logger.warning("sponsor-patch requires %s to be installed for full "
|
||||
"functionality", ', '.join(missing))
|
||||
|
||||
|
||||
def get_source_package_name(bug_task):
|
||||
@ -81,18 +84,15 @@ def get_user_shell():
|
||||
def edit_source():
|
||||
# Spawn shell to allow modifications
|
||||
cmd = [get_user_shell()]
|
||||
Logger.debug(" ".join(cmd))
|
||||
print(
|
||||
f"""An interactive shell was launched in
|
||||
file://{os.getcwd()}
|
||||
Logger.debug(' '.join(cmd))
|
||||
print("""An interactive shell was launched in
|
||||
file://%s
|
||||
Edit your files. When you are done, exit the shell. If you wish to abort the
|
||||
process, exit the shell such that it returns an exit code other than zero.
|
||||
""",
|
||||
end=" ",
|
||||
)
|
||||
""" % (os.getcwd()), end=' ')
|
||||
returncode = subprocess.call(cmd)
|
||||
if returncode != 0:
|
||||
Logger.error("Shell exited with exit value %i.", returncode)
|
||||
Logger.error("Shell exited with exit value %i." % (returncode))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -100,26 +100,30 @@ def ask_for_patch_or_branch(bug, attached_patches, linked_branches):
|
||||
patch = None
|
||||
branch = None
|
||||
if len(attached_patches) == 0:
|
||||
msg = f"{len(linked_branches)} branches linked:"
|
||||
msg = "https://launchpad.net/bugs/%i has %i branches linked:" % \
|
||||
(bug.id, len(linked_branches))
|
||||
elif len(linked_branches) == 0:
|
||||
msg = f"{len(attached_patches)} patches attached:"
|
||||
msg = "https://launchpad.net/bugs/%i has %i patches attached:" % \
|
||||
(bug.id, len(attached_patches))
|
||||
else:
|
||||
branches = f"{len(linked_branches)} branch"
|
||||
branches = "%i branch" % len(linked_branches)
|
||||
if len(linked_branches) > 1:
|
||||
branches += "es"
|
||||
patches = f"{len(attached_patches)} patch"
|
||||
patches = "%i patch" % len(attached_patches)
|
||||
if len(attached_patches) > 1:
|
||||
patches += "es"
|
||||
msg = f"{branches} linked and {patches} attached:"
|
||||
Logger.info("https://launchpad.net/bugs/%i has %s", bug.id, msg)
|
||||
msg = "https://launchpad.net/bugs/%i has %s linked and %s attached:" % \
|
||||
(bug.id, branches, patches)
|
||||
Logger.info(msg)
|
||||
i = 0
|
||||
for linked_branch in linked_branches:
|
||||
i += 1
|
||||
print(f"{i}) {linked_branch.display_name}")
|
||||
print("%i) %s" % (i, linked_branch.display_name))
|
||||
for attached_patch in attached_patches:
|
||||
i += 1
|
||||
print(f"{i}) {attached_patch.title}")
|
||||
selected = input_number("Which branch or patch do you want to download", 1, i, i)
|
||||
print("%i) %s" % (i, attached_patch.title))
|
||||
selected = input_number("Which branch or patch do you want to download",
|
||||
1, i, i)
|
||||
if selected <= len(linked_branches):
|
||||
branch = linked_branches[selected - 1].bzr_identity
|
||||
else:
|
||||
@ -135,26 +139,21 @@ def get_patch_or_branch(bug):
|
||||
linked_branches = [b.branch for b in bug.linked_branches]
|
||||
if len(attached_patches) == 0 and len(linked_branches) == 0:
|
||||
if len(bug.attachments) == 0:
|
||||
Logger.error(
|
||||
"No attachment and no linked branch found on "
|
||||
"bug #%i. Add the tag sync to the bug if it is "
|
||||
"a sync request.",
|
||||
bug.id,
|
||||
)
|
||||
Logger.error("No attachment and no linked branch found on "
|
||||
"bug #%i. Add the tag sync to the bug if it is "
|
||||
"a sync request.", bug.id)
|
||||
else:
|
||||
Logger.error(
|
||||
"No attached patch and no linked branch found. "
|
||||
"Go to https://launchpad.net/bugs/%i and mark an "
|
||||
"attachment as patch.",
|
||||
bug.id,
|
||||
)
|
||||
Logger.error("No attached patch and no linked branch found. "
|
||||
"Go to https://launchpad.net/bugs/%i and mark an "
|
||||
"attachment as patch.", bug.id)
|
||||
sys.exit(1)
|
||||
elif len(attached_patches) == 1 and len(linked_branches) == 0:
|
||||
patch = Patch(attached_patches[0])
|
||||
elif len(attached_patches) == 0 and len(linked_branches) == 1:
|
||||
branch = linked_branches[0].bzr_identity
|
||||
else:
|
||||
patch, branch = ask_for_patch_or_branch(bug, attached_patches, linked_branches)
|
||||
patch, branch = ask_for_patch_or_branch(bug, attached_patches,
|
||||
linked_branches)
|
||||
return (patch, branch)
|
||||
|
||||
|
||||
@ -163,9 +162,9 @@ def download_branch(branch):
|
||||
if os.path.isdir(dir_name):
|
||||
shutil.rmtree(dir_name)
|
||||
cmd = ["bzr", "branch", branch]
|
||||
Logger.debug(" ".join(cmd))
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error("Failed to download branch %s.", branch)
|
||||
Logger.error("Failed to download branch %s." % (branch))
|
||||
sys.exit(1)
|
||||
return dir_name
|
||||
|
||||
@ -173,21 +172,21 @@ def download_branch(branch):
|
||||
def merge_branch(branch):
|
||||
edit = False
|
||||
cmd = ["bzr", "merge", branch]
|
||||
Logger.debug(" ".join(cmd))
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error("Failed to merge branch %s.", branch)
|
||||
Logger.error("Failed to merge branch %s." % (branch))
|
||||
ask_for_manual_fixing()
|
||||
edit = True
|
||||
return edit
|
||||
|
||||
|
||||
def extract_source(dsc_file, verbose=False):
|
||||
cmd = ["dpkg-source", "--skip-patches", "-x", dsc_file]
|
||||
cmd = ["dpkg-source", "--no-preparation", "-x", dsc_file]
|
||||
if not verbose:
|
||||
cmd.insert(1, "-q")
|
||||
Logger.debug(" ".join(cmd))
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.error("Extraction of %s failed.", os.path.basename(dsc_file))
|
||||
Logger.error("Extraction of %s failed." % (os.path.basename(dsc_file)))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -202,18 +201,19 @@ def get_open_ubuntu_bug_task(launchpad, bug, branch=None):
|
||||
ubuntu_tasks = [x for x in bug_tasks if x.is_ubuntu_task()]
|
||||
bug_id = bug.id
|
||||
if branch:
|
||||
branch = branch.split("/")
|
||||
branch = branch.split('/')
|
||||
# Non-production LP?
|
||||
if len(branch) > 5:
|
||||
branch = branch[3:]
|
||||
|
||||
if len(ubuntu_tasks) == 0:
|
||||
Logger.error("No Ubuntu bug task found on bug #%i.", bug_id)
|
||||
Logger.error("No Ubuntu bug task found on bug #%i." % (bug_id))
|
||||
sys.exit(1)
|
||||
elif len(ubuntu_tasks) == 1:
|
||||
task = ubuntu_tasks[0]
|
||||
if len(ubuntu_tasks) > 1 and branch and branch[1] == "ubuntu":
|
||||
tasks = [t for t in ubuntu_tasks if t.get_series() == branch[2] and t.package == branch[3]]
|
||||
if len(ubuntu_tasks) > 1 and branch and branch[1] == 'ubuntu':
|
||||
tasks = [t for t in ubuntu_tasks if
|
||||
t.get_series() == branch[2] and t.package == branch[3]]
|
||||
if len(tasks) > 1:
|
||||
# A bug targeted to the development series?
|
||||
tasks = [t for t in tasks if t.series is not None]
|
||||
@ -221,26 +221,21 @@ def get_open_ubuntu_bug_task(launchpad, bug, branch=None):
|
||||
task = tasks[0]
|
||||
elif len(ubuntu_tasks) > 1:
|
||||
task_list = [t.get_short_info() for t in ubuntu_tasks]
|
||||
Logger.debug(
|
||||
"%i Ubuntu tasks exist for bug #%i.\n%s",
|
||||
len(ubuntu_tasks),
|
||||
bug_id,
|
||||
"\n".join(task_list),
|
||||
)
|
||||
Logger.debug("%i Ubuntu tasks exist for bug #%i.\n%s", len(ubuntu_tasks),
|
||||
bug_id, "\n".join(task_list))
|
||||
open_ubuntu_tasks = [x for x in ubuntu_tasks if not x.is_complete()]
|
||||
if len(open_ubuntu_tasks) == 1:
|
||||
task = open_ubuntu_tasks[0]
|
||||
else:
|
||||
Logger.info(
|
||||
"https://launchpad.net/bugs/%i has %i Ubuntu tasks:", bug_id, len(ubuntu_tasks)
|
||||
)
|
||||
for i, ubuntu_task in enumerate(ubuntu_tasks):
|
||||
print(f"{i + 1}) {ubuntu_task.get_package_and_series()}")
|
||||
selected = input_number(
|
||||
"To which Ubuntu task does the patch belong", 1, len(ubuntu_tasks)
|
||||
)
|
||||
Logger.info("https://launchpad.net/bugs/%i has %i Ubuntu tasks:" %
|
||||
(bug_id, len(ubuntu_tasks)))
|
||||
for i in range(len(ubuntu_tasks)):
|
||||
print("%i) %s" % (i + 1,
|
||||
ubuntu_tasks[i].get_package_and_series()))
|
||||
selected = input_number("To which Ubuntu task does the patch belong",
|
||||
1, len(ubuntu_tasks))
|
||||
task = ubuntu_tasks[selected - 1]
|
||||
Logger.debug("Selected Ubuntu task: %s", task.get_short_info())
|
||||
Logger.debug("Selected Ubuntu task: %s" % (task.get_short_info()))
|
||||
return task
|
||||
|
||||
|
||||
@ -251,15 +246,11 @@ def _create_and_change_into(workdir):
|
||||
try:
|
||||
os.makedirs(workdir)
|
||||
except os.error as error:
|
||||
Logger.error(
|
||||
"Failed to create the working directory %s [Errno %i]: %s.",
|
||||
workdir,
|
||||
error.errno,
|
||||
error.strerror,
|
||||
)
|
||||
Logger.error("Failed to create the working directory %s [Errno %i]: %s." %
|
||||
(workdir, error.errno, error.strerror))
|
||||
sys.exit(1)
|
||||
if workdir != os.getcwd():
|
||||
Logger.debug("cd %s", workdir)
|
||||
Logger.debug("cd " + workdir)
|
||||
os.chdir(workdir)
|
||||
|
||||
|
||||
@ -276,7 +267,7 @@ def _update_maintainer_field():
|
||||
def _update_timestamp():
|
||||
"""Run dch to update the timestamp of debian/changelog."""
|
||||
cmd = ["dch", "--maintmaint", "--release", ""]
|
||||
Logger.debug(" ".join(cmd))
|
||||
Logger.debug(' '.join(cmd))
|
||||
if subprocess.call(cmd) != 0:
|
||||
Logger.debug("Failed to update timestamp in debian/changelog.")
|
||||
|
||||
@ -288,13 +279,13 @@ def _download_and_change_into(task, dsc_file, patch, branch):
|
||||
branch_dir = download_branch(task.get_branch_link())
|
||||
|
||||
# change directory
|
||||
Logger.debug("cd %s", branch_dir)
|
||||
Logger.debug("cd " + branch_dir)
|
||||
os.chdir(branch_dir)
|
||||
else:
|
||||
if patch:
|
||||
patch.download()
|
||||
|
||||
Logger.debug("Ubuntu package: %s", task.package)
|
||||
Logger.debug("Ubuntu package: %s" % (task.package))
|
||||
if task.is_merge():
|
||||
Logger.debug("The task is a merge request.")
|
||||
if task.is_sync():
|
||||
@ -303,12 +294,13 @@ def _download_and_change_into(task, dsc_file, patch, branch):
|
||||
extract_source(dsc_file, Logger.isEnabledFor(logging.DEBUG))
|
||||
|
||||
# change directory
|
||||
directory = f"{task.package}-{task.get_version().upstream_version}"
|
||||
Logger.debug("cd %s", directory)
|
||||
directory = task.package + '-' + task.get_version().upstream_version
|
||||
Logger.debug("cd " + directory)
|
||||
os.chdir(directory)
|
||||
|
||||
|
||||
def sponsor_patch(bug_number, build, builder, edit, keyid, lpinstance, update, upload, workdir):
|
||||
def sponsor_patch(bug_number, build, builder, edit, keyid, lpinstance, update,
|
||||
upload, workdir):
|
||||
workdir = os.path.realpath(os.path.expanduser(workdir))
|
||||
_create_and_change_into(workdir)
|
||||
|
||||
@ -339,13 +331,17 @@ def sponsor_patch(bug_number, build, builder, edit, keyid, lpinstance, update, u
|
||||
update = False
|
||||
else:
|
||||
# We are going to run lintian, so we need a source package
|
||||
successful = source_package.build_source(None, upload, previous_version)
|
||||
successful = source_package.build_source(None, upload,
|
||||
previous_version)
|
||||
|
||||
if successful:
|
||||
series = task.get_debian_source_series()
|
||||
if source_package.sync(upload, series, bug_number, bug.owner.name):
|
||||
return
|
||||
edit = True
|
||||
else:
|
||||
edit = True
|
||||
else:
|
||||
edit = True
|
||||
|
||||
if patch:
|
||||
edit |= patch.apply(task)
|
||||
@ -367,7 +363,8 @@ def sponsor_patch(bug_number, build, builder, edit, keyid, lpinstance, update, u
|
||||
|
||||
_update_timestamp()
|
||||
|
||||
if not source_package.build_source(keyid, upload, task.get_previous_version()):
|
||||
if not source_package.build_source(keyid, upload,
|
||||
task.get_previous_version()):
|
||||
continue
|
||||
|
||||
source_package.generate_debdiff(dsc_file)
|
||||
|
@ -17,76 +17,66 @@
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from pathlib import Path
|
||||
from ubuntutools.version import Version
|
||||
|
||||
|
||||
class ExamplePackage:
|
||||
def __init__(self, source="example", version="1.0-1", destdir="test-data"):
|
||||
class ExamplePackage(object):
|
||||
def __init__(self, source='example', version='1.0-1', destdir='test-data'):
|
||||
self.source = source
|
||||
self.version = Version(version)
|
||||
self.destdir = Path(destdir)
|
||||
|
||||
self.env = dict(os.environ)
|
||||
self.env["DEBFULLNAME"] = "Example"
|
||||
self.env["DEBEMAIL"] = "example@example.net"
|
||||
self.env['DEBFULLNAME'] = 'Example'
|
||||
self.env['DEBEMAIL'] = 'example@example.net'
|
||||
|
||||
@property
|
||||
def orig(self):
|
||||
return self.destdir / f"{self.source}_{self.version.upstream_version}.orig.tar.xz"
|
||||
return self.destdir / f'{self.source}_{self.version.upstream_version}.orig.tar.xz'
|
||||
|
||||
@property
|
||||
def debian(self):
|
||||
return self.destdir / f"{self.source}_{self.version}.debian.tar.xz"
|
||||
return self.destdir / f'{self.source}_{self.version}.debian.tar.xz'
|
||||
|
||||
@property
|
||||
def dsc(self):
|
||||
return self.destdir / f"{self.source}_{self.version}.dsc"
|
||||
return self.destdir / f'{self.source}_{self.version}.dsc'
|
||||
|
||||
@property
|
||||
def dirname(self):
|
||||
return f"{self.source}-{self.version.upstream_version}"
|
||||
return f'{self.source}-{self.version.upstream_version}'
|
||||
|
||||
@property
|
||||
def content_filename(self):
|
||||
return "content"
|
||||
return 'content'
|
||||
|
||||
@property
|
||||
def content_text(self):
|
||||
return "my content"
|
||||
return 'my content'
|
||||
|
||||
def create(self):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
self._create(Path(tmpdir))
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
self._create(Path(d))
|
||||
|
||||
def _create(self, directory: Path):
|
||||
pkgdir = directory / self.dirname
|
||||
def _create(self, d):
|
||||
pkgdir = d / self.dirname
|
||||
pkgdir.mkdir()
|
||||
(pkgdir / self.content_filename).write_text(self.content_text)
|
||||
|
||||
# run dh_make to create orig tarball
|
||||
subprocess.run(
|
||||
"dh_make -sy --createorig".split(),
|
||||
check=True,
|
||||
env=self.env,
|
||||
cwd=str(pkgdir),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
subprocess.run('dh_make -sy --createorig'.split(),
|
||||
check=True, env=self.env, cwd=str(pkgdir),
|
||||
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
|
||||
# run dpkg-source -b to create debian tar and dsc
|
||||
subprocess.run(
|
||||
f"dpkg-source -b {self.dirname}".split(),
|
||||
check=True,
|
||||
env=self.env,
|
||||
cwd=str(directory),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
subprocess.run(f'dpkg-source -b {self.dirname}'.split(),
|
||||
check=True, env=self.env, cwd=str(d),
|
||||
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
|
||||
# move tarballs and dsc to destdir
|
||||
self.destdir.mkdir(parents=True, exist_ok=True)
|
||||
(directory / self.orig.name).rename(self.orig)
|
||||
(directory / self.debian.name).rename(self.debian)
|
||||
(directory / self.dsc.name).rename(self.dsc)
|
||||
(d / self.orig.name).rename(self.orig)
|
||||
(d / self.debian.name).rename(self.debian)
|
||||
(d / self.dsc.name).rename(self.dsc)
|
||||
|
@ -1,10 +1,5 @@
|
||||
[MASTER]
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=apt_pkg
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=no
|
||||
|
||||
@ -14,6 +9,10 @@ jobs=0
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=HIGH
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
@ -23,18 +22,7 @@ jobs=0
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=fixme,locally-disabled,missing-docstring,useless-option-value,
|
||||
# TODO: Fix all following disabled checks!
|
||||
invalid-name,
|
||||
consider-using-with,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-statements,
|
||||
too-many-locals,
|
||||
duplicate-code,
|
||||
too-many-instance-attributes,
|
||||
too-many-nested-blocks,
|
||||
too-many-lines,
|
||||
disable=locally-disabled
|
||||
|
||||
|
||||
[REPORTS]
|
||||
@ -43,6 +31,14 @@ disable=fixme,locally-disabled,missing-docstring,useless-option-value,
|
||||
reports=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamically set).
|
||||
# lpapicache classes, urlparse
|
||||
ignored-classes=Launchpad,BaseWrapper,PersonTeam,Distribution,Consumer,Credentials,ParseResult,apt_pkg,apt_pkg.Dependency,apt_pkg.BaseDependency
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
@ -56,10 +52,4 @@ indent-string=' '
|
||||
[BASIC]
|
||||
|
||||
# Allow variables called e, f, lp
|
||||
good-names=i,j,k,ex,Run,_,e,f,lp,me,to
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=debian
|
||||
good-names=i,j,k,ex,Run,_,e,f,lp
|
@ -18,17 +18,19 @@
|
||||
import filecmp
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
import ubuntutools.archive
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from ubuntutools.test.example_package import ExamplePackage
|
||||
|
||||
|
||||
class BaseVerificationTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
tmpdir = tempfile.TemporaryDirectory()
|
||||
self.addCleanup(tmpdir.cleanup)
|
||||
self.pkg = ExamplePackage(destdir=Path(tmpdir.name))
|
||||
d = tempfile.TemporaryDirectory()
|
||||
self.addCleanup(d.cleanup)
|
||||
self.pkg = ExamplePackage(destdir=Path(d.name))
|
||||
self.pkg.create()
|
||||
self.dsc = ubuntutools.archive.Dsc(self.pkg.dsc.read_bytes())
|
||||
|
||||
@ -39,7 +41,7 @@ class DscVerificationTestCase(BaseVerificationTestCase):
|
||||
self.assertTrue(self.dsc.verify_file(self.pkg.debian))
|
||||
|
||||
def test_missing(self):
|
||||
self.assertFalse(self.dsc.verify_file(self.pkg.destdir / "does.not.exist"))
|
||||
self.assertFalse(self.dsc.verify_file(self.pkg.destdir / 'does.not.exist'))
|
||||
|
||||
def test_bad(self):
|
||||
data = self.pkg.orig.read_bytes()
|
||||
@ -49,13 +51,13 @@ class DscVerificationTestCase(BaseVerificationTestCase):
|
||||
self.assertFalse(self.dsc.verify_file(self.pkg.orig))
|
||||
|
||||
def test_sha1(self):
|
||||
del self.dsc["Checksums-Sha256"]
|
||||
del self.dsc['Checksums-Sha256']
|
||||
self.test_good()
|
||||
self.test_bad()
|
||||
|
||||
def test_md5(self):
|
||||
del self.dsc["Checksums-Sha256"]
|
||||
del self.dsc["Checksums-Sha1"]
|
||||
del self.dsc['Checksums-Sha256']
|
||||
del self.dsc['Checksums-Sha1']
|
||||
self.test_good()
|
||||
self.test_bad()
|
||||
|
||||
@ -65,17 +67,17 @@ class LocalSourcePackageTestCase(BaseVerificationTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
tmpdir = tempfile.TemporaryDirectory()
|
||||
self.addCleanup(tmpdir.cleanup)
|
||||
self.workdir = Path(tmpdir.name)
|
||||
d = tempfile.TemporaryDirectory()
|
||||
self.addCleanup(d.cleanup)
|
||||
self.workdir = Path(d.name)
|
||||
|
||||
def pull(self, **kwargs):
|
||||
"""Do the pull from pkg dir to the workdir, return the SourcePackage"""
|
||||
''' Do the pull from pkg dir to the workdir, return the SourcePackage '''
|
||||
srcpkg = self.SourcePackage(dscfile=self.pkg.dsc, workdir=self.workdir, **kwargs)
|
||||
srcpkg.pull()
|
||||
return srcpkg
|
||||
|
||||
def _test_pull(self, **kwargs):
|
||||
def test_pull(self, **kwargs):
|
||||
srcpkg = self.pull(**kwargs)
|
||||
self.assertTrue(filecmp.cmp(self.pkg.dsc, self.workdir / self.pkg.dsc.name))
|
||||
self.assertTrue(filecmp.cmp(self.pkg.orig, self.workdir / self.pkg.orig.name))
|
||||
@ -83,16 +85,16 @@ class LocalSourcePackageTestCase(BaseVerificationTestCase):
|
||||
return srcpkg
|
||||
|
||||
def test_unpack(self, **kwargs):
|
||||
srcpkg = kwargs.get("srcpkg", self.pull(**kwargs))
|
||||
srcpkg = kwargs.get('srcpkg', self.pull(**kwargs))
|
||||
srcpkg.unpack()
|
||||
content = self.workdir / self.pkg.dirname / self.pkg.content_filename
|
||||
self.assertEqual(self.pkg.content_text, content.read_text())
|
||||
debian = self.workdir / self.pkg.dirname / "debian"
|
||||
debian = self.workdir / self.pkg.dirname / 'debian'
|
||||
self.assertTrue(debian.exists())
|
||||
self.assertTrue(debian.is_dir())
|
||||
|
||||
def test_pull_and_unpack(self, **kwargs):
|
||||
self.test_unpack(srcpkg=self._test_pull(**kwargs))
|
||||
self.test_unpack(srcpkg=self.test_pull(**kwargs))
|
||||
|
||||
def test_with_package(self):
|
||||
self.test_pull_and_unpack(package=self.pkg.source)
|
||||
@ -101,12 +103,12 @@ class LocalSourcePackageTestCase(BaseVerificationTestCase):
|
||||
self.test_pull_and_unpack(package=self.pkg.source, version=self.pkg.version)
|
||||
|
||||
def test_with_package_version_component(self):
|
||||
self.test_pull_and_unpack(
|
||||
package=self.pkg.source, version=self.pkg.version, componet="main"
|
||||
)
|
||||
self.test_pull_and_unpack(package=self.pkg.source,
|
||||
version=self.pkg.version,
|
||||
componet='main')
|
||||
|
||||
def test_verification(self):
|
||||
corruption = b"CORRUPTION"
|
||||
corruption = b'CORRUPTION'
|
||||
|
||||
self.pull()
|
||||
|
||||
@ -117,7 +119,7 @@ class LocalSourcePackageTestCase(BaseVerificationTestCase):
|
||||
testfile.write_bytes(corruption)
|
||||
self.assertEqual(testfile.read_bytes(), corruption)
|
||||
|
||||
self._test_pull()
|
||||
self.test_pull()
|
||||
self.assertTrue(testfile.exists())
|
||||
self.assertTrue(testfile.is_file())
|
||||
self.assertNotEqual(testfile.read_bytes(), corruption)
|
||||
|
@ -17,7 +17,9 @@
|
||||
|
||||
import locale
|
||||
import os
|
||||
# import sys
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
|
||||
@ -25,25 +27,27 @@ from ubuntutools.config import UDTConfig, ubu_email
|
||||
|
||||
|
||||
class ConfigTestCase(unittest.TestCase):
|
||||
_config_files = {"system": "", "user": ""}
|
||||
_config_files = {
|
||||
'system': '',
|
||||
'user': '',
|
||||
}
|
||||
|
||||
def _fake_open(self, filename, mode="r", encoding=None):
|
||||
self.assertTrue(encoding, f"encoding for {filename} not specified")
|
||||
if mode != "r":
|
||||
def _fake_open(self, filename, mode='r'):
|
||||
if mode != 'r':
|
||||
raise IOError("Read only fake-file")
|
||||
files = {
|
||||
"/etc/devscripts.conf": self._config_files["system"],
|
||||
os.path.expanduser("~/.devscripts"): self._config_files["user"],
|
||||
'/etc/devscripts.conf': self._config_files['system'],
|
||||
os.path.expanduser('~/.devscripts'): self._config_files['user'],
|
||||
}
|
||||
if filename not in files:
|
||||
raise IOError(f"No such file or directory: '{filename}'")
|
||||
raise IOError("No such file or directory: '%s'" % filename)
|
||||
return StringIO(files[filename])
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
open_mock = mock.mock_open()
|
||||
open_mock.side_effect = self._fake_open
|
||||
patcher = mock.patch("builtins.open", open_mock)
|
||||
super(ConfigTestCase, self).setUp()
|
||||
m = mock.mock_open()
|
||||
m.side_effect = self._fake_open
|
||||
patcher = mock.patch('builtins.open', m)
|
||||
self.addCleanup(patcher.stop)
|
||||
patcher.start()
|
||||
|
||||
@ -61,16 +65,14 @@ class ConfigTestCase(unittest.TestCase):
|
||||
self.clean_environment()
|
||||
|
||||
def clean_environment(self):
|
||||
self._config_files["system"] = ""
|
||||
self._config_files["user"] = ""
|
||||
self._config_files['system'] = ''
|
||||
self._config_files['user'] = ''
|
||||
for k in list(os.environ.keys()):
|
||||
if k.startswith(("UBUNTUTOOLS_", "TEST_")):
|
||||
if k.startswith(('UBUNTUTOOLS_', 'TEST_')):
|
||||
del os.environ[k]
|
||||
|
||||
def test_config_parsing(self):
|
||||
self._config_files[
|
||||
"user"
|
||||
] = """#COMMENT=yes
|
||||
self._config_files['user'] = """#COMMENT=yes
|
||||
\tTAB_INDENTED=yes
|
||||
SPACE_INDENTED=yes
|
||||
SPACE_SUFFIX=yes
|
||||
@ -83,64 +85,59 @@ INHERIT=user
|
||||
REPEAT=no
|
||||
REPEAT=yes
|
||||
"""
|
||||
self._config_files["system"] = "INHERIT=system"
|
||||
self.assertEqual(
|
||||
UDTConfig(prefix="TEST").config,
|
||||
{
|
||||
"TAB_INDENTED": "yes",
|
||||
"SPACE_INDENTED": "yes",
|
||||
"SPACE_SUFFIX": "yes",
|
||||
"SINGLE_QUOTE": "yes no",
|
||||
"DOUBLE_QUOTE": "yes no",
|
||||
"QUOTED_QUOTE": "it's",
|
||||
"PAIR_QUOTES": "yes a no",
|
||||
"COMMAND_EXECUTION": "a",
|
||||
"INHERIT": "user",
|
||||
"REPEAT": "yes",
|
||||
},
|
||||
)
|
||||
self._config_files['system'] = 'INHERIT=system'
|
||||
self.assertEqual(UDTConfig(prefix='TEST').config, {
|
||||
'TAB_INDENTED': 'yes',
|
||||
'SPACE_INDENTED': 'yes',
|
||||
'SPACE_SUFFIX': 'yes',
|
||||
'SINGLE_QUOTE': 'yes no',
|
||||
'DOUBLE_QUOTE': 'yes no',
|
||||
'QUOTED_QUOTE': "it's",
|
||||
'PAIR_QUOTES': 'yes a no',
|
||||
'COMMAND_EXECUTION': 'a',
|
||||
'INHERIT': 'user',
|
||||
'REPEAT': 'yes',
|
||||
})
|
||||
# errs = Logger.stderr.getvalue().strip()
|
||||
# Logger.stderr = StringIO()
|
||||
# self.assertEqual(len(errs.splitlines()), 1)
|
||||
# self.assertRegex(errs,
|
||||
# r'Warning: Cannot parse.*\bCOMMAND_EXECUTION=a')
|
||||
|
||||
@staticmethod
|
||||
def get_value(*args, **kwargs):
|
||||
config = UDTConfig(prefix="TEST")
|
||||
def get_value(self, *args, **kwargs):
|
||||
config = UDTConfig(prefix='TEST')
|
||||
return config.get_value(*args, **kwargs)
|
||||
|
||||
def test_defaults(self):
|
||||
self.assertEqual(self.get_value("BUILDER"), "pbuilder")
|
||||
self.assertEqual(self.get_value('BUILDER'), 'pbuilder')
|
||||
|
||||
def test_provided_default(self):
|
||||
self.assertEqual(self.get_value("BUILDER", default="foo"), "foo")
|
||||
self.assertEqual(self.get_value('BUILDER', default='foo'), 'foo')
|
||||
|
||||
def test_scriptname_precedence(self):
|
||||
self._config_files[
|
||||
"user"
|
||||
] = """TEST_BUILDER=foo
|
||||
self._config_files['user'] = """TEST_BUILDER=foo
|
||||
UBUNTUTOOLS_BUILDER=bar"""
|
||||
self.assertEqual(self.get_value("BUILDER"), "foo")
|
||||
self.assertEqual(self.get_value('BUILDER'), 'foo')
|
||||
|
||||
def test_configfile_precedence(self):
|
||||
self._config_files["system"] = "UBUNTUTOOLS_BUILDER=foo"
|
||||
self._config_files["user"] = "UBUNTUTOOLS_BUILDER=bar"
|
||||
self.assertEqual(self.get_value("BUILDER"), "bar")
|
||||
self._config_files['system'] = "UBUNTUTOOLS_BUILDER=foo"
|
||||
self._config_files['user'] = "UBUNTUTOOLS_BUILDER=bar"
|
||||
self.assertEqual(self.get_value('BUILDER'), 'bar')
|
||||
|
||||
def test_environment_precedence(self):
|
||||
self._config_files["user"] = "UBUNTUTOOLS_BUILDER=bar"
|
||||
os.environ["UBUNTUTOOLS_BUILDER"] = "baz"
|
||||
self.assertEqual(self.get_value("BUILDER"), "baz")
|
||||
self._config_files['user'] = "UBUNTUTOOLS_BUILDER=bar"
|
||||
os.environ['UBUNTUTOOLS_BUILDER'] = 'baz'
|
||||
self.assertEqual(self.get_value('BUILDER'), 'baz')
|
||||
|
||||
def test_general_environment_specific_config_precedence(self):
|
||||
self._config_files["user"] = "TEST_BUILDER=bar"
|
||||
os.environ["UBUNTUTOOLS_BUILDER"] = "foo"
|
||||
self.assertEqual(self.get_value("BUILDER"), "bar")
|
||||
self._config_files['user'] = "TEST_BUILDER=bar"
|
||||
os.environ['UBUNTUTOOLS_BUILDER'] = 'foo'
|
||||
self.assertEqual(self.get_value('BUILDER'), 'bar')
|
||||
|
||||
def test_compat_keys(self):
|
||||
self._config_files["user"] = "COMPATFOOBAR=bar"
|
||||
self.assertEqual(self.get_value("QUX", compat_keys=["COMPATFOOBAR"]), "bar")
|
||||
self._config_files['user'] = 'COMPATFOOBAR=bar'
|
||||
self.assertEqual(self.get_value('QUX', compat_keys=['COMPATFOOBAR']),
|
||||
'bar')
|
||||
# errs = Logger.stderr.getvalue().strip()
|
||||
# Logger.stderr = StringIO()
|
||||
# self.assertEqual(len(errs.splitlines()), 1)
|
||||
@ -148,16 +145,16 @@ REPEAT=yes
|
||||
# r'deprecated.*\bCOMPATFOOBAR\b.*\bTEST_QUX\b')
|
||||
|
||||
def test_boolean(self):
|
||||
self._config_files["user"] = "TEST_BOOLEAN=yes"
|
||||
self.assertEqual(self.get_value("BOOLEAN", boolean=True), True)
|
||||
self._config_files["user"] = "TEST_BOOLEAN=no"
|
||||
self.assertEqual(self.get_value("BOOLEAN", boolean=True), False)
|
||||
self._config_files["user"] = "TEST_BOOLEAN=true"
|
||||
self.assertEqual(self.get_value("BOOLEAN", boolean=True), None)
|
||||
self._config_files['user'] = "TEST_BOOLEAN=yes"
|
||||
self.assertEqual(self.get_value('BOOLEAN', boolean=True), True)
|
||||
self._config_files['user'] = "TEST_BOOLEAN=no"
|
||||
self.assertEqual(self.get_value('BOOLEAN', boolean=True), False)
|
||||
self._config_files['user'] = "TEST_BOOLEAN=true"
|
||||
self.assertEqual(self.get_value('BOOLEAN', boolean=True), None)
|
||||
|
||||
def test_nonpackagewide(self):
|
||||
self._config_files["user"] = "UBUNTUTOOLS_FOOBAR=a"
|
||||
self.assertEqual(self.get_value("FOOBAR"), None)
|
||||
self._config_files['user'] = 'UBUNTUTOOLS_FOOBAR=a'
|
||||
self.assertEqual(self.get_value('FOOBAR'), None)
|
||||
|
||||
|
||||
class UbuEmailTestCase(unittest.TestCase):
|
||||
@ -167,72 +164,72 @@ class UbuEmailTestCase(unittest.TestCase):
|
||||
def tearDown(self):
|
||||
self.clean_environment()
|
||||
|
||||
@staticmethod
|
||||
def clean_environment():
|
||||
for k in ("UBUMAIL", "DEBEMAIL", "DEBFULLNAME"):
|
||||
def clean_environment(self):
|
||||
for k in ('UBUMAIL', 'DEBEMAIL', 'DEBFULLNAME'):
|
||||
if k in os.environ:
|
||||
del os.environ[k]
|
||||
|
||||
def test_pristine(self):
|
||||
os.environ["DEBFULLNAME"] = name = "Joe Developer"
|
||||
os.environ["DEBEMAIL"] = email = "joe@example.net"
|
||||
os.environ['DEBFULLNAME'] = name = 'Joe Developer'
|
||||
os.environ['DEBEMAIL'] = email = 'joe@example.net'
|
||||
self.assertEqual(ubu_email(), (name, email))
|
||||
|
||||
def test_two_hat(self):
|
||||
os.environ["DEBFULLNAME"] = name = "Joe Developer"
|
||||
os.environ["DEBEMAIL"] = "joe@debian.org"
|
||||
os.environ["UBUMAIL"] = email = "joe@ubuntu.com"
|
||||
os.environ['DEBFULLNAME'] = name = 'Joe Developer'
|
||||
os.environ['DEBEMAIL'] = 'joe@debian.org'
|
||||
os.environ['UBUMAIL'] = email = 'joe@ubuntu.com'
|
||||
self.assertEqual(ubu_email(), (name, email))
|
||||
self.assertEqual(os.environ["DEBFULLNAME"], name)
|
||||
self.assertEqual(os.environ["DEBEMAIL"], email)
|
||||
self.assertEqual(os.environ['DEBFULLNAME'], name)
|
||||
self.assertEqual(os.environ['DEBEMAIL'], email)
|
||||
|
||||
def test_two_hat_cmdlineoverride(self):
|
||||
os.environ["DEBFULLNAME"] = "Joe Developer"
|
||||
os.environ["DEBEMAIL"] = "joe@debian.org"
|
||||
os.environ["UBUMAIL"] = "joe@ubuntu.com"
|
||||
name = "Foo Bar"
|
||||
email = "joe@example.net"
|
||||
os.environ['DEBFULLNAME'] = 'Joe Developer'
|
||||
os.environ['DEBEMAIL'] = 'joe@debian.org'
|
||||
os.environ['UBUMAIL'] = 'joe@ubuntu.com'
|
||||
name = 'Foo Bar'
|
||||
email = 'joe@example.net'
|
||||
self.assertEqual(ubu_email(name, email), (name, email))
|
||||
self.assertEqual(os.environ["DEBFULLNAME"], name)
|
||||
self.assertEqual(os.environ["DEBEMAIL"], email)
|
||||
self.assertEqual(os.environ['DEBFULLNAME'], name)
|
||||
self.assertEqual(os.environ['DEBEMAIL'], email)
|
||||
|
||||
def test_two_hat_noexport(self):
|
||||
os.environ["DEBFULLNAME"] = name = "Joe Developer"
|
||||
os.environ["DEBEMAIL"] = demail = "joe@debian.org"
|
||||
os.environ["UBUMAIL"] = uemail = "joe@ubuntu.com"
|
||||
os.environ['DEBFULLNAME'] = name = 'Joe Developer'
|
||||
os.environ['DEBEMAIL'] = demail = 'joe@debian.org'
|
||||
os.environ['UBUMAIL'] = uemail = 'joe@ubuntu.com'
|
||||
self.assertEqual(ubu_email(export=False), (name, uemail))
|
||||
self.assertEqual(os.environ["DEBFULLNAME"], name)
|
||||
self.assertEqual(os.environ["DEBEMAIL"], demail)
|
||||
self.assertEqual(os.environ['DEBFULLNAME'], name)
|
||||
self.assertEqual(os.environ['DEBEMAIL'], demail)
|
||||
|
||||
def test_two_hat_with_name(self):
|
||||
os.environ["DEBFULLNAME"] = "Joe Developer"
|
||||
os.environ["DEBEMAIL"] = "joe@debian.org"
|
||||
name = "Joe Ubuntunista"
|
||||
email = "joe@ubuntu.com"
|
||||
os.environ["UBUMAIL"] = f"{name} <{email}>"
|
||||
os.environ['DEBFULLNAME'] = 'Joe Developer'
|
||||
os.environ['DEBEMAIL'] = 'joe@debian.org'
|
||||
name = 'Joe Ubuntunista'
|
||||
email = 'joe@ubuntu.com'
|
||||
os.environ['UBUMAIL'] = '%s <%s>' % (name, email)
|
||||
self.assertEqual(ubu_email(), (name, email))
|
||||
self.assertEqual(os.environ["DEBFULLNAME"], name)
|
||||
self.assertEqual(os.environ["DEBEMAIL"], email)
|
||||
self.assertEqual(os.environ['DEBFULLNAME'], name)
|
||||
self.assertEqual(os.environ['DEBEMAIL'], email)
|
||||
|
||||
def test_debemail_with_name(self):
|
||||
name = "Joe Developer"
|
||||
email = "joe@example.net"
|
||||
os.environ["DEBEMAIL"] = orig = f"{name} <{email}>"
|
||||
name = 'Joe Developer'
|
||||
email = 'joe@example.net'
|
||||
os.environ['DEBEMAIL'] = orig = '%s <%s>' % (name, email)
|
||||
self.assertEqual(ubu_email(), (name, email))
|
||||
self.assertEqual(os.environ["DEBEMAIL"], orig)
|
||||
self.assertEqual(os.environ['DEBEMAIL'], orig)
|
||||
|
||||
def test_unicode_name(self):
|
||||
encoding = locale.getlocale()[1]
|
||||
encoding = locale.getdefaultlocale()[1]
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
name = "Jöe Déveloper"
|
||||
encoding = 'utf-8'
|
||||
name = 'Jöe Déveloper'
|
||||
env_name = name
|
||||
if isinstance(name, bytes):
|
||||
name = "Jöe Déveloper".decode("utf-8")
|
||||
name = 'Jöe Déveloper'.decode('utf-8')
|
||||
env_name = name.encode(encoding)
|
||||
try:
|
||||
os.environ["DEBFULLNAME"] = env_name
|
||||
os.environ['DEBFULLNAME'] = env_name
|
||||
except UnicodeEncodeError:
|
||||
self.skipTest("python interpreter is not running in an unicode capable locale")
|
||||
os.environ["DEBEMAIL"] = email = "joe@example.net"
|
||||
raise unittest.SkipTest("python interpreter is not running in an "
|
||||
"unicode capable locale")
|
||||
os.environ['DEBEMAIL'] = email = 'joe@example.net'
|
||||
self.assertEqual(ubu_email(), (name, email))
|
||||
|
@ -19,6 +19,7 @@ import unittest
|
||||
|
||||
from setup import scripts
|
||||
|
||||
|
||||
TIMEOUT = 10
|
||||
|
||||
|
||||
@ -26,12 +27,10 @@ class HelpTestCase(unittest.TestCase):
|
||||
def test_script(self):
|
||||
for script in scripts:
|
||||
with self.subTest(script=script):
|
||||
result = subprocess.run(
|
||||
[f"./{script}", "--help"],
|
||||
encoding="UTF-8",
|
||||
timeout=10,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
result = subprocess.run([f'./{script}', '--help'],
|
||||
encoding='UTF-8',
|
||||
timeout=10,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
self.assertFalse(result.stderr.strip())
|
||||
|
@ -1,33 +0,0 @@
|
||||
# Copyright (C) 2024 Canonical Ltd.
|
||||
# Author: Chris Peterson <chris.peterson@canonical.com>
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import unittest
|
||||
|
||||
|
||||
# Binary Tests
|
||||
class BinaryTests(unittest.TestCase):
|
||||
|
||||
# The requestsync binary has the option of using the launchpad api
|
||||
# to log in but requires python3-keyring in addition to
|
||||
# python3-launchpadlib. Testing the integrated login functionality
|
||||
# automatically isn't very feasbile, but we can at least write a smoke
|
||||
# test to make sure the required packages are installed.
|
||||
# See LP: #2049217
|
||||
def test_keyring_installed(self):
|
||||
"""Smoke test for required lp api dependencies"""
|
||||
try:
|
||||
import keyring # noqa: F401
|
||||
except ModuleNotFoundError:
|
||||
raise ModuleNotFoundError("package python3-keyring is not installed")
|
@ -1,128 +0,0 @@
|
||||
# Copyright (C) 2024 Canonical Ltd.
|
||||
# Author: Chris Peterson <chris.peterson@canonical.com>
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
""" Tests for running_autopkgtests
|
||||
Tests using cached data from autopkgtest servers.
|
||||
|
||||
These tests only ensure code changes don't change parsing behavior
|
||||
of the response data. If the response format changes, then the cached
|
||||
responses will need to change as well.
|
||||
"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from ubuntutools.running_autopkgtests import (
|
||||
URL_QUEUED,
|
||||
URL_RUNNING,
|
||||
_get_jobs,
|
||||
get_queued,
|
||||
get_running,
|
||||
)
|
||||
|
||||
# Cached binary response data from autopkgtest server
|
||||
RUN_DATA = (
|
||||
b'{"pyatem": {'
|
||||
b" \"submit-time_2024-01-19 19:37:36;triggers_['python3-defaults/3.12.1-0ubuntu1'];\":"
|
||||
b' {"noble": {"arm64": [{"triggers": ["python3-defaults/3.12.1-0ubuntu1"],'
|
||||
b' "submit-time": "2024-01-19 19:37:36"}, 380, "<omitted log>"]}}}}'
|
||||
)
|
||||
QUEUED_DATA = (
|
||||
b'{"ubuntu": {"noble": {"arm64": ["libobject-accessor-perl {\\"requester\\": \\"someone\\",'
|
||||
b' \\"submit-time\\": \\"2024-01-18 01:08:55\\",'
|
||||
b' \\"triggers\\": [\\"perl/5.38.2-3\\", \\"liblocale-gettext-perl/1.07-6build1\\"]}"]}}}'
|
||||
)
|
||||
|
||||
# Expected result(s) of parsing the above JSON data
|
||||
RUNNING_JOB = {
|
||||
"pyatem": {
|
||||
"submit-time_2024-01-19 19:37:36;triggers_['python3-defaults/3.12.1-0ubuntu1'];": {
|
||||
"noble": {
|
||||
"arm64": [
|
||||
{
|
||||
"triggers": ["python3-defaults/3.12.1-0ubuntu1"],
|
||||
"submit-time": "2024-01-19 19:37:36",
|
||||
},
|
||||
380,
|
||||
"<omitted log>",
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
QUEUED_JOB = {
|
||||
"ubuntu": {
|
||||
"noble": {
|
||||
"arm64": [
|
||||
'libobject-accessor-perl {"requester": "someone",'
|
||||
' "submit-time": "2024-01-18 01:08:55",'
|
||||
' "triggers": ["perl/5.38.2-3", "liblocale-gettext-perl/1.07-6build1"]}'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
PRIVATE_JOB = {"ppa": {"noble": {"arm64": ["private job"]}}}
|
||||
|
||||
|
||||
# Expected textual output of the program based on the above data
|
||||
RUNNING_OUTPUT = (
|
||||
"R 0:06:20 pyatem - noble arm64"
|
||||
" - python3-defaults/3.12.1-0ubuntu1 -\n"
|
||||
)
|
||||
QUEUED_OUTPUT = (
|
||||
"Q0001 -:-- libobject-accessor-perl ubuntu noble arm64"
|
||||
" - perl/5.38.2-3,liblocale-gettext-perl/1.07-6build1\n"
|
||||
)
|
||||
PRIVATE_OUTPUT = (
|
||||
"Q0001 -:-- private job ppa noble arm64"
|
||||
" private job private job\n"
|
||||
)
|
||||
|
||||
|
||||
class RunningAutopkgtestTestCase(unittest.TestCase):
|
||||
"""Assert helper functions parse data correctly"""
|
||||
|
||||
maxDiff = None
|
||||
|
||||
@patch("urllib.request.urlopen")
|
||||
def test_get_running_jobs(self, mock_response):
|
||||
"""Test: Correctly parse autopkgtest json data for running tests"""
|
||||
mock_response.return_value.__enter__.return_value.read.return_value = RUN_DATA
|
||||
jobs = _get_jobs(URL_RUNNING)
|
||||
self.assertEqual(RUNNING_JOB, jobs)
|
||||
|
||||
@patch("urllib.request.urlopen")
|
||||
def test_get_queued_jobs(self, mock_response):
|
||||
"""Test: Correctly parse autopkgtest json data for queued tests"""
|
||||
mock_response.return_value.__enter__.return_value.read.return_value = QUEUED_DATA
|
||||
jobs = _get_jobs(URL_QUEUED)
|
||||
self.assertEqual(QUEUED_JOB, jobs)
|
||||
|
||||
def test_get_running_output(self):
|
||||
"""Test: Correctly print running tests"""
|
||||
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=RUNNING_JOB):
|
||||
self.assertEqual(get_running(), RUNNING_OUTPUT)
|
||||
|
||||
def test_get_queued_output(self):
|
||||
"""Test: Correctly print queued tests"""
|
||||
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=QUEUED_JOB):
|
||||
self.assertEqual(get_queued(), QUEUED_OUTPUT)
|
||||
|
||||
def test_private_queued_job(self):
|
||||
"""Test: Correctly print queued private job"""
|
||||
with patch("ubuntutools.running_autopkgtests._get_jobs", return_value=PRIVATE_JOB):
|
||||
self.assertEqual(get_queued(), PRIVATE_OUTPUT)
|
@ -17,7 +17,9 @@
|
||||
"""Test suite for ubuntutools.update_maintainer"""
|
||||
|
||||
import os
|
||||
# import sys
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
|
||||
@ -165,7 +167,8 @@ Source: seahorse-plugins
|
||||
Section: gnome
|
||||
Priority: optional
|
||||
Maintainer: Emilio Pozuelo Monfort <pochu@debian.org>
|
||||
Build-Depends: debhelper (>= 5)
|
||||
Build-Depends: debhelper (>= 5),
|
||||
cdbs (>= 0.4.41)
|
||||
Standards-Version: 3.8.3
|
||||
Homepage: http://live.gnome.org/Seahorse
|
||||
|
||||
@ -183,7 +186,8 @@ Section: gnome
|
||||
Priority: optional
|
||||
Maintainer: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>
|
||||
XSBC-Original-Maintainer: Emilio Pozuelo Monfort <pochu@debian.org>
|
||||
Build-Depends: debhelper (>= 5)
|
||||
Build-Depends: debhelper (>= 5),
|
||||
cdbs (>= 0.4.41)
|
||||
Standards-Version: 3.8.3
|
||||
Homepage: http://live.gnome.org/Seahorse
|
||||
|
||||
@ -196,25 +200,25 @@ class UpdateMaintainerTestCase(unittest.TestCase):
|
||||
"""TestCase object for ubuntutools.update_maintainer"""
|
||||
|
||||
_directory = "/"
|
||||
_files = {"changelog": None, "control": None, "control.in": None, "rules": None}
|
||||
_files = {
|
||||
"changelog": None,
|
||||
"control": None,
|
||||
"control.in": None,
|
||||
"rules": None,
|
||||
}
|
||||
|
||||
def _fake_isfile(self, filename):
|
||||
"""Check only for existing fake files."""
|
||||
directory, base = os.path.split(filename)
|
||||
return (
|
||||
directory == self._directory and base in self._files and self._files[base] is not None
|
||||
)
|
||||
return (directory == self._directory and base in self._files and
|
||||
self._files[base] is not None)
|
||||
|
||||
def _fake_open(self, filename, mode="r", encoding=None):
|
||||
def _fake_open(self, filename, mode='r'):
|
||||
"""Provide StringIO objects instead of real files."""
|
||||
self.assertTrue(encoding, f"encoding for {filename} not specified")
|
||||
directory, base = os.path.split(filename)
|
||||
if (
|
||||
directory != self._directory
|
||||
or base not in self._files
|
||||
or (mode == "r" and self._files[base] is None)
|
||||
):
|
||||
raise IOError(f"No such file or directory: '{filename}'")
|
||||
if (directory != self._directory or base not in self._files or
|
||||
(mode == "r" and self._files[base] is None)):
|
||||
raise IOError("No such file or directory: '%s'" % filename)
|
||||
if mode == "w":
|
||||
self._files[base] = StringIO()
|
||||
self._files[base].close = lambda: None
|
||||
@ -224,11 +228,11 @@ class UpdateMaintainerTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
m = mock.mock_open()
|
||||
m.side_effect = self._fake_open
|
||||
patcher = mock.patch("builtins.open", m)
|
||||
patcher = mock.patch('builtins.open', m)
|
||||
self.addCleanup(patcher.stop)
|
||||
patcher.start()
|
||||
m = mock.MagicMock(side_effect=self._fake_isfile)
|
||||
patcher = mock.patch("os.path.isfile", m)
|
||||
patcher = mock.patch('os.path.isfile', m)
|
||||
self.addCleanup(patcher.stop)
|
||||
patcher.start()
|
||||
self._files["rules"] = StringIO(_SIMPLE_RULES)
|
||||
@ -256,8 +260,8 @@ class UpdateMaintainerTestCase(unittest.TestCase):
|
||||
def test_original_ubuntu_maintainer(self):
|
||||
"""Test: Original maintainer is Ubuntu developer.
|
||||
|
||||
The Maintainer field needs to be update even if
|
||||
XSBC-Original-Maintainer has an @ubuntu.com address."""
|
||||
The Maintainer field needs to be update even if
|
||||
XSBC-Original-Maintainer has an @ubuntu.com address."""
|
||||
self._files["changelog"] = StringIO(_LUCID_CHANGELOG)
|
||||
self._files["control"] = StringIO(_AXIS2C_CONTROL)
|
||||
update_maintainer(self._directory)
|
||||
@ -284,11 +288,12 @@ class UpdateMaintainerTestCase(unittest.TestCase):
|
||||
|
||||
def test_comments_in_control(self):
|
||||
"""Test: Update Maintainer field in a control file containing
|
||||
comments."""
|
||||
comments."""
|
||||
self._files["changelog"] = StringIO(_LUCID_CHANGELOG)
|
||||
self._files["control"] = StringIO(_SEAHORSE_PLUGINS_CONTROL)
|
||||
update_maintainer(self._directory)
|
||||
self.assertEqual(self._files["control"].getvalue(), _SEAHORSE_PLUGINS_UPDATED)
|
||||
self.assertEqual(self._files["control"].getvalue(),
|
||||
_SEAHORSE_PLUGINS_UPDATED)
|
||||
|
||||
def test_skip_smart_rules(self):
|
||||
"""Test: Skip update when XSBC-Original in debian/rules."""
|
||||
|
@ -16,12 +16,12 @@
|
||||
|
||||
"""This module is for updating the Maintainer field of an Ubuntu package."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import debian.changelog
|
||||
|
||||
import logging
|
||||
Logger = logging.getLogger(__name__)
|
||||
|
||||
# Prior May 2009 these Maintainers were used:
|
||||
@ -37,26 +37,26 @@ class MaintainerUpdateException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Control:
|
||||
class Control(object):
|
||||
"""Represents a debian/control file"""
|
||||
|
||||
def __init__(self, filename):
|
||||
assert os.path.isfile(filename), f"{filename} does not exist."
|
||||
assert os.path.isfile(filename), "%s does not exist." % (filename)
|
||||
self._filename = filename
|
||||
self._content = open(filename, encoding="utf-8").read()
|
||||
self._content = open(filename).read()
|
||||
|
||||
def get_maintainer(self):
|
||||
"""Returns the value of the Maintainer field."""
|
||||
maintainer = re.search("^Maintainer: ?(.*)$", self._content, re.MULTILINE)
|
||||
maintainer = re.search("^Maintainer: ?(.*)$", self._content,
|
||||
re.MULTILINE)
|
||||
if maintainer:
|
||||
maintainer = maintainer.group(1)
|
||||
return maintainer
|
||||
|
||||
def get_original_maintainer(self):
|
||||
"""Returns the value of the XSBC-Original-Maintainer field."""
|
||||
orig_maintainer = re.search(
|
||||
"^(?:[XSBC]*-)?Original-Maintainer: ?(.*)$", self._content, re.MULTILINE
|
||||
)
|
||||
orig_maintainer = re.search("^(?:[XSBC]*-)?Original-Maintainer: ?(.*)$",
|
||||
self._content, re.MULTILINE)
|
||||
if orig_maintainer:
|
||||
orig_maintainer = orig_maintainer.group(1)
|
||||
return orig_maintainer
|
||||
@ -65,38 +65,38 @@ class Control:
|
||||
"""Saves the control file."""
|
||||
if filename:
|
||||
self._filename = filename
|
||||
control_file = open(self._filename, "w", encoding="utf-8")
|
||||
control_file = open(self._filename, "w")
|
||||
control_file.write(self._content)
|
||||
control_file.close()
|
||||
|
||||
def set_maintainer(self, maintainer):
|
||||
"""Sets the value of the Maintainer field."""
|
||||
pattern = re.compile("^Maintainer: ?.*$", re.MULTILINE)
|
||||
self._content = pattern.sub(f"Maintainer: {maintainer}", self._content)
|
||||
self._content = pattern.sub("Maintainer: " + maintainer, self._content)
|
||||
|
||||
def set_original_maintainer(self, original_maintainer):
|
||||
"""Sets the value of the XSBC-Original-Maintainer field."""
|
||||
original_maintainer = f"XSBC-Original-Maintainer: {original_maintainer}"
|
||||
original_maintainer = "XSBC-Original-Maintainer: " + original_maintainer
|
||||
if self.get_original_maintainer():
|
||||
pattern = re.compile("^(?:[XSBC]*-)?Original-Maintainer:.*$", re.MULTILINE)
|
||||
pattern = re.compile("^(?:[XSBC]*-)?Original-Maintainer:.*$",
|
||||
re.MULTILINE)
|
||||
self._content = pattern.sub(original_maintainer, self._content)
|
||||
else:
|
||||
pattern = re.compile("^(Maintainer:.*)$", re.MULTILINE)
|
||||
self._content = pattern.sub(f"\\1\\n{original_maintainer}", self._content)
|
||||
self._content = pattern.sub(r"\1\n" + original_maintainer,
|
||||
self._content)
|
||||
|
||||
def remove_original_maintainer(self):
|
||||
"""Strip out out the XSBC-Original-Maintainer line"""
|
||||
pattern = re.compile(
|
||||
"^(?:[XSBC]*-)?Original-Maintainer:.*?$.*?^", re.MULTILINE | re.DOTALL
|
||||
)
|
||||
self._content = pattern.sub("", self._content)
|
||||
pattern = re.compile("^(?:[XSBC]*-)?Original-Maintainer:.*?$.*?^",
|
||||
re.MULTILINE | re.DOTALL)
|
||||
self._content = pattern.sub('', self._content)
|
||||
|
||||
|
||||
def _get_distribution(changelog_file):
|
||||
"""get distribution of latest changelog entry"""
|
||||
changelog = debian.changelog.Changelog(
|
||||
open(changelog_file, encoding="utf-8"), strict=False, max_blocks=1
|
||||
)
|
||||
changelog = debian.changelog.Changelog(open(changelog_file), strict=False,
|
||||
max_blocks=1)
|
||||
distribution = changelog.distributions.split()[0]
|
||||
# Strip things like "-proposed-updates" or "-security" from distribution
|
||||
return distribution.split("-", 1)[0]
|
||||
@ -107,24 +107,25 @@ def _find_files(debian_directory, verbose):
|
||||
Returns (changelog, control files list)
|
||||
Raises an exception if none can be found.
|
||||
"""
|
||||
possible_contol_files = [os.path.join(debian_directory, f) for f in ["control.in", "control"]]
|
||||
possible_contol_files = [os.path.join(debian_directory, f) for
|
||||
f in ["control.in", "control"]]
|
||||
|
||||
changelog_file = os.path.join(debian_directory, "changelog")
|
||||
control_files = [f for f in possible_contol_files if os.path.isfile(f)]
|
||||
|
||||
# Make sure that a changelog and control file is available
|
||||
if len(control_files) == 0:
|
||||
raise MaintainerUpdateException(f"No control file found in {debian_directory}.")
|
||||
raise MaintainerUpdateException(
|
||||
"No control file found in %s." % debian_directory)
|
||||
if not os.path.isfile(changelog_file):
|
||||
raise MaintainerUpdateException(f"No changelog file found in {debian_directory}.")
|
||||
raise MaintainerUpdateException(
|
||||
"No changelog file found in %s." % debian_directory)
|
||||
|
||||
# If the rules file accounts for XSBC-Original-Maintainer, we should not
|
||||
# touch it in this package (e.g. the python package).
|
||||
rules_file = os.path.join(debian_directory, "rules")
|
||||
if (
|
||||
os.path.isfile(rules_file)
|
||||
and "XSBC-Original-" in open(rules_file, encoding="utf-8").read()
|
||||
):
|
||||
if os.path.isfile(rules_file) and \
|
||||
'XSBC-Original-' in open(rules_file).read():
|
||||
if verbose:
|
||||
print("XSBC-Original is managed by 'rules' file. Doing nothing.")
|
||||
control_files = []
|
||||
@ -160,8 +161,8 @@ def update_maintainer(debian_directory, verbose=False):
|
||||
|
||||
if original_maintainer.strip().lower() in _PREVIOUS_UBUNTU_MAINTAINER:
|
||||
if verbose:
|
||||
print(f"The old maintainer was: {original_maintainer}")
|
||||
print(f"Resetting as: {_UBUNTU_MAINTAINER}")
|
||||
print("The old maintainer was: %s" % original_maintainer)
|
||||
print("Resetting as: %s" % _UBUNTU_MAINTAINER)
|
||||
control.set_maintainer(_UBUNTU_MAINTAINER)
|
||||
control.save()
|
||||
continue
|
||||
@ -177,13 +178,12 @@ def update_maintainer(debian_directory, verbose=False):
|
||||
return
|
||||
|
||||
if control.get_original_maintainer() is not None:
|
||||
Logger.warning(
|
||||
"Overwriting original maintainer: %s", control.get_original_maintainer()
|
||||
)
|
||||
Logger.warning("Overwriting original maintainer: %s",
|
||||
control.get_original_maintainer())
|
||||
|
||||
if verbose:
|
||||
print(f"The original maintainer is: {original_maintainer}")
|
||||
print(f"Resetting as: {_UBUNTU_MAINTAINER}")
|
||||
print("The original maintainer is: %s" % original_maintainer)
|
||||
print("Resetting as: %s" % _UBUNTU_MAINTAINER)
|
||||
control.set_original_maintainer(original_maintainer)
|
||||
control.set_maintainer(_UBUNTU_MAINTAINER)
|
||||
control.save()
|
||||
@ -194,7 +194,7 @@ def update_maintainer(debian_directory, verbose=False):
|
||||
def restore_maintainer(debian_directory, verbose=False):
|
||||
"""Restore the original maintainer"""
|
||||
try:
|
||||
control_files = _find_files(debian_directory, verbose)[1]
|
||||
changelog_file, control_files = _find_files(debian_directory, verbose)
|
||||
except MaintainerUpdateException as e:
|
||||
Logger.error(str(e))
|
||||
raise
|
||||
@ -205,7 +205,7 @@ def restore_maintainer(debian_directory, verbose=False):
|
||||
if not orig_maintainer:
|
||||
continue
|
||||
if verbose:
|
||||
print(f"Restoring original maintainer: {orig_maintainer}")
|
||||
print("Restoring original maintainer: %s" % orig_maintainer)
|
||||
control.set_maintainer(orig_maintainer)
|
||||
control.remove_original_maintainer()
|
||||
control.save()
|
||||
|
@ -1,79 +0,0 @@
|
||||
# Copyright (C) 2019-2023 Canonical Ltd.
|
||||
# Author: Brian Murray <brian.murray@canonical.com> et al.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Portions of archive related code that is re-used by various tools."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import urllib.request
|
||||
from datetime import datetime
|
||||
|
||||
import dateutil.parser
|
||||
from dateutil.tz import tzutc
|
||||
|
||||
|
||||
def get_cache_dir():
|
||||
cache_dir = os.environ.get("XDG_CACHE_HOME", os.path.expanduser(os.path.join("~", ".cache")))
|
||||
uat_cache = os.path.join(cache_dir, "ubuntu-archive-tools")
|
||||
os.makedirs(uat_cache, exist_ok=True)
|
||||
return uat_cache
|
||||
|
||||
|
||||
def get_url(url, force_cached):
|
||||
"""Return file to the URL, possibly caching it"""
|
||||
cache_file = None
|
||||
|
||||
# ignore bileto urls wrt caching, they're usually too small to matter
|
||||
# and we don't do proper cache expiry
|
||||
m = re.search("ubuntu-archive-team.ubuntu.com/proposed-migration/([^/]*)/([^/]*)", url)
|
||||
if m:
|
||||
cache_dir = get_cache_dir()
|
||||
cache_file = os.path.join(cache_dir, f"{m.group(1)}_{m.group(2)}")
|
||||
else:
|
||||
# test logs can be cached, too
|
||||
m = re.search(
|
||||
"https://autopkgtest.ubuntu.com/results/autopkgtest-[^/]*/([^/]*)/([^/]*)"
|
||||
"/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz",
|
||||
url,
|
||||
)
|
||||
if m:
|
||||
cache_dir = get_cache_dir()
|
||||
cache_file = os.path.join(
|
||||
cache_dir, f"{m.group(1)}_{m.group(2)}_{m.group(3)}_{m.group(4)}.gz"
|
||||
)
|
||||
|
||||
if cache_file:
|
||||
try:
|
||||
prev_mtime = os.stat(cache_file).st_mtime
|
||||
except FileNotFoundError:
|
||||
prev_mtime = 0
|
||||
prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc())
|
||||
new_timestamp = datetime.now(tz=tzutc()).timestamp()
|
||||
if force_cached:
|
||||
return open(cache_file, "rb")
|
||||
|
||||
f = urllib.request.urlopen(url)
|
||||
|
||||
if cache_file:
|
||||
remote_ts = dateutil.parser.parse(f.headers["last-modified"])
|
||||
if remote_ts > prev_timestamp:
|
||||
with open(f"{cache_file}.new", "wb") as new_cache:
|
||||
for line in f:
|
||||
new_cache.write(line)
|
||||
os.rename(f"{cache_file}.new", cache_file)
|
||||
os.utime(cache_file, times=(new_timestamp, new_timestamp))
|
||||
f.close()
|
||||
f = open(cache_file, "rb")
|
||||
return f
|
@ -17,28 +17,28 @@ import debian.debian_support
|
||||
|
||||
class Version(debian.debian_support.Version):
|
||||
def strip_epoch(self):
|
||||
"""Removes the epoch from a Debian version string.
|
||||
'''Removes the epoch from a Debian version string.
|
||||
|
||||
strip_epoch(1:1.52-1) will return "1.52-1" and strip_epoch(1.1.3-1)
|
||||
will return "1.1.3-1".
|
||||
"""
|
||||
parts = self.full_version.split(":")
|
||||
'''
|
||||
parts = self.full_version.split(':')
|
||||
if len(parts) > 1:
|
||||
del parts[0]
|
||||
version_without_epoch = ":".join(parts)
|
||||
version_without_epoch = ':'.join(parts)
|
||||
return version_without_epoch
|
||||
|
||||
def get_related_debian_version(self):
|
||||
"""Strip the ubuntu-specific bits off the version"""
|
||||
'''Strip the ubuntu-specific bits off the version'''
|
||||
related_debian_version = self.full_version
|
||||
uidx = related_debian_version.find("ubuntu")
|
||||
uidx = related_debian_version.find('ubuntu')
|
||||
if uidx > 0:
|
||||
related_debian_version = related_debian_version[:uidx]
|
||||
uidx = related_debian_version.find("build")
|
||||
uidx = related_debian_version.find('build')
|
||||
if uidx > 0:
|
||||
related_debian_version = related_debian_version[:uidx]
|
||||
return Version(related_debian_version)
|
||||
|
||||
def is_modified_in_ubuntu(self):
|
||||
"""Did Ubuntu modify this (and mark the version appropriately)?"""
|
||||
return "ubuntu" in self.full_version
|
||||
'''Did Ubuntu modify this (and mark the version appropriately)?'''
|
||||
return 'ubuntu' in self.full_version
|
||||
|
@ -14,18 +14,13 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ubuntutools.update_maintainer import (
|
||||
MaintainerUpdateException,
|
||||
restore_maintainer,
|
||||
update_maintainer,
|
||||
)
|
||||
from ubuntutools.update_maintainer import (update_maintainer,
|
||||
restore_maintainer,
|
||||
MaintainerUpdateException)
|
||||
|
||||
|
||||
def find_debian_dir(depth=6):
|
||||
@ -35,42 +30,42 @@ def find_debian_dir(depth=6):
|
||||
:rtype: str
|
||||
:returns: a path to an existing debian/ directory, or None
|
||||
"""
|
||||
for path in ["../" * n or "./" for n in list(range(0, depth + 1))]:
|
||||
debian_path = f"{path}debian"
|
||||
if os.path.exists(os.path.join(debian_path, "control")) and os.path.exists(
|
||||
os.path.join(debian_path, "changelog")
|
||||
):
|
||||
for path in ['../'*n or './' for n in list(range(0, depth+1))]:
|
||||
debian_path = '{}debian'.format(path)
|
||||
if os.path.exists(os.path.join(debian_path, 'control')) \
|
||||
and os.path.exists(os.path.join(debian_path, 'changelog')):
|
||||
return debian_path
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
epilog = f"See {script_name}(1) for more info."
|
||||
parser = argparse.ArgumentParser(epilog=epilog)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--debian-directory",
|
||||
dest="debian_directory",
|
||||
help="location of the 'debian' directory (default: %(default)s).",
|
||||
metavar="PATH",
|
||||
default=find_debian_dir() or "./debian",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-r", "--restore", help="Restore the original maintainer", action="store_true"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q", "--quiet", help="print no informational messages", dest="quiet", action="store_true"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
usage = "%s [options]" % (script_name)
|
||||
epilog = "See %s(1) for more info." % (script_name)
|
||||
parser = optparse.OptionParser(usage=usage, epilog=epilog)
|
||||
parser.add_option("-d", "--debian-directory", dest="debian_directory",
|
||||
help="location of the 'debian' directory (default: "
|
||||
"%default).", metavar="PATH",
|
||||
default=find_debian_dir() or './debian')
|
||||
parser.add_option("-r", "--restore",
|
||||
help="Restore the original maintainer",
|
||||
action='store_true', default=False)
|
||||
parser.add_option("-q", "--quiet", help="print no informational messages",
|
||||
dest="quiet", action="store_true", default=False)
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if not args.restore:
|
||||
if len(args) != 0:
|
||||
print("%s: Error: Unsupported additional parameters specified: %s"
|
||||
% (script_name, ", ".join(args)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not options.restore:
|
||||
operation = update_maintainer
|
||||
else:
|
||||
operation = restore_maintainer
|
||||
|
||||
try:
|
||||
operation(args.debian_directory, not args.quiet)
|
||||
operation(options.debian_directory, not options.quiet)
|
||||
except MaintainerUpdateException:
|
||||
sys.exit(1)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user