Compare commits

..

No commits in common. "main" and "0.158" have entirely different histories.
main ... 0.158

147 changed files with 6575 additions and 10431 deletions

View File

@ -0,0 +1,2 @@
[BUILDDEB]
native = True

9
.bzrignore Normal file
View File

@ -0,0 +1,9 @@
/.shelf/
/build/
/python-build-stamp-*
/debian/files
/debian/ubuntu-dev-tools/
/debian/ubuntu-dev-tools.debhelper.log
/debian/ubuntu-dev-tools.*.debhelper
/debian/ubuntu-dev-tools.substvars
ubuntu_dev_tools.egg-info

2
.gitignore vendored
View File

@ -1,2 +0,0 @@
__pycache__
*.egg-info

View File

@ -1,65 +0,0 @@
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=apt_pkg
# Pickle collected data for later comparisons.
persistent=no
# Use all cpus, to speed up testing
jobs=0
[MESSAGES CONTROL]
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=fixme,locally-disabled,missing-docstring,useless-option-value,
# TODO: Fix all following disabled checks!
invalid-name,
consider-using-with,
too-many-arguments,
too-many-branches,
too-many-statements,
too-many-locals,
duplicate-code,
too-many-instance-attributes,
too-many-nested-blocks,
too-many-lines,
[REPORTS]
# Tells whether to display a full report or only the messages
reports=no
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=99
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
[BASIC]
# Allow variables called e, f, lp
good-names=i,j,k,ex,Run,_,e,f,lp,me,to
[IMPORTS]
# Force import order to recognize a module as part of a third party library.
known-third-party=debian

177
404main Executable file
View File

@ -0,0 +1,177 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2006-2007 (C) Pete Savage <petesavage@ubuntu.com>
# Copyright 2007 (C) Siegfried-A. Gevatter <rainct@ubuntu.com>
# Copyright 2009 (C) Canonical Ltd. (by Colin Watson <cjwatson@ubuntu.com>)
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL for more details.
#
# ##################################################################
#
# This script is used to check if a package and all its build
# dependencies are in main or not.
import sys
import apt_pkg
import apt
from ubuntutools import subprocess
def process_deps(cache, deps):
"""Takes a list of (build) dependencies and processes it."""
for basedep in [d.or_dependencies[0] for d in deps]:
if not packages.has_key(basedep.name) and basedep.name != '':
# Check the (build) dependencies recursively
find_main(cache, basedep.name)
def get_package_version(cache, distro, pack):
if pack not in cache:
return None
for version in (cache[pack].candidate, cache[pack].installed):
if not version:
continue
for origin in version.origins:
if origin.archive == distro:
return version
return None
# Cache::CompTypeDeb isn't exposed via python-apt
def comp_type_deb(op):
ops = ("", "<=", ">=", "<<", ">>", "=", "!=")
if (op & 15) < 7:
return ops[op & 15]
return ""
def find_main(cache, pack):
"""Searches the dependencies and build dependencies of a package recursively
to determine if they are all in the 'main' component or not."""
global packages
if pack in packages:
return
# Retrieve information about the package
version = get_package_version(cache, distro, pack)
if not version:
packages[pack] = False
return
elif [origin for origin in version.origins if origin.component == 'main']:
packages[pack] = True
return
else:
if not packages.has_key(pack):
packages[pack] = False
# Retrieve package dependencies
process_deps(cache, version.dependencies)
# Retrieve package build dependencies. There's no handy
# attribute on version for this, so unfortunately we have to
# do a lot of messing about with apt.
deps = []
src_records = apt_pkg.SourceRecords()
got_src = False
while src_records.lookup(version.source_name):
if pack in src_records.binaries:
got_src = True
break
if got_src:
# pylint: disable=E1101
for _, all_deps in src_records.build_depends.iteritems():
# pylint: enable=E1101
for or_deps in all_deps:
base_deps = []
for (name, ver, op) in or_deps:
base_deps.append(apt.package.BaseDependency(name, op,
ver, False))
deps.append(apt.package.Dependency(base_deps))
process_deps(cache, deps)
def usage(exit_code):
print 'Usage: %s <package name> [<distribution>]' % sys.argv[0]
sys.exit(exit_code)
def main():
global packages, distro
# Check if the amount of arguments is correct
if len(sys.argv) > 1 and sys.argv[1] in ('help', '-h', '--help'):
usage(0)
if len(sys.argv) < 2 or len(sys.argv) > 3:
usage(1)
cache = apt.cache.Cache()
if len(sys.argv) == 3 and sys.argv[2]:
distro = sys.argv[2]
if not get_package_version(cache, distro, 'bash'):
print u'«%s» is not a valid distribution.' % distro
print ('Remember that for 404main to work with a certain '
'distribution it must be in your /etc/apt/sources.list '
'file.')
sys.exit(1)
else:
cmd = ['lsb_release', '-cs']
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
distro = process.stdout.read().strip('\n')
if not get_package_version(cache, distro, sys.argv[1]):
print (u"Can't find package «%s» in distribution «%s»."
% (sys.argv[1], distro))
sys.exit(1)
print (u'Checking package «%s» in distribution «%s»...'
% (sys.argv[1], distro))
find_main(cache, sys.argv[1])
# True if everything checked until the point is in main
all_in_main = True
for package in packages:
if not packages[package]:
if all_in_main:
print 'The following packages aren\'t in main:'
all_in_main = False
print ' ', package
if all_in_main:
print (u'Package «%s» and all its dependencies and build dependencies '
u'are in main.') % sys.argv[1]
if __name__ == '__main__':
# Global variable to hold the status of all packages
packages = {}
# Global variable to hold the target distribution
distro = ''
try:
main()
except KeyboardInterrupt:
print 'Aborted.'
sys.exit(1)

View File

@ -1,12 +1,12 @@
Updating the ubuntu-dev-tools package Updating the ubuntu-dev-tools package
------------------------------------- -------------------------------------
Here are the 10 steps that are recommended to take when updating the Here are the steps that are recommended to take when updating the
ubuntu-dev-tools package in Ubuntu. ubuntu-dev-tools package in Ubuntu.
1) Make sure there are no new commits to the package's master branch in git: 1) Make sure that there are no new revisions to the package's trunk in Bazaar:
git pull bzr pull lp:ubuntu-dev-tools
2) Check to make sure that all approved merges have been merged: 2) Check to make sure that all approved merges have been merged:
@ -15,7 +15,6 @@ ubuntu-dev-tools package in Ubuntu.
3) Make sure that there is no low lying fruit that can be fixed at: 3) Make sure that there is no low lying fruit that can be fixed at:
https://bugs.launchpad.net/ubuntu/+source/ubuntu-dev-tools https://bugs.launchpad.net/ubuntu/+source/ubuntu-dev-tools
https://bugs.debian.org/src:ubuntu-dev-tools
4) Check that the test suite passes 4) Check that the test suite passes
@ -28,27 +27,31 @@ ubuntu-dev-tools package in Ubuntu.
If there is no UNRELEASED entry, make sure that the version for the current If there is no UNRELEASED entry, make sure that the version for the current
one has not been uploaded by someone else already: one has not been uploaded by someone else already:
https://tracker.debian.org/pkg/ubuntu-dev-tools http://packages.qa.debian.org/u/ubuntu-dev-tools.html
https://launchpad.net/ubuntu/+source/ubuntu-dev-tools/+publishinghistory https://launchpad.net/ubuntu/+source/ubuntu-dev-tools/+publishinghistory
6) Once the target release has been changed, commit it to git (where X.YY is 6) Once the target release has been changed, commit it to Bazaar (where X.YY is
the new package version): the new package version):
git commit -a -m "Uploaded X.YY to RELEASE." bzr commit -m "Uploaded X.YY to RELEASE."
7) Create the new source package and tag the new release in git: 7) Tag the new release in Bazaar:
gbp buildpackage -S --git-tag bzr tag
For a full list of tags, please see: 'git tag -l'. This is so we can track For a full list of tags, please see: 'bzr tags'. This is so we can track
which git commit is in which release and makes bug triaging easier. which Bazaar revision is in which release and makes bug triaging easier.
8) Upload the package to Debian with dput as normal: 8) Create the new source package:
bzr bd -S
9) Upload the package to Debian with dput as normal:
dput ftp-master ubuntu-dev-tools_X.YY_$arch.changes dput ftp-master ubuntu-dev-tools_X.YY_$arch.changes
9) Create a new blank entry with dch -i and mark it as UNRELEASED. 10) Create a new blank entry with dch -i and mark it as UNRELEASED.
10) After it's been dinstalled in Debian, sync to Ubuntu: 11) After it's been dinstalled in Debian, sync to Ubuntu:
syncpackage ubuntu-dev-tools syncpackage ubuntu-dev-tools

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# ################################################################## # ##################################################################
# #
@ -18,286 +18,241 @@
# #
# ################################################################## # ##################################################################
import argparse
import glob import glob
import optparse
import os import os
import shutil import shutil
import subprocess
import sys import sys
import tempfile import tempfile
from urllib.parse import quote
try: import lsb_release
import lsb_release
except ImportError:
lsb_release = None
from distro_info import DebianDistroInfo, UbuntuDistroInfo
from httplib2 import Http, HttpLib2Error from httplib2 import Http, HttpLib2Error
from ubuntutools import getLogger from ubuntutools.archive import (SourcePackage, DebianSourcePackage,
from ubuntutools.archive import DebianSourcePackage, DownloadError, UbuntuSourcePackage UbuntuSourcePackage, DownloadError)
from ubuntutools.builder import get_builder
from ubuntutools.config import UDTConfig, ubu_email from ubuntutools.config import UDTConfig, ubu_email
from ubuntutools.lp.lpapicache import ( from ubuntutools.builder import get_builder
Distribution, from ubuntutools.lp.lpapicache import (Launchpad, Distribution,
Launchpad, SeriesNotFoundException,
PackageNotFoundException, PackageNotFoundException)
SeriesNotFoundException, from ubuntutools.logger import Logger
) from ubuntutools.misc import (system_distribution, vendor_to_distroinfo,
from ubuntutools.misc import codename_to_distribution, system_distribution, vendor_to_distroinfo codename_to_distribution)
from ubuntutools.question import YesNoQuestion from ubuntutools.question import YesNoQuestion
from ubuntutools import subprocess
Logger = getLogger() def error(msg):
Logger.error(msg)
def error(msg, *args):
Logger.error(msg, *args)
sys.exit(1) sys.exit(1)
def check_call(cmd, *args, **kwargs): def check_call(cmd, *args, **kwargs):
Logger.debug(" ".join(cmd)) Logger.command(cmd)
ret = subprocess.call(cmd, *args, **kwargs) ret = subprocess.call(cmd, *args, **kwargs)
if ret != 0: if ret != 0:
error("%s returned %d.", cmd[0], ret) error('%s returned %d.' % (cmd[0], ret))
def parse(argv): def check_program_exists(name, package=None):
usage = "%(prog)s [options] <source package name or .dsc URL/file>" paths = set(os.environ['PATH'].split(':'))
parser = argparse.ArgumentParser(usage=usage) paths |= set(('/sbin', '/usr/sbin', '/usr/local/sbin'))
parser.add_argument( if not any(os.path.exists(os.path.join(p, name)) for p in paths):
"-d", Logger.error('Could not find "%s". Please install the package "%s" '
"--destination", 'to use this functionality.',
metavar="DEST", name, package or name)
dest="dest_releases", sys.exit(1)
default=[],
action="append",
help="Backport to DEST release (default: current release)",
)
parser.add_argument(
"-s",
"--source",
metavar="SOURCE",
dest="source_release",
help="Backport from SOURCE release (default: devel release)",
)
parser.add_argument(
"-S",
"--suffix",
metavar="SUFFIX",
help="Suffix to append to version number (default: ~ppa1 when uploading to a PPA)",
)
parser.add_argument(
"-e",
"--message",
metavar="MESSAGE",
default="No-change",
help='Changelog message to use instead of "No-change" '
"(default: No-change backport to DEST.)",
)
parser.add_argument(
"-b",
"--build",
default=False,
action="store_true",
help="Build the package before uploading (default: %(default)s)",
)
parser.add_argument(
"-B",
"--builder",
metavar="BUILDER",
help="Specify the package builder (default: pbuilder)",
)
parser.add_argument(
"-U",
"--update",
default=False,
action="store_true",
help="Update the build environment before attempting to build",
)
parser.add_argument("-u", "--upload", metavar="UPLOAD", help="Specify an upload destination")
parser.add_argument(
"-k", "--key", dest="keyid", help="Specify the key ID to be used for signing."
)
parser.add_argument(
"--dont-sign", dest="keyid", action="store_false", help="Do not sign the upload."
)
parser.add_argument(
"-y",
"--yes",
dest="prompt",
default=True,
action="store_false",
help="Do not prompt before uploading to a PPA",
)
parser.add_argument(
"-v", "--version", metavar="VERSION", help="Package version to backport (or verify)"
)
parser.add_argument(
"-w",
"--workdir",
metavar="WORKDIR",
help="Specify a working directory (default: temporary dir)",
)
parser.add_argument(
"-r",
"--release-pocket",
default=False,
action="store_true",
help="Target the release pocket in the .changes file. "
"Necessary (and default) for uploads to PPAs",
)
parser.add_argument(
"-c", "--close", metavar="BUG", help="Bug to close in the changelog entry."
)
parser.add_argument(
"-m", "--mirror", metavar="URL", help="Preferred mirror (default: Launchpad)"
)
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
help="Launchpad instance to connect to (default: production)",
)
parser.add_argument(
"--no-conf",
default=False,
action="store_true",
help="Don't read config files or environment variables",
)
parser.add_argument("package_or_dsc", help=argparse.SUPPRESS)
args = parser.parse_args(argv)
config = UDTConfig(args.no_conf)
if args.builder is None:
args.builder = config.get_value("BUILDER")
if not args.update:
args.update = config.get_value("UPDATE_BUILDER", boolean=True)
if args.workdir is None:
args.workdir = config.get_value("WORKDIR")
if args.lpinstance is None:
args.lpinstance = config.get_value("LPINSTANCE")
if args.upload is None:
args.upload = config.get_value("UPLOAD")
if args.keyid is None:
args.keyid = config.get_value("KEYID")
if not args.upload and not args.workdir:
parser.error("Please specify either a working dir or an upload target!")
if args.upload and args.upload.startswith("ppa:"):
args.release_pocket = True
return args, config
def find_release_package(mirror, workdir, package, version, source_release, config): def parse(args):
usage = 'Usage: %prog [options] <source package name or .dsc URL/file>'
parser = optparse.OptionParser(usage)
parser.add_option('-d', '--destination',
metavar='DEST',
dest='dest_releases',
default=[],
action='append',
help='Backport to DEST release '
'(default: current release)')
parser.add_option('-s', '--source',
metavar='SOURCE',
dest='source_release',
help='Backport from SOURCE release '
'(default: devel release)')
parser.add_option('-S', '--suffix',
metavar='SUFFIX',
help='Suffix to append to version number '
'(default: ~ppa1 when uploading to a PPA)')
parser.add_option('-b', '--build',
default=False,
action='store_true',
help='Build the package before uploading '
'(default: %default)')
parser.add_option('-B', '--builder',
metavar='BUILDER',
help='Specify the package builder (default: pbuilder)')
parser.add_option('-U', '--update',
default=False,
action='store_true',
help='Update the build environment before '
'attempting to build')
parser.add_option('-u', '--upload',
metavar='UPLOAD',
help='Specify an upload destination')
parser.add_option("-k", "--key",
dest='keyid',
help="Specify the key ID to be used for signing.")
parser.add_option('--dont-sign',
dest='keyid', action='store_false',
help='Do not sign the upload.')
parser.add_option('-y', '--yes',
dest='prompt',
default=True,
action='store_false',
help='Do not prompt before uploading to a PPA')
parser.add_option('-v', '--version',
metavar='VERSION',
help='Package version to backport (or verify)')
parser.add_option('-w', '--workdir',
metavar='WORKDIR',
help='Specify a working directory '
'(default: temporary dir)')
parser.add_option('-r', '--release-pocket',
default=False,
action='store_true',
help='Target the release pocket in the .changes file. '
'Necessary (and default) for uploads to PPAs')
parser.add_option('-c', '--close',
metavar='BUG',
help='Bug to close in the changelog entry.')
parser.add_option('-m', '--mirror',
metavar='URL',
help='Preferred mirror (default: Launchpad)')
parser.add_option('-l', '--lpinstance',
metavar='INSTANCE',
help='Launchpad instance to connect to '
'(default: production)')
parser.add_option('--no-conf',
default=False,
action='store_true',
help="Don't read config files or environment variables")
opts, args = parser.parse_args(args)
if len(args) != 1:
parser.error('You must specify a single source package or a .dsc '
'URL/path.')
config = UDTConfig(opts.no_conf)
if opts.builder is None:
opts.builder = config.get_value('BUILDER')
if not opts.update:
opts.update = config.get_value('UPDATE_BUILDER', boolean=True)
if opts.workdir is None:
opts.workdir = config.get_value('WORKDIR')
if opts.lpinstance is None:
opts.lpinstance = config.get_value('LPINSTANCE')
if opts.upload is None:
opts.upload = config.get_value('UPLOAD')
if opts.keyid is None:
opts.keyid = config.get_value('KEYID')
if not opts.upload and not opts.workdir:
parser.error('Please specify either a working dir or an upload target!')
if opts.upload and opts.upload.startswith('ppa:'):
opts.release_pocket = True
if opts.upload:
check_program_exists('dput')
return opts, args, config
def find_release_package(mirror, workdir, package, version, source_release,
config):
srcpkg = None srcpkg = None
if source_release: if source_release:
distribution = codename_to_distribution(source_release) distribution = codename_to_distribution(source_release)
if not distribution: if not distribution:
error("Unknown release codename %s", source_release) error('Unknown release codename %s' % source_release)
info = vendor_to_distroinfo(distribution)() info = vendor_to_distroinfo(distribution)()
source_release = info.codename(source_release, default=source_release) source_release = info.codename(source_release, default=source_release)
else: else:
distribution = system_distribution() distribution = system_distribution()
mirrors = [mirror] if mirror else [] mirrors = [mirror] if mirror else []
mirrors.append(config.get_value(f"{distribution.upper()}_MIRROR")) mirrors.append(config.get_value('%s_MIRROR' % distribution.upper()))
if not version: if not version:
archive = Distribution(distribution.lower()).getArchive() archive = Distribution(distribution.lower()).getArchive()
try: try:
spph = archive.getSourcePackage(package, source_release) spph = archive.getSourcePackage(package, source_release)
except (SeriesNotFoundException, PackageNotFoundException) as e: except (SeriesNotFoundException, PackageNotFoundException), e:
error("%s", str(e)) error(str(e))
version = spph.getVersion() version = spph.getVersion()
if distribution == "Debian": if distribution == 'Debian':
srcpkg = DebianSourcePackage(package, version, workdir=workdir, mirrors=mirrors) srcpkg = DebianSourcePackage(package,
elif distribution == "Ubuntu": version,
srcpkg = UbuntuSourcePackage(package, version, workdir=workdir, mirrors=mirrors) workdir=workdir,
mirrors=mirrors)
elif distribution == 'Ubuntu':
srcpkg = UbuntuSourcePackage(package,
version,
workdir=workdir,
mirrors=mirrors)
return srcpkg return srcpkg
def find_package(mirror, workdir, package, version, source_release, config): def find_package(mirror, workdir, package, version, source_release, config):
"Returns the SourcePackage" "Returns the SourcePackage"
if package.endswith(".dsc"): if package.endswith('.dsc'):
# Here we are using UbuntuSourcePackage just because we don't have any return SourcePackage(version=version, dscfile=package,
# "general" class that is safely instantiable (as SourcePackage is an workdir=workdir, mirrors=(mirror,))
# abstract class). None of the distribution-specific details within
# UbuntuSourcePackage is relevant for this use case.
return UbuntuSourcePackage(
version=version, dscfile=package, workdir=workdir, mirrors=(mirror,)
)
if not source_release and not version: if not source_release and not version:
info = vendor_to_distroinfo(system_distribution()) info = vendor_to_distroinfo(system_distribution())
source_release = info().devel() source_release = info().devel()
srcpkg = find_release_package(mirror, workdir, package, version, source_release, config) srcpkg = find_release_package(mirror, workdir, package, version,
source_release, config)
if version and srcpkg.version != version: if version and srcpkg.version != version:
error( error('Requested backport of version %s but version of %s in %s is %s'
"Requested backport of version %s but version of %s in %s is %s", % (version, package, source_release, srcpkg.version))
version,
package,
source_release,
srcpkg.version,
)
return srcpkg return srcpkg
def get_backport_version(version, suffix, upload, release): def get_backport_version(version, suffix, upload, release):
distribution = codename_to_distribution(release) distribution = codename_to_distribution(release)
if not distribution: if not distribution:
error("Unknown release codename %s", release) error('Unknown release codename %s' % release)
if distribution == "Debian": series = Distribution(distribution.lower()).\
debian_distro_info = DebianDistroInfo() getSeries(name_or_version=release)
debian_codenames = debian_distro_info.supported()
if release in debian_codenames:
release_version = debian_distro_info.version(release)
if not release_version:
error("Can't find the release version for %s", release)
backport_version = f"{version}~bpo{release_version}+1"
else:
error("%s is not a supported release (%s)", release, debian_codenames)
elif distribution == "Ubuntu":
series = Distribution(distribution.lower()).getSeries(name_or_version=release)
backport_version = f"{version}~bpo{series.version}.1" backport_version = version + ('~%s%s.1' % (distribution.lower(), series.version))
else:
error("Unknown distribution «%s» for release «%s»", distribution, release)
if suffix is not None: if suffix is not None:
backport_version += suffix backport_version += suffix
elif upload and upload.startswith("ppa:"): elif upload and upload.startswith('ppa:'):
backport_version += "~ppa1" backport_version += '~ppa1'
return backport_version return backport_version
def get_old_version(source, release): def get_old_version(source, release):
try: try:
distribution = codename_to_distribution(release) distribution = codename_to_distribution(release)
archive = Distribution(distribution.lower()).getArchive() archive = Distribution(distribution.lower()).getArchive()
pkg = archive.getSourcePackage( pkg = archive.getSourcePackage(source,
source, release, ("Release", "Security", "Updates", "Proposed", "Backports") release,
) ('Release', 'Security', 'Updates',
'Proposed', 'Backports'))
return pkg.getVersion() return pkg.getVersion()
except (SeriesNotFoundException, PackageNotFoundException): except (SeriesNotFoundException, PackageNotFoundException), e:
pass pass
return None
def get_backport_dist(release, release_pocket): def get_backport_dist(release, release_pocket):
if release_pocket: if release_pocket:
return release return release
return f"{release}-backports" else:
return '%s-backports' % release
def do_build(workdir, dsc, release, builder, update): def do_build(workdir, dsc, release, builder, update):
builder = get_builder(builder) builder = get_builder(builder)
if not builder: if not builder:
return None return
if update: if update:
if 0 != builder.update(release): if 0 != builder.update(release):
@ -305,121 +260,101 @@ def do_build(workdir, dsc, release, builder, update):
# builder.build is going to chdir to buildresult: # builder.build is going to chdir to buildresult:
workdir = os.path.realpath(workdir) workdir = os.path.realpath(workdir)
return builder.build(os.path.join(workdir, dsc), release, os.path.join(workdir, "buildresult")) return builder.build(os.path.join(workdir, dsc),
release,
os.path.join(workdir, "buildresult"))
def do_upload(workdir, package, bp_version, changes, upload, prompt): def do_upload(workdir, package, bp_version, changes, upload, prompt):
print(f"Please check {package} {bp_version} in file://{workdir} carefully!") print 'Please check %s %s in file://%s carefully!' % \
if prompt or upload == "ubuntu": (package, bp_version, workdir)
question = f"Do you want to upload the package to {upload}" if prompt or upload == 'ubuntu':
question = 'Do you want to upload the package to %s' % upload
answer = YesNoQuestion().ask(question, "yes") answer = YesNoQuestion().ask(question, "yes")
if answer == "no": if answer == "no":
return return
check_call(["dput", upload, changes], cwd=workdir) check_call(['dput', upload, changes], cwd=workdir)
def orig_needed(upload, workdir, pkg): def orig_needed(upload, workdir, pkg):
"""Avoid a -sa if possible""" '''Avoid a -sa if possible'''
if not upload or not upload.startswith("ppa:"): if not upload or not upload.startswith('ppa:'):
return True return True
ppa = upload.split(":", 1)[1] ppa = upload.split(':', 1)[1]
user, ppa = ppa.split("/", 1) user, ppa = ppa.split('/', 1)
version = pkg.version.upstream_version version = pkg.version.upstream_version
http = Http() h = Http()
for filename in glob.glob(os.path.join(workdir, f"{pkg.source}_{version}.orig*")): for filename in glob.glob(os.path.join(workdir,
url = ( '%s_%s.orig*' % (pkg.source, version))):
f"https://launchpad.net/~{quote(user)}/+archive/{quote(ppa)}/+sourcefiles" url = ('https://launchpad.net/~%s/+archive/%s/+files/%s'
f"/{quote(pkg.source)}/{quote(pkg.version.full_version)}" % (user, ppa, filename))
f"/{quote(os.path.basename(filename))}"
)
try: try:
headers = http.request(url, "HEAD")[0] headers, body = h.request(url, 'HEAD')
if headers.status != 200 or not headers["content-location"].startswith( if (headers.status != 200 or
"https://launchpadlibrarian.net" not headers['content-location'].
): startswith('https://launchpadlibrarian.net')):
return True return True
except HttpLib2Error as e: except HttpLib2Error, e:
Logger.debug(e) Logger.info(e)
return True return True
return False return False
def do_backport(workdir, pkg, suffix, close, release, release_pocket, build,
def do_backport( builder, update, upload, keyid, prompt):
workdir, dirname = '%s-%s' % (pkg.source, release)
pkg,
suffix,
message,
close,
release,
release_pocket,
build,
builder,
update,
upload,
keyid,
prompt,
):
dirname = f"{pkg.source}-{release}"
srcdir = os.path.join(workdir, dirname) srcdir = os.path.join(workdir, dirname)
if os.path.exists(srcdir): if os.path.exists(srcdir):
question = f"Working directory {srcdir} already exists. Delete it?" question = 'Working directory %s already exists. Delete it?' % srcdir
if YesNoQuestion().ask(question, "no") == "no": if YesNoQuestion().ask(question, 'no') == 'no':
sys.exit(1) sys.exit(1)
shutil.rmtree(srcdir) shutil.rmtree(srcdir)
pkg.unpack(dirname) pkg.unpack(dirname)
bp_version = get_backport_version(pkg.version.full_version, suffix, upload, release) bp_version = get_backport_version(pkg.version.full_version, suffix,
upload, release)
old_version = get_old_version(pkg.source, release) old_version = get_old_version(pkg.source, release)
bp_dist = get_backport_dist(release, release_pocket) bp_dist = get_backport_dist(release, release_pocket)
changelog = f"{message} backport to {release}." changelog = 'No-change backport to %s' % (release,)
if close: if close:
changelog += f" (LP: #{close})" changelog += ' (LP: #%s)' % (close,)
check_call( check_call(['dch',
[ '--force-bad-version',
"dch", '--force-distribution',
"--force-bad-version", '--preserve',
"--force-distribution", '--newversion', bp_version,
"--preserve", '--distribution', bp_dist,
"--newversion", changelog],
bp_version, cwd=srcdir)
"--distribution",
bp_dist,
changelog,
],
cwd=srcdir,
)
cmd = ["debuild", "--no-lintian", "-S", "-nc", "-uc", "-us"] cmd = ['debuild', '--no-lintian', '-S', '-nc', '-uc', '-us']
if orig_needed(upload, workdir, pkg): if orig_needed(upload, workdir, pkg):
cmd.append("-sa") cmd.append('-sa')
else: else:
cmd.append("-sd") cmd.append('-sd')
if old_version: if old_version:
cmd.append(f"-v{old_version}") cmd.append('-v%s' % old_version)
env = os.environ.copy() env = os.environ.copy()
# An ubuntu.com e-mail address would make dpkg-buildpackage fail if there # An ubuntu.com e-mail address would make dpkg-buildpackage fail if there
# wasn't an Ubuntu maintainer for an ubuntu-versioned package. LP: #1007042 # wasn't an Ubuntu maintainer for an ubuntu-versioned package. LP: #1007042
env.pop("DEBEMAIL", None) env.pop('DEBEMAIL', None)
check_call(cmd, cwd=srcdir, env=env) check_call(cmd, cwd=srcdir, env=env)
fn_base = pkg.source + "_" + bp_version.split(":", 1)[-1] fn_base = pkg.source + '_' + bp_version.split(':', 1)[-1]
changes = fn_base + "_source.changes" changes = fn_base + '_source.changes'
if build: if build:
if 0 != do_build(workdir, fn_base + ".dsc", release, builder, update): if 0 != do_build(workdir, fn_base + '.dsc', release, builder, update):
sys.exit(1) sys.exit(1)
# None: sign with the default signature. False: don't sign # None: sign with the default signature. False: don't sign
if keyid is not False: if keyid is not False:
cmd = ["debsign"] cmd = ['debsign']
if keyid: if keyid:
cmd.append("-k" + keyid) cmd.append('-k' + keyid)
cmd.append(changes) cmd.append(changes)
check_call(cmd, cwd=workdir) check_call(cmd, cwd=workdir)
if upload: if upload:
@ -427,69 +362,55 @@ def do_backport(
shutil.rmtree(srcdir) shutil.rmtree(srcdir)
def main(args):
def main(argv):
ubu_email() ubu_email()
args, config = parse(argv[1:]) opts, (package_or_dsc,), config = parse(args[1:])
Launchpad.login_anonymously(service=args.lpinstance) Launchpad.login_anonymously(service=opts.lpinstance)
if not args.dest_releases: if not opts.dest_releases:
if lsb_release: distinfo = lsb_release.get_distro_information()
distinfo = lsb_release.get_distro_information() try:
try: opts.dest_releases = [distinfo['CODENAME']]
current_distro = distinfo["ID"] except KeyError:
except KeyError: error('No destination release specified and unable to guess yours.')
error("No destination release specified and unable to guess yours.")
else:
err, current_distro = subprocess.getstatusoutput("lsb_release --id --short")
if err:
error("Could not run lsb_release to retrieve distribution")
if current_distro == "Ubuntu": if opts.workdir:
args.dest_releases = [UbuntuDistroInfo().lts()] workdir = os.path.expanduser(opts.workdir)
elif current_distro == "Debian":
args.dest_releases = [DebianDistroInfo().stable()]
else:
error("Unknown distribution %s, can't guess target release", current_distro)
if args.workdir:
workdir = os.path.expanduser(args.workdir)
else: else:
workdir = tempfile.mkdtemp(prefix="backportpackage-") workdir = tempfile.mkdtemp(prefix='backportpackage-')
if not os.path.exists(workdir): if not os.path.exists(workdir):
os.makedirs(workdir) os.makedirs(workdir)
try: try:
pkg = find_package( pkg = find_package(opts.mirror,
args.mirror, workdir, args.package_or_dsc, args.version, args.source_release, config workdir,
) package_or_dsc,
opts.version,
opts.source_release,
config)
pkg.pull() pkg.pull()
for release in args.dest_releases: for release in opts.dest_releases:
do_backport( do_backport(workdir,
workdir, pkg,
pkg, opts.suffix,
args.suffix, opts.close,
args.message, release,
args.close, opts.release_pocket,
release, opts.build,
args.release_pocket, opts.builder,
args.build, opts.update,
args.builder, opts.upload,
args.update, opts.keyid,
args.upload, opts.prompt)
args.keyid, except DownloadError, e:
args.prompt, error(str(e))
)
except DownloadError as e:
error("%s", str(e))
finally: finally:
if not args.workdir: if not opts.workdir:
shutil.rmtree(workdir) shutil.rmtree(workdir)
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main(sys.argv)) sys.exit(main(sys.argv))

View File

@ -36,7 +36,7 @@ _pbuilder-dist()
for distro in $(ubuntu-distro-info --all; debian-distro-info --all) stable testing unstable; do for distro in $(ubuntu-distro-info --all; debian-distro-info --all) stable testing unstable; do
for builder in pbuilder cowbuilder; do for builder in pbuilder cowbuilder; do
echo "$builder-$distro" echo "$builder-$distro"
for arch in i386 amd64 armhf; do for arch in i386 amd64 armel armhf; do
echo "$builder-$distro-$arch" echo "$builder-$distro-$arch"
done done
done done

88
bitesize Executable file
View File

@ -0,0 +1,88 @@
#!/usr/bin/python
"""Add 'bitesize' tag to bugs and add a comment."""
# Copyright (c) 2011 Canonical Ltd.
#
# bitesize is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any
# later version.
#
# bitesize is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with bitesize; see the file COPYING. If not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
# Authors:
# Daniel Holbach <daniel.holbach@canonical.com>
import sys
from optparse import OptionParser
from launchpadlib.launchpad import Launchpad
from launchpadlib.errors import HTTPError
from ubuntutools.config import UDTConfig
from ubuntutools.logger import Logger
def error_out(msg):
Logger.error(msg)
sys.exit(1)
def save_entry(entry):
try:
entry.lp_save()
except HTTPError, error:
error_out(error.content)
def tag_bug(bug):
bug.tags = bug.tags + ['bitesize'] # LP: #254901 workaround
save_entry(bug)
def main():
usage = "Usage: %prog <bug number>"
opt_parser = OptionParser(usage)
opt_parser.add_option("-l", "--lpinstance", metavar="INSTANCE",
help="Launchpad instance to connect to "
"(default: production)",
dest="lpinstance", default=None)
opt_parser.add_option("--no-conf",
help="Don't read config files or "
"environment variables.",
dest="no_conf", default=False, action="store_true")
(options, args) = opt_parser.parse_args()
config = UDTConfig(options.no_conf)
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
if len(args) < 1:
opt_parser.error("Need at least one bug number.")
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
if launchpad is None:
error_out("Couldn't authenticate to Launchpad.")
# check that the new main bug isn't a duplicate
try:
bug = launchpad.bugs[args[0]]
except HTTPError, error:
if error.response.status == 401:
error_out("Don't have enough permissions to access bug %s. %s" % \
(args[0], error.content))
else:
raise
if 'bitesize' in bug.tags:
error_out("Bug is already marked as 'bitesize'.")
bug.newMessage(content="I'm marking this bug as 'bitesize' as it looks "
"like an issue that is easy to fix and suitable "
"for newcomers in Ubuntu development. If you need "
"any help with fixing it, talk to me about it.")
bug.subscribe(person=launchpad.me)
tag_bug(launchpad.bugs[bug.id]) # fresh bug object, LP: #336866 workaround
if __name__ == '__main__':
main()

142
check-mir
View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# Check components of build dependencies and warn about universe/multiverse # Check components of build dependencies and warn about universe/multiverse
# ones, for a package destined for main/restricted # ones, for a package destined for main/restricted
@ -21,116 +21,71 @@
# this program; if not, write to the Free Software Foundation, Inc., # this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# pylint: disable=invalid-name from __future__ import print_function
# pylint: enable=invalid-name
"""Check if any of a package's build or binary dependencies are in universe or multiverse.
Run this inside an unpacked source package
"""
import argparse
import os.path
import sys import sys
import optparse
import os.path
import apt import apt
def check_support(apt_cache, pkgname, alt=False): def check_support(apt_cache, pkgname, alt=False):
"""Check if pkgname is in main or restricted. '''Check if pkgname is in main or restricted.
This prints messages if a package is not in main/restricted, or only This prints messages if a package is not in main/restricted, or only
partially (i. e. source in main, but binary in universe). partially (i. e. source in main, but binary in universe).
""" '''
if alt: if alt:
prefix = " ... alternative " + pkgname prefix = ' ... alternative ' + pkgname
else: else:
prefix = " * " + pkgname prefix = ' * ' + pkgname
prov_packages = apt_cache.get_providing_packages(pkgname) try:
if pkgname in apt_cache:
pkg = apt_cache[pkgname] pkg = apt_cache[pkgname]
except KeyError:
# If this is a virtual package, iterate through the binary packages that print(prefix, 'does not exist (pure virtual?)', file=sys.stderr)
# provide this, and ensure they are all in Main. Source packages in and of
# themselves cannot provide virtual packages, only binary packages can.
elif len(prov_packages) > 0:
supported, unsupported = [], []
for pkg in prov_packages:
candidate = pkg.candidate
if candidate:
section = candidate.section
if section.startswith("universe") or section.startswith("multiverse"):
unsupported.append(pkg.name)
else:
supported.append(pkg.name)
if len(supported) > 0:
msg = "is a virtual package, which is provided by the following "
msg += "candidates in Main: " + " ".join(supported)
print(prefix, msg)
elif len(unsupported) > 0:
msg = "is a virtual package, but is only provided by the "
msg += "following non-Main candidates: " + " ".join(unsupported)
print(prefix, msg, file=sys.stderr)
return False
else:
msg = "is a virtual package that exists but is not provided by "
msg += "package currently in the archive. Proceed with caution."
print(prefix, msg, file=sys.stderr)
return False
else:
print(prefix, "does not exist", file=sys.stderr)
return False return False
section = pkg.candidate.section section = pkg.candidate.section
if section.startswith("universe") or section.startswith("multiverse"): if section.startswith('universe') or section.startswith('multiverse'):
# check if the source package is in main and thus will only need binary # check if the source package is in main and thus will only need binary
# promotion # promotion
source_records = apt.apt_pkg.SourceRecords() source_records = apt.apt_pkg.SourceRecords()
if not source_records.lookup(pkg.candidate.source_name): if not source_records.lookup(pkg.candidate.source_name):
print("ERROR: Cannot lookup source package for", pkg.name, file=sys.stderr) print('ERROR: Cannot lookup source package for', pkg.name,
print(prefix, "package is in", section.split("/")[0]) file=sys.stderr)
print(prefix, 'package is in', section.split('/')[0])
return False return False
src = apt.apt_pkg.TagSection(source_records.record) src = apt.apt_pkg.TagSection(source_records.record)
if src["Section"].startswith("universe") or src["Section"].startswith("multiverse"): if (src['Section'].startswith('universe') or
print(prefix, "binary and source package is in", section.split("/")[0]) src['Section'].startswith('multiverse')):
print(prefix, 'binary and source package is in',
section.split('/')[0])
return False return False
else:
print( print(prefix, 'is in', section.split('/')[0] + ', but its source',
prefix, pkg.candidate.source_name,
"is in", 'is already in main; file an ubuntu-archive bug for '
section.split("/")[0] + ", but its source", 'promoting the current preferred alternative')
pkg.candidate.source_name, return True
"is already in main; file an ubuntu-archive bug for "
"promoting the current preferred alternative",
)
return True
if alt: if alt:
print(prefix, "is already in main; consider preferring it") print(prefix, 'is already in main; consider preferring it')
return True return True
def check_build_dependencies(apt_cache, control): def check_build_dependencies(apt_cache, control):
print("Checking support status of build dependencies...") print('Checking support status of build dependencies...')
any_unsupported = False any_unsupported = False
for field in ("Build-Depends", "Build-Depends-Indep"): for field in ('Build-Depends', 'Build-Depends-Indep'):
if field not in control.section: if field not in control.section:
continue continue
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]): for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
pkgname = or_group[0][0] pkgname = or_group[0][0]
# debhelper-compat is expected to be a build dependency of every
# package, so it is a red herring to display it in this report.
# (src:debhelper is in Ubuntu Main anyway)
if pkgname == "debhelper-compat":
continue
if not check_support(apt_cache, pkgname): if not check_support(apt_cache, pkgname):
# check non-preferred alternatives # check non-preferred alternatives
for altpkg in or_group[1:]: for altpkg in or_group[1:]:
@ -145,19 +100,20 @@ def check_build_dependencies(apt_cache, control):
def check_binary_dependencies(apt_cache, control): def check_binary_dependencies(apt_cache, control):
any_unsupported = False any_unsupported = False
print("\nChecking support status of binary dependencies...") print('\nChecking support status of binary dependencies...')
while True: while True:
try: try:
next(control) control.next()
except StopIteration: except StopIteration:
break break
for field in ("Depends", "Pre-Depends", "Recommends"): for field in ('Depends', 'Pre-Depends', 'Recommends'):
if field not in control.section: if field not in control.section:
continue continue
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]): for or_group in apt.apt_pkg.parse_src_depends(
control.section[field]):
pkgname = or_group[0][0] pkgname = or_group[0][0]
if pkgname.startswith("$"): if pkgname.startswith('$'):
continue continue
if not check_support(apt_cache, pkgname): if not check_support(apt_cache, pkgname):
# check non-preferred alternatives # check non-preferred alternatives
@ -171,33 +127,31 @@ def check_binary_dependencies(apt_cache, control):
def main(): def main():
parser = argparse.ArgumentParser(description=__doc__) description = "Check if any of a package's build or binary " + \
"dependencies are in universe or multiverse. " + \
"Run this inside an unpacked source package"
parser = optparse.OptionParser(description=description)
parser.parse_args() parser.parse_args()
apt_cache = apt.Cache() apt_cache = apt.Cache()
if not os.path.exists("debian/control"): if not os.path.exists('debian/control'):
print( print('debian/control not found. You need to run this tool in a '
"debian/control not found. You need to run this tool in a source package directory", 'source package directory', file=sys.stderr)
file=sys.stderr,
)
sys.exit(1) sys.exit(1)
# get build dependencies from debian/control # get build dependencies from debian/control
control = apt.apt_pkg.TagFile(open("debian/control", encoding="utf-8")) control = apt.apt_pkg.TagFile(open('debian/control'))
next(control) control.next()
unsupported_build_deps = check_build_dependencies(apt_cache, control) unsupported_build_deps = check_build_dependencies(apt_cache, control)
unsupported_binary_deps = check_binary_dependencies(apt_cache, control) unsupported_binary_deps = check_binary_dependencies(apt_cache, control)
if unsupported_build_deps or unsupported_binary_deps: if unsupported_build_deps or unsupported_binary_deps:
print( print('\nPlease check https://wiki.ubuntu.com/MainInclusionProcess if '
"\nPlease check https://wiki.ubuntu.com/MainInclusionProcess if " 'this source package needs to get into in main/restricted, or '
"this source package needs to get into in main/restricted, or " 'reconsider if the package really needs above dependencies.')
"reconsider if the package really needs above dependencies."
)
else: else:
print("All dependencies are supported in main or restricted.") print('All dependencies are supported in main or restricted.')
if __name__ == '__main__':
if __name__ == "__main__":
main() main()

1
debian/.gitignore vendored
View File

@ -1 +0,0 @@
files

26
debian/README.source vendored
View File

@ -1,26 +0,0 @@
Changelog generation and releasing
----------------------------------
The changelog is generated by the uploader using `gbp dch' from
`git-buildpackage'. To invoke, just run
$ gbp dch
and then edit the changelog as appropriate - wrap lines, remove Signed-Off-By,
and so on. Then finalise the changelog, e.g.
$ dch -D unstable --release ""
commit it
$ git commit debian/changelog -m "Releasing 0.foo"
and tag/sign this commit
$ gbp buildpackage --git-tag-only
then build using (for example)
$ gbp buildpackage -S
and test/upload as normal.

870
debian/changelog vendored
View File

@ -1,859 +1,3 @@
ubuntu-dev-tools (0.206) unstable; urgency=medium
[ Dan Bungert ]
* mk-sbuild: enable pkgmaintainermangler
[ Shengjing Zhu ]
* import-bug-from-debian: package option is overridden and not used
[ Fernando Bravo Hernández ]
* Parsing arch parameter to getBinaryPackage() (LP: #2081861)
[ Simon Quigley ]
* Read ~/.devscripts in a more robust way, to ideally pick up multi-line
variables (Closes: #725418).
* mk-sbuild: default to using UTC for schroots (LP: #2097159).
* syncpackage: s/syncblacklist/syncblocklist/g
* syncpackage: Cache the sync blocklist in-memory, so it's not fetched
multiple times when syncing more than one package.
* syncpackage: Catch exceptions cleanly, simply skipping to the next
package (erring on the side of caution) if there is an error doing the
download (LP: #1943286).
-- Simon Quigley <tsimonq2@debian.org> Tue, 04 Mar 2025 13:43:15 -0600
ubuntu-dev-tools (0.205) unstable; urgency=medium
* [syncpackage] When syncing multiple packages, if one of the packages is in
the sync blocklist, do not exit, simply continue.
* [syncpackage] Do not use exit(1) on an error or exception unless it
applies to all packages, instead return None so we can continue to the
next package.
* [syncpackage] Add support for -y or --yes, noted that it should be used
with care.
* Update Standards-Version to 4.7.2, no changes needed.
-- Simon Quigley <tsimonq2@debian.org> Sat, 01 Mar 2025 11:29:54 -0600
ubuntu-dev-tools (0.204) unstable; urgency=medium
[ Simon Quigley ]
* Update Standards-Version to 4.7.1, no changes needed.
* Add several Lintian overrides related to .pyc files.
* Add my name to the copyright file.
* Rename bitesize to lp-bitesize (Closes: #1076224).
* Add a manpage for running-autopkgtests.
* Add a large warning at the top of mk-sbuild encouraging the use of the
unshare backend. This is to provide ample warning to users.
* Remove mail line from default ~/.sbuildrc, to resolve the undeclared
dependency on sendmail (Closes: #1074632).
[ Julien Plissonneau Duquène ]
* Fix reverse-depends -b crash on packages that b-d on themselves
(Closes: #1087760).
-- Simon Quigley <tsimonq2@debian.org> Mon, 24 Feb 2025 19:54:39 -0600
ubuntu-dev-tools (0.203) unstable; urgency=medium
[ Steve Langasek ]
* ubuntu-build: handle TOCTOU issue with the "can be retried" value on
builds.
* Recommend sbuild over pbuilder. sbuild is the tool recommended by
Ubuntu developers whose behavior most closely approximates Launchpad
builds.
[ Florent 'Skia' Jacquet ]
* import-bug-from-debian: handle multipart message (Closes: #969510)
[ Benjamin Drung ]
* import-bug-from-debian: add type hints
* Bump Standards-Version to 4.7.0
* Bump year and add missing files to copyright
* setup.py: add pm-helper
* Format code with black and isort
* Address several issues pointed out by Pylint
* Depend on python3-yaml for pm-helper
-- Benjamin Drung <bdrung@debian.org> Sat, 02 Nov 2024 18:19:24 +0100
ubuntu-dev-tools (0.202) unstable; urgency=medium
[ Steve Langasek ]
* ubuntu-build: support --batch with no package names to retry all
* ubuntu-build: in batch mode, print a count of packages retried
* ubuntu-build: make the --arch option top-level.
This gets rid of the fugly --arch2 option
* ubuntu-build: support retrying builds in other states that failed-to-build
* ubuntu-build: Handling of proposed vs release pocket default for ppas
* ubuntu-build: update manpage
[ Chris Peterson ]
* Replace Depends on python3-launchpadlib with Depends on
python3-launchpadlib-desktop (LP: #2049217)
-- Simon Quigley <tsimonq2@ubuntu.com> Fri, 12 Apr 2024 23:33:14 -0500
ubuntu-dev-tools (0.201) unstable; urgency=medium
* running-autopkgtests: fix packaging to make the script available
(LP: #2055466)
-- Chris Peterson <chris.peterson@canonical.com> Thu, 29 Feb 2024 11:09:14 -0800
ubuntu-dev-tools (0.200) unstable; urgency=medium
[ Gianfranco Costamagna ]
* Team upload
[ Chris Peterson ]
* Add support to see currently running autopkgtests (running-autopkgtests)
* running-autopkgtests: use f-strings
[ Athos Ribeiro ]
* syncpackage: log LP authentication errors before halting.
[ Ying-Chun Liu (PaulLiu) ]
* Drop qemu-debootstrap
qemu-debootstrap is deprecated for a while. In newer qemu release
the command is totally removed. We can use debootstrap directly.
Signed-off-by: Ying-Chun Liu (PaulLiu) <paulliu@debian.org>
[ Logan Rosen ]
* Don't rely on debootstrap for validating Ubuntu distro
-- Gianfranco Costamagna <locutusofborg@debian.org> Thu, 15 Feb 2024 17:53:48 +0100
ubuntu-dev-tools (0.199) unstable; urgency=medium
[ Simon Quigley ]
* Add my name to Uploaders.
[ Steve Langasek ]
* Introduce a pm-helper tool.
-- Simon Quigley <tsimonq2@debian.org> Mon, 29 Jan 2024 10:03:22 -0600
ubuntu-dev-tools (0.198) unstable; urgency=medium
* In check-mir, ignore debhelper-compat when checking the build
dependencies. This is expected to be a build dependency of all packages,
so warning about it in any way is surely a red herring.
* Add proper support for virtual packages in check-mir, basing the
determination solely off of binary packages. This is not expected to be a
typical case.
-- Simon Quigley <tsimonq2@debian.org> Wed, 10 Jan 2024 20:04:02 -0600
ubuntu-dev-tools (0.197) unstable; urgency=medium
* Update the manpage for syncpackage to reflect the ability to sync
multiple packages at once.
* When using pull-*-source to grab a package which already has a defined
Vcs- field, display the exact same warning message `apt source` does.
-- Simon Quigley <tsimonq2@debian.org> Tue, 03 Oct 2023 14:01:25 -0500
ubuntu-dev-tools (0.196) unstable; urgency=medium
* Allow the user to sync multiple packages at one time (LP: #1756748).
-- Simon Quigley <tsimonq2@debian.org> Fri, 04 Aug 2023 14:37:59 -0500
ubuntu-dev-tools (0.195) unstable; urgency=medium
* Add support for the non-free-firmware components in all tools already
referencing non-free.
-- Simon Quigley <tsimonq2@debian.org> Wed, 26 Jul 2023 13:03:31 -0500
ubuntu-dev-tools (0.194) unstable; urgency=medium
[ Gianfranco Costamagna ]
* ubuntu-build: For some reasons, now you need to be authenticated before
trying to use the "PersonTeam" class features.
Do it at the begin instead of replicating the same code inside the
tool itself.
[ Steve Langasek ]
* Remove references to deprecated
http://people.canonical.com/~ubuntu-archive.
* Remove references to architectures not supported in any active
Ubuntu release.
* Remove references to ftpmaster.internal. When this name is resolvable
but firewalled, syncpackage hangs; and these are tools for developers,
not for running in an automated context in the DCs where
ftpmaster.internal is reachable.
* Excise all references to cdbs (including in test cases)
* Set apt preferences for the -proposed pocket in mk-sbuild so that
it works as expected for lunar and forward.
[ Robie Basak ]
* ubuntutools/misc: swap iter_content for raw stream with "Accept-Encoding:
identity" to fix .diff.gz downloads (LP: #2025748).
[ Vladimir Petko ]
* Fix a typo introduced in the last upload that made mk-sbuild fail
unconditionally. LP: #2017177.
-- Gianfranco Costamagna <locutusofborg@debian.org> Sat, 08 Jul 2023 08:42:05 +0200
ubuntu-dev-tools (0.193) unstable; urgency=medium
* Don't run linters at build time, or in autopkgtests. (Closes: #1031436).
-- Stefano Rivera <stefanor@debian.org> Sat, 25 Feb 2023 13:19:56 -0400
ubuntu-dev-tools (0.192) unstable; urgency=medium
[ Benjamin Drung ]
* sponsor-patch:
+ Ignore exit code 1 of debdiff call.
+ Use --skip-patches instead of --no-preparation with dpkg-source -x.
* Demote bzr/brz from Recommends to Suggests, as nowadays git is the way.
Closes: #940531
* Use PEP440 compliant version in setup.py (LP: #1991606)
* Fix issues found by flake8 on the Python scripts
* Check Python scripts with flake8 again
* Format Python code with black and run black during package build
* Sort Python imports with isort and run isort during package build
* Replace deprecated optparse with argparse
* requestbackport: Remove useless loop from locate_package
* reverse-depends: Restore field titles format
* test: Fix deprecated return value for test case
* Fix all errors and warnings found by pylint and implement most refactorings
and conventions. Run pylint during package build again.
* Bump Standards-Version to 4.6.2
* Drop unneeded X-Python3-Version from d/control
[ Masahiro Yamada ]
* mk-sbuild:
+ Handle the new location of the Debian bullseye security archive.
Closes: #1001832; LP: #1955116
[ Mattia Rizzolo ]
* requestbackport:
+ Apply patch from Krytarik Raido and Unit 193 to update the template and
workflow after the new Ubuntu Backport process has been established.
LP: #1959115
-- Benjamin Drung <bdrung@debian.org> Wed, 01 Feb 2023 12:45:15 +0100
ubuntu-dev-tools (0.191) unstable; urgency=medium
[ Dan Streetman ]
* lpapicache:
+ Make sure that login() actually logins and doesn't use cached credentials.
* ubuntu-build:
+ Fix crash caused by a change in lpapicache that changed the default
operation mode from authenticated to anonymous. LP: #1984113
[ Stefano Rivera ]
* backportpackage:
+ Add support for lsb-release-minimal, which doesn't have a Python module.
Thanks to Gioele Barabucci for the patch. Closes: #1020901; LP: #1991828
[ Mattia Rizzolo ]
* ubuntutools/archive.py:
+ Fix operation of SourcePackage._source_urls() (as used, for example, in
SourcePackage.pull() called by backportpackage) to also work when the
class is instantiated with a URL as .dsc. Fixes regression from v0.184.
Thanks to Unit 193 for the initial patch.
-- Mattia Rizzolo <mattia@debian.org> Tue, 11 Oct 2022 13:56:03 +0200
ubuntu-dev-tools (0.190) unstable; urgency=medium
[ Dimitri John Ledkov ]
* mk-sbuild:
+ For ubuntu, fix the debootstrap script to "gutsy", so to allow using
mk-sbuild for newer releases without requiring a newer debootstrap.
[ Gianfranco Costamagna ]
* pbuilder-dist: fix typo kernal/kernel
[ Benjamin Drung ]
* Add missing files to debian/copyright
* Bump Standards-Version to 4.6.1
-- Benjamin Drung <bdrung@debian.org> Thu, 16 Jun 2022 10:55:17 +0200
ubuntu-dev-tools (0.189) unstable; urgency=medium
[ Heinrich Schuchardt ]
* mk-sbuild: don't require pkg-config-<target>. LP: #1966881.
[ Tobias Heider ]
* mk-sbuild: document SCHROOT_TYPE zfs in the manpage.
-- Mattia Rizzolo <mattia@debian.org> Mon, 04 Apr 2022 15:03:31 +0200
ubuntu-dev-tools (0.188) unstable; urgency=medium
[ Mattia Rizzolo ]
* archive.py:
+ Support Python 3.6 by calling functools.lru_cache() as a function, and
avoid using @functools.cached_property (both new in Python 3.8).
[ Graham Inggs ]
* lpapicache.py:
+ Use collections.abc.Callable instead of the long deprecated
collections.Callable. LP: #1959541
-- Mattia Rizzolo <mattia@debian.org> Mon, 07 Feb 2022 16:30:07 +0100
ubuntu-dev-tools (0.187) unstable; urgency=medium
[ Paride Legovini ]
* mk-sbuild:
+ Add support for zfs-snapshot schroots. LP: #1945349
[ Mattia Rizzolo ]
* mk-sbuild:
+ Apply patch from Peter Pentchev to avoid a broken log message.
Closes: #968316
* backportpackage:
+ Support backporting to Debian releases. Closes: #776442; LP: #974132
+ Fix the guessing algorithm for the target release:
- for Debian: pick the current stable release.
- for Ubuntu: pick the current LTS release.
[ Unit 193 ]
* backportpackage:
+ Change the generated Ubuntu version following the new policy from the
Backporters team.
[ Dan Streetman ]
* misc:
+ Refactor download progress bar code.
+ Save files that have Content-Encoding correctly,
such as the changes file from upload queue packages.
* pullpkg:
+ Extract source packages pulled from upload queue.
* hugdaylist:
+ Remove long unused and non-working script.
-- Mattia Rizzolo <mattia@debian.org> Sun, 05 Dec 2021 15:58:15 +0100
ubuntu-dev-tools (0.186) unstable; urgency=medium
* Replace nose with pytest (see: #997758).
-- Stefano Rivera <stefanor@debian.org> Sun, 24 Oct 2021 16:10:44 -0700
ubuntu-dev-tools (0.185) unstable; urgency=medium
[ Alex Murray ]
* ubuntutools/archive.py:
+ Fix crash due to PersonalPackageArchiveSourcePackage() returning the
wrong object when requesting a download url. LP: #1938659
[ Krytarik Raido ]
* merge-changelog: Fix setting of newlines.
[ Dan Streetman ]
* misc: download to tmp file, to avoid leftover 0-size file on error
* misc: handle ConnectionError as NotFoundError
* archive: use proper component source packages sometimes have different
component than their bpphs, so use the correct component when downloading
binaries (LP: #1943819)
* misc: fix flake8 complaints
[ Stefano Rivera ]
* Bump Standards-Version to 4.6.0, no changes needed.
-- Stefano Rivera <stefanor@debian.org> Fri, 17 Sep 2021 15:53:02 -0700
ubuntu-dev-tools (0.184) experimental; urgency=medium
[ Dan Streetman ]
* Drop never packaged ubuntu-archive-assistant.
* Add support for downloading from private PPAs:
+ ubuntutools/misc:
- Refactor to use Pathlib and f-strings.
- Refactor to use requests instead of urllib (for the earier auth)
+ ubuntutools/archive:
- Refactor to use Pathlib.
- Add support for the special URLs of private PPAs.
* Don't use existing file without verifying their checksum.
* tests: recreate the test package files on demand.
* Remove no longer used dependencies on python3-termcolor and python3-yaml
[ Mattia Rizzolo ]
* pbuilder-dist: use shutil.which instead of
distutils.spawn.find_executable() to save a dependency. LP: #1936697
* d/control:
+ Drop redundant Recommends that are already in Depends.
+ Bump debhelper compat level to 13.
[ Marco Trevisan (Treviño) ]
* mk-sbuild:
+ Enable debugging in the finish.sh script if --debug is used.
+ Add support to configure ccache for each schroot.
-- Mattia Rizzolo <mattia@debian.org> Sat, 17 Jul 2021 17:31:19 +0200
ubuntu-dev-tools (0.183) unstable; urgency=medium
[ Dan Streetman ]
* pbuilder-dist: include missing import
-- Stefano Rivera <stefanor@debian.org> Tue, 08 Jun 2021 10:09:11 -0400
ubuntu-dev-tools (0.182) unstable; urgency=medium
[ Dan Streetman ]
* syncpackage, ubuntutools/archive.py:
Don't save dsc file to disk until requested with pull()
(LP: #1928946)
* syncpackage:
Don't login to LP if using --simulate
* d/t/control: Add minimum flake8 version
The --extend-exclude parameter is first available in flake8 3.8.0
* ubuntutools/archive.py: Fix flake8 test failure
* d/rules, d/control: Override build tests to use flake8 and nosetests3
[ Stefano Rivera ]
* Respect nocheck in DEB_BUILD_OPTIONS, again.
-- Stefano Rivera <stefanor@debian.org> Sun, 06 Jun 2021 19:52:18 -0400
ubuntu-dev-tools (0.181) unstable; urgency=medium
[ Logan Rosen ]
* Fix a couple of remaining issues from the py2→py3 move.
[ Krytarik Raido ]
* Fix typo in the logging configuration.
[ Dan Streetman ]
* pbuilder: Handle debian change from /updates to -security. LP: #1916633
Starting in bullseye, the security suite is -security instead of /updates.
* backportpackage: Don't use SourcePackage() directly. Closes: #983854
As the warning from 2010 says, don't use this class directly.
[ Balint Reczey ]
* mk-sbuild:
+ Use eatmydata only with the dpkg command.
Eatmydata wrapping the build as well could break tests.
Thanks to Julian Andres Klode for suggesting this solution
+ Use eatmydata by default.
Since only the dpkg is wrapped in eatmydata it should be the safe and
fast default. Eatmydata is widely used around apt thus it should be a
serious bug if a package can't be installed with eatmydata in use.
[ Marco Trevisan (Treviño) ]
* doc/mk-sbuild.1: Add documentation for --debootstrap-proxy and
DEBOOTSTRAP_PROXY. LP: #1926166
-- Mattia Rizzolo <mattia@debian.org> Sun, 02 May 2021 19:56:48 +0200
ubuntu-dev-tools (0.180) unstable; urgency=medium
* Drop coverage in the autopkgtest, as python3-nose-cov is not in Debian.
-- Mattia Rizzolo <mattia@debian.org> Fri, 19 Feb 2021 12:12:33 +0100
ubuntu-dev-tools (0.179) unstable; urgency=medium
[ Stefano Rivera ]
* archive.py: Evaluate the filter() fixing Debian source history queries
LP: #1913330
[ Dan Streetman ]
* allow running tests using tox
* add autopkgtests to run tests
* simplify/combine archive download functions
* add support for private ppa by logging into lp
* improve support for pull-uca-*
* fix logging/printing output to stdout/stderr
-- Dan Streetman <ddstreet@canonical.com> Mon, 01 Feb 2021 11:59:03 -0500
ubuntu-dev-tools (0.178) unstable; urgency=medium
[ Dan Streetman ]
* pullpkg: also catch and deal with InvalidPullValueError. LP: #1908770
[ Mattia Rizzolo ]
* d/control: Bump Standards-Version to 4.5.1, no changes needed.
* ubuntu-archive-assistant/mir: Fix a SyntaxWarning.
* backportpackage:
+ Add a -e/--message option to change the default "No-change"
in "No-change backport to DIST".
Thanks to Unit 193 for the initial patch.
[ You-Sheng Yang ]
* Add a dependency on tzdata, used by mk-sbuild.
[ Logan Rosen ]
* import-bug-from-debian:
+ Limit bug description length to 50k chars to support Launchpad's limits.
LP: #1193941
[ Dimitri John Ledkov ]
* pullpkg.py: fix --mirror option parsing.
* config.py: add UBUNTU_INTERNAL_MIRROR option, for launchpad internal
mirror.
* archive.py: use Regular, Ports, and Internal mirrors by default. Thus
enabling pull-lp-debs to work with ports architectures, and inside
launchpad builds too.
[ Michael R. Crusoe ]
* pbuilder-dist:
+ Use `arch-test` to determine whether the current system can run binaries
of the requested architecture, instead of hardcoding an ever-growing
list of whether something requires qemu or not. Add the "arch-test"
package to Recommends to that effect.
-- Dimitri John Ledkov <xnox@ubuntu.com> Mon, 25 Jan 2021 23:28:24 +0000
ubuntu-dev-tools (0.177) unstable; urgency=medium
[ Dan Streetman ]
* Verify checksums for downloaded binary files
* pullpkg: support pulling from Ubuntu upload queues
[ Mattia Rizzolo ]
* ubuntu-build:
+ Add support for riscv64.
* syncpackge:
+ Fix the new flake8 E741. Closes: #963310
[ Bryce Harrington ]
* update-maintainer:
+ Try to recurse upwards to find a valid debian directory. LP: #1885233
-- Mattia Rizzolo <mattia@debian.org> Sun, 28 Jun 2020 15:52:27 +0200
ubuntu-dev-tools (0.176) unstable; urgency=medium
[ Debian Janitor ]
* Fix day-of-week for changelog entry 0.66.
[ Mattia Rizzolo ]
* pbuilder-dist:
+ Add support for riscv64. LP: #1859277
* d/control: Bump Standards-Version to 4.5.0, no changes needed.
[ Colin Watson ]
* Use +sourcefiles URLs where possible. LP: #1860456
[ Dan Streetman ]
* submittodebian:
+ Open file in binary mode before writing utf-8 encoded bytes. LP: #1863119
* ubuntu-upload-permission:
+ Explicitly sort packagesets by name. LP: #1862372
* pullpkg:
+ For "-p list", show bpph arch for files built for 'all' arch.
* archive.py:
+ If using local file, avoid error trying to copy file to itself.
+ Allow pull_binaries() to accept arch=None.
* lpapicache:
+ Remove SPPH _have_all_binaries flag, as there are cases where it yield
unexpected results.
+ Remove fallback_arch from getBinaries.
+ Allow getBinaries(arch=None) to get all archs. LP: #1862286
-- Mattia Rizzolo <mattia@debian.org> Sun, 23 Feb 2020 13:03:21 +0100
ubuntu-dev-tools (0.175) unstable; urgency=medium
[ Mattia Rizzolo ]
* Trust the installed debian-keyring when checking validity of dsc
signatures.
* requestbackport:
+ Error out nicely when a tracking project doesn't exist. LP: #1852901
* d/control: Bump Standards-Version to 4.4.1, no changes needed.
[ Stefano Rivera ]
* merge-changelog: rewrite the changelog handling to use python3-debian.
[ Dan Streetman ]
* tests/pylint.conf: use jobs=0 to speed up tests.
* submittodebian: use a context manager while opening a file.
* d/control: add dependency on python3-lazr.restfulclient.
* Big refactor/rewrite of the whole archive.py module, together with a
restracturing of all the pull-pkg-* commands.
* Unify the logging using the standard python logging module, and remove the
local ubuntutools.logger module.
-- Mattia Rizzolo <mattia@debian.org> Sun, 01 Dec 2019 19:36:23 +0100
ubuntu-dev-tools (0.174) unstable; urgency=medium
[ Stefano Rivera ]
* reverse-depends:
+ Support reverse test dependencies as well. LP: #1843614
* ubuntutools.misc:
+ Replace Popen() calls with check_output(). Closes: #940040
+ Use a context manager to open file, to be sure to close them.
[ Dan Streetman ]
* Update setup.py to also use python3.
* reverse-depends:
+ Move from optparse to argparse.
+ Rename the typoed --recursive-deph to --recursive-depth.
+ Use list comprehensions to simplify del-during-iteration functions.
* import-bug-from-debian:
+ Migrate to argparge.
+ Add --verbose option.
+ Actually make --dry-run do a dry run.
+ Handle multiple bug numbers in the command line.
+ Correctly get the bug summary.
-- Mattia Rizzolo <mattia@debian.org> Thu, 26 Sep 2019 11:05:53 +0200
ubuntu-dev-tools (0.173) unstable; urgency=medium
[ Stefano Rivera ]
* pull-debian-debdiff:
+ Don't unpack the older source package, it will often use the same
directory as the newer one, and break.
* merge-changelog:
+ Use ubuntutools.version.Version, to support Python 3.
* Drop 404main, it's been totally broken for years.
* Port all the Python scripts to Python 3, and remove Python 2 support.
Closes: #938740, LP: #1099537
[ Dan Streetman ]
* pull-pkg:
+ Use ubuntutools.version.Version which has strip_epoch() instead
of debian.debian_support.Version.
* Have ubuntu-dev-tools depend on the matching version of python3-ubuntutools.
[ Scott Kitterman ]
* Update requestsync to python3. Closes: #927147
[ Mattia Rizzolo ]
* Explicitly require Python3 >= 3.6.
-- Mattia Rizzolo <mattia@debian.org> Tue, 10 Sep 2019 15:35:06 +0200
ubuntu-dev-tools (0.172) unstable; urgency=medium
[ Mattia Rizzolo ]
* autopkgtest: Add a allow-stderr restriction, as things log to stderr.
[ Stefano Rivera ]
* Build-Depend on pylint (>= 2, the Python 3 version), it replaced pylint3.
* Explicitly declare python dependencies in the python library modules (the
setup.py doesn't provide these) and ubuntu-dev-tools itself.
* dh_python2 doesn't generate a python dependency in ubuntu-dev-tools, now
that it's scripts-only, so manually declare one.
* Install pull-debian-source as python 3. It was ported, and doesn't work
under python 2 any more. LP: #1841127
* Use dh_install to split the build into multiple binary packages.
-- Mattia Rizzolo <mattia@debian.org> Wed, 04 Sep 2019 08:44:51 +0200
ubuntu-dev-tools (0.171) unstable; urgency=medium
* Add an autopkgtest running the package tests.
Currently it runs the tests directly over the sources, instead of the
installed package.
* ubuntutools/archive: Disable dsc signature verification for non-Debian.
Ubuntu doesn't have a unified keyring of developers like Debian has, so
it is not feasible to check for the dsc signatures.
-- Mattia Rizzolo <mattia@debian.org> Mon, 12 Aug 2019 13:42:31 +0200
ubuntu-dev-tools (0.170) unstable; urgency=medium
[ Robie Basak ]
* pull-debian-source:
+ Add a new --no-verify-signature option option, to download a source
package without checking its signature.
+ Port to Python 3. LP: #1700846
[ Mattia Rizzolo ]
* d/control:
+ Bump debhelper compat level to 12.
* reverse-depends:
+ prevent crash when specifying a specific architecture. Closes: #933018
* ubuntutools/archive:
+ Default to checking signatures while pulling a .dsc.
-- Mattia Rizzolo <mattia@debian.org> Mon, 05 Aug 2019 13:28:23 +0200
ubuntu-dev-tools (0.169) unstable; urgency=medium
[ Colin Watson ]
* mk-sbuild:
+ Set personality=linux32 by default on armel and armhf as well.
[ Sahid Orentino Ferdjaoui ]
* reverse-depends:
+ New ability to find reverse-depends recursively, and print a tree.
[ Mattia Rizzolo ]
* d/control:
+ Add myself to uploaders.
+ Bump Standards-Version to 4.4.0, no changes needed.
-- Mattia Rizzolo <mattia@debian.org> Sat, 20 Jul 2019 11:18:00 +0200
ubuntu-dev-tools (0.168) eoan; urgency=medium
* grep-merges: flake8-clean.
-- Steve Langasek <steve.langasek@ubuntu.com> Tue, 07 May 2019 21:26:05 -0700
ubuntu-dev-tools (0.167) eoan; urgency=medium
[ Colin Watson ]
* syncpackage:
+ Support wildcards in sync-blacklist (LP: #1807992).
[ Steve Langasek ]
* grep-merges:
+ support grepping by team name (full name match) now that MoM exposes
this
-- Steve Langasek <steve.langasek@ubuntu.com> Tue, 07 May 2019 18:53:46 -0700
ubuntu-dev-tools (0.166) unstable; urgency=medium
* Team upload.
[ Felipe Reyes ]
* pbuilder-dist:
+ Fix handling of --othermirror when a local archive found. LP: #1314076
[ Jelmer Vernooij ]
* Recommend Bazaar (brz) or Breezy (brz); the latter provides a
command-line compatible interface.
[ Mathieu Trudel-Lapierre ]
* Add a new tool "ubuntu-archive-assistant" tool for
proposed-migration / mir review. More information on:
https://lists.ubuntu.com/archives/ubuntu-devel/2018-September/040492.html
The tool is not currently installed, see the launchpad bug #1799568.
[ Benjamin Drung ]
* ubuntutools/test: Introduce get_source_files helper function.
* Update pylint and flake8 unittests.
* Fix invalid escape sequences '\(' or '\)'. Closes: #911689
[ Mattia Rizzolo ]
* Add missing dependencies on sensible-utils (thanks lintian!).
* wrap-and-sort -ast.
* Bump Standards-Version to 4.2.1, no changes needed.
* Use the new debhelper-compat(=11) notation and drop d/compat.
* Clarify package descriptions for Python libraries.
Thanks to Ben Finney for the patch. Closes: #804198, #804199
* Add a recommends on ubuntu-keyring | ubuntu-archive-keyring.
Closes: #838254
* mk-sbuild: disable recommends also within the chroot.
Thanks to Steve Beattie for the patch. LP: #1268684
-- Mattia Rizzolo <mattia@debian.org> Tue, 23 Oct 2018 22:08:04 +0200
ubuntu-dev-tools (0.165) unstable; urgency=medium
* Team upload.
* Bump debhelper compat level to 11.
* Fix FTBFS due to newest tar being picker about arguments order.
Closes: #897478
-- Mattia Rizzolo <mattia@debian.org> Thu, 10 May 2018 10:40:49 +0200
ubuntu-dev-tools (0.164) unstable; urgency=medium
* mk-sbuild: Initialise ubuntu_dist_ge vars so unknown releases work.
-- Adam Conrad <adconrad@ubuntu.com> Tue, 24 Apr 2018 05:24:43 -0600
ubuntu-dev-tools (0.163) unstable; urgency=medium
* mk-sbuild: Add ubuntu_dist_ge and use it to set BUILD_PKGS for Ubuntu.
-- Adam Conrad <adconrad@ubuntu.com> Tue, 24 Apr 2018 05:11:18 -0600
ubuntu-dev-tools (0.162) unstable; urgency=medium
[ Mattia Rizzolo ]
* Team upload.
* Fix test failures with newer flake8 and pylint. Closes: #891721
* Bump Standards-Version to 4.1.4, no changes needed.
* Set Rules-Requires-Root:no.
* Drop superseded X-Python(3)-Version fields.
* Add a dependency on sensible-utils, as we use sensible-editor(1).
* Change Maintainer address to ubuntu-dev-tools@packages.debian.org, to
assist with the alioth deprecation.
[ Dimitri John Ledkov ]
* mk-sbuild: add support for 'overlay' in favor of older 'overlayfs'.
Closes: 799267
[ Scott Kitterman ]
* pbuilder-dist: add a --backports option to make it easier to build for
backports when dependencies from backports are needed.
-- Mattia Rizzolo <mattia@debian.org> Thu, 05 Apr 2018 18:58:15 +0200
ubuntu-dev-tools (0.161) unstable; urgency=medium
* Team upload.
* Upload to unstable.
* [939c2a2] Bump Standards-Version to 4.0.0, no changes needed.
-- Mattia Rizzolo <mattia@debian.org> Tue, 18 Jul 2017 07:38:01 +0200
ubuntu-dev-tools (0.160) experimental; urgency=medium
* [798a36c] subprocess: Use getfullargspec on python3. Closes: #867901
-- Iain Lane <laney@ubuntu.com> Tue, 13 Jun 2017 10:14:14 +0100
ubuntu-dev-tools (0.159) experimental; urgency=medium
[ Mattia Rizzolo ]
* [1c6b989] Move packaging to git.
* [3c138f6] Bump Standards-Version to 3.9.8, no changes needed.
* [88fbffa] Bump debhelper compat level to 10.
[ Unit 193 ]
* [f67601c] mk-sbuild, pull-debian-{debdiff,source}: Switch from
httpredir.debian.org to deb.debian.org as the default mirror for Debian.
[ Ursula Junque (Ursinha) ]
* [6fea8fb] Fix behavior of getBinaryPackage in lpapicache. It was using
the same parameters to get Source and Binary packages build history, but
source packages need a distro series, and binary packages need distro arch
series, as the results are arch dependent.
[ Anatoly Techtonik ]
* [bf52bd6] backportpackage: improve python3 compatibility.
[ Benjamin Drung ]
* [6ee0915] Add .gitignore
* [ba16daf] Repair pylint test case. The --include-ids parameter was
dropped from pylint and thus the command failed as was skipped. Repair the
pylint check and add support for Python 3.
* [67c353d] Raise maximum line length to 99
* [3a6cd3a] Fix pylint3 error (for Python 2 support code)
* [cc7170e] Fix all flake8 issues
* [18ae4d8] Add flake8 check to test suite
[ Mattia Rizzolo ]
* [001d108] Recommend cowbuilder, not cowdancer.
[ Iain Lane ]
* [a6043a6] Remove the `harvest' command, and all other integration with the
Harvest service, since it has been shut down.
* [4471193] ubuntu-build: Pass the pocket through to the archive permission
check. So that we can retry builds in releases where the release pocket is
frozen - for example so that backporters can retry backports builds.
* [179f45c] Add some more ignores for pylint. It doesn't work very well
with apt_pkg.
* [44dc0a9] debian/gbp.conf: Add gbp-dch configuration
* [d41602b] debian/README.source: Add with some instructions about changelog
* [0a3738c] Fix some 2/3 differences and run pylint with confidence=HIGH.
Too many false positives otherwise.
-- Iain Lane <laney@ubuntu.com> Tue, 30 May 2017 11:23:48 +0100
ubuntu-dev-tools (0.158) experimental; urgency=medium ubuntu-dev-tools (0.158) experimental; urgency=medium
* Team upload. * Team upload.
@ -871,7 +15,6 @@ ubuntu-dev-tools (0.158) experimental; urgency=medium
[ Corey Bryant ] [ Corey Bryant ]
* pull-uca-source: Added to pull source from Ubuntu Cloud Archive. * pull-uca-source: Added to pull source from Ubuntu Cloud Archive.
(LP: #1661324)
-- Mattia Rizzolo <mattia@debian.org> Wed, 29 Mar 2017 20:07:38 +0200 -- Mattia Rizzolo <mattia@debian.org> Wed, 29 Mar 2017 20:07:38 +0200
@ -2941,7 +2084,7 @@ ubuntu-dev-tools (0.66) jaunty; urgency=low
[ Jonathan Davies ] [ Jonathan Davies ]
* Added grab-merge from merges.ubuntu.com (LP: #155098). * Added grab-merge from merges.ubuntu.com (LP: #155098).
-- Jonathan Davies <jpds@ubuntu.com> Mon, 09 Mar 2009 17:01:19 +0000 -- Jonathan Davies <jpds@ubuntu.com> Thu, 09 Mar 2009 17:01:19 +0000
ubuntu-dev-tools (0.65) jaunty; urgency=low ubuntu-dev-tools (0.65) jaunty; urgency=low
@ -3768,10 +2911,10 @@ ubuntu-dev-tools (0.25) hardy; urgency=low
didn't work (LP: #175183) didn't work (LP: #175183)
- added support for --http-proxy, honours now $http_proxy or $HTTP_PROXY - added support for --http-proxy, honours now $http_proxy or $HTTP_PROXY
- removed $COMPONENTS_LINE from pbuilder call, data is crippled in the - removed $COMPONENTS_LINE from pbuilder call, data is crippled in the
pbuilder chroot. Instead of this behaviour add pbuilder chroot.
$BASE_DIR/etc/$DISTRIBUTION/apt.conf/ directory and install a sane Instead of this behaviour add $BASE_DIR/etc/$DISTRIBUTION/apt.conf/
sources.list, depending on the releases of Ubuntu and add --aptconfdir to directory and install a sane sources.list, depending on the releases of Ubuntu
pbuilder call (LP: #175183) and add --aptconfdir to pbuilder call (LP: #175183)
- add support for gksudo|kdesudo|sudo depending on $DESKTOP_SESSION. - add support for gksudo|kdesudo|sudo depending on $DESKTOP_SESSION.
or if $PBUILDAUTH is set to something else, it will be used instead of or if $PBUILDAUTH is set to something else, it will be used instead of
sudo|gksudo|kdesudo (LP: #172943) sudo|gksudo|kdesudo (LP: #172943)
@ -4019,8 +3162,7 @@ ubuntu-dev-tools (0.11) gutsy; urgency=low
[ Siegfried-Angel Gevatter Pujals (RainCT) ] [ Siegfried-Angel Gevatter Pujals (RainCT) ]
* Added a manpage for suspicious-source. * Added a manpage for suspicious-source.
* Fixed a bug in pbuilder-dist (it needed ftp.debian.org in sources.list to * Fixed a bug in pbuilder-dist (it needed ftp.debian.org in sources.list to work with Debian).
work with Debian).
-- Daniel Holbach <daniel.holbach@ubuntu.com> Mon, 24 Sep 2007 09:39:24 +0200 -- Daniel Holbach <daniel.holbach@ubuntu.com> Mon, 24 Sep 2007 09:39:24 +0200

3
debian/clean vendored
View File

@ -1 +1,2 @@
*.egg-info/ *.egg-info/*
test-data/example_*

1
debian/compat vendored Normal file
View File

@ -0,0 +1 @@
9

191
debian/control vendored
View File

@ -1,92 +1,87 @@
Source: ubuntu-dev-tools Source: ubuntu-dev-tools
Section: devel Section: devel
Priority: optional Priority: optional
Maintainer: Ubuntu Developers <ubuntu-dev-tools@packages.debian.org> Maintainer: Ubuntu Developers <ubuntu-dev-team@lists.alioth.debian.org>
Uploaders: Uploaders: Benjamin Drung <bdrung@debian.org>,
Benjamin Drung <bdrung@debian.org>, Stefano Rivera <stefanor@debian.org>
Stefano Rivera <stefanor@debian.org>, Vcs-Bzr: lp:ubuntu-dev-tools
Mattia Rizzolo <mattia@debian.org>, Vcs-Browser: https://code.launchpad.net/~ubuntu-dev/ubuntu-dev-tools/trunk
Simon Quigley <tsimonq2@debian.org>, Build-Depends: dctrl-tools,
Build-Depends: debhelper (>= 9),
black <!nocheck>, devscripts (>= 2.11.0~),
dctrl-tools, dh-python,
debhelper-compat (= 13), distro-info (>= 0.2~),
devscripts (>= 2.11.0~), libwww-perl,
dh-make, lsb-release,
dh-python, pylint,
distro-info (>= 0.2~), python-all (>= 2.6.5-13~),
flake8, python-apt (>= 0.7.93~),
isort <!nocheck>, python-debian (>= 0.1.20~),
lsb-release, python-distro-info (>= 0.4~),
pylint <!nocheck>, python-httplib2,
python3-all, python-launchpadlib (>= 1.5.7),
python3-apt, python-mock,
python3-dateutil, python-setuptools,
python3-debian, python-soappy,
python3-debianbts, python-unittest2,
python3-distro-info, python3-all,
python3-httplib2, python3-apt,
python3-launchpadlib-desktop, python3-debian,
python3-pytest, python3-distro-info,
python3-requests <!nocheck>, python3-httplib2,
python3-setuptools, python3-launchpadlib,
python3-yaml <!nocheck>, python3-mock,
Standards-Version: 4.7.2 python3-setuptools
Rules-Requires-Root: no X-Python-Version: >= 2.6
Vcs-Git: https://git.launchpad.net/ubuntu-dev-tools X-Python3-Version: >= 3.2
Vcs-Browser: https://git.launchpad.net/ubuntu-dev-tools
Homepage: https://launchpad.net/ubuntu-dev-tools Homepage: https://launchpad.net/ubuntu-dev-tools
Standards-Version: 3.9.6
Package: ubuntu-dev-tools Package: ubuntu-dev-tools
Architecture: all Architecture: all
Depends: Depends: binutils,
binutils, dctrl-tools,
dctrl-tools, devscripts (>= 2.11.0~),
devscripts (>= 2.11.0~), diffstat,
diffstat, distro-info (>= 0.2~),
distro-info (>= 0.2~), dpkg-dev,
dpkg-dev, lsb-release,
dput, python-apt (>= 0.7.93~),
lsb-release, python-debian (>= 0.1.20~),
python3, python-distro-info (>= 0.4~),
python3-apt, python-httplib2,
python3-debian, python-launchpadlib (>= 1.5.7),
python3-debianbts, python-lazr.restfulclient,
python3-distro-info, python-ubuntutools,
python3-httplib2, sudo,
python3-launchpadlib-desktop, ${misc:Depends},
python3-lazr.restfulclient, ${perl:Depends},
python3-ubuntutools (= ${binary:Version}), ${python:Depends}
python3-yaml, Recommends: bzr,
sensible-utils, bzr-builddeb,
sudo, ca-certificates,
tzdata, debian-archive-keyring,
${misc:Depends}, debian-keyring,
${perl:Depends}, debootstrap,
Recommends: dput,
arch-test, genisoimage,
ca-certificates, libwww-perl,
debian-archive-keyring, lintian,
debian-keyring, patch,
debootstrap, pbuilder | cowdancer | sbuild,
genisoimage, python-dns,
lintian, python-soappy,
patch, quilt,
sbuild | pbuilder | cowbuilder, reportbug (>= 3.39ubuntu1)
python3-dns, Suggests: python-simplejson | python (>= 2.7), qemu-user-static
quilt,
reportbug (>= 3.39ubuntu1),
ubuntu-keyring | ubuntu-archive-keyring,
Suggests:
bzr | brz,
bzr-builddeb | brz-debian,
qemu-user-static,
Description: useful tools for Ubuntu developers Description: useful tools for Ubuntu developers
This is a collection of useful tools that Ubuntu developers use to make their This is a collection of useful tools that Ubuntu developers use to make their
packaging work a lot easier. packaging work a lot easier.
. .
Such tools include: Such tools include:
. .
- 404main - used to check what components a package's deps are in, for
doing a main inclusion report for example.
- backportpackage - helper to test package backports - backportpackage - helper to test package backports
- bitesize - add the 'bitesize' tag to a bug and comment that you are - bitesize - add the 'bitesize' tag to a bug and comment that you are
willing to help fix it. willing to help fix it.
@ -96,6 +91,9 @@ Description: useful tools for Ubuntu developers
- dch-repeat - used to repeat a change log into an older release. - dch-repeat - used to repeat a change log into an older release.
- grab-merge - grabs a merge from merges.ubuntu.com easily. - grab-merge - grabs a merge from merges.ubuntu.com easily.
- grep-merges - search for pending merges from Debian. - grep-merges - search for pending merges from Debian.
- harvest - grabs information about development opportunities from
http://harvest.ubuntu.com
- hugdaylist - compile HugDay lists from bug list URLs.
- import-bug-from-debian - copy a bug from the Debian BTS to Launchpad - import-bug-from-debian - copy a bug from the Debian BTS to Launchpad
- merge-changelog - manually merges two Debian changelogs with the same base - merge-changelog - manually merges two Debian changelogs with the same base
version. version.
@ -107,19 +105,12 @@ Description: useful tools for Ubuntu developers
a Debian package and its immediate parent to generate a debdiff. a Debian package and its immediate parent to generate a debdiff.
- pull-debian-source - downloads the latest source package available in - pull-debian-source - downloads the latest source package available in
Debian of a package. Debian of a package.
- pull-lp-source - downloads source package from Launchpad. - pull-lp-source - downloads latest source package from Launchpad.
- pull-lp-debs - downloads debs package(s) from Launchpad. - pull-revu-source - downloads the latest source package from REVU
- pull-lp-ddebs - downloads dbgsym/ddebs package(s) from Launchpad.
- pull-lp-udebs - downloads udebs package(s) from Launchpad.
- pull-debian-* - same as pull-lp-* but for Debian packages.
- pull-uca-* - same as pull-lp-* but for Ubuntu Cloud Archive packages.
- pull-pkg - common script that provides above pull-* functionality.
- requestbackport - file a backporting request. - requestbackport - file a backporting request.
- requestsync - files a sync request with Debian changelog and rationale. - requestsync - files a sync request with Debian changelog and rationale.
- reverse-depends - find the reverse dependencies (or build dependencies) of - reverse-depends - find the reverse dependencies (or build dependencies) of
a package. a package.
- running-autopkgtests - lists the currently running and/or queued
autopkgtests on the Ubuntu autopkgtest infrastructure
- seeded-in-ubuntu - query if a package is safe to upload during a freeze. - seeded-in-ubuntu - query if a package is safe to upload during a freeze.
- setup-packaging-environment - assistant to get an Ubuntu installation - setup-packaging-environment - assistant to get an Ubuntu installation
ready for packaging work. ready for packaging work.
@ -134,22 +125,24 @@ Description: useful tools for Ubuntu developers
package. package.
- update-maintainer - script to update maintainer field in ubuntu packages. - update-maintainer - script to update maintainer field in ubuntu packages.
Package: python3-ubuntutools Package: python-ubuntutools
Architecture: all Architecture: all
Section: python Section: python
Depends: Depends: ${misc:Depends}, ${python:Depends}
python3-dateutil, Breaks: ubuntu-dev-tools (<< 0.154)
python3-debian, Replaces: ubuntu-dev-tools (<< 0.154)
python3-distro-info, Description: useful library of APIs for Ubuntu developer tools (Python 2)
python3-httplib2,
python3-launchpadlib-desktop,
python3-lazr.restfulclient,
python3-requests,
sensible-utils,
${misc:Depends},
${python3:Depends},
Description: useful APIs for Ubuntu developer tools — Python 3 library
This package ships a collection of APIs, helpers and wrappers used to This package ships a collection of APIs, helpers and wrappers used to
develop useful utilities for Ubuntu developers. develop useful utilities for Ubuntu developers.
. .
This package installs the library for Python 3. Python 2 variant.
Package: python3-ubuntutools
Architecture: all
Section: python
Depends: ${misc:Depends}, ${python3:Depends}
Description: useful library of APIs for Ubuntu developer tools
This package ships a collection of APIs, helpers and wrappers used to
develop useful utilities for Ubuntu developers.
.
Python 3 variant.

68
debian/copyright vendored
View File

@ -3,30 +3,26 @@ Upstream-Name: Ubuntu Developer Tools
Upstream-Contact: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com> Upstream-Contact: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>
Source: https://launchpad.net/ubuntu-dev-tools Source: https://launchpad.net/ubuntu-dev-tools
Files: backportpackage Files: *
backportpackage
bash_completion/pbuilder-dist bash_completion/pbuilder-dist
check-symbols check-symbols
debian/*
doc/backportpackage.1 doc/backportpackage.1
doc/check-symbols.1 doc/check-symbols.1
doc/requestsync.1 doc/requestsync.1
doc/ubuntu-iso.1 doc/ubuntu-iso.1
doc/running-autopkgtests.1
GPL-2
README.updates
requestsync requestsync
setup.py setup.py
TODO
ubuntu-iso ubuntu-iso
ubuntutools/requestsync/*.py ubuntutools/requestsync/lp.py
ubuntutools/requestsync/mail.py
Copyright: 2007, Albert Damen <albrt@gmx.net> Copyright: 2007, Albert Damen <albrt@gmx.net>
2010-2024, Benjamin Drung <bdrung@ubuntu.com> 2010, Benjamin Drung <bdrung@ubuntu.com>
2007-2023, Canonical Ltd. 2007-2010, Canonical Ltd.
2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com> 2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com>
2010, Evan Broder <evan@ebroder.net> 2010, Evan Broder <evan@ebroder.net>
2006-2007, Luke Yelavich <themuso@ubuntu.com> 2006-2007, Luke Yelavich <themuso@ubuntu.com>
2009-2010, Michael Bienia <geser@ubuntu.com> 2009-2010, Michael Bienia <geser@ubuntu.com>
2024-2025, Simon Quigley <tsimonq2@debian.org>
2010-2011, Stefano Rivera <stefanor@ubuntu.com> 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
2008, Stephan Hermann <sh@sourcecode.de> 2008, Stephan Hermann <sh@sourcecode.de>
2007, Steve Kowalik <stevenk@ubuntu.com> 2007, Steve Kowalik <stevenk@ubuntu.com>
@ -43,7 +39,9 @@ License: GPL-2
On Debian systems, the complete text of the GNU General Public License On Debian systems, the complete text of the GNU General Public License
version 2 can be found in the /usr/share/common-licenses/GPL-2 file. version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
Files: doc/import-bug-from-debian.1 Files: 404main
doc/404main.1
doc/import-bug-from-debian.1
doc/pbuilder-dist-simple.1 doc/pbuilder-dist-simple.1
doc/pbuilder-dist.1 doc/pbuilder-dist.1
doc/submittodebian.1 doc/submittodebian.1
@ -74,28 +72,23 @@ License: GPL-2+
On Debian systems, the complete text of the GNU General Public License On Debian systems, the complete text of the GNU General Public License
version 2 can be found in the /usr/share/common-licenses/GPL-2 file. version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
Files: doc/lp-bitesize.1 Files: doc/bitesize.1
doc/check-mir.1
doc/grab-merge.1 doc/grab-merge.1
doc/harvest.1
doc/hugdaylist.1
doc/merge-changelog.1 doc/merge-changelog.1
doc/pm-helper.1
doc/setup-packaging-environment.1 doc/setup-packaging-environment.1
doc/syncpackage.1 doc/syncpackage.1
lp-bitesize bitesize
check-mir
GPL-3
grab-merge grab-merge
harvest
hugdaylist
merge-changelog merge-changelog
pm-helper
pyproject.toml
run-linters
running-autopkgtests
setup-packaging-environment setup-packaging-environment
syncpackage syncpackage
ubuntutools/running_autopkgtests.py ubuntutools/harvest.py
ubuntutools/utils.py Copyright: 2010, Benjamin Drung <bdrung@ubuntu.com>
Copyright: 2010-2024, Benjamin Drung <bdrung@ubuntu.com> 2007-2011, Canonical Ltd.
2007-2024, Canonical Ltd.
2008, Jonathan Patrick Davies <jpds@ubuntu.com> 2008, Jonathan Patrick Davies <jpds@ubuntu.com>
2008-2010, Martin Pitt <martin.pitt@canonical.com> 2008-2010, Martin Pitt <martin.pitt@canonical.com>
2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com> 2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
@ -117,23 +110,17 @@ Files: dch-repeat
doc/dch-repeat.1 doc/dch-repeat.1
doc/grep-merges.1 doc/grep-merges.1
doc/mk-sbuild.1 doc/mk-sbuild.1
doc/pull-pkg.1 doc/pull-lp-source.1
doc/pull-revu-source.1
doc/ubuntu-build.1 doc/ubuntu-build.1
grep-merges grep-merges
mk-sbuild mk-sbuild
pull-pkg pull-lp-source
pull-*debs pull-revu-source
pull-*-source
requirements.txt
test-requirements.txt
tox.ini
ubuntu-build ubuntu-build
ubuntutools/__init__.py ubuntutools/lp/libsupport.py
ubuntutools/lp/__init__.py
ubuntutools/lp/lpapicache.py ubuntutools/lp/lpapicache.py
ubuntutools/lp/udtexceptions.py
ubuntutools/misc.py ubuntutools/misc.py
ubuntutools/pullpkg.py
Copyright: 2007-2010, Canonical Ltd. Copyright: 2007-2010, Canonical Ltd.
2008-2009, Iain Lane <iain@orangesquash.org.uk> 2008-2009, Iain Lane <iain@orangesquash.org.uk>
2006, John Dong <jdong@ubuntu.com> 2006, John Dong <jdong@ubuntu.com>
@ -159,6 +146,7 @@ License: GPL-3+
version 3 can be found in the /usr/share/common-licenses/GPL-3 file. version 3 can be found in the /usr/share/common-licenses/GPL-3 file.
Files: doc/pull-debian-debdiff.1 Files: doc/pull-debian-debdiff.1
doc/pull-debian-source.1
doc/requestbackport.1 doc/requestbackport.1
doc/reverse-depends.1 doc/reverse-depends.1
doc/seeded-in-ubuntu.1 doc/seeded-in-ubuntu.1
@ -168,10 +156,12 @@ Files: doc/pull-debian-debdiff.1
doc/update-maintainer.1 doc/update-maintainer.1
enforced-editing-wrapper enforced-editing-wrapper
pull-debian-debdiff pull-debian-debdiff
pull-debian-source
requestbackport requestbackport
reverse-depends reverse-depends
seeded-in-ubuntu seeded-in-ubuntu
sponsor-patch sponsor-patch
test-data/*
ubuntu-upload-permission ubuntu-upload-permission
ubuntutools/archive.py ubuntutools/archive.py
ubuntutools/builder.py ubuntutools/builder.py
@ -181,15 +171,11 @@ Files: doc/pull-debian-debdiff.1
ubuntutools/sponsor_patch/* ubuntutools/sponsor_patch/*
ubuntutools/test/* ubuntutools/test/*
ubuntutools/update_maintainer.py ubuntutools/update_maintainer.py
ubuntutools/version.py
update-maintainer update-maintainer
.pylintrc Copyright: 2009-2011, Benjamin Drung <bdrung@ubuntu.com>
Copyright: 2009-2024, Benjamin Drung <bdrung@ubuntu.com>
2010, Evan Broder <evan@ebroder.net> 2010, Evan Broder <evan@ebroder.net>
2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com> 2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
2010-2011, Stefano Rivera <stefanor@ubuntu.com> 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
2017-2021, Dan Streetman <ddstreet@canonical.com>
2024, Canonical Ltd.
License: ISC License: ISC
Permission to use, copy, modify, and/or distribute this software for any Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above purpose with or without fee is hereby granted, provided that the above

9
debian/gbp.conf vendored
View File

@ -1,9 +0,0 @@
[DEFAULT]
debian-tag = %(version)s
debian-branch = master
sign-tags = True
[dch]
meta = True
auto = True
full = True

View File

@ -1 +0,0 @@
/usr/lib/python3.*

18
debian/rules vendored
View File

@ -1,14 +1,12 @@
#!/usr/bin/make -f #!/usr/bin/make -f
override_dh_auto_clean: export PYBUILD_NAME=ubuntutools
dh_auto_clean
rm -f .coverage
rm -rf .tox
override_dh_auto_test:
ifeq (,$(filter nocheck,$(DEB_BUILD_OPTIONS)))
python3 -m pytest -v ubuntutools
endif
%: %:
dh $@ --with python3 --buildsystem=pybuild dh $@ --with python2,python3 --buildsystem=pybuild
override_dh_install:
dh_install
mkdir -p debian/ubuntu-dev-tools/usr
mv debian/python-ubuntutools/usr/bin debian/ubuntu-dev-tools/usr/
mv debian/python-ubuntutools/usr/share debian/ubuntu-dev-tools/usr/

View File

@ -1,3 +0,0 @@
# pyc files are machine-generated; they're expected to have long lines and have unstated copyright
source: file-without-copyright-information *.pyc [debian/copyright]
source: very-long-line-length-in-source-file * > 512 [*.pyc:*]

View File

@ -1,7 +0,0 @@
Test-Command: python3 -m pytest -v ubuntutools
Depends:
dh-make,
python3-pytest,
python3-setuptools,
@,
Restrictions: allow-stderr

View File

@ -1,2 +0,0 @@
/usr/bin
/usr/share

29
doc/404main.1 Normal file
View File

@ -0,0 +1,29 @@
.TH 404main 1 "February 17, 2008" "ubuntu-dev-tools"
.SH NAME
404main \- check if all build dependencies of a package are in main
.SH SYNOPSIS
\fB404main\fP <\fIpackage name\fP> [<\fIdistribution\fP>]
.SH DESCRIPTION
\fB404main\fP is a script that can be used to check if a package and
all its build dependencies are in Ubuntu's main component or not.
.SH CAVEATS
\fB404main\fP will take the dependencies and build dependencies of the
packages from the distribution you have first in your
/etc/apt/sources.list file.
.PP
Also, because of this the <\fIdistribution\fP> option is NOT trustworthy; if
the dependencies changed YOU WILL GET INCORRECT RESULTS.
.SH SEE ALSO
.BR apt-cache (8)
.SH AUTHORS
\fB404main\fP was written by Pete Savage <petesavage@ubuntu.com> and
this manpage by Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>.
.PP
Both are released under the GNU General Public License, version 2 or
later.

View File

@ -1,21 +1,21 @@
.TH lp-bitesize "1" "May 9 2010" "ubuntu-dev-tools" .TH bitesize "1" "May 9 2010" "ubuntu-dev-tools"
.SH NAME .SH NAME
lp-bitesize \- Add \fBbitesize\fR tag to bugs and add a comment. bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
.SH SYNOPSIS .SH SYNOPSIS
.B lp-bitesize \fR<\fIbug number\fR> .B bitesize \fR<\fIbug number\fR>
.br .br
.B lp-bitesize \-\-help .B bitesize \-\-help
.SH DESCRIPTION .SH DESCRIPTION
\fBlp-bitesize\fR adds a bitesize tag to the bug, if it's not there yet. It \fBbitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
also adds a comment to the bug indicating that you are willing to help with also adds a comment to the bug indicating that you are willing to help with
fixing it. fixing it.
It checks for permission to operate on a given bug first, It checks for permission to operate on a given bug first,
then perform required tasks on Launchpad. then perform required tasks on Launchpad.
.SH OPTIONS .SH OPTIONS
Listed below are the command line options for \fBlp-bitesize\fR: Listed below are the command line options for \fBbitesize\fR:
.TP .TP
.BR \-h ", " \-\-help .BR \-h ", " \-\-help
Display a help message and exit. Display a help message and exit.
@ -48,7 +48,7 @@ The default value for \fB--lpinstance\fR.
.BR ubuntu\-dev\-tools (5) .BR ubuntu\-dev\-tools (5)
.SH AUTHORS .SH AUTHORS
\fBlp-bitesize\fR and this manual page were written by Daniel Holbach \fBbitesize\fR and this manual page were written by Daniel Holbach
<daniel.holbach@canonical.com>. <daniel.holbach@canonical.com>.
.PP .PP
Both are released under the terms of the GNU General Public License, version 3. Both are released under the terms of the GNU General Public License, version 3.

19
doc/harvest.1 Normal file
View File

@ -0,0 +1,19 @@
.TH harvest 1 "March 21, 2011" "ubuntu-dev-tools"
.SH NAME
harvest \- grabs information about a given source package from harvest.ubuntu.com.
.SH SYNOPSIS
\fBharvest\fP <\fIsource package name\fP>
.SH DESCRIPTION
\fBharvest\fP is a script that downloads information about development
opportunities from harvest.ubuntu.com and gives a summary of the types of
opportunities.
.SH AUTHORS
\fBharvest\fP and its manpage were written by Daniel Holbach
<daniel.holbach@ubuntu.com>.
.PP
Both are released under the GNU General Public License, version 3 or
later.

26
doc/hugdaylist.1 Normal file
View File

@ -0,0 +1,26 @@
.TH HUGDAYLIST "1" "August 27, 2008" "ubuntu-dev-tools"
.SH NAME
hugdaylist \- produce MoinMoin wiki formatted tables based on a Launchpad bug list
.SH SYNOPSIS
.B hugdaylist [\fB\-n\fP|\fB\-\-number <NUMBER>\fP] \fBlaunchpad-buglist-url\fP
.SH DESCRIPTION
\fBhugdaylist\fP produces MoinMoin wiki formatted tables based on a
Launchpad bug list
.SH OPTIONS
.TP
\fB\-\-number=<NUMBER>\fP
This option allows you to specify the number of entries to output.
.TP
\fBlaunchpad-buglist-url\fP
Required, this option is a URL pointing to a launchpad bug list.
.SH AUTHOR
\fBhugdaylist\fP has been written by Canonical Ltd., Daniel Holbach
<daniel.holbach@canonical.com> and Jonathan Patrick Davies <jpds@ubuntu.com>.
This manual page was written by Ryan Kavanagh <ryanakca@kubuntu.org>.
.PP
Both are released under the GNU General Public License, version 3.

View File

@ -64,15 +64,6 @@ Disable checking gpg signatures of downloaded Release files by using
debootstrap's \fB\-\-no\-check\-gpg\fR option. See \fBdebootstrap\fR (8) debootstrap's \fB\-\-no\-check\-gpg\fR option. See \fBdebootstrap\fR (8)
for more details. for more details.
.TP .TP
.B \-\-debootstrap\-proxy\fR=\fIPROXY
Use \fIPROXY\fR as apt proxy.
.TP
.B \-\-eatmydata
Install and use eatmydata (default)
.TP
.B \-\-skip\-eatmydata
Don't install and use eatmydata
.TP
.B \-\-distro\fR=\fIDISTRO .B \-\-distro\fR=\fIDISTRO
Enable distro-specific logic. Enable distro-specific logic.
When not provided, the distribution is determined from \fIrelease\fR. When not provided, the distribution is determined from \fIrelease\fR.
@ -83,31 +74,10 @@ Specify a volume group, and subsequently use a default \fBSCHROOT_TYPE\fR of
"\fBlvm-snapshot\fR" rather than "\fBdirectory\fR" (via overlayfs or "\fBlvm-snapshot\fR" rather than "\fBdirectory\fR" (via overlayfs or
aufs) mounts. aufs) mounts.
.TP .TP
.B \-\-zfs-dataset=\fIDATASET
Specify a zfs dataset, and subsequently use a default \fBSCHROOT_TYPE\fR of
"\fBzfs-snapshot\fR" rather than "\fBdirectory\fR" (via overlayfs or
aufs) mounts.
.TP
.B \-\-type\fR=\fISHROOT_TYPE .B \-\-type\fR=\fISHROOT_TYPE
Specify a \fBSCHROOT_TYPE\fR. Supported values are "\fBdirectory\fR" Specify a \fBSCHROOT_TYPE\fR. Supported values are "\fBdirectory\fR"
(default if \fB\-\-vg\fR not specified), "\fBlvm-snapshot\fR" (default (default if \fB\-\-vg\fR not specified), "\fBlvm-snapshot\fR" (default
if \fB\-\-vg\fR specified), "\fBbtrfs-snapshot\fR", "\fBzfs-snapshot\fR" if \fB\-\-vg\fR specified), "\fBbtrfs-snapshot\fR", and "\fBfile\fR".
and "\fBfile\fR".
.TP
.B \-\-ccache
Enable usage of \fBccache\fR by default. See \fBccache\fR (1) for
more details.
.TP
.B \-\-ccache-dir=\fIPATH
Use \fBPATH\fR as schroot ccache directory. This directory can be
safely shared by multiple schroots, but they will all use the same
\fBCCACHE_MAXSIZE\fR.
Defaults to /var/cache/ccache-sbuild.
See \fBccache\fR (1) for more details.
.TP
.B \-\-ccache-size=\fISIZE
Sets \fBSIZE\fR as the schroot \fBCCACHE_DIR\fR max-size used by ccache.
See \fBccache\fR (1) for more details.
.SH ENVIRONMENT VARIABLES .SH ENVIRONMENT VARIABLES
.TP .TP
@ -150,14 +120,6 @@ Keyring file to use for checking gpg signatures of retrieved release files
Disable gpg verification of retrieved release files (same as Disable gpg verification of retrieved release files (same as
\fB\-\-debootstrap\-no\-check\-gpg\fR) \fB\-\-debootstrap\-no\-check\-gpg\fR)
.TP .TP
.B DEBOOTSTRAP_PROXY
Proxy to use for apt. (same as
\fB\-\-debootstrap\-proxy\fR)
.TP
.B EATMYDATA
Enable or disable eatmydata usage, see \fB\-\-eatmydata\fR
and \fB\-\-skip\-eatmydata\fR
.TP
.B SOURCE_CHROOTS_DIR .B SOURCE_CHROOTS_DIR
Use \fBSOURCE_CHROOTS_DIR\fR as home of schroot source directories. Use \fBSOURCE_CHROOTS_DIR\fR as home of schroot source directories.
(default \fB/var/lib/schroot/chroots\fR) (default \fB/var/lib/schroot/chroots\fR)
@ -169,18 +131,6 @@ Use \fBSOURCE_CHROOTS_TGZ\fR as home of schroot source tarballs.
.B CHROOT_SNAPSHOT_DIR .B CHROOT_SNAPSHOT_DIR
Use \fBCHROOT_SNAPSHOT_DIR\fR as home of mounted btrfs snapshots. Use \fBCHROOT_SNAPSHOT_DIR\fR as home of mounted btrfs snapshots.
(default \fB/var/lib/schroot/snapshots\fR) (default \fB/var/lib/schroot/snapshots\fR)
.TP
.B CCACHE
Enable \fBccache\fR (1) by default.
(defaults to \fB0\fR)
.TP
.B CCACHE_DIR
Use \fBCCACHE_DIR\fR as the \fBccache\fR (1) directory.
(default \fB/var/cache/ccache-sbuild\fR)
.TP
.B CCACHE_SIZE
Use \fBCCACHE_SIZE\fR as the \fBccache\fR (1) max-size.
(defaults to \fB4G\fR)
.SH FILES .SH FILES

View File

@ -20,7 +20,7 @@ like for example \fBpbuilder\-feisty\fP, \fBpbuilder\-sid\fP, \fBpbuilder\-gutsy
.PP .PP
The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main
difference between both is that pbuilder compresses the created chroot as a difference between both is that pbuilder compresses the created chroot as a
tarball, thus using less disc space but needing to uncompress (and possibly a tarball, thus using less disc space but needing to uncompress (and possibly
compress) its contents again on each run, and cowbuilder doesn't do this. compress) its contents again on each run, and cowbuilder doesn't do this.
.SH USAGE .SH USAGE
@ -38,7 +38,7 @@ This optional parameter will attempt to construct a chroot in a foreign
architecture. architecture.
For some architecture pairs (e.g. i386 on an amd64 install), the chroot For some architecture pairs (e.g. i386 on an amd64 install), the chroot
will be created natively. will be created natively.
For others (e.g. arm64 on an amd64 install), qemu\-user\-static will be For others (e.g. armel on an i386 install), qemu\-user\-static will be
used. used.
Note that some combinations (e.g. amd64 on an i386 install) require Note that some combinations (e.g. amd64 on an i386 install) require
special separate kernel handling, and may break in unexpected ways. special separate kernel handling, and may break in unexpected ways.
@ -85,9 +85,6 @@ Suitable environment for preparing security updates.
\fB\-\-updates\-only\fP \fB\-\-updates\-only\fP
Only use the release, security, and updates pocket. Only use the release, security, and updates pocket.
Not the proposed\-updates pocket. Not the proposed\-updates pocket.
.TP
\fB\-\-backports\fP
Also use the backports archive..
.SH EXAMPLES .SH EXAMPLES
.TP .TP

View File

@ -1,44 +0,0 @@
.\" Copyright (C) 2023, Canonical Ltd.
.\"
.\" This program is free software; you can redistribute it and/or
.\" modify it under the terms of the GNU General Public License, version 3.
.\"
.\" This program is distributed in the hope that it will be useful,
.\" but WITHOUT ANY WARRANTY; without even the implied warranty of
.\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
.\" General Public License for more details.
.\"
.\" You should have received a copy of the GNU General Public License
.\" along with this program. If not, see <http://www.gnu.org/licenses/>.
.TH pm\-helper 1 "June 2023" ubuntu\-dev\-tools
.SH NAME
pm\-helper \- helper to guide a developer through proposed\-migration work
.SH SYNOPSIS
.B pm\-helper \fR[\fIoptions\fR] [\fIpackage\fR]
.SH DESCRIPTION
Claim a package from proposed\-migration to work on and get additional
information (such as the state of the package in Debian) that may be helpful
in unblocking it.
.PP
This tool is incomplete and under development.
.SH OPTIONS
.TP
.B \-l \fIINSTANCE\fR, \fB\-\-launchpad\fR=\fIINSTANCE\fR
Use the specified instance of Launchpad (e.g. "staging"), instead of
the default of "production".
.TP
.B \-v\fR, \fB--verbose\fR
be more verbose
.TP
\fB\-h\fR, \fB\-\-help\fR
Display a help message and exit
.SH AUTHORS
\fBpm\-helper\fR and this manpage were written by Steve Langasek
<steve.langasek@ubuntu.com>.
.PP
Both are released under the GPLv3 license.

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

89
doc/pull-debian-source.1 Normal file
View File

@ -0,0 +1,89 @@
.\" Copyright (C) 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
.\"
.\" Permission to use, copy, modify, and/or distribute this software for any
.\" purpose with or without fee is hereby granted, provided that the above
.\" copyright notice and this permission notice appear in all copies.
.\"
.\" THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
.\" REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
.\" AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
.\" INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
.\" LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
.\" OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
.\" PERFORMANCE OF THIS SOFTWARE.
.TH PULL\-DEBIAN\-SOURCE "1" "22 January 2011" "ubuntu\-dev\-tools"
.SH NAME
pull\-debian\-source \- download and extract a source package from Debian
.SH SYNOPSIS
.B pull\-debian\-source \fR[\fIoptions\fR] <\fIsource package\fR>
[\fIrelease\fR|\fIversion\fR]
.SH DESCRIPTION
\fBpull\-debian\-source\fR downloads and extracts the specified
\fIversion\fR of \fIsource package\fR, or the latest version in the
specified Debian \fIrelease\fR.
.P
\fBpull\-debian\-source\fR will try the preferred mirror, default
mirror, security mirror, and fall back to \fBLaunchpad\fR or
\fBsnapshot.debian.org\fR, in search of the requested version.
.SH OPTIONS
.TP
.I source package
The source package to download from Debian.
.TP
.I release
The release to download the source package from. Defaults to
\fBunstable\fR.
.TP
.I version
The specific version of the package to download.
.TP
.BR \-d ", " \-\-download\-only
Do not extract the source package.
.TP
.B \-m \fIDEBIAN_MIRROR\fR, \fB\-\-mirror\fR=\fIDEBIAN_MIRROR\fR
Use the specified mirror.
Should be in the form \fBhttp://ftp.debian.org/debian\fR.
If the package isn't found on this mirror, \fBpull\-debian\-source\fR
will fall back to the default mirror.
.TP
.B \-s \fIDEBSEC_MIRROR\fR, \fB\-\-security\-mirror\fR=\fIDEBSEC_MIRROR\fR
Use the specified mirror.
Should be in the form \fBhttp://security.debian.org\fR.
If the package isn't found on this mirror, \fBpull\-debian\-source\fR
will fall back to the default mirror.
.TP
.B \-\-no\-conf
Do not read any configuration files, or configuration from environment
variables.
.TP
.BR \-h ", " \-\-help
Display the usage instructions and exit.
.SH ENVIRONMENT
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
environment variables.
Variables in the environment take precedence to those in configuration
files.
.SH CONFIGURATION VARIABLES
The following variables can be set in the environment or in
.BR ubuntu\-dev\-tools (5)
configuration files.
In each case, the script\-specific variable takes precedence over the
package\-wide variable.
.TP
.BR PULL_DEBIAN_SOURCE_DEBIAN_MIRROR ", " UBUNTUTOOLS_DEBIAN_MIRROR
The default value for \fB\-\-mirror\fR.
.TP
.BR PULL_DEBIAN_SOURCE_DEBSEC_MIRROR ", " UBUNTUTOOLS_DEBSEC_MIRROR
The default value for \fB\-\-security\-mirror\fR.
.SH SEE ALSO
.BR dget (1),
.BR pull\-debian\-debdiff (1),
.BR pull\-lp\-source (1),
.BR ubuntu\-dev\-tools (5)

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

79
doc/pull-lp-source.1 Normal file
View File

@ -0,0 +1,79 @@
.TH PULL\-LP\-SOURCE "1" "4 August 2008" "ubuntu-dev-tools"
.SH NAME
pull\-lp\-source \- download a source package from Launchpad
.SH SYNOPSIS
.B pull\-lp\-source \fR[\fIoptions\fR]\fB \fBsource package\fR
[\fIrelease\fR|\fIversion\fR]
.SH DESCRIPTION
\fBpull\-lp\-source\fR downloads and extracts the specified
\fIversion\fR of <\fBsource package\fR> from Launchpad, or the latest
version of the specified \fIrelease\fR.
To request a version from a particular pocket say
\fIrelease\fB\-\fIpocket\fR (with a magic \fB\-release\fR for only the
release pocket).
If no \fIversion\fR or \fIrelease\fR is specified, the latest version in
the development release will be downloaded.
.SH OPTIONS
Listed below are the command line options for pull\-lp\-source:
.TP
.B source package
This is the source package that you would like to be downloaded from Launchpad.
.TP
.B version
This is the version of the source package to be downloaded.
.TP
.B release
This is the release that you would like the source package to be downloaded from.
This value defaults to the current development release.
.TP
.BR \-h ", " \-\-help
Display a help message and exit.
.TP
.BR \-d ", " \-\-download\-only
Do not extract the source package.
.TP
.B \-m \fIUBUNTU_MIRROR\fR, \fB\-\-mirror\fR=\fIUBUNTU_MIRROR\fR
Use the specified Ubuntu mirror.
Should be in the form \fBhttp://archive.ubuntu.com/ubuntu\fR.
If the package isn't found on this mirror, \fBpull\-lp\-source\fR will
fall back to Launchpad, as its name implies.
.TP
.B \-\-no\-conf
Do not read any configuration files, or configuration from environment
variables.
.SH ENVIRONMENT
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
environment variables.
Variables in the environment take precedence to those in configuration
files.
.TP
.B
DIST
Specifies the default target.
.SH CONFIGURATION VARIABLES
The following variables can be set in the environment or in
.BR ubuntu\-dev\-tools (5)
configuration files.
In each case, the script\-specific variable takes precedence over the
package\-wide variable.
.TP
.BR PULL_LP_SOURCE_UBUNTU_MIRROR ", " UBUNTUTOOLS_UBUNTU_MIRROR
The default value for \fB\-\-mirror\fR.
.SH SEE ALSO
.BR dget (1),
.BR pull\-debian\-source (1),
.BR pull\-debian\-debdiff (1),
.BR ubuntu\-dev\-tools (5)
.SH AUTHOR
.PP
\fBpull\-lp\-source\fR and this manual page were written by Iain Lane
<iain@orangesquash.org.uk>.
Both are released under the GNU General Public License, version 3 or later.

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1,147 +0,0 @@
.TH PULL\-PKG "1" "28 August 2017" "ubuntu-dev-tools"
.SH NAME
pull\-pkg \- download a package for Debian, Ubuntu, UCA, or a PPA
.SH SYNOPSIS
.B pull\-pkg \fR[\fIoptions\fR]\fR <\fIpackage name\fR>
[\fIrelease\fR|\fIversion\fR]
.SH DESCRIPTION
\fBpull\-pkg\fR downloads the specified \fIversion\fR of
<\fIpackage name\fR>, or the latest version from the
specified \fIrelease\fR. To request a version from
a particular pocket say \fIrelease\fB\-\fIpocket\fR (with a magic
\fB\-release\fR for only the release pocket). If no \fIpocket\fR is
specified, all pockets will be searched except -backports.
If no \fIversion\fR or \fIrelease\fR is specified, the latest version in
the development release will be downloaded.
There are convenience scripts that set pull type and distribution
appropriately: these are
\fBpull\-lp\-source\fR, \fBpull\-lp\-debs\fR, \fBpull\-lp\-ddebs\fR,
and \fBpull\-lp\-udebs\fR, which all pull Ubuntu packages;
\fBpull\-debian\-source\fR, \fBpull\-debian\-debs\fR, \fBpull\-debian\-ddebs\fR,
and \fBpull\-debian\-udebs\fR, which all pull Debian packages;
\fBpull\-uca\-source\fR, \fBpull\-uca\-debs\fR, \fBpull\-uca\-ddebs\fR,
and \fBpull\-uca\-udebs\fR, which all pull Ubuntu Cloud Archive packages;
and \fBpull\-ppa\-source\fR, \fBpull\-ppa\-debs\fR, \fBpull\-ppa\-ddebs\fR,
and \fBpull\-ppa\-udebs\fR, which all pull from a specified Personal Package
Archive on Launchpad. Each script pulls the file type in its name, i.e.
\fIsource\fR, \fIdebs\fR, \fIddebs\fR, or \fIudebs\fR.
.SH OPTIONS
Listed below are the command line options for pull\-pkg:
.TP
.I package name
This is name of the package to downloaded.
You can use either the source package name, or binary package name.
.TP
.I version
This is the version of the package to downloaded.
.TP
.I release
This is the release to downloaded from.
For debian, you can use either the release name like \fBjessie\fR
or \fBsid\fR, or you can use the special release names \fBunstable\fR,
\fBstable\fR, or \fBtesting\fR.
For ubuntu, you can use either the release name like \fBxenial\fR
or the release-pocket like \fBxenial-proposed\fR.
For ubuntu cloud archive (uca) you can use either the uca release
name like \fBmitaka\fR or the ubuntu and uca release names like
\fBtrusty-mitaka\fR. Defaults to the current development release.
.TP
.BR \-h ", " \-\-help
Display a help message and exit.
.TP
.BR \-v ", " \-\-verbose
Be verbose about what is being done.
.TP
.BR \-d ", " \-\-download\-only
Do not extract the source package (applies only to source packages).
.TP
.B \-m \fIMIRROR\fR, \fB\-\-mirror\fR=\fIMIRROR\fR
Use the specified mirror server.
Should be in the form \fBhttp://archive.ubuntu.com/ubuntu\fR or
\fBhttp://deb.debian.org/debian\fR. If not specified or if the
package is not found on the specified mirror, this will fall
back to the default mirror(s) and/or mirror(s) from environment
variables, and then will fall back to Launchpad or Debian Snapshot.
This can be specified multiple times to try multiple mirrors.
.TP
.B \-\-no\-conf
Do not use mirrors from the default configuration, or from
any environment variables.
.TP
.B \-a \fIARCH\fR, \fB\-\-arch\fR=\fIARCH\fR
Get binary packages from the \fIARCH\fR architecture.
Defaults to the local architecture, if it can be deteected.
.TP
.B \-p \fIPULL\fR, \fB\-\-pull\fR=\fIPULL\fR
What to pull: \fBsource\fR, \fBdebs\fR, \fBddebs\fR, \fBudebs\fR,
or \fBlist\fR. The \fBlist\fR action only lists all a package's
source and binary files, but does not actually download any.
Defaults to \fBsource\fR.
.TP
.B \-D \fIDISTRO\fR, \fB\-\-distro\fR=\fIDISTRO\fR
Pull from: \fBdebian\fR, \fBuca\fR, \fBubuntu\fR, or a \fBppa\fR.
\fBlp\fR can be used instead of \fBubuntu\fR.
Any string containing \fBcloud\fR can be used instead of \fBuca\fR.
If pulling from a ppa, you must specify the PPA. Defaults to \fBubuntu\fR.
.TP
.B \-\-ppa\fR=ppa:\fIUSER/NAME\fR
Applies only when \fBdistro\fR is \fIppa\fR. Can be provided either as
a value to the \fB\-\-ppa\fR option parameter, or as a plain option
(like \fIrelease\fR or \fIversion\fR). When specified as a plain option,
the form must be \fBppa:USER/NAME\fR; when specified as a value to the
\fB\-\-ppa\fR option parameter, the leading \fBppa:\fR is optional.
.SH ENVIRONMENT
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
environment variables.
Variables in the environment take precedence to those in configuration
files.
.SH CONFIGURATION VARIABLES
The following variables can be set in the environment or in
.BR ubuntu\-dev\-tools (5)
configuration files.
In each case, the script\-specific variable takes precedence over the
package\-wide variable.
.TP
.BR UBUNTUTOOLS_UBUNTU_MIRROR
The default mirror.
.TP
.BR PULL_PKG_UBUNTU_MIRROR
The default mirror when using the \fBpull\-pkg\fR script.
.TP
.BR PULL_[LP|DEBIAN|PPA|UCA]_[SOURCE|DEBS|DDEBS|UDEBS]_MIRROR
The default mirror when using the associated script.
.SH SEE ALSO
.BR dget (1),
.BR pull\-lp\-source (1),
.BR pull\-lp\-debs (1),
.BR pull\-lp\-ddebs (1),
.BR pull\-lp\-udebs (1),
.BR pull\-debian\-source (1),
.BR pull\-debian\-debs (1),
.BR pull\-debian\-ddebs (1),
.BR pull\-debian\-udebs (1),
.BR pull\-ppa\-source (1),
.BR pull\-ppa\-debs (1),
.BR pull\-ppa\-ddebs (1),
.BR pull\-ppa\-udebs (1),
.BR pull\-uca\-source (1),
.BR pull\-uca\-debs (1),
.BR pull\-uca\-ddebs (1),
.BR pull\-uca\-udebs (1),
.BR pull\-debian\-debdiff (1),
.BR ubuntu\-dev\-tools (5)
.SH AUTHOR
.PP
\fBpull\-pkg\fR was written by Dan Streetman <ddstreet@canonical.com>,
based on the original \fBpull\-lp\-source\fR; it and this manual page
were written by Iain Lane <iain@orangesquash.org.uk>.
All are released under the GNU General Public License, version 3 or later.

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

27
doc/pull-revu-source.1 Normal file
View File

@ -0,0 +1,27 @@
.TH PULL\-REVU\-SOURCE "1" "30 August 2009" "ubuntu-dev-tools"
.SH NAME
pull\-revu\-source \- download a source package from REVU
.SH SYNOPSIS
.B pull\-revu\-source \fR[\fB\-h\fR]\fB <\fBsource package\fR>
.SH DESCRIPTION
\fBpull\-revu\-source\fR downloads and extracts the latest version of
<\fBsource package\fR> from REVU.
.SH OPTIONS
Listed below are the command line options for pull\-revu\-source:
.TP
.B \-h, \-\-help
Display the usage instructions and exit.
.TP
.B <source package>
This is the source package that you would like to be downloaded from Debian.
.SH AUTHOR
.PP
\fBpull\-revu\-source\fR and this manual page were written by Nathan Handler
<nhandler@ubuntu.com>. \fBpull\-revu\-source\fR is based on \fBrevupull\fR in
\fBkubuntu\-dev\-tools\fR, written by Harald Sitter <apachelogger@ubuntu.com>.
Both are released under the GNU General Public License, version 3 or later.

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -0,0 +1,13 @@
.TH REVERSE-BUILD-DEPENDS "1" "June 2012" "ubuntu-dev-tools"
.SH NAME
reverse-build-depends \- find packages that depend on a specific package to
build (reverse build depends)
.SH SYNOPSIS
.TP
.B reverse-build-depends \fR[\fIoptions\fR] \fIpackage
.SH DESCRIPTION
\fBreverse-build-depends\fR has been replaced by \fBreverse-depends \-b\fR.
This script now wraps \fBreverse-depends\fR.
Please use it in the future.
.SH SEE ALSO
.BR reverse-depends (1)

View File

@ -1,15 +0,0 @@
.TH running\-autopkgtests "1" "18 January 2024" "ubuntu-dev-tools"
.SH NAME
running\-autopkgtests \- dumps a list of currently running autopkgtests
.SH SYNOPSIS
.B running\-autopkgtests
.SH DESCRIPTION
Dumps a list of currently running and queued tests in Autopkgtest.
Pass --running to only see running tests, or --queued to only see
queued tests. Passing both will print both, which is the default behavior.
.SH AUTHOR
.B running\-autopkgtests
was written by Chris Peterson <chris.peterson@canonical.com>.

View File

@ -6,13 +6,7 @@
\fBsetup-packaging-environment\fR \fBsetup-packaging-environment\fR
.SH DESCRIPTION .SH DESCRIPTION
\fBsetup-packaging-environment\fR aims to make it more straightforward for new \fBsetup-packaging-environment\fR aims to make it more straightforward for new contributors to get their Ubuntu installation ready for packaging work. It ensures that all four components from Ubuntu's official repositories are enabled along with their corresponding source repositories. It also installs a minimal set of packages needed for Ubuntu packaging work (ubuntu-dev-tools, devscripts, debhelper, cdbs, patchutils, pbuilder, and build-essential). Finally, it assists in defining the DEBEMAIL and DEBFULLNAME environment variables.
contributors to get their Ubuntu installation ready for packaging work. It
ensures that all four components from Ubuntu's official repositories are enabled
along with their corresponding source repositories. It also installs a minimal
set of packages needed for Ubuntu packaging work (ubuntu-dev-tools, devscripts,
debhelper, patchutils, pbuilder, and build-essential). Finally, it assists
in defining the DEBEMAIL and DEBFULLNAME environment variables.
.SH AUTHORS .SH AUTHORS
\fBsetup-packaging-environment\fR was written by Siegfried-A. Gevatter <rainct@ubuntu.com>. \fBsetup-packaging-environment\fR was written by Siegfried-A. Gevatter <rainct@ubuntu.com>.

View File

@ -4,11 +4,11 @@ syncpackage \- copy source packages from Debian to Ubuntu
.\" .\"
.SH SYNOPSIS .SH SYNOPSIS
.B syncpackage .B syncpackage
[\fIoptions\fR] \fI<.dsc URL/path or package name(s)>\fR [\fIoptions\fR] \fI<.dsc URL/path or package name>\fR
.\" .\"
.SH DESCRIPTION .SH DESCRIPTION
\fBsyncpackage\fR causes one or more source package(s) to be copied from Debian \fBsyncpackage\fR causes a source package to be copied from Debian to
to Ubuntu. Ubuntu.
.PP .PP
\fBsyncpackage\fR allows you to upload files with the same checksums of the \fBsyncpackage\fR allows you to upload files with the same checksums of the
Debian ones, as the common script used by Ubuntu archive administrators does, Debian ones, as the common script used by Ubuntu archive administrators does,
@ -58,7 +58,7 @@ Display more progress information.
\fB\-F\fR, \fB\-\-fakesync\fR \fB\-F\fR, \fB\-\-fakesync\fR
Perform a fakesync, to work around a tarball mismatch between Debian and Perform a fakesync, to work around a tarball mismatch between Debian and
Ubuntu. Ubuntu.
This option ignores blocklisting, and performs a local sync. This option ignores blacklisting, and performs a local sync.
It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file
for you to upload. for you to upload.
.TP .TP

View File

@ -1,14 +1,9 @@
.TH UBUNTU-BUILD "1" "Mar 2024" "ubuntu-dev-tools" .TH UBUNTU-BUILD "1" "June 2010" "ubuntu-dev-tools"
.SH NAME .SH NAME
ubuntu-build \- command-line interface to Launchpad build operations ubuntu-build \- command-line interface to Launchpad build operations
.SH SYNOPSIS .SH SYNOPSIS
.nf .B ubuntu-build <srcpackage> <release> <operation>
\fBubuntu-build\fR <srcpackage> <release> <operation>
\fBubuntu-build\fR --batch [--retry] [--rescore \fIPRIORITY\fR] [--arch \fIARCH\fR [...]]
[--series \fISERIES\fR] [--state \fIBUILD-STATE\fR]
[-A \fIARCHIVE\fR] [pkg]...
.fi
.SH DESCRIPTION .SH DESCRIPTION
\fBubuntu-build\fR provides a command line interface to the Launchpad build \fBubuntu-build\fR provides a command line interface to the Launchpad build
@ -42,8 +37,8 @@ operations.
.IP .IP
\fB\-a\fR ARCHITECTURE, \fB\-\-arch\fR=\fIARCHITECTURE\fR \fB\-a\fR ARCHITECTURE, \fB\-\-arch\fR=\fIARCHITECTURE\fR
Rebuild or rescore a specific architecture. Valid Rebuild or rescore a specific architecture. Valid
architectures are: architectures include: amd64, sparc, powerpc, i386,
armhf, arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x. armel, armhf, arm64, ia64, lpia, hppa, ppc64el, s390x.
.TP .TP
Batch processing: Batch processing:
.IP .IP
@ -63,16 +58,14 @@ Retry builds (give\-back).
\fB\-\-rescore\fR=\fIPRIORITY\fR \fB\-\-rescore\fR=\fIPRIORITY\fR
Rescore builds to <priority>. Rescore builds to <priority>.
.IP .IP
\fB\-\-arch\fR=\fIARCHITECTURE\fR \fB\-\-arch2\fR=\fIARCHITECTURE\fR
Affect only 'architecture' (can be used several Affect only 'architecture' (can be used several
times). Valid architectures are: times). Valid architectures are: amd64, sparc,
arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x. powerpc, i386, armel, armhf, arm64, ia64, lpia, hppa.
.IP
\fB\-A=\fIARCHIVE\fR
Act on the named archive (ppa) instead of on the main Ubuntu archive.
.SH AUTHORS .SH AUTHORS
\fBubuntu-build\fR was written by Martin Pitt <martin.pitt@canonical.com>, and \fBubuntu-build\fR was written by Martin Pitt <martin.pitt@canonical.com>, and
this manual page was written by Jonathan Patrick Davies <jpds@ubuntu.com>. this manual page was written by Jonathan Patrick Davies <jpds@ubuntu.com>.
.PP .PP
Both are released under the terms of the GNU General Public License, version 3. Both are released under the terms of the GNU General Public License, version 3
or (at your option) any later version.

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com> # Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
# #
@ -22,10 +22,7 @@
# UDT_EDIT_WRAPPER_TEMPLATE_RE: An extra boilerplate-detecting regex. # UDT_EDIT_WRAPPER_TEMPLATE_RE: An extra boilerplate-detecting regex.
# UDT_EDIT_WRAPPER_FILE_DESCRIPTION: The type of file being edited. # UDT_EDIT_WRAPPER_FILE_DESCRIPTION: The type of file being edited.
# pylint: disable=invalid-name import optparse
# pylint: enable=invalid-name
import argparse
import os import os
import re import re
@ -33,30 +30,33 @@ from ubuntutools.question import EditFile
def main(): def main():
parser = argparse.ArgumentParser(usage="%(prog)s [options] filename") parser = optparse.OptionParser('%prog [options] filename')
parser.add_argument("filename", help=argparse.SUPPRESS) options, args = parser.parse_args()
args = parser.parse_args()
if not os.path.isfile(args.filename):
parser.error(f"File {args.filename} does not exist")
if "UDT_EDIT_WRAPPER_EDITOR" in os.environ: if len(args) != 1:
os.environ["EDITOR"] = os.environ["UDT_EDIT_WRAPPER_EDITOR"] parser.error('A filename must be specified')
else: body = args[0]
del os.environ["EDITOR"] if not os.path.isfile(body):
parser.error('File %s does not exist' % body)
if "UDT_EDIT_WRAPPER_VISUAL" in os.environ: if 'UDT_EDIT_WRAPPER_EDITOR' in os.environ:
os.environ["VISUAL"] = os.environ["UDT_EDIT_WRAPPER_VISUAL"] os.environ['EDITOR'] = os.environ['UDT_EDIT_WRAPPER_EDITOR']
else: else:
del os.environ["VISUAL"] del os.environ['EDITOR']
if 'UDT_EDIT_WRAPPER_VISUAL' in os.environ:
os.environ['VISUAL'] = os.environ['UDT_EDIT_WRAPPER_VISUAL']
else:
del os.environ['VISUAL']
placeholders = [] placeholders = []
if "UDT_EDIT_WRAPPER_TEMPLATE_RE" in os.environ: if 'UDT_EDIT_WRAPPER_TEMPLATE_RE' in os.environ:
placeholders.append(re.compile(os.environ["UDT_EDIT_WRAPPER_TEMPLATE_RE"])) placeholders.append(re.compile(
os.environ['UDT_EDIT_WRAPPER_TEMPLATE_RE']))
description = os.environ.get("UDT_EDIT_WRAPPER_FILE_DESCRIPTION", "file") description = os.environ.get('UDT_EDIT_WRAPPER_FILE_DESCRIPTION', 'file')
EditFile(args.filename, description, placeholders).edit() EditFile(body, description, placeholders).edit()
if __name__ == '__main__':
if __name__ == "__main__":
main() main()

View File

@ -1,4 +1,4 @@
#! /usr/bin/python3 #! /usr/bin/python
# #
# grep-merges - search for pending merges from Debian # grep-merges - search for pending merges from Debian
# #
@ -19,70 +19,56 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=invalid-name import optparse
# pylint: enable=invalid-name
import argparse
import json
import sys import sys
import json
from httplib2 import Http, HttpLib2Error from httplib2 import Http, HttpLib2Error
import ubuntutools.misc import ubuntutools.misc
from ubuntutools import getLogger
Logger = getLogger()
def main(): def main():
parser = argparse.ArgumentParser( parser = optparse.OptionParser(usage='%prog [options] [string]',
usage="%(prog)s [options] [string]", description='List pending merges from Debian matching string')
description="List pending merges from Debian matching string", args = parser.parse_args()[1]
)
parser.add_argument("string", nargs="?", help=argparse.SUPPRESS) if len(args) > 1:
args = parser.parse_args() parser.error('Too many arguments')
elif len(args) == 1:
match = args[0]
else:
match = None
ubuntutools.misc.require_utf8() ubuntutools.misc.require_utf8()
for component in ( for component in ('main', 'main-manual',
"main", 'restricted', 'restricted-manual',
"main-manual", 'universe', 'universe-manual',
"restricted", 'multiverse', 'multiverse-manual'):
"restricted-manual",
"universe", url = 'https://merges.ubuntu.com/%s.json' % component
"universe-manual",
"multiverse",
"multiverse-manual",
):
url = f"https://merges.ubuntu.com/{component}.json"
try: try:
headers, page = Http().request(url) headers, page = Http().request(url)
except HttpLib2Error as e: except HttpLib2Error, e:
Logger.exception(e) print >> sys.stderr, str(e)
sys.exit(1) sys.exit(1)
if headers.status != 200: if headers.status != 200:
Logger.error("%s: %s %s", url, headers.status, headers.reason) print >> sys.stderr, "%s: %s %s" % (url, headers.status,
headers.reason)
sys.exit(1) sys.exit(1)
for merge in json.loads(page): for merge in json.loads(page):
package = merge["source_package"] package = merge['source_package']
author, uploader = "", "" author, uploader = '', ''
if merge.get("user"): if merge.get('user'):
author = merge["user"] author = merge['user']
if merge.get("uploader"): if merge.get('uploader'):
uploader = f"({merge['uploader']})" uploader = '(%s)' % merge['uploader']
teams = merge.get("teams", []) pretty_uploader = u'{} {}'.format(author, uploader)
if (match is None or
match in package or match in author or match in uploader):
print '%s\t%s' % (package.encode("utf-8"), pretty_uploader.encode("utf-8"))
pretty_uploader = f"{author} {uploader}" if __name__ == '__main__':
if (
args.string is None
or args.string in package
or args.string in author
or args.string in uploader
or args.string in teams
):
Logger.info("%s\t%s", package, pretty_uploader)
if __name__ == "__main__":
main() main()

50
harvest Executable file
View File

@ -0,0 +1,50 @@
#!/usr/bin/python
# Copyright (C) 2011 Canonical Ltd., Daniel Holbach
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL-3 for more details.
#
# ##################################################################
#
#
# harvest - grabs information about development opportunities from
# harvest.ubuntu.com
#
#
# Daniel Holbach
# (c) 2011 Canonical
from optparse import OptionParser
import sys
from ubuntutools.harvest import Harvest
from ubuntutools.logger import Logger
def main():
usage = "usage: %prog source-package-name"
opt_parser = OptionParser(usage)
args = opt_parser.parse_args()[1]
if len(args) != 1:
opt_parser.print_help()
sys.exit(1)
pkg = args[0].strip()
print Harvest(pkg).report()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.error("Aborted.")
sys.exit(1)

136
hugdaylist Executable file
View File

@ -0,0 +1,136 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Canonical Ltd., Daniel Holbach
# Copyright (C) 2008 Jonathan Patrick Davies <jpds@ubuntu.com>
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL-3 for more details.
#
# ##################################################################
#
#
# hugdaylist - produces MoinMoin wiki formatted tables based on a Launchpad bug
# list.
#
# hugdaylist <url>
# - produces lists like https://wiki.ubuntu.com/UbuntuBugDay/20070912?action=raw
#
# hugdaylist -n <howmany> <url>
# - will only list <howmany> URLs.
import sys
from optparse import OptionParser
from launchpadlib.launchpad import Launchpad
from ubuntutools.lp.libsupport import translate_web_api
def check_args():
howmany = -1
url = ""
# Our usage options.
usage = "usage: %prog [-n <number>] launchpad-buglist-url"
opt_parser = OptionParser(usage)
# Options - namely just the number of bugs to output.
opt_parser.add_option("-n", "--number", type="int",
dest="number", help="Number of entries to output.")
# Parse arguments.
(options, args) = opt_parser.parse_args()
# Check if we want a number other than the default.
howmany = options.number
# Check that we have an URL.
if not args:
print >> sys.stderr, "An URL pointing to a Launchpad bug list is " \
"required."
opt_parser.print_help()
sys.exit(1)
else:
url = args[0]
return (howmany, url)
def filter_unsolved(task):
# TODO: don't use this filter here, only check status and assignee of
# the given task
# Filter out special types of bugs:
# - https://wiki.ubuntu.com/Bugs/HowToTriage#Special%20types%20of%20bugs
# this is expensive, parse name out of self_link instead?
subscriptions = set(s.person.name for s in task.bug.subscriptions)
if (task.status != "Fix Committed" and
(not task.assignee or task.assignee.name in ['motu','desktop-bugs']) and
'ubuntu-sponsors' not in subscriptions and
'ubuntu-archive' not in subscriptions):
return True
return False
def main():
(howmany, url) = check_args()
if len(url.split("?", 1)) == 2:
# search options not supported, because there is no mapping web ui
# options <-> API options
print >> sys.stderr, "Options in url are not supported, url: %s" % url
sys.exit(1)
launchpad = None
try:
launchpad = Launchpad.login_with("ubuntu-dev-tools", 'production')
except IOError, error:
print error
sys.exit(1)
api_url = translate_web_api(url, launchpad)
try:
product = launchpad.load(api_url)
except Exception, error:
response = getattr(error, "response", {})
if response.get("status", None) == "404":
print >> sys.stderr, ("The URL at '%s' does not appear to be a "
"valid url to a product") % url
sys.exit(1)
else:
raise
bug_list = [b for b in product.searchTasks() if filter_unsolved(b)]
if not bug_list:
print "Bug list of %s is empty." % url
sys.exit(0)
if howmany == -1:
howmany = len(bug_list)
print """
## ||<rowbgcolor="#CCFFCC"> This task is done || somebody || ||
## ||<rowbgcolor="#FFFFCC"> This task is assigned || somebody || <status> ||
## ||<rowbgcolor="#FFEBBB"> This task isn't || ... || ||
## ||<rowbgcolor="#FFCCCC"> This task is blocked on something || somebody || <explanation> ||
|| Bug || Subject || Triager ||"""
for i in list(bug_list)[:howmany]:
bug = i.bug
print '||<rowbgcolor="#FFEBBB"> [%s %s] || %s || ||' % \
(bug.web_link, bug.id, bug.title)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print >> sys.stderr, "Aborted."
sys.exit(1)

View File

@ -1,4 +1,5 @@
#!/usr/bin/python3 #!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright © 2009 James Westby <james.westby@ubuntu.com>, # Copyright © 2009 James Westby <james.westby@ubuntu.com>,
# 2010, 2011 Stefano Rivera <stefanor@ubuntu.com> # 2010, 2011 Stefano Rivera <stefanor@ubuntu.com>
@ -21,66 +22,66 @@
# #
# ################################################################## # ##################################################################
# pylint: disable=invalid-name from optparse import OptionParser, SUPPRESS_HELP
# pylint: enable=invalid-name
import argparse
import logging
import re import re
import sys import sys
import webbrowser import webbrowser
from collections.abc import Iterable
from email.message import EmailMessage
import debianbts
from launchpadlib.launchpad import Launchpad from launchpadlib.launchpad import Launchpad
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig from ubuntutools.config import UDTConfig
from ubuntutools.logger import Logger
Logger = getLogger() try:
ATTACHMENT_MAX_SIZE = 2000 import SOAPpy
except ImportError:
Logger.error("Please install 'python-soappy' in order to use this utility.")
sys.exit(1)
def main():
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"-b",
"--browserless",
action="store_true",
help="Don't open the bug in the browser at the end",
)
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
help="LP instance to connect to (default: production)",
)
parser.add_argument(
"-v", "--verbose", action="store_true", help="Print info about the bug being imported"
)
parser.add_argument(
"-n",
"--dry-run",
action="store_true",
help="Don't actually open a bug (also sets verbose)",
)
parser.add_argument(
"-p", "--package", help="Launchpad package to file bug against (default: Same as Debian)"
)
parser.add_argument(
"--no-conf", action="store_true", help="Don't read config files or environment variables."
)
parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)")
return parser.parse_args()
def get_bug_numbers(bug_list: Iterable[str]) -> list[int]:
bug_re = re.compile(r"bug=(\d+)") bug_re = re.compile(r"bug=(\d+)")
url = 'http://bugs.debian.org/cgi-bin/soap.cgi'
namespace = 'Debbugs/SOAP'
debbugs = SOAPpy.SOAPProxy(url, namespace)
# debug
#debbugs.config.dumpSOAPOut = 1
#debbugs.config.dumpSOAPIn = 1
parser = OptionParser(usage="%prog [option] bug ...")
parser.add_option("-b", "--browserless",
help="Don't open the bug in the browser at the end",
dest="browserless", action="store_true")
parser.add_option("-l", "--lpinstance", metavar="INSTANCE",
help="Launchpad instance to connect to "
"(default: production)",
dest="lpinstance", default=None)
parser.add_option("-n", "--dry-run",
help=SUPPRESS_HELP,
dest="lpinstance", action="store_const", const="staging")
parser.add_option("-p", "--package", metavar="PACKAGE",
help="Launchpad package to file bug against "
"(default: Same as Debian)",
dest="package", default=None)
parser.add_option("--no-conf", dest="no_conf", default=False,
help="Don't read config files or environment variables.",
action="store_true")
(options, args) = parser.parse_args()
config = UDTConfig(options.no_conf)
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
debian = launchpad.distributions['debian']
ubuntu = launchpad.distributions['ubuntu']
lp_debbugs = launchpad.bug_trackers.getByName(name='debbugs')
bug_nums = [] bug_nums = []
for bug_num in bug_list: for bug_num in args:
if bug_num.startswith("http"): if bug_num.startswith("http"):
# bug URL # bug URL
match = bug_re.search(bug_num) match = bug_re.search(bug_num)
@ -89,168 +90,48 @@ def get_bug_numbers(bug_list: Iterable[str]) -> list[int]:
sys.exit(1) sys.exit(1)
bug_num = match.groups()[0] bug_num = match.groups()[0]
bug_num = bug_num.lstrip("#") bug_num = bug_num.lstrip("#")
bug_nums.append(int(bug_num)) bug_num = int(bug_num)
bug_nums.append(bug_num)
return bug_nums bugs = debbugs.get_status(*bug_nums)
if len(bug_nums) > 1:
def walk_multipart_message(message: EmailMessage) -> tuple[str, list[tuple[int, EmailMessage]]]: bugs = bugs[0]
summary = ""
attachments = []
i = 1
for part in message.walk():
content_type = part.get_content_type()
if content_type.startswith("multipart/"):
# we're already iterating on multipart items
# let's just skip the multipart extra metadata
continue
if content_type == "application/pgp-signature":
# we're not interested in importing pgp signatures
continue
if part.is_attachment():
attachments.append((i, part))
elif content_type.startswith("image/"):
# images here are not attachment, they are inline, but Launchpad can't handle that,
# so let's add them as attachments
summary += f"Message part #{i}\n"
summary += f"[inline image '{part.get_filename()}']\n\n"
attachments.append((i, part))
elif content_type.startswith("text/html"):
summary += f"Message part #{i}\n"
summary += "[inline html]\n\n"
attachments.append((i, part))
elif content_type == "text/plain":
summary += f"Message part #{i}\n"
summary += part.get_content() + "\n"
else:
raise RuntimeError(
f"""Unknown message part
Your Debian bug is too weird to be imported in Launchpad, sorry.
You can fix that by patching this script in ubuntu-dev-tools.
Faulty message part:
{part}"""
)
i += 1
return summary, attachments
def process_bugs(
bugs: Iterable[debianbts.Bugreport],
launchpad: Launchpad,
package: str,
dry_run: bool = True,
browserless: bool = False,
) -> bool:
debian = launchpad.distributions["debian"]
ubuntu = launchpad.distributions["ubuntu"]
lp_debbugs = launchpad.bug_trackers.getByName(name="debbugs")
err = False
for bug in bugs:
ubupackage = bug.source
if package:
ubupackage = package
bug_num = bug.bug_num
subject = bug.subject
log = debianbts.get_bug_log(bug_num)
message = log[0]["message"]
assert isinstance(message, EmailMessage)
attachments: list[tuple[int, EmailMessage]] = []
if message.is_multipart():
summary, attachments = walk_multipart_message(message)
else:
summary = str(message.get_payload())
target = ubuntu.getSourcePackage(name=ubupackage)
if target is None:
Logger.error(
"Source package '%s' is not in Ubuntu. Please specify "
"the destination source package with --package",
ubupackage,
)
err = True
continue
description = f"Imported from Debian bug http://bugs.debian.org/{bug_num}:\n\n{summary}"
# LP limits descriptions to 50K chars
description = (description[:49994] + " [...]") if len(description) > 50000 else description
Logger.debug("Target: %s", target)
Logger.debug("Subject: %s", subject)
Logger.debug("Description: ")
Logger.debug(description)
for i, attachment in attachments:
Logger.debug("Attachment #%s (%s)", i, attachment.get_filename() or "inline")
Logger.debug("Content:")
if attachment.get_content_type() == "text/plain":
content = attachment.get_content()
if len(content) > ATTACHMENT_MAX_SIZE:
content = (
content[:ATTACHMENT_MAX_SIZE]
+ f" [attachment cropped after {ATTACHMENT_MAX_SIZE} characters...]"
)
Logger.debug(content)
else:
Logger.debug("[data]")
if dry_run:
Logger.info("Dry-Run: not creating Ubuntu bug.")
continue
u_bug = launchpad.bugs.createBug(target=target, title=subject, description=description)
for i, attachment in attachments:
name = f"#{i}-{attachment.get_filename() or "inline"}"
content = attachment.get_content()
if isinstance(content, str):
# Launchpad only wants bytes
content = content.encode()
u_bug.addAttachment(
filename=name,
data=content,
comment=f"Imported from Debian bug http://bugs.debian.org/{bug_num}",
)
d_sp = debian.getSourcePackage(name=package)
if d_sp is None and package:
d_sp = debian.getSourcePackage(name=package)
d_task = u_bug.addTask(target=d_sp)
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
d_task.bug_watch = d_watch
d_task.lp_save()
Logger.info("Opened %s", u_bug.web_link)
if not browserless:
webbrowser.open(u_bug.web_link)
return err
def main() -> None:
options = parse_args()
config = UDTConfig(options.no_conf)
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
if options.dry_run:
launchpad = Launchpad.login_anonymously("ubuntu-dev-tools")
options.verbose = True
else:
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
if options.verbose:
Logger.setLevel(logging.DEBUG)
bugs = debianbts.get_status(get_bug_numbers(options.bugs))
if not bugs: if not bugs:
Logger.error("Cannot find any of the listed bugs") Logger.error("Cannot find any of the listed bugs")
sys.exit(1) sys.exit(1)
if process_bugs(bugs, launchpad, options.package, options.dry_run, options.browserless): for bug in bugs:
sys.exit(1) bug = bug.value
ubupackage = package = bug.source
if options.package:
ubupackage = options.package
bug_num = bug.bug_num
subject = bug.subject
log = debbugs.get_bug_log(bug_num)
summary = log[0][0]
target = ubuntu.getSourcePackage(name=ubupackage)
if target is None:
Logger.error("Source package '%s' is not in Ubuntu. Please specify "
"the destination source package with --package",
ubupackage)
sys.exit(1)
u_bug = launchpad.bugs.createBug(target=target, title=subject,
description='Imported from Debian bug '
'http://bugs.debian.org/%d:\n\n%s'
% (bug_num, summary))
d_sp = debian.getSourcePackage(name=package)
if d_sp is None and options.package:
d_sp = debian.getSourcePackage(name=options.package)
d_task = u_bug.addTask(target=d_sp)
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
d_task.bug_watch = d_watch
d_task.lp_save()
Logger.normal("Opened %s", u_bug.web_link)
if not options.browserless:
webbrowser.open(u_bug.web_link)
if __name__ == "__main__": if __name__ == '__main__':
main() main()

View File

@ -1,105 +0,0 @@
#!/usr/bin/python3
"""Add 'bitesize' tag to bugs and add a comment."""
# Copyright (c) 2011 Canonical Ltd.
#
# bitesize is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any
# later version.
#
# bitesize is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with bitesize; see the file COPYING. If not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
# Authors:
# Daniel Holbach <daniel.holbach@canonical.com>
import argparse
import sys
from launchpadlib.errors import HTTPError
from launchpadlib.launchpad import Launchpad
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig
Logger = getLogger()
def error_out(msg, *args):
Logger.error(msg, *args)
sys.exit(1)
def save_entry(entry):
try:
entry.lp_save()
except HTTPError as error:
error_out("%s", error.content)
def tag_bug(bug):
bug.tags = bug.tags + ["bitesize"] # LP: #254901 workaround
save_entry(bug)
def main():
parser = argparse.ArgumentParser(usage="%(prog)s [options] <bug number>")
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
help="Launchpad instance to connect to (default: production)",
dest="lpinstance",
default=None,
)
parser.add_argument(
"--no-conf",
help="Don't read config files or environment variables.",
dest="no_conf",
default=False,
action="store_true",
)
parser.add_argument("bug_number", help=argparse.SUPPRESS)
args = parser.parse_args()
config = UDTConfig(args.no_conf)
if args.lpinstance is None:
args.lpinstance = config.get_value("LPINSTANCE")
launchpad = Launchpad.login_with("ubuntu-dev-tools", args.lpinstance)
if launchpad is None:
error_out("Couldn't authenticate to Launchpad.")
# check that the new main bug isn't a duplicate
try:
bug = launchpad.bugs[args.bug_number]
except HTTPError as error:
if error.response.status == 401:
error_out(
"Don't have enough permissions to access bug %s. %s",
args.bug_number,
error.content,
)
else:
raise
if "bitesize" in bug.tags:
error_out("Bug is already marked as 'bitesize'.")
bug.newMessage(
content="I'm marking this bug as 'bitesize' as it looks "
"like an issue that is easy to fix and suitable "
"for newcomers in Ubuntu development. If you need "
"any help with fixing it, talk to me about it."
)
bug.subscribe(person=launchpad.me)
tag_bug(launchpad.bugs[bug.id]) # fresh bug object, LP: #336866 workaround
if __name__ == "__main__":
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright © 2008 Canonical Ltd. # Copyright © 2008 Canonical Ltd.
# Author: Scott James Remnant <scott at ubuntu.com>. # Author: Scott James Remnant <scott at ubuntu.com>.
@ -18,67 +18,248 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=invalid-name import re
# pylint: enable=invalid-name
import sys import sys
from debian.changelog import Changelog
from ubuntutools import getLogger
Logger = getLogger()
def usage(exit_code=1): def usage(exit_code=1):
Logger.info( print '''Usage: merge-changelog <left changelog> <right changelog>
"""Usage: merge-changelog <left changelog> <right changelog>
merge-changelog takes two changelogs that once shared a common source, merge-changelog takes two changelogs that once shared a common source,
merges them back together, and prints the merged result to stdout. This merges them back together, and prints the merged result to stdout. This
is useful if you need to manually merge a ubuntu package with a new is useful if you need to manually merge a ubuntu package with a new
Debian release of the package. Debian release of the package.
""" '''
)
sys.exit(exit_code) sys.exit(exit_code)
######################################################################## ########################################################################
# Changelog Management # Changelog Management
######################################################################## ########################################################################
# Regular expression for top of debian/changelog
CL_RE = re.compile(r'^(\w[-+0-9a-z.]*) \(([^\(\) \t]+)\)((\s+[-0-9a-z]+)+)\;',
re.IGNORECASE)
def merge_changelog(left_changelog, right_changelog): def merge_changelog(left_changelog, right_changelog):
"""Merge a changelog file.""" """Merge a changelog file."""
with open(left_changelog, encoding="utf-8") as f: left_cl = read_changelog(left_changelog)
left_cl = Changelog(f) right_cl = read_changelog(right_changelog)
with open(right_changelog, encoding="utf-8") as f:
right_cl = Changelog(f)
left_versions = set(left_cl.versions) for right_ver, right_text in right_cl:
right_versions = set(right_cl.versions) while len(left_cl) and left_cl[0][0] > right_ver:
left_blocks = iter(left_cl) (left_ver, left_text) = left_cl.pop(0)
right_blocks = iter(right_cl) print left_text
clist = sorted(left_versions | right_versions, reverse=True) while len(left_cl) and left_cl[0][0] == right_ver:
remaining = len(clist) (left_ver, left_text) = left_cl.pop(0)
for version in clist:
remaining -= 1
if version in left_versions:
block = next(left_blocks)
if version in right_versions:
next(right_blocks)
else:
block = next(right_blocks)
assert block.version == version print right_text
Logger.info("%s%s", str(block).strip(), "\n" if remaining else "") for _, left_text in left_cl:
print left_text
return False
def read_changelog(filename):
"""Return a parsed changelog file."""
entries = []
changelog_file = open(filename)
try:
(ver, text) = (None, "")
for line in changelog_file:
match = CL_RE.search(line)
if match:
try:
ver = Version(match.group(2))
except ValueError:
ver = None
text += line
elif line.startswith(" -- "):
if ver is None:
ver = Version("0")
text += line
entries.append((ver, text))
(ver, text) = (None, "")
elif len(line.strip()) or ver is not None:
text += line
finally:
changelog_file.close()
if len(text):
entries.append((ver, text))
return entries
########################################################################
# Version parsing code
########################################################################
# Regular expressions make validating things easy
VALID_EPOCH = re.compile(r'^[0-9]+$')
VALID_UPSTREAM = re.compile(r'^[A-Za-z0-9+:.~-]*$')
VALID_REVISION = re.compile(r'^[A-Za-z0-9+.~]+$')
# Character comparison table for upstream and revision components
CMP_TABLE = "~ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz+-.:"
class Version(object):
"""Debian version number.
This class is designed to be reasonably transparent and allow you
to write code like:
| s.version >= '1.100-1'
The comparison will be done according to Debian rules, so '1.2' will
compare lower.
Properties:
epoch Epoch
upstream Upstream version
revision Debian/local revision
"""
def __init__(self, ver):
"""Parse a string or number into the three components."""
self.epoch = 0
self.upstream = None
self.revision = None
ver = str(ver)
if not len(ver):
raise ValueError
# Epoch is component before first colon
idx = ver.find(":")
if idx != -1:
self.epoch = ver[:idx]
if not len(self.epoch):
raise ValueError
if not VALID_EPOCH.search(self.epoch):
raise ValueError
ver = ver[idx+1:]
# Revision is component after last hyphen
idx = ver.rfind("-")
if idx != -1:
self.revision = ver[idx+1:]
if not len(self.revision):
raise ValueError
if not VALID_REVISION.search(self.revision):
raise ValueError
ver = ver[:idx]
# Remaining component is upstream
self.upstream = ver
if not len(self.upstream):
raise ValueError
if not VALID_UPSTREAM.search(self.upstream):
raise ValueError
self.epoch = int(self.epoch)
def get_without_epoch(self):
"""Return the version without the epoch."""
string = self.upstream
if self.revision is not None:
string += "-%s" % (self.revision,)
return string
without_epoch = property(get_without_epoch)
def __str__(self):
"""Return the class as a string for printing."""
string = ""
if self.epoch > 0:
string += "%d:" % (self.epoch,)
string += self.upstream
if self.revision is not None:
string += "-%s" % (self.revision,)
return string
def __repr__(self):
"""Return a debugging representation of the object."""
return "<%s epoch: %d, upstream: %r, revision: %r>" \
% (self.__class__.__name__, self.epoch,
self.upstream, self.revision)
def __cmp__(self, other):
"""Compare two Version classes."""
other = Version(other)
result = cmp(self.epoch, other.epoch)
if result != 0:
return result
result = deb_cmp(self.upstream, other.upstream)
if result != 0:
return result
result = deb_cmp(self.revision or "", other.revision or "")
if result != 0:
return result
return 0
def strcut(string, idx, accept):
"""Cut characters from string that are entirely in accept."""
ret = ""
while idx < len(string) and string[idx] in accept:
ret += string[idx]
idx += 1
return (ret, idx)
def deb_order(string, idx):
"""Return the comparison order of two characters."""
if idx >= len(string):
return 0
elif string[idx] == "~":
return -1
else:
return CMP_TABLE.index(string[idx])
def deb_cmp_str(x, y):
"""Compare two strings in a deb version."""
idx = 0
while (idx < len(x)) or (idx < len(y)):
result = deb_order(x, idx) - deb_order(y, idx)
if result < 0:
return -1
elif result > 0:
return 1
idx += 1
return 0
def deb_cmp(x, y):
"""Implement the string comparison outlined by Debian policy."""
x_idx = y_idx = 0
while x_idx < len(x) or y_idx < len(y):
# Compare strings
(x_str, x_idx) = strcut(x, x_idx, CMP_TABLE)
(y_str, y_idx) = strcut(y, y_idx, CMP_TABLE)
result = deb_cmp_str(x_str, y_str)
if result != 0:
return result
# Compare numbers
(x_str, x_idx) = strcut(x, x_idx, "0123456789")
(y_str, y_idx) = strcut(y, y_idx, "0123456789")
result = cmp(int(x_str or "0"), int(y_str or "0"))
if result != 0:
return result
return 0
def main(): def main():
if len(sys.argv) > 1 and sys.argv[1] in ("-h", "--help"): if len(sys.argv) > 1 and sys.argv[1] in ('-h', '--help'):
usage(0) usage(0)
if len(sys.argv) != 3: if len(sys.argv) != 3:
usage(1) usage(1)
@ -89,6 +270,5 @@ def main():
merge_changelog(left_changelog, right_changelog) merge_changelog(left_changelog, right_changelog)
sys.exit(0) sys.exit(0)
if __name__ == '__main__':
if __name__ == "__main__":
main() main()

346
mk-sbuild
View File

@ -26,7 +26,7 @@
# ################################################################## # ##################################################################
# #
# This script creates chroots designed to be used in a snapshot mode # This script creates chroots designed to be used in a snapshot mode
# (with LVM, btrfs, zfs, overlay, overlay or aufs) with schroot and sbuild. # (with LVM, btrfs, overlayfs, or aufs) with schroot and sbuild.
# Much love to "man sbuild-setup", https://wiki.ubuntu.com/PbuilderHowto, # Much love to "man sbuild-setup", https://wiki.ubuntu.com/PbuilderHowto,
# and https://help.ubuntu.com/community/SbuildLVMHowto. # and https://help.ubuntu.com/community/SbuildLVMHowto.
# #
@ -40,18 +40,15 @@ SOURCE_CHROOTS_DIR="/var/lib/schroot/chroots"
SOURCE_CHROOTS_TGZ="/var/lib/schroot/tarballs" SOURCE_CHROOTS_TGZ="/var/lib/schroot/tarballs"
CHROOT_SNAPSHOT_DIR="/var/lib/schroot/snapshots" CHROOT_SNAPSHOT_DIR="/var/lib/schroot/snapshots"
SCHROOT_PROFILE="sbuild" SCHROOT_PROFILE="sbuild"
CCACHE_DIR="/var/cache/ccache-sbuild"
CCACHE_SIZE="4G"
function usage() function usage()
{ {
echo "Usage: $0 [OPTIONS] Release" echo "Usage: $0 [OPTIONS] Release" >&2
echo "Options:" echo "Options:"
echo " --arch=ARCH What architecture to select" echo " --arch=ARCH What architecture to select"
echo " --name=NAME Base name for the schroot (arch is appended)" echo " --name=NAME Base name for the schroot (arch is appended)"
echo " --personality=PERSONALITY What personality to use (defaults to match --arch)" echo " --personality=PERSONALITY What personality to use (defaults to match --arch)"
echo " --vg=VG use LVM snapshots, with group VG" echo " --vg=VG use LVM snapshots, with group VG"
echo " --zfs-dataset=DATASET use ZFS snapshots, with parent dataset DATASET"
echo " --debug Turn on script debugging" echo " --debug Turn on script debugging"
echo " --skip-updates Do not include -updates pocket in sources.list" echo " --skip-updates Do not include -updates pocket in sources.list"
echo " --skip-security Do not include -security pocket in sources.list" echo " --skip-security Do not include -security pocket in sources.list"
@ -65,21 +62,14 @@ function usage()
echo " --debootstrap-keyring=KEYRING" echo " --debootstrap-keyring=KEYRING"
echo " Use KEYRING to check signatures of retrieved Release files" echo " Use KEYRING to check signatures of retrieved Release files"
echo " --debootstrap-no-check-gpg Disables checking gpg signatures of retrieved Release files" echo " --debootstrap-no-check-gpg Disables checking gpg signatures of retrieved Release files"
echo " --skip-eatmydata Don't install and use eatmydata" echo " --eatmydata Install and use eatmydata"
echo " --eatmydata Install and use eatmydata (default)"
echo " --ccache Install configure and use ccache as default"
echo " --ccache-dir=PATH Sets the CCACHE_DIR to PATH"
echo " (can be shared between all schroots, defaults to ${CCACHE_DIR})"
echo " --ccache-size=SIZE Sets the ccache max-size to SIZE"
echo " (shared by each CCACHE_DIR, defaults to ${CCACHE_SIZE})"
echo " --distro=DISTRO Install specific distro:" echo " --distro=DISTRO Install specific distro:"
echo " 'ubuntu' or 'debian' " echo " 'ubuntu' or 'debian' "
echo " (defaults to determining from release name)" echo " (defaults to determining from release name)"
echo " --target=ARCH Target architecture for cross-building" echo " --target=ARCH Target architecture for cross-building"
echo " --type=SCHROOT_TYPE Define the schroot type:" echo " --type=SCHROOT_TYPE Define the schroot type:"
echo " 'directory' (default), 'file', or 'btrfs-snapshot'." echo " 'directory'(default), 'file', or 'btrfs-snapshot'"
echo " 'lvm-snapshot' is selected via --vg" echo " 'lvm-snapshot' is selected via --vg"
echo " 'zfs-snapshot' is selected via --zfs-dataset"
echo "" echo ""
echo "Configuration (via ~/.mk-sbuild.rc)" echo "Configuration (via ~/.mk-sbuild.rc)"
echo " LV_SIZE Size of source LVs (default ${LV_SIZE})" echo " LV_SIZE Size of source LVs (default ${LV_SIZE})"
@ -99,12 +89,7 @@ function usage()
echo " DEBOOTSTRAP_PROXY Apt proxy (same as --debootstrap-proxy)" echo " DEBOOTSTRAP_PROXY Apt proxy (same as --debootstrap-proxy)"
echo " DEBOOTSTRAP_KEYRING GPG keyring (same as --debootstrap-keyring)" echo " DEBOOTSTRAP_KEYRING GPG keyring (same as --debootstrap-keyring)"
echo " DEBOOTSTRAP_NO_CHECK_GPG Disable GPG verification (same as --debootstrap-no-check-gpg)" echo " DEBOOTSTRAP_NO_CHECK_GPG Disable GPG verification (same as --debootstrap-no-check-gpg)"
echo " EATMYDATA Enable or disable eatmydata usage, see --eatmydata and --skip-eatmydata" echo " EATMYDATA Enable --eatmydata"
echo " CCACHE Enable --ccache"
echo " CCACHE_DIR Path for ccache (can be shared between all schroots, "
echo " same as --ccache-dir, default ${CCACHE_DIR})"
echo " CCACHE_SIZE Sets the ccache max-size (shared by each CCACHE_DIR, "
echo " same as --ccache-size, default ${CCACHE_SIZE})"
echo " TEMPLATE_SOURCES A template for sources.list" echo " TEMPLATE_SOURCES A template for sources.list"
echo " TEMPLATE_SCHROOTCONF A template for schroot.conf stanza" echo " TEMPLATE_SCHROOTCONF A template for schroot.conf stanza"
if [ -z "$1" ]; then if [ -z "$1" ]; then
@ -117,62 +102,28 @@ function usage()
if [ -z "$1" ]; then if [ -z "$1" ]; then
usage usage
fi fi
supported_options=( OPTS=`getopt -o 'h' --long "help,debug,skip-updates,skip-security,skip-proposed,eatmydata,arch:,name:,source-template:,debootstrap-mirror:,debootstrap-include:,debootstrap-exclude:,debootstrap-opts:,debootstrap-proxy:,debootstrap-no-check-gpg,debootstrap-keyring:,personality:,distro:,vg:,type:,target:" -- "$@"`
help
debug
skip-updates
skip-security
skip-proposed
skip-eatmydata
ccache
arch:
name:
source-template:
debootstrap-mirror:
debootstrap-include:
debootstrap-exclude:
debootstrap-opts:
debootstrap-proxy:
debootstrap-no-check-gpg
debootstrap-keyring:
personality:
distro:
vg:
zfs-dataset:
type:
target:
ccache-dir:
ccache-size:
)
OPTS=$(getopt -o 'h' --long "$(IFS=, && echo "${supported_options[*]}")" -- "$@")
eval set -- "$OPTS" eval set -- "$OPTS"
VG="" VG=""
DISTRO="" DISTRO=""
COMMAND_PREFIX=""
name="" name=""
proxy="_unset_" proxy="_unset_"
DEBOOTSTRAP_NO_CHECK_GPG=0 DEBOOTSTRAP_NO_CHECK_GPG=0
EATMYDATA=1 EATMYDATA=0
CCACHE=0
USE_PKGBINARYMANGLER=0
while :; do while :; do
case "$1" in case "$1" in
--debug) --debug)
DEBUG=1
set -x set -x
shift shift
;; ;;
--arch) --arch)
CHROOT_ARCH="$2" CHROOT_ARCH="$2"
case $2 in if [ "$2" = "i386" ] || [ "$2" = "lpia" ] && [ -z "$personality" ];
armhf|i386) then
if [ -z "$personality" ]; then personality="linux32"
personality="linux32" fi
fi
;;
esac
shift 2 shift 2
;; ;;
--personality) --personality)
@ -232,12 +183,8 @@ while :; do
DEBOOTSTRAP_NO_CHECK_GPG=1 DEBOOTSTRAP_NO_CHECK_GPG=1
shift shift
;; ;;
--skip-eatmydata) --eatmydata)
EATMYDATA=0 EATMYDATA=1
shift
;;
--ccache)
CCACHE=1
shift shift
;; ;;
--distro) --distro)
@ -248,10 +195,6 @@ while :; do
VG="$2" VG="$2"
shift 2 shift 2
;; ;;
--zfs-dataset)
ZFS_PARENT_DATASET="$2"
shift 2
;;
--type) --type)
SCHROOT_TYPE="$2" SCHROOT_TYPE="$2"
shift 2 shift 2
@ -260,14 +203,6 @@ while :; do
TARGET_ARCH="$2" TARGET_ARCH="$2"
shift 2 shift 2
;; ;;
--ccache-dir)
CCACHE_DIR="$2"
shift 2
;;
--ccache-size)
CCACHE_SIZE="$2"
shift 2
;;
--) --)
shift shift
break break
@ -304,27 +239,11 @@ if [ ! -w /var/lib/sbuild ]; then
# Prepare a usable default .sbuildrc # Prepare a usable default .sbuildrc
if [ ! -e ~/.sbuildrc ]; then if [ ! -e ~/.sbuildrc ]; then
cat > ~/.sbuildrc <<EOM cat > ~/.sbuildrc <<EOM
# *** THIS COMMAND IS DEPRECATED ***
#
# In sbuild 0.87.0 and later, the unshare backend is available. This is
# expected to become the default in a future release.
#
# This is the new preferred way of building Debian packages, making the manual
# creation of schroots no longer necessary. To retain the default behavior,
# you may remove this comment block and continue.
#
# To test the unshare backend while retaining the default settings, run sbuild
# with --chroot-mode=unshare like this:
# $ sbuild --chroot-mode=unshare --dist=unstable hello
#
# To switch to the unshare backend by default (recommended), uncomment the
# following lines and delete the rest of the file (with the exception of the
# last two lines):
#\$chroot_mode = 'unshare';
#\$unshare_mmdebstrap_keep_tarball = 1;
# *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW *** # *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW ***
# Mail address where logs are sent to (mandatory, no default!)
\$mailto = '$USER';
# Name to use as override in .changes files for the Maintainer: field # Name to use as override in .changes files for the Maintainer: field
#\$maintainer_name='$USER <$USER@localhost>'; #\$maintainer_name='$USER <$USER@localhost>';
@ -411,48 +330,6 @@ elif [ -z "$DISTRO" ]; then
exit 1 exit 1
fi fi
# By default DEBOOTSTRAP_SCRIPT must match RELEASE
DEBOOTSTRAP_SCRIPT="$RELEASE"
dist_ge() {
local releases="$($3-distro-info --all)"
local left=999
local right=0
local seq=1
for i in $releases; do
if [ $1 = $i ]; then
local left=$seq
break
fi
seq=$((seq+1))
done
seq=1
for i in $releases; do
if [ $2 = $i ]; then
local right=$seq
break
fi
seq=$((seq+1))
done
[ $left -ge $right ] && return 0 || return 1
}
ubuntu_dist_ge () {
dist_ge $1 $2 ubuntu
}
debian_dist_ge () {
dist_ge $1 $2 debian
}
if [ "$DISTRO" = "ubuntu" ]; then
# On Ubuntu, set DEBOOTSTRAP_SCRIPT to gutsy to allow building new RELEASES without new debootstrap
DEBOOTSTRAP_SCRIPT=gutsy
fi
# By default, name the schroot the same as the release # By default, name the schroot the same as the release
if [ -z "$name" ]; then if [ -z "$name" ]; then
name="$RELEASE" name="$RELEASE"
@ -497,58 +374,10 @@ if [ $EATMYDATA -eq 1 ]; then
esac esac
fi fi
if [ $CCACHE -eq 1 ]; then
if [ -z "$CCACHE_DIR" ] || [[ "$(dirname "$CCACHE_DIR")" == '/' ]]; then
echo "Invalid ccache dir: ${CCACHE_DIR}" >&2
exit 1
fi
# We can safely use a global cache path, in such case changing size applies
# to all the schroots
setup_script="$CCACHE_DIR"/mk-sbuild-setup
if [ -d "$CCACHE_DIR" ]; then
echo "Reusing $CCACHE_DIR as CCACHE_DIR, will be configured to use max-size=${CCACHE_SIZE}"
rm -f "$setup_script"
else
echo "Configuring $CCACHE_DIR as CCACHE_DIR with max-size=${CCACHE_SIZE}"
sudo install --group=sbuild --mode=2775 -d "$CCACHE_DIR"
fi
if [ ! -x "$setup_script" ]; then
cat <<END | sudo tee "$setup_script" 1>/dev/null
#!/bin/sh
export CCACHE_DIR="$CCACHE_DIR"
export CCACHE_MAXSIZE="${CCACHE_SIZE}"
export CCACHE_UMASK=002
export CCACHE_COMPRESS=1
unset CCACHE_HARDLINK
export CCACHE_NOHARDLINK=1
export PATH="/usr/lib/ccache:\$PATH"
exec "\$@"
END
sudo chmod a+rx "$setup_script"
fi
if ! sudo grep -qs "$CCACHE_DIR" /etc/schroot/sbuild/fstab; then
# This acts on host configuration, but there is no other way to handle
# this, however it won't affect anything
cat <<END | sudo tee -a /etc/schroot/sbuild/fstab 1>/dev/null
${CCACHE_DIR} ${CCACHE_DIR} none rw,bind 0 0
END
fi
DEBOOTSTRAP_INCLUDE="${DEBOOTSTRAP_INCLUDE:+$DEBOOTSTRAP_INCLUDE,}ccache"
BUILD_PKGS="$BUILD_PKGS ccache"
COMMAND_PREFIX="${COMMAND_PREFIX:+$COMMAND_PREFIX,}$setup_script"
fi
if [ -z "$SCHROOT_TYPE" ]; then if [ -z "$SCHROOT_TYPE" ]; then
# To build the LV, we need to know which volume group to use # To build the LV, we need to know which volume group to use
if [ -n "$VG" ]; then if [ -n "$VG" ]; then
SCHROOT_TYPE="lvm-snapshot" SCHROOT_TYPE="lvm-snapshot"
# To build the ZFS dataset, we need to know which parent to use
elif [ -n "$ZFS_PARENT_DATASET" ]; then
SCHROOT_TYPE="zfs-snapshot"
else else
SCHROOT_TYPE="directory" SCHROOT_TYPE="directory"
fi fi
@ -595,7 +424,7 @@ case "$SCHROOT_TYPE" in
# Set up some variables for use in the paths and names # Set up some variables for use in the paths and names
CHROOT_PATH="${SOURCE_CHROOTS_TGZ}/${CHROOT_NAME}.tgz" CHROOT_PATH="${SOURCE_CHROOTS_TGZ}/${CHROOT_NAME}.tgz"
;; ;;
"btrfs-snapshot" | "zfs-snapshot") "btrfs-snapshot")
if [ ! -d "${SOURCE_CHROOTS_DIR}" ]; then if [ ! -d "${SOURCE_CHROOTS_DIR}" ]; then
sudo mkdir -p "${SOURCE_CHROOTS_DIR}" sudo mkdir -p "${SOURCE_CHROOTS_DIR}"
fi fi
@ -612,8 +441,8 @@ esac
# Is the specified release known to debootstrap? # Is the specified release known to debootstrap?
variant_opt="--variant=buildd" variant_opt="--variant=buildd"
if [ ! -r "/usr/share/debootstrap/scripts/$DEBOOTSTRAP_SCRIPT" ]; then if [ ! -r "/usr/share/debootstrap/scripts/$RELEASE" ]; then
echo "Specified release ($DEBOOTSTRAP_SCRIPT) not known to debootstrap" >&2 echo "Specified release ($RELEASE) not known to debootstrap" >&2
exit 1 exit 1
fi fi
@ -665,27 +494,19 @@ ubuntu)
esac esac
fi fi
# Add edgy+ buildd tools # Add edgy+ buildd tools
if ubuntu_dist_ge "$RELEASE" "edgy"; then if [ "$RELEASE" != "breezy" ] && [ "$RELEASE" != "dapper" ]; then
# Add pkgbinarymangler (edgy and later)
BUILD_PKGS="$BUILD_PKGS pkgbinarymangler"
USE_PKGBINARYMANGLER=1
# Disable recommends for a smaller chroot (gutsy and later only) # Disable recommends for a smaller chroot (gutsy and later only)
if ubuntu_dist_ge "$RELEASE" "gutsy"; then BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
BUILD_PKGS="--no-install-recommends $BUILD_PKGS" # Add buildd tools
SKIP_RECOMMENDS=1 BUILD_PKGS="$BUILD_PKGS pkg-create-dbgsym pkgbinarymangler"
fi
# Add pkg-create-dbgsym (edgy through zesty)
if ! ubuntu_dist_ge "$RELEASE" "artful"; then
BUILD_PKGS="$BUILD_PKGS pkg-create-dbgsym"
fi
fi fi
;; ;;
debian) debian)
if [ -z "$DEBOOTSTRAP_MIRROR" ]; then if [ -z "$DEBOOTSTRAP_MIRROR" ]; then
DEBOOTSTRAP_MIRROR="http://deb.debian.org/debian" DEBOOTSTRAP_MIRROR="http://httpredir.debian.org/debian"
fi fi
if [ -z "$COMPONENTS" ]; then if [ -z "$COMPONENTS" ]; then
COMPONENTS="main non-free non-free-firmware contrib" COMPONENTS="main non-free contrib"
fi fi
if [ -z "$SOURCES_PROPOSED_SUITE" ]; then if [ -z "$SOURCES_PROPOSED_SUITE" ]; then
SOURCES_PROPOSED_SUITE="RELEASE-proposed-updates" SOURCES_PROPOSED_SUITE="RELEASE-proposed-updates"
@ -693,11 +514,7 @@ debian)
# Debian only performs security updates # Debian only performs security updates
SKIP_UPDATES=1 SKIP_UPDATES=1
if [ -z "$SOURCES_SECURITY_SUITE" ]; then if [ -z "$SOURCES_SECURITY_SUITE" ]; then
if debian_dist_ge "$RELEASE" "bullseye"; then SOURCES_SECURITY_SUITE="RELEASE/updates"
SOURCES_SECURITY_SUITE="RELEASE-security"
else
SOURCES_SECURITY_SUITE="RELEASE/updates"
fi
fi fi
if [ -z "$SOURCES_SECURITY_URL" ]; then if [ -z "$SOURCES_SECURITY_URL" ]; then
SOURCES_SECURITY_URL="http://security.debian.org/" SOURCES_SECURITY_URL="http://security.debian.org/"
@ -713,7 +530,6 @@ debian)
fi fi
# Keep the chroot as minimal as possible # Keep the chroot as minimal as possible
BUILD_PKGS="--no-install-recommends $BUILD_PKGS" BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
SKIP_RECOMMENDS=1
;; ;;
*) *)
echo "Unknown --distro '$DISTRO': aborting" >&2 echo "Unknown --distro '$DISTRO': aborting" >&2
@ -730,7 +546,7 @@ if [ -n "$TARGET_ARCH" ]; then
echo "Unknown target architecture $TARGET_ARCH" >&2 echo "Unknown target architecture $TARGET_ARCH" >&2
exit 1 exit 1
fi fi
BUILD_PKGS="$BUILD_PKGS g++-$target_tuple pkg-config dpkg-cross libc-dev:$TARGET_ARCH" BUILD_PKGS="$BUILD_PKGS g++-$target_tuple pkg-config-$target_tuple dpkg-cross libc-dev:$TARGET_ARCH"
fi fi
debootstrap_opts="--components=$(echo $COMPONENTS | tr ' ' ,)" debootstrap_opts="--components=$(echo $COMPONENTS | tr ' ' ,)"
@ -768,12 +584,12 @@ DEBOOTSTRAP_COMMAND=debootstrap
if [ "$CHROOT_ARCH" != "$HOST_ARCH" ] ; then if [ "$CHROOT_ARCH" != "$HOST_ARCH" ] ; then
case "$CHROOT_ARCH-$HOST_ARCH" in case "$CHROOT_ARCH-$HOST_ARCH" in
# Sometimes we don't need qemu # Sometimes we don't need qemu
amd64-i386|arm64-armhf|armhf-arm64|i386-amd64|powerpc-ppc64|ppc64-powerpc) amd64-i386|amd64-lpia|armel-armhf|armhf-armel|arm64-armel|arm64-armhf|armel-arm64|armhf-arm64|i386-amd64|i386-lpia|lpia-i386|powerpc-ppc64|ppc64-powerpc|sparc-sparc64|sparc64-sparc)
;; ;;
# Sometimes we do # Sometimes we do
*) *)
DEBOOTSTRAP_COMMAND=debootstrap DEBOOTSTRAP_COMMAND=qemu-debootstrap
if ! which "qemu-x86_64-static"; then if ! which "$DEBOOTSTRAP_COMMAND"; then
sudo apt-get install qemu-user-static sudo apt-get install qemu-user-static
fi fi
;; ;;
@ -806,48 +622,26 @@ case "$SCHROOT_TYPE" in
fi fi
sudo btrfs subvolume create "${MNT}" sudo btrfs subvolume create "${MNT}"
;; ;;
"zfs-snapshot")
ZFS_DATASET="${ZFS_PARENT_DATASET}/${CHROOT_NAME}"
if sudo zfs list "${ZFS_DATASET}" >/dev/null 2>&1; then
echo "E: ZFS dataset ${ZFS_DATASET} already exists; aborting" >&2
exit 1
fi
sudo zfs create -p -o mountpoint=legacy "${ZFS_DATASET}"
# Mount
MNT=`mktemp -d -t schroot-XXXXXX`
sudo mount -t zfs "${ZFS_DATASET}" "${MNT}"
;;
"file") "file")
MNT=`mktemp -d -t schroot-XXXXXX` MNT=`mktemp -d -t schroot-XXXXXX`
esac esac
# Debian doesn't have overlayfs yet
case "$SCHROOT_TYPE" in case "$SCHROOT_TYPE" in
directory|file) directory|file)
if grep -q '\soverlay$' /proc/filesystems \ if grep -q '\soverlayfs$' /proc/filesystems \
|| /sbin/modprobe -q --dry-run overlay; then || /sbin/modprobe -q --dry-run overlayfs; then
OVERLAY_FS=overlay OVERLAY_FS=overlayfs
elif grep -q '\soverlayfs$' /proc/filesystems \ else
|| /sbin/modprobe -q --dry-run overlayfs; then OVERLAY_FS=aufs
OVERLAY_FS=overlayfs fi
else
OVERLAY_FS=aufs
fi
esac esac
# work around apt's GPG invocation that fails without root's .gnupg directory # work around apt's GPG invocation that fails without root's .gnupg directory
sudo mkdir -p -m 0700 "$MNT"/root/.gnupg sudo mkdir -p -m 0700 "$MNT"/root/.gnupg
# debootstrap the chroot # debootstrap the chroot
sudo ${proxy:+"http_proxy=${proxy}"} "$DEBOOTSTRAP_COMMAND" --arch="$CHROOT_ARCH" $variant_opt $debootstrap_opts "$RELEASE" "$MNT" "${DEBOOTSTRAP_MIRROR:-http://archive.ubuntu.com/ubuntu}" "$DEBOOTSTRAP_SCRIPT" sudo ${proxy:+"http_proxy=${proxy}"} "$DEBOOTSTRAP_COMMAND" --arch="$CHROOT_ARCH" $variant_opt $debootstrap_opts "$RELEASE" "$MNT" "${DEBOOTSTRAP_MIRROR:-http://archive.ubuntu.com/ubuntu}"
if [ $EATMYDATA -eq 1 ]; then
sudo mkdir -p "${MNT}/usr/local/libexec/mk-sbuild"
sudo ln -s /usr/bin/eatmydata "${MNT}/usr/local/libexec/mk-sbuild/dpkg"
echo 'Dir::Bin::dpkg "/usr/local/libexec/mk-sbuild/dpkg";' \
| sudo tee "${MNT}/etc/apt/apt.conf.d/00mk-sbuild-eatmydata" > /dev/null
fi
# Update the package sources # Update the package sources
TEMP_SOURCES=`mktemp -t sources-XXXXXX` TEMP_SOURCES=`mktemp -t sources-XXXXXX`
@ -892,13 +686,6 @@ EOM
fi fi
fi fi
if [ -z "$SKIP_PROPOSED" ]; then if [ -z "$SKIP_PROPOSED" ]; then
TEMP_PREFERENCES=`mktemp -t preferences-XXXXXX`
cat >> "$TEMP_PREFERENCES" <<EOM
# override for NotAutomatic: yes
Package: *
Pin: release a=*-proposed
Pin-Priority: 500
EOM
cat >> "$TEMP_SOURCES" <<EOM cat >> "$TEMP_SOURCES" <<EOM
deb ${MIRROR_ARCHS}${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS} deb ${MIRROR_ARCHS}${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS}
deb-src ${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS} deb-src ${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS}
@ -924,12 +711,9 @@ fi
cat "$TEMP_SOURCES" | sed -e "s|RELEASE|$RELEASE|g" | \ cat "$TEMP_SOURCES" | sed -e "s|RELEASE|$RELEASE|g" | \
sudo bash -c "cat > $MNT/etc/apt/sources.list" sudo bash -c "cat > $MNT/etc/apt/sources.list"
rm -f "$TEMP_SOURCES" rm -f "$TEMP_SOURCES"
if [ -n "$TEMP_PREFERENCES" ]; then
sudo mv "$TEMP_PREFERENCES" $MNT/etc/apt/preferences.d/proposed.pref
fi
# Copy the timezone (uncomment this if you want to use your local time zone) # Copy the timezone (comment this out if you want to leave the chroot at UTC)
#sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/ sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
# Create a schroot entry for this chroot # Create a schroot entry for this chroot
TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX` TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX`
TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf
@ -956,9 +740,9 @@ root-groups=$ADMIN_GROUPS
type=SCHROOT_TYPE type=SCHROOT_TYPE
profile=$SCHROOT_PROFILE profile=$SCHROOT_PROFILE
EOM EOM
if [ -n "$COMMAND_PREFIX" ]; then if [ $EATMYDATA -eq 1 ]; then
cat >> "$TEMP_SCHROOTCONF" <<EOM cat >> "$TEMP_SCHROOTCONF" <<EOM
command-prefix=${COMMAND_PREFIX} command-prefix=eatmydata
EOM EOM
fi fi
case "$SCHROOT_TYPE" in case "$SCHROOT_TYPE" in
@ -981,12 +765,6 @@ btrfs-source-subvolume=CHROOT_PATH
btrfs-snapshot-directory=CHROOT_SNAPSHOT_DIR btrfs-snapshot-directory=CHROOT_SNAPSHOT_DIR
EOM EOM
;; ;;
zfs-snapshot)
cat >> "${TEMP_SCHROOTCONF}" <<EOM
zfs-dataset=ZFS_DATASET
EOM
;;
esac esac
fi fi
if [ ! -z "$personality" ]; then if [ ! -z "$personality" ]; then
@ -1003,7 +781,6 @@ sed -e "s|CHROOT_NAME|$CHROOT_NAME|g" \
-e "s|SNAPSHOT_SIZE|$SNAPSHOT_SIZE|g" \ -e "s|SNAPSHOT_SIZE|$SNAPSHOT_SIZE|g" \
-e "s|SCHROOT_TYPE|$SCHROOT_TYPE|g" \ -e "s|SCHROOT_TYPE|$SCHROOT_TYPE|g" \
-e "s|CHROOT_SNAPSHOT_DIR|$CHROOT_SNAPSHOT_DIR|g" \ -e "s|CHROOT_SNAPSHOT_DIR|$CHROOT_SNAPSHOT_DIR|g" \
-e "s|ZFS_DATASET|$ZFS_DATASET|g" \
"$TEMP_SCHROOTCONF" \ "$TEMP_SCHROOTCONF" \
| sudo tee "/etc/schroot/chroot.d/sbuild-$CHROOT_NAME" > /dev/null | sudo tee "/etc/schroot/chroot.d/sbuild-$CHROOT_NAME" > /dev/null
rm -f "$TEMP_SCHROOTCONF" rm -f "$TEMP_SCHROOTCONF"
@ -1025,9 +802,7 @@ sudo chmod a+x "$MNT"/usr/sbin/policy-rc.d
# Create image finalization script # Create image finalization script
sudo bash -c "cat >> $MNT/finish.sh" <<EOM sudo bash -c "cat >> $MNT/finish.sh" <<EOM
#!/bin/bash #!/bin/bash
if [ "$DEBUG" = 1 ]; then #set -x
set -x
fi
set -e set -e
if [ -n "$proxy" ]; then if [ -n "$proxy" ]; then
mkdir -p /etc/apt/apt.conf.d/ mkdir -p /etc/apt/apt.conf.d/
@ -1038,35 +813,6 @@ EOF
fi fi
EOM EOM
if [ -n "$SKIP_RECOMMENDS" ]; then
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
mkdir -p /etc/apt/apt.conf.d/
cat > /etc/apt/apt.conf.d/99mk-sbuild-no-recommends <<EOF
// disable install recommends
APT::Install-Recommends "0";
EOF
EOM
fi
if [ "$USE_PKGBINARYMANGLER" = 1 ]; then
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
mkdir -p /etc/pkgbinarymangler/
cat > /etc/pkgbinarymangler/maintainermangler.conf <<EOF
# pkgmaintainermangler configuration file
# pkgmaintainermangler will do nothing unless enable is set to "true"
enable: true
# Configure what happens if /CurrentlyBuilding is present, but invalid
# (i. e. it does not contain a Package: field). If "ignore" (default),
# the file is ignored (i. e. the Maintainer field is mangled) and a
# warning is printed. If "fail" (or any other value), pkgmaintainermangler
# exits with an error, which causes a package build to fail.
invalid_currentlybuilding: ignore
EOF
EOM
fi
if [ -n "$TARGET_ARCH" ]; then if [ -n "$TARGET_ARCH" ]; then
sudo bash -c "cat >> $MNT/finish.sh" <<EOM sudo bash -c "cat >> $MNT/finish.sh" <<EOM
# Configure target architecture # Configure target architecture
@ -1085,7 +831,7 @@ apt-get update || true
echo set debconf/frontend Noninteractive | debconf-communicate echo set debconf/frontend Noninteractive | debconf-communicate
echo set debconf/priority critical | debconf-communicate echo set debconf/priority critical | debconf-communicate
# Install basic build tool set, trying to match buildd # Install basic build tool set, trying to match buildd
apt-get -y --force-yes -o Dpkg::Options::="--force-confold" install $BUILD_PKGS apt-get -y --force-yes install $BUILD_PKGS
# Set up expected /dev entries # Set up expected /dev entries
if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi
if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi
@ -1097,7 +843,7 @@ EOM
sudo chmod a+x "$MNT"/finish.sh sudo chmod a+x "$MNT"/finish.sh
case "$SCHROOT_TYPE" in case "$SCHROOT_TYPE" in
"lvm-snapshot"|"zfs-snapshot") "lvm-snapshot")
sudo umount "$MNT" sudo umount "$MNT"
rmdir "$MNT" rmdir "$MNT"
;; ;;
@ -1121,7 +867,7 @@ echo ""
echo " To CHANGE the golden image: sudo schroot -c source:${CHROOT_NAME} -u root" echo " To CHANGE the golden image: sudo schroot -c source:${CHROOT_NAME} -u root"
echo " To ENTER an image snapshot: schroot -c ${CHROOT_NAME}" echo " To ENTER an image snapshot: schroot -c ${CHROOT_NAME}"
echo " To BUILD within a snapshot: sbuild -A -d ${CHROOT_NAME} PACKAGE*.dsc" echo " To BUILD within a snapshot: sbuild -A -d ${CHROOT_NAME} PACKAGE*.dsc"
if [ -n "$TARGET_ARCH" ] && [ "$CHROOT_ARCH" != "$TARGET_ARCH" ] ; then if [ "$CHROOT_ARCH" != "$TARGET_ARCH" ] ; then
echo " To BUILD for ${TARGET_ARCH}: sbuild -A -d ${CHROOT_NAME} --host ${TARGET_ARCH} PACKAGE*.dsc" echo " To BUILD for ${TARGET_ARCH}: sbuild -A -d ${CHROOT_NAME} --host ${TARGET_ARCH} PACKAGE*.dsc"
fi fi
echo "" echo ""

View File

@ -1,4 +1,4 @@
#! /usr/bin/python3 #! /usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
# Copyright (C) 2007-2010, Siegfried-A. Gevatter <rainct@ubuntu.com>, # Copyright (C) 2007-2010, Siegfried-A. Gevatter <rainct@ubuntu.com>,
@ -29,29 +29,21 @@
# configurations. For example, a symlink called pbuilder-hardy will assume # configurations. For example, a symlink called pbuilder-hardy will assume
# that the target distribution is always meant to be Ubuntu Hardy. # that the target distribution is always meant to be Ubuntu Hardy.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import os import os
import os.path
import shutil
import subprocess
import sys import sys
from contextlib import suppress
import debian.deb822 import debian.deb822
from distro_info import DebianDistroInfo, DistroDataOutdated, UbuntuDistroInfo from distro_info import DebianDistroInfo, UbuntuDistroInfo, DistroDataOutdated
import ubuntutools.misc import ubuntutools.misc
import ubuntutools.version import ubuntutools.version
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig from ubuntutools.config import UDTConfig
from ubuntutools.logger import Logger
from ubuntutools.question import YesNoQuestion from ubuntutools.question import YesNoQuestion
from ubuntutools import subprocess
Logger = getLogger()
class PbuilderDist: class PbuilderDist(object):
def __init__(self, builder): def __init__(self, builder):
# Base directory where pbuilder will put all the files it creates. # Base directory where pbuilder will put all the files it creates.
self.base = None self.base = None
@ -68,7 +60,6 @@ class PbuilderDist:
self.enable_security = True self.enable_security = True
self.enable_updates = True self.enable_updates = True
self.enable_proposed = True self.enable_proposed = True
self.enable_backports = False
# File where the log of the last operation will be saved. # File where the log of the last operation will be saved.
self.logfile = None self.logfile = None
@ -90,36 +81,31 @@ class PbuilderDist:
self.chroot_string = None self.chroot_string = None
# Authentication method # Authentication method
self.auth = "sudo" self.auth = 'sudo'
# Builder # Builder
self.builder = builder self.builder = builder
# Distro info self._debian_distros = DebianDistroInfo().all + \
self.debian_distro_info = DebianDistroInfo() ['stable', 'testing', 'unstable']
self.ubuntu_distro_info = UbuntuDistroInfo()
self._debian_distros = self.debian_distro_info.all + ["stable", "testing", "unstable"]
# Ensure that the used builder is installed # Ensure that the used builder is installed
paths = set(os.environ["PATH"].split(":")) paths = set(os.environ['PATH'].split(':'))
paths |= set(("/sbin", "/usr/sbin", "/usr/local/sbin")) paths |= set(('/sbin', '/usr/sbin', '/usr/local/sbin'))
if not any(os.path.exists(os.path.join(p, builder)) for p in paths): if not any(os.path.exists(os.path.join(p, builder)) for p in paths):
Logger.error('Could not find "%s".', builder) Logger.error('Could not find "%s".', builder)
sys.exit(1) sys.exit(1)
############################################################## ##############################################################
self.base = os.path.expanduser(os.environ.get("PBUILDFOLDER", "~/pbuilder/")) self.base = os.path.expanduser(os.environ.get('PBUILDFOLDER',
'~/pbuilder/'))
if "SUDO_USER" in os.environ: if 'SUDO_USER' in os.environ:
Logger.warning( Logger.warn('Running under sudo. '
"Running under sudo. " 'This is probably not what you want. '
"This is probably not what you want. " 'pbuilder-dist will use sudo itself, when necessary.')
"pbuilder-dist will use sudo itself, " if os.stat(os.environ['HOME']).st_uid != os.getuid():
"when necessary."
)
if os.stat(os.environ["HOME"]).st_uid != os.getuid():
Logger.error("You don't own $HOME") Logger.error("You don't own $HOME")
sys.exit(1) sys.exit(1)
@ -130,8 +116,8 @@ class PbuilderDist:
Logger.error('Cannot create base directory "%s"', self.base) Logger.error('Cannot create base directory "%s"', self.base)
sys.exit(1) sys.exit(1)
if "PBUILDAUTH" in os.environ: if 'PBUILDAUTH' in os.environ:
self.auth = os.environ["PBUILDAUTH"] self.auth = os.environ['PBUILDAUTH']
self.system_architecture = ubuntutools.misc.host_architecture() self.system_architecture = ubuntutools.misc.host_architecture()
self.system_distro = ubuntutools.misc.system_distribution() self.system_distro = ubuntutools.misc.system_distribution()
@ -141,7 +127,7 @@ class PbuilderDist:
self.target_distro = self.system_distro self.target_distro = self.system_distro
def set_target_distro(self, distro): def set_target_distro(self, distro):
"""PbuilderDist.set_target_distro(distro) -> None """ PbuilderDist.set_target_distro(distro) -> None
Check if the given target distribution name is correct, if it Check if the given target distribution name is correct, if it
isn't know to the system ask the user for confirmation before isn't know to the system ask the user for confirmation before
@ -152,17 +138,16 @@ class PbuilderDist:
Logger.error('"%s" is an invalid distribution codename.', distro) Logger.error('"%s" is an invalid distribution codename.', distro)
sys.exit(1) sys.exit(1)
if not os.path.isfile(os.path.join("/usr/share/debootstrap/scripts/", distro)): if not os.path.isfile(os.path.join('/usr/share/debootstrap/scripts/',
if os.path.isdir("/usr/share/debootstrap/scripts/"): distro)):
if os.path.isdir('/usr/share/debootstrap/scripts/'):
# Debian experimental doesn't have a debootstrap file but # Debian experimental doesn't have a debootstrap file but
# should work nevertheless. Ubuntu releases automatically use # should work nevertheless.
# the gutsy script as of debootstrap 1.0.128+nmu2ubuntu1.1. if distro not in self._debian_distros:
if distro not in (self._debian_distros + self.ubuntu_distro_info.all): question = ('Warning: Unknown distribution "%s". '
question = ( 'Do you want to continue' % distro)
f'Warning: Unknown distribution "{distro}". ' "Do you want to continue" answer = YesNoQuestion().ask(question, 'no')
) if answer == 'no':
answer = YesNoQuestion().ask(question, "no")
if answer == "no":
sys.exit(0) sys.exit(0)
else: else:
Logger.error('Please install package "debootstrap".') Logger.error('Please install package "debootstrap".')
@ -171,34 +156,33 @@ class PbuilderDist:
self.target_distro = distro self.target_distro = distro
def set_operation(self, operation): def set_operation(self, operation):
"""PbuilderDist.set_operation -> None """ PbuilderDist.set_operation -> None
Check if the given string is a valid pbuilder operation and Check if the given string is a valid pbuilder operation and
depending on this either save it into the appropiate variable depending on this either save it into the appropiate variable
or finalize pbuilder-dist's execution. or finalize pbuilder-dist's execution.
""" """
arguments = ("create", "update", "build", "clean", "login", "execute") arguments = ('create', 'update', 'build', 'clean', 'login', 'execute')
if operation not in arguments: if operation not in arguments:
if operation.endswith(".dsc"): if operation.endswith('.dsc'):
if os.path.isfile(operation): if os.path.isfile(operation):
self.operation = "build" self.operation = 'build'
return [operation] return [operation]
Logger.error('Could not find file "%s".', operation) else:
Logger.error('Could not find file "%s".', operation)
sys.exit(1)
else:
Logger.error('"%s" is not a recognized argument.\n'
'Please use one of these: %s.',
operation, ', '.join(arguments))
sys.exit(1) sys.exit(1)
else:
Logger.error( self.operation = operation
'"%s" is not a recognized argument.\nPlease use one of these: %s.', return []
operation,
", ".join(arguments),
)
sys.exit(1)
self.operation = operation
return []
def get_command(self, remaining_arguments=None): def get_command(self, remaining_arguments=None):
"""PbuilderDist.get_command -> string """ PbuilderDist.get_command -> string
Generate the pbuilder command which matches the given configuration Generate the pbuilder command which matches the given configuration
and return it as a string. and return it as a string.
@ -209,34 +193,30 @@ class PbuilderDist:
if self.build_architecture == self.system_architecture: if self.build_architecture == self.system_architecture:
self.chroot_string = self.target_distro self.chroot_string = self.target_distro
else: else:
self.chroot_string = self.target_distro + "-" + self.build_architecture self.chroot_string = (self.target_distro + '-'
+ self.build_architecture)
prefix = os.path.join(self.base, self.chroot_string) prefix = os.path.join(self.base, self.chroot_string)
if "--buildresult" not in remaining_arguments: if '--buildresult' not in remaining_arguments:
result = os.path.normpath(f"{prefix}_result/") result = os.path.normpath('%s_result/' % prefix)
else: else:
location_of_arg = remaining_arguments.index("--buildresult") location_of_arg = remaining_arguments.index('--buildresult')
result = os.path.normpath(remaining_arguments[location_of_arg + 1]) result = os.path.normpath(remaining_arguments[location_of_arg+1])
remaining_arguments.pop(location_of_arg + 1) remaining_arguments.pop(location_of_arg+1)
remaining_arguments.pop(location_of_arg) remaining_arguments.pop(location_of_arg)
if not self.logfile and self.operation != "login": if not self.logfile and self.operation != 'login':
if self.operation == "build": if self.operation == 'build':
dsc_files = [a for a in remaining_arguments if a.strip().endswith(".dsc")] dsc_files = [a for a in remaining_arguments
if a.strip().endswith('.dsc')]
assert len(dsc_files) == 1 assert len(dsc_files) == 1
dsc = debian.deb822.Dsc(open(dsc_files[0], encoding="utf-8")) dsc = debian.deb822.Dsc(open(dsc_files[0]))
version = ubuntutools.version.Version(dsc["Version"]) version = ubuntutools.version.Version(dsc['Version'])
name = ( name = (dsc['Source'] + '_' + version.strip_epoch() + '_' +
dsc["Source"] self.build_architecture + '.build')
+ "_"
+ version.strip_epoch()
+ "_"
+ self.build_architecture
+ ".build"
)
self.logfile = os.path.join(result, name) self.logfile = os.path.join(result, name)
else: else:
self.logfile = os.path.join(result, "last_operation.log") self.logfile = os.path.join(result, 'last_operation.log')
if not os.path.isdir(result): if not os.path.isdir(result):
try: try:
@ -246,91 +226,86 @@ class PbuilderDist:
sys.exit(1) sys.exit(1)
arguments = [ arguments = [
f"--{self.operation}", '--%s' % self.operation,
"--distribution", '--distribution', self.target_distro,
self.target_distro, '--buildresult', result,
"--buildresult",
result,
] ]
if self.operation == "update": if self.operation == 'update':
arguments += ["--override-config"] arguments += ['--override-config']
if self.builder == "pbuilder": if self.builder == 'pbuilder':
arguments += ["--basetgz", prefix + "-base.tgz"] arguments += ['--basetgz', prefix + '-base.tgz']
elif self.builder == "cowbuilder": elif self.builder == 'cowbuilder':
arguments += ["--basepath", prefix + "-base.cow"] arguments += ['--basepath', prefix + '-base.cow']
else: else:
Logger.error('Unrecognized builder "%s".', self.builder) Logger.error('Unrecognized builder "%s".', self.builder)
sys.exit(1) sys.exit(1)
if self.logfile: if self.logfile:
arguments += ["--logfile", self.logfile] arguments += ['--logfile', self.logfile]
if os.path.exists("/var/cache/archive/"): if os.path.exists('/var/cache/archive/'):
arguments += ["--bindmounts", "/var/cache/archive/"] arguments += ['--bindmounts', '/var/cache/archive/']
localrepo = '/var/cache/archive/' + self.target_distro
if os.path.exists(localrepo):
arguments += [
'--othermirror',
'deb file:///var/cache/archive/ %s/' % self.target_distro,
]
config = UDTConfig() config = UDTConfig()
if self.target_distro in self._debian_distros: if self.target_distro in self._debian_distros:
mirror = os.environ.get("MIRRORSITE", config.get_value("DEBIAN_MIRROR")) mirror = os.environ.get('MIRRORSITE',
components = "main" config.get_value('DEBIAN_MIRROR'))
components = 'main'
if self.extra_components: if self.extra_components:
components += " contrib non-free non-free-firmware" components += ' contrib non-free'
else: else:
mirror = os.environ.get("MIRRORSITE", config.get_value("UBUNTU_MIRROR")) mirror = os.environ.get('MIRRORSITE',
if self.build_architecture not in ("amd64", "i386"): config.get_value('UBUNTU_MIRROR'))
mirror = os.environ.get("MIRRORSITE", config.get_value("UBUNTU_PORTS_MIRROR")) if self.build_architecture not in ('amd64', 'i386'):
components = "main restricted" mirror = os.environ.get(
'MIRRORSITE', config.get_value('UBUNTU_PORTS_MIRROR'))
components = 'main restricted'
if self.extra_components: if self.extra_components:
components += " universe multiverse" components += ' universe multiverse'
arguments += ["--mirror", mirror] arguments += ['--mirror', mirror]
othermirrors = [] othermirrors = []
localrepo = f"/var/cache/archive/{self.target_distro}"
if os.path.exists(localrepo):
repo = f"deb file:///var/cache/archive/ {self.target_distro}/"
othermirrors.append(repo)
if self.target_distro in self._debian_distros: if self.target_distro in self._debian_distros:
debian_info = DebianDistroInfo()
try: try:
codename = self.debian_distro_info.codename( codename = debian_info.codename(self.target_distro,
self.target_distro, default=self.target_distro default=self.target_distro)
) except DistroDataOutdated, error:
except DistroDataOutdated as error: Logger.warn(error)
Logger.warning(error) if codename in (debian_info.devel(), 'experimental'):
if codename in (self.debian_distro_info.devel(), "experimental"):
self.enable_security = False self.enable_security = False
self.enable_updates = False self.enable_updates = False
self.enable_proposed = False self.enable_proposed = False
elif codename in (self.debian_distro_info.testing(), "testing"): elif codename in (debian_info.testing(), 'testing'):
self.enable_updates = False self.enable_updates = False
if self.enable_security: if self.enable_security:
pocket = "-security" othermirrors.append('deb %s %s/updates %s'
with suppress(ValueError): % (config.get_value('DEBSEC_MIRROR'),
# before bullseye (version 11) security suite is /updates self.target_distro, components))
if float(self.debian_distro_info.version(codename)) < 11.0:
pocket = "/updates"
othermirrors.append(
f"deb {config.get_value('DEBSEC_MIRROR')}"
f" {self.target_distro}{pocket} {components}"
)
if self.enable_updates: if self.enable_updates:
othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}") othermirrors.append('deb %s %s-updates %s'
% (mirror, self.target_distro, components))
if self.enable_proposed: if self.enable_proposed:
othermirrors.append( othermirrors.append('deb %s %s-proposed-updates %s'
f"deb {mirror} {self.target_distro}-proposed-updates {components}" % (mirror, self.target_distro, components))
)
if self.enable_backports:
othermirrors.append(f"deb {mirror} {self.target_distro}-backports {components}")
aptcache = os.path.join(self.base, "aptcache", "debian") aptcache = os.path.join(self.base, 'aptcache', 'debian')
else: else:
try: try:
dev_release = self.target_distro == self.ubuntu_distro_info.devel() dev_release = self.target_distro == UbuntuDistroInfo().devel()
except DistroDataOutdated as error: except DistroDataOutdated, error:
Logger.warning(error) Logger.warn(error)
dev_release = True dev_release = True
if dev_release: if dev_release:
@ -338,45 +313,46 @@ class PbuilderDist:
self.enable_updates = False self.enable_updates = False
if self.enable_security: if self.enable_security:
othermirrors.append(f"deb {mirror} {self.target_distro}-security {components}") othermirrors.append('deb %s %s-security %s'
% (mirror, self.target_distro, components))
if self.enable_updates: if self.enable_updates:
othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}") othermirrors.append('deb %s %s-updates %s'
% (mirror, self.target_distro, components))
if self.enable_proposed: if self.enable_proposed:
othermirrors.append(f"deb {mirror} {self.target_distro}-proposed {components}") othermirrors.append('deb %s %s-proposed %s'
% (mirror, self.target_distro, components))
aptcache = os.path.join(self.base, "aptcache", "ubuntu") aptcache = os.path.join(self.base, 'aptcache', 'ubuntu')
if "OTHERMIRROR" in os.environ: if 'OTHERMIRROR' in os.environ:
othermirrors += os.environ["OTHERMIRROR"].split("|") othermirrors += os.environ['OTHERMIRROR'].split('|')
if othermirrors: if othermirrors:
arguments += ["--othermirror", "|".join(othermirrors)] arguments += ['--othermirror', '|'.join(othermirrors)]
# Work around LP:#599695 # Work around LP:#599695
if ( if (ubuntutools.misc.system_distribution() == 'Debian'
ubuntutools.misc.system_distribution() == "Debian" and self.target_distro not in self._debian_distros):
and self.target_distro not in self._debian_distros if not os.path.exists(
): '/usr/share/keyrings/ubuntu-archive-keyring.gpg'):
if not os.path.exists("/usr/share/keyrings/ubuntu-archive-keyring.gpg"): Logger.error('ubuntu-keyring not installed')
Logger.error("ubuntu-keyring not installed")
sys.exit(1) sys.exit(1)
arguments += [ arguments += [
"--debootstrapopts", '--debootstrapopts',
"--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg", '--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg',
] ]
elif ( elif (ubuntutools.misc.system_distribution() == 'Ubuntu'
ubuntutools.misc.system_distribution() == "Ubuntu" and self.target_distro in self._debian_distros):
and self.target_distro in self._debian_distros if not os.path.exists(
): '/usr/share/keyrings/debian-archive-keyring.gpg'):
if not os.path.exists("/usr/share/keyrings/debian-archive-keyring.gpg"): Logger.error('debian-archive-keyring not installed')
Logger.error("debian-archive-keyring not installed")
sys.exit(1) sys.exit(1)
arguments += [ arguments += [
"--debootstrapopts", '--debootstrapopts',
"--keyring=/usr/share/keyrings/debian-archive-keyring.gpg", '--keyring=/usr/share/keyrings/debian-archive-keyring.gpg',
] ]
arguments += ["--aptcache", aptcache, "--components", components] arguments += ['--aptcache', aptcache, '--components', components]
if not os.path.isdir(aptcache): if not os.path.isdir(aptcache):
try: try:
@ -386,11 +362,13 @@ class PbuilderDist:
sys.exit(1) sys.exit(1)
if self.build_architecture != self.system_architecture: if self.build_architecture != self.system_architecture:
arguments += ["--debootstrapopts", "--arch=" + self.build_architecture] arguments += ['--debootstrapopts',
'--arch=' + self.build_architecture]
apt_conf_dir = os.path.join(self.base, f"etc/{self.target_distro}/apt.conf") apt_conf_dir = os.path.join(self.base,
'etc/%s/apt.conf' % self.target_distro)
if os.path.exists(apt_conf_dir): if os.path.exists(apt_conf_dir):
arguments += ["--aptconfdir", apt_conf_dir] arguments += ['--aptconfdir', apt_conf_dir]
# Append remaining arguments # Append remaining arguments
if remaining_arguments: if remaining_arguments:
@ -401,28 +379,28 @@ class PbuilderDist:
# With both common variable name schemes (BTS: #659060). # With both common variable name schemes (BTS: #659060).
return [ return [
self.auth, self.auth,
"HOME=" + os.path.expanduser("~"), 'HOME=' + os.path.expanduser('~'),
"ARCHITECTURE=" + self.build_architecture, 'ARCHITECTURE=' + self.build_architecture,
"DISTRIBUTION=" + self.target_distro, 'DISTRIBUTION=' + self.target_distro,
"ARCH=" + self.build_architecture, 'ARCH=' + self.build_architecture,
"DIST=" + self.target_distro, 'DIST=' + self.target_distro,
"DEB_BUILD_OPTIONS=" + os.environ.get("DEB_BUILD_OPTIONS", ""), 'DEB_BUILD_OPTIONS=' + os.environ.get('DEB_BUILD_OPTIONS', ''),
self.builder, self.builder,
] + arguments ] + arguments
def show_help(exit_code=0): def show_help(exit_code=0):
"""help() -> None """ help() -> None
Print a help message for pbuilder-dist, and exit with the given code. Print a help message for pbuilder-dist, and exit with the given code.
""" """
Logger.info("See man pbuilder-dist for more information.") print 'See man pbuilder-dist for more information.'
sys.exit(exit_code) sys.exit(exit_code)
def main(): def main():
"""main() -> None """ main() -> None
This is pbuilder-dist's main function. It creates a PbuilderDist This is pbuilder-dist's main function. It creates a PbuilderDist
object, modifies all necessary settings taking data from the object, modifies all necessary settings taking data from the
@ -430,122 +408,96 @@ def main():
the script and runs pbuilder itself or exists with an error message. the script and runs pbuilder itself or exists with an error message.
""" """
script_name = os.path.basename(sys.argv[0]) script_name = os.path.basename(sys.argv[0])
parts = script_name.split("-") parts = script_name.split('-')
# Copy arguments into another list for save manipulation # Copy arguments into another list for save manipulation
args = sys.argv[1:] args = sys.argv[1:]
if "-" in script_name and parts[0] not in ("pbuilder", "cowbuilder") or len(parts) > 3: if ('-' in script_name and parts[0] not in ('pbuilder', 'cowbuilder')
Logger.error('"%s" is not a valid name for a "pbuilder-dist" executable.', script_name) or len(parts) > 3):
Logger.error('"%s" is not a valid name for a "pbuilder-dist" '
'executable.', script_name)
sys.exit(1) sys.exit(1)
if len(args) < 1: if len(args) < 1:
Logger.error("Insufficient number of arguments.") Logger.error('Insufficient number of arguments.')
show_help(1) show_help(1)
if args[0] in ("-h", "--help", "help"): if args[0] in ('-h', '--help', 'help'):
show_help(0) show_help(0)
app = PbuilderDist(parts[0]) app = PbuilderDist(parts[0])
if len(parts) > 1 and parts[1] != "dist" and "." not in parts[1]: if len(parts) > 1 and parts[1] != 'dist' and '.' not in parts[1]:
app.set_target_distro(parts[1]) app.set_target_distro(parts[1])
else: else:
app.set_target_distro(args.pop(0)) app.set_target_distro(args.pop(0))
if len(parts) > 2: if len(parts) > 2:
requested_arch = parts[2] requested_arch = parts[2]
elif len(args) > 0: elif len(args) > 0 and args[0] in (
if shutil.which("arch-test") is not None: 'alpha', 'amd64', 'arm', 'armeb', 'armel', 'armhf', 'arm64',
arch_test = subprocess.run( 'i386', 'lpia', 'm68k', 'mips', 'mipsel', 'powerpc', 'ppc64',
["arch-test", args[0]], check=False, stdout=subprocess.DEVNULL 'ppc64el', 's390x', 'sh4', 'sh4eb', 'sparc', 'sparc64'):
) requested_arch = args.pop(0)
if arch_test.returncode == 0:
requested_arch = args.pop(0)
elif os.path.isdir("/usr/lib/arch-test") and args[0] in os.listdir(
"/usr/lib/arch-test/"
):
Logger.error(
'Architecture "%s" is not supported on your '
"currently running kernel. Consider installing "
"the qemu-user-static package to enable the use of "
"foreign architectures.",
args[0],
)
sys.exit(1)
else:
requested_arch = None
else:
Logger.error(
'Cannot determine if "%s" is a valid architecture. '
"Please install the arch-test package and retry.",
args[0],
)
sys.exit(1)
else: else:
requested_arch = None requested_arch = None
if requested_arch: if requested_arch:
app.build_architecture = requested_arch app.build_architecture = requested_arch
# For some foreign architectures we need to use qemu # For some foreign architectures we need to use qemu
if requested_arch != app.system_architecture and ( if (requested_arch != app.system_architecture
app.system_architecture, and (app.system_architecture, requested_arch) not in [
requested_arch, ('amd64', 'i386'), ('amd64', 'lpia'), ('arm', 'armel'),
) not in [ ('armel', 'arm'), ('armel', 'armhf'), ('armhf', 'armel'),
("amd64", "i386"), ('arm64', 'arm'), ('arm64', 'armhf'), ('arm64', 'armel'),
("arm64", "arm"), ('i386', 'lpia'), ('lpia', 'i386'), ('powerpc', 'ppc64'),
("arm64", "armhf"), ('ppc64', 'powerpc'), ('sparc', 'sparc64'),
("powerpc", "ppc64"), ('sparc64', 'sparc')]):
("ppc64", "powerpc"), args += ['--debootstrap', 'qemu-debootstrap']
]:
args += ["--debootstrap", "debootstrap"]
if "mainonly" in sys.argv or "--main-only" in sys.argv: if 'mainonly' in sys.argv or '--main-only' in sys.argv:
app.extra_components = False app.extra_components = False
if "mainonly" in sys.argv: if 'mainonly' in sys.argv:
args.remove("mainonly") args.remove('mainonly')
else: else:
args.remove("--main-only") args.remove('--main-only')
if "--release-only" in sys.argv: if '--release-only' in sys.argv:
args.remove("--release-only") args.remove('--release-only')
app.enable_security = False app.enable_security = False
app.enable_updates = False app.enable_updates = False
app.enable_proposed = False app.enable_proposed = False
elif "--security-only" in sys.argv: elif '--security-only' in sys.argv:
args.remove("--security-only") args.remove('--security-only')
app.enable_updates = False app.enable_updates = False
app.enable_proposed = False app.enable_proposed = False
elif "--updates-only" in sys.argv: elif '--updates-only' in sys.argv:
args.remove("--updates-only") args.remove('--updates-only')
app.enable_proposed = False app.enable_proposed = False
elif "--backports" in sys.argv:
args.remove("--backports")
app.enable_backports = True
if len(args) < 1: if len(args) < 1:
Logger.error("Insufficient number of arguments.") Logger.error('Insufficient number of arguments.')
show_help(1) show_help(1)
# Parse the operation # Parse the operation
args = app.set_operation(args.pop(0)) + args args = app.set_operation(args.pop(0)) + args
if app.operation == "build": if app.operation == 'build':
if len([a for a in args if a.strip().endswith(".dsc")]) != 1: if len([a for a in args if a.strip().endswith('.dsc')]) != 1:
msg = "You have to specify one .dsc file if you want to build." msg = 'You have to specify one .dsc file if you want to build.'
Logger.error(msg) Logger.error(msg)
sys.exit(1) sys.exit(1)
# Execute the pbuilder command # Execute the pbuilder command
if "--debug-echo" not in args: if not '--debug-echo' in args:
sys.exit(subprocess.call(app.get_command(args))) sys.exit(subprocess.call(app.get_command(args)))
else: else:
Logger.info(app.get_command([arg for arg in args if arg != "--debug-echo"])) print app.get_command([arg for arg in args if arg != '--debug-echo'])
if __name__ == '__main__':
if __name__ == "__main__":
try: try:
main() main()
except KeyboardInterrupt: except KeyboardInterrupt:
Logger.error("Manually aborted.") Logger.error('Manually aborted.')
sys.exit(1) sys.exit(1)

142
pm-helper
View File

@ -1,142 +0,0 @@
#!/usr/bin/python3
# Find the next thing to work on for proposed-migration
# Copyright (C) 2023 Canonical Ltd.
# Author: Steve Langasek <steve.langasek@ubuntu.com>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License, version 3.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import lzma
import sys
import webbrowser
from argparse import ArgumentParser
import yaml
from launchpadlib.launchpad import Launchpad
from ubuntutools.utils import get_url
# proposed-migration is only concerned with the devel series; unlike other
# tools, don't make this configurable
excuses_url = "https://ubuntu-archive-team.ubuntu.com/proposed-migration/update_excuses.yaml.xz"
def get_proposed_version(excuses, package):
for k in excuses["sources"]:
if k["source"] == package:
return k.get("new-version")
return None
def claim_excuses_bug(launchpad, bug, package):
print(f"LP: #{bug.id}: {bug.title}")
ubuntu = launchpad.distributions["ubuntu"]
series = ubuntu.current_series.fullseriesname
for task in bug.bug_tasks:
# targeting to a series doesn't make the default task disappear,
# it just makes it useless
if task.bug_target_name == f"{package} ({series})":
our_task = task
break
if task.bug_target_name == f"{package} (Ubuntu)":
our_task = task
if our_task.assignee == launchpad.me:
print("Bug already assigned to you.")
return True
if our_task.assignee:
print(f"Currently assigned to {our_task.assignee.name}")
print("""Do you want to claim this bug? [yN] """, end="")
sys.stdout.flush()
response = sys.stdin.readline()
if response.strip().lower().startswith("y"):
our_task.assignee = launchpad.me
our_task.lp_save()
return True
return False
def create_excuses_bug(launchpad, package, version):
print("Will open a new bug")
bug = launchpad.bugs.createBug(
title=f"proposed-migration for {package} {version}",
tags=("update-excuse"),
target=f"https://api.launchpad.net/devel/ubuntu/+source/{package}",
description=f"{package} {version} is stuck in -proposed.",
)
task = bug.bug_tasks[0]
task.assignee = launchpad.me
task.lp_save()
print(f"Opening {bug.web_link} in browser")
webbrowser.open(bug.web_link)
return bug
def has_excuses_bugs(launchpad, package):
ubuntu = launchpad.distributions["ubuntu"]
pkg = ubuntu.getSourcePackage(name=package)
if not pkg:
raise ValueError(f"No such source package: {package}")
tasks = pkg.searchTasks(tags=["update-excuse"], order_by=["id"])
bugs = [task.bug for task in tasks]
if not bugs:
return False
if len(bugs) == 1:
print(f"There is 1 open update-excuse bug against {package}")
else:
print(f"There are {len(bugs)} open update-excuse bugs against {package}")
for bug in bugs:
if claim_excuses_bug(launchpad, bug, package):
return True
return True
def main():
parser = ArgumentParser()
parser.add_argument("-l", "--launchpad", dest="launchpad_instance", default="production")
parser.add_argument(
"-v", "--verbose", default=False, action="store_true", help="be more verbose"
)
parser.add_argument("package", nargs="?", help="act on this package only")
args = parser.parse_args()
args.launchpad = Launchpad.login_with("pm-helper", args.launchpad_instance, version="devel")
f = get_url(excuses_url, False)
with lzma.open(f) as lzma_f:
excuses = yaml.load(lzma_f, Loader=yaml.CSafeLoader)
if args.package:
try:
if not has_excuses_bugs(args.launchpad, args.package):
proposed_version = get_proposed_version(excuses, args.package)
if not proposed_version:
print(f"Package {args.package} not found in -proposed.")
sys.exit(1)
create_excuses_bug(args.launchpad, args.package, proposed_version)
except ValueError as e:
sys.stderr.write(f"{e}\n")
else:
pass # for now
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-debian-ddebs -- pull ddeb package files for debian
# Basic usage: pull-debian-ddebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="debian", pull="ddebs")

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# pull-debian-debdiff - find and download a specific version of a Debian # pull-debian-debdiff - find and download a specific version of a Debian
# package and its immediate parent to generate a debdiff. # package and its immediate parent to generate a debdiff.
# #
@ -17,112 +17,96 @@
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR # OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE. # PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name import optparse
# pylint: enable=invalid-name
import argparse
import sys import sys
import debian.debian_support
import debian.changelog import debian.changelog
from ubuntutools import getLogger
from ubuntutools.archive import DebianSourcePackage, DownloadError from ubuntutools.archive import DebianSourcePackage, DownloadError
from ubuntutools.config import UDTConfig from ubuntutools.config import UDTConfig
from ubuntutools.version import Version from ubuntutools.logger import Logger
Logger = getLogger()
def previous_version(package, version, distance): def previous_version(package, version, distance):
"Given an (extracted) package, determine the version distance versions ago" "Given an (extracted) package, determine the version distance versions ago"
upver = Version(version).upstream_version upver = debian.debian_support.Version(version).upstream_version
filename = f"{package}-{upver}/debian/changelog" filename = '%s-%s/debian/changelog' % (package, upver)
changelog_file = open(filename, "r", encoding="utf-8") changelog_file = open(filename, 'r')
changelog = debian.changelog.Changelog(changelog_file.read()) changelog = debian.changelog.Changelog(changelog_file.read())
changelog_file.close() changelog_file.close()
seen = 0 seen = 0
for entry in changelog: for entry in changelog:
if entry.distributions == "UNRELEASED": if entry.distributions == 'UNRELEASED':
continue continue
if seen == distance: if seen == distance:
return entry.version.full_version return entry.version.full_version
seen += 1 seen += 1
return False return False
def main(): def main():
parser = argparse.ArgumentParser(usage="%(prog)s [options] <package> <version> [distance]") parser = optparse.OptionParser('%prog [options] <package> <version> '
parser.add_argument( '[distance]')
"-f", parser.add_option('-f', '--fetch',
"--fetch", dest='fetch_only', default=False, action='store_true',
dest="fetch_only", help="Only fetch the source packages, don't diff.")
default=False, parser.add_option('-d', '--debian-mirror', metavar='DEBIAN_MIRROR',
action="store_true", dest='debian_mirror',
help="Only fetch the source packages, don't diff.", help='Preferred Debian mirror '
) '(default: http://httpredir.debian.org/debian)')
parser.add_argument( parser.add_option('-s', '--debsec-mirror', metavar='DEBSEC_MIRROR',
"-d", dest='debsec_mirror',
"--debian-mirror", help='Preferred Debian Security mirror '
metavar="DEBIAN_MIRROR", '(default: http://security.debian.org)')
dest="debian_mirror", parser.add_option('--no-conf',
help="Preferred Debian mirror (default: http://deb.debian.org/debian)", dest='no_conf', default=False, action='store_true',
) help="Don't read config files or environment variables")
parser.add_argument(
"-s",
"--debsec-mirror",
metavar="DEBSEC_MIRROR",
dest="debsec_mirror",
help="Preferred Debian Security mirror (default: http://security.debian.org)",
)
parser.add_argument(
"--no-conf",
dest="no_conf",
default=False,
action="store_true",
help="Don't read config files or environment variables",
)
parser.add_argument("package", help=argparse.SUPPRESS)
parser.add_argument("version", help=argparse.SUPPRESS)
parser.add_argument("distance", default=1, type=int, nargs="?", help=argparse.SUPPRESS)
args = parser.parse_args()
config = UDTConfig(args.no_conf) opts, args = parser.parse_args()
if args.debian_mirror is None: if len(args) < 2:
args.debian_mirror = config.get_value("DEBIAN_MIRROR") parser.error('Must specify package and version')
if args.debsec_mirror is None: elif len(args) > 3:
args.debsec_mirror = config.get_value("DEBSEC_MIRROR") parser.error('Too many arguments')
mirrors = [args.debsec_mirror, args.debian_mirror] package = args[0]
version = args[1]
distance = int(args[2]) if len(args) > 2 else 1
Logger.info("Downloading %s %s", args.package, args.version) config = UDTConfig(opts.no_conf)
if opts.debian_mirror is None:
opts.debian_mirror = config.get_value('DEBIAN_MIRROR')
if opts.debsec_mirror is None:
opts.debsec_mirror = config.get_value('DEBSEC_MIRROR')
mirrors = [opts.debsec_mirror, opts.debian_mirror]
newpkg = DebianSourcePackage(args.package, args.version, mirrors=mirrors) Logger.normal('Downloading %s %s', package, version)
newpkg = DebianSourcePackage(package, version, mirrors=mirrors)
try: try:
newpkg.pull() newpkg.pull()
except DownloadError as e: except DownloadError, e:
Logger.error("Failed to download: %s", str(e)) Logger.error('Failed to download: %s', str(e))
sys.exit(1) sys.exit(1)
newpkg.unpack() newpkg.unpack()
if args.fetch_only: if opts.fetch_only:
sys.exit(0) sys.exit(0)
oldversion = previous_version(args.package, args.version, args.distance) oldversion = previous_version(package, version, distance)
if not oldversion: if not oldversion:
Logger.error("No previous version could be found") Logger.error('No previous version could be found')
sys.exit(1) sys.exit(1)
Logger.info("Downloading %s %s", args.package, oldversion) Logger.normal('Downloading %s %s', package, oldversion)
oldpkg = DebianSourcePackage(args.package, oldversion, mirrors=mirrors) oldpkg = DebianSourcePackage(package, oldversion, mirrors=mirrors)
try: try:
oldpkg.pull() oldpkg.pull()
except DownloadError as e: except DownloadError, e:
Logger.error("Failed to download: %s", str(e)) Logger.error('Failed to download: %s', str(e))
sys.exit(1) sys.exit(1)
Logger.info("file://%s", oldpkg.debdiff(newpkg, diffstat=True)) oldpkg.unpack()
print 'file://' + oldpkg.debdiff(newpkg, diffstat=True)
if __name__ == '__main__':
if __name__ == "__main__":
try: try:
main() main()
except KeyboardInterrupt: except KeyboardInterrupt:
Logger.info("User abort.") Logger.normal('User abort.')

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-debian-debs -- pull deb package files for debian
# Basic usage: pull-debian-debs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="debian", pull="debs")

View File

@ -1,14 +1,142 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# pull-debian-source -- pull source package files for debian # pull-debian-source -- pull a source package from Launchpad
# Basic usage: pull-debian-source <package name> [version|release] # Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
# Inspired by a tool of the same name by Nathan Handler.
# #
# See pull-pkg # Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name import json
# pylint: enable=invalid-name import optparse
import sys
import urllib2
from ubuntutools.pullpkg import PullPkg from distro_info import DebianDistroInfo, DistroDataOutdated
if __name__ == "__main__": from ubuntutools.archive import DebianSourcePackage, DownloadError, rmadison
PullPkg.main(distro="debian", pull="source") from ubuntutools.config import UDTConfig
from ubuntutools.logger import Logger
def is_suite(version):
"""If version could be considered to be a Debian suite, return the
canonical suite name. Otherwise None
"""
debian_info = DebianDistroInfo()
debian_releases = debian_info.all + ['experimental']
if '-' in version:
release, pocket = version.split('-', 1)
release = debian_info.codename(release, default=release)
if release in debian_releases:
if pocket in ('proposed-updates', 'p-u'):
return (release + '-proposed-updates')
elif pocket == 'security':
return (release + '-security')
else:
release = debian_info.codename(version, default=version)
if release in debian_releases:
return release
return None
def source_package_for(binary, release):
"""Query DDE to find the source package for a particular binary"""
try:
release = DebianDistroInfo().codename(release, default=release)
except DistroDataOutdated, e:
Logger.warn(e)
url = ('http://dde.debian.net/dde/q/udd/dist/d:debian/r:%s/p:%s/?t=json'
% (release, binary))
data = None
try:
data = json.load(urllib2.urlopen(url))['r']
except urllib2.URLError, e:
Logger.error('Unable to retrieve package information from DDE: '
'%s (%s)', url, str(e))
except ValueError, e:
Logger.error('Unable to parse JSON response from DDE: '
'%s (%s)', url, str(e))
if not data:
return None
return data[0]['source']
def main():
usage = 'Usage: %prog <package> [release|version]'
parser = optparse.OptionParser(usage)
parser.add_option('-d', '--download-only',
dest='download_only', default=False, action='store_true',
help='Do not extract the source package')
parser.add_option('-m', '--mirror', metavar='DEBIAN_MIRROR',
dest='debian_mirror',
help='Preferred Debian mirror (default: %s)'
% UDTConfig.defaults['DEBIAN_MIRROR'])
parser.add_option('-s', '--security-mirror', metavar='DEBSEC_MIRROR',
dest='debsec_mirror',
help='Preferred Debian Security mirror (default: %s)'
% UDTConfig.defaults['DEBSEC_MIRROR'])
parser.add_option('--no-conf',
dest='no_conf', default=False, action='store_true',
help="Don't read config files or environment variables")
(options, args) = parser.parse_args()
if not args:
parser.error('Must specify package name')
elif len(args) > 2:
parser.error('Too many arguments. '
'Must only specify package and (optionally) release.')
config = UDTConfig(options.no_conf)
if options.debian_mirror is None:
options.debian_mirror = config.get_value('DEBIAN_MIRROR')
if options.debsec_mirror is None:
options.debsec_mirror = config.get_value('DEBSEC_MIRROR')
package = args[0].lower()
version = args[1] if len(args) > 1 else 'unstable'
component = None
suite = is_suite(version)
if suite is not None:
line = list(rmadison('debian', package, suite, 'source'))
if not line:
source_package = source_package_for(package, suite)
if source_package != None and package != source_package:
package = source_package
line = list(rmadison('debian', package, suite, 'source'))
if not line:
Logger.error('Unable to find %s in Debian suite "%s".', package,
suite)
sys.exit(1)
line = line[-1]
version = line['version']
component = line['component']
Logger.normal('Downloading %s version %s', package, version)
srcpkg = DebianSourcePackage(package, version, component=component,
mirrors=[options.debian_mirror,
options.debsec_mirror])
try:
srcpkg.pull()
except DownloadError, e:
Logger.error('Failed to download: %s', str(e))
sys.exit(1)
if not options.download_only:
srcpkg.unpack()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.normal('User abort.')

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-debian-udebs -- pull udeb package files for debian
# Basic usage: pull-debian-udebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="debian", pull="udebs")

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-lp-ddebs -- pull ddeb package files for ubuntu
# Basic usage: pull-lp-ddebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ubuntu", pull="ddebs")

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-lp-debs -- pull deb package files for ubuntu
# Basic usage: pull-lp-debs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ubuntu", pull="debs")

View File

@ -1,14 +1,148 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# pull-lp-source -- pull source package files for ubuntu # pull-lp-source -- pull a source package from Launchpad
# Basic usage: pull-lp-source <package name> [version|release] # Basic usage: pull-lp-source <source package> [<release>]
# #
# See pull-pkg # Copyright (C) 2008, Iain Lane <iain@orangesquash.org.uk>,
# 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL for more details.
#
# ##################################################################
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg import json
import os
import sys
import urllib2
from optparse import OptionParser
if __name__ == "__main__": from distro_info import UbuntuDistroInfo, DistroDataOutdated
PullPkg.main(distro="ubuntu", pull="source")
from ubuntutools.archive import UbuntuSourcePackage, DownloadError
from ubuntutools.config import UDTConfig
from ubuntutools.lp.lpapicache import Distribution, Launchpad
from ubuntutools.lp.udtexceptions import (SeriesNotFoundException,
PackageNotFoundException,
PocketDoesNotExistError)
from ubuntutools.logger import Logger
from ubuntutools.misc import split_release_pocket
def source_package_for(binary, release):
"""Query DDE to find the source package for a particular binary
Should really do this with LP, but it's not possible LP: #597041
"""
url = ('http://dde.debian.net/dde/q/udd/dist/d:ubuntu/r:%s/p:%s/?t=json'
% (release, binary))
data = None
try:
data = json.load(urllib2.urlopen(url))['r']
except urllib2.URLError, e:
Logger.error('Unable to retrieve package information from DDE: '
'%s (%s)', url, str(e))
except ValueError, e:
Logger.error('Unable to parse JSON response from DDE: '
'%s (%s)', url, str(e))
if not data:
return None
return data[0]['source']
def main():
usage = "Usage: %prog <package> [release|version]"
opt_parser = OptionParser(usage)
opt_parser.add_option('-d', '--download-only',
dest='download_only', default=False,
action='store_true',
help="Do not extract the source package")
opt_parser.add_option('-m', '--mirror', metavar='UBUNTU_MIRROR',
dest='ubuntu_mirror',
help='Preferred Ubuntu mirror (default: Launchpad)')
opt_parser.add_option('--no-conf',
dest='no_conf', default=False, action='store_true',
help="Don't read config files or environment "
"variables")
(options, args) = opt_parser.parse_args()
if not args:
opt_parser.error("Must specify package name")
config = UDTConfig(options.no_conf)
if options.ubuntu_mirror is None:
options.ubuntu_mirror = config.get_value('UBUNTU_MIRROR')
# Login anonymously to LP
Launchpad.login_anonymously()
package = str(args[0]).lower()
ubuntu_info = UbuntuDistroInfo()
if len(args) > 1: # Custom distribution specified.
version = str(args[1])
else:
try:
version = os.getenv('DIST') or ubuntu_info.devel()
except DistroDataOutdated, e:
Logger.warn("%s\nOr specify a distribution.", e)
sys.exit(1)
component = None
# Release, not package version number:
release = None
pocket = None
try:
(release, pocket) = split_release_pocket(version, default=None)
except PocketDoesNotExistError, e:
pass
if release in ubuntu_info.all:
archive = Distribution('ubuntu').getArchive()
try:
spph = archive.getSourcePackage(package, release, pocket)
except SeriesNotFoundException, e:
Logger.error(str(e))
sys.exit(1)
except PackageNotFoundException, e:
source_package = source_package_for(package, release)
if source_package is not None and source_package != package:
try:
spph = archive.getSourcePackage(source_package, release,
pocket)
package = source_package
except PackageNotFoundException:
Logger.error(str(e))
sys.exit(1)
else:
Logger.error(str(e))
sys.exit(1)
version = spph.getVersion()
component = spph.getComponent()
Logger.normal('Downloading %s version %s', package, version)
srcpkg = UbuntuSourcePackage(package, version, component=component,
mirrors=[options.ubuntu_mirror])
try:
srcpkg.pull()
except DownloadError, e:
Logger.error('Failed to download: %s', str(e))
sys.exit(1)
if not options.download_only:
srcpkg.unpack()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.normal('User abort.')

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-lp-udebs -- pull udeb package files for ubuntu
# Basic usage: pull-lp-udebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ubuntu", pull="udebs")

View File

@ -1,32 +0,0 @@
#!/usr/bin/python3
#
# pull-pkg -- pull package files for debian/ubuntu/uca/ppa
# Basic usage: pull-pkg -D distro -p type <package name> [version|release]
#
# Copyright (C) 2008, Iain Lane <iain@orangesquash.org.uk>,
# 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
# 2017-2018, Dan Streetman <ddstreet@canonical.com>
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL for more details.
#
# ##################################################################
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main()

View File

@ -1,15 +0,0 @@
#!/usr/bin/python3
#
# pull-ppa-ddebs -- pull ddeb package files for a Launchpad Personal Package Archive
# Basic usage: pull-ppa-ddebs <package name> <ppa:USER/NAME> [version|release]
# pull-ppa-ddebs --ppa USER/NAME <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ppa", pull="ddebs")

View File

@ -1,15 +0,0 @@
#!/usr/bin/python3
#
# pull-ppa-debs -- pull deb package files for a Launchpad Personal Package Archive
# Basic usage: pull-ppa-debs <package name> <ppa:USER/NAME> [version|release]
# pull-ppa-debs --ppa USER/NAME <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ppa", pull="debs")

View File

@ -1,15 +0,0 @@
#!/usr/bin/python3
#
# pull-ppa-source -- pull source package files for a Launchpad Personal Package Archive
# Basic usage: pull-ppa-source <package name> <ppa:USER/NAME> [version|release]
# pull-ppa-source --ppa USER/NAME <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ppa", pull="source")

View File

@ -1,15 +0,0 @@
#!/usr/bin/python3
#
# pull-ppa-udebs -- pull udeb package files for a Launchpad Personal Package Archive
# Basic usage: pull-ppa-udebs <package name> <ppa:USER/NAME> [version|release]
# pull-ppa-udebs --ppa USER/NAME <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ppa", pull="udebs")

59
pull-revu-source Executable file
View File

@ -0,0 +1,59 @@
#!/usr/bin/perl
# Script Name: pull-revu-source
# Author: Nathan Handler <nhandler@ubuntu.com>
# Usage: pull-revu-source <source package>
# Copyright (C) 2009 Nathan Handler <nhandler@ubuntu.com>
# Based on revupull in kubuntu-dev-tools,
# written by Harald Sitter <apachelogger@ubuntu.com>
# License: GNU General Public License
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# On Debian GNU/Linux systems, the complete text of the GNU General
# Public License can be found in the /usr/share/common-licenses/GPL-3 file.
use warnings;
use strict;
use File::Basename;
use Getopt::Long;
my $REVU = "revu.ubuntuwire.com";
my($package) = lc($ARGV[0]) || usage(2);
my($help)=0;
GetOptions('help' => \$help);
usage(0) if($help);
eval { require LWP::Simple; };
if ($@=~ m#^Can\'t locate LWP/Simple#) {
print(STDERR "Please install libwww-perl.\n");
exit(1);
}
use LWP::Simple;
dget(getURL());
sub getURL {
my($url) = "http://" . $REVU . "/dsc.py?url&package=" . $package;
my($page)=get($url);
die("Could Not Get $url") unless (defined $page);
return $page;
}
sub dget {
my($dsc) = @_;
exec("dget -xu $dsc");
}
sub usage {
my($exit) = @_;
my($name)=basename($0);
print("USAGE: $name [-h] <source package>\n");
exit($exit);
}

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-uca-ddebs -- pull ddeb package files for ubuntu cloud archive
# Basic usage: pull-uca-ddebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="uca", pull="ddebs")

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-uca-debs -- pull deb package files for ubuntu cloud archive
# Basic usage: pull-uca-debs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="uca", pull="debs")

View File

@ -1,14 +1,163 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# pull-uca-source -- pull source package files for ubuntu cloud archive # pull-uca-source -- pull a source package from Ubuntu Cloud Archive
# Basic usage: pull-uca-source <package name> [version|release] # Basic usage: pull-uca-source <source package> <openstack release> [version]
# #
# See pull-pkg # Copyright (C) 2008, Iain Lane <iain@orangesquash.org.uk>,
# 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
# 2016, Corey Bryant <corey.bryant@ubuntu.com>
# 2016, Dan Streetman <dan.streetman@canonical.com>
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL for more details.
#
# ##################################################################
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg import re
import json
import os
import sys
import urllib2
from optparse import OptionParser
if __name__ == "__main__": from distro_info import UbuntuDistroInfo, DistroDataOutdated
PullPkg.main(distro="uca", pull="source")
from ubuntutools.archive import UbuntuCloudArchiveSourcePackage, DownloadError
from ubuntutools.config import UDTConfig
from ubuntutools.lp.lpapicache import Distribution, Launchpad
from ubuntutools.lp.udtexceptions import (SeriesNotFoundException,
PackageNotFoundException,
PocketDoesNotExistError)
from ubuntutools.logger import Logger
from ubuntutools.misc import split_release_pocket
from lazr.restfulclient.errors import NotFound
from launchpadlib.launchpad import Launchpad as LP
def showOpenstackReleases(uca):
releases = []
for p in uca.ppas:
if re.match("\w*-staging", p.name):
releases.append(re.sub("-staging", "", p.name))
Logger.error("Openstack releases are:\n\t%s", ", ".join(releases))
def getSPPH(lp, archive, package, version=None, series=None, pocket=None, try_binary=True):
params = { 'exact_match': True, 'order_by_date': True, }
if pocket:
params['pocket'] = pocket
if series:
params['distro_series'] = series()
elif version:
params['version'] = version
Logger.normal("checking %s version %s pocket %s", package, version, pocket)
spphs = archive.getPublishedSources(source_name=package, **params)
if spphs:
return spphs[0]
if not try_binary:
return None
# Didn't find any, maybe the package is a binary package name
if series:
del params['distro_series']
archs = lp.load(series().architectures_collection_link).entries
params['distro_arch_series'] = archs[0]['self_link']
bpphs = archive.getPublishedBinaries(binary_name=package, **params)
if bpphs:
bpph_build = lp.load(bpphs[0].build_link)
source_package = bpph_build.source_package_name
return getSPPH(lp, archive, source_package, version, series, pocket,
try_binary=False)
return None
def main():
usage = "Usage: %prog <package> <openstack release> [version]"
opt_parser = OptionParser(usage)
opt_parser.add_option('-d', '--download-only',
dest='download_only', default=False,
action='store_true',
help="Do not extract the source package")
opt_parser.add_option('-m', '--mirror', metavar='OPENSTACK_MIRROR',
dest='openstack_mirror',
help='Preferred Openstack mirror (default: Launchpad)')
opt_parser.add_option('--no-conf',
dest='no_conf', default=False, action='store_true',
help="Don't read config files or environment "
"variables")
(options, args) = opt_parser.parse_args()
if len(args) < 2:
opt_parser.error("Must specify package name and openstack release")
config = UDTConfig(options.no_conf)
if options.openstack_mirror is None:
options.openstack_mirror = config.get_value('OPENSTACK_MIRROR')
mirrors = []
if options.openstack_mirror:
mirrors.append(options.openstack_mirror)
# Login anonymously to LP
Launchpad.login_anonymously()
lp = LP.login_anonymously("pull-uca-source", "production")
uca = lp.people("ubuntu-cloud-archive")
package = str(args[0]).lower()
release = str(args[1]).lower()
version = None
if len(args) > 2:
version = str(args[2])
pocket = None
try:
(release, pocket) = split_release_pocket(release, default=None)
except PocketDoesNotExistError, e:
pass
try:
archive = uca.getPPAByName(name="%s-staging" % release)
except NotFound, e:
Logger.error('Archive does not exist for Openstack release: %s',
release)
showOpenstackReleases(uca)
sys.exit(1)
spph = getSPPH(lp, archive, package, version, pocket=pocket)
if not spph:
Logger.error("Package %s in %s not found.", package, release)
sys.exit(1)
package = spph.source_package_name
version = spph.source_package_version
component = spph.component_name
Logger.normal('Downloading %s version %s component %s', package, version, component)
srcpkg = UbuntuCloudArchiveSourcePackage(release, package, version, component=component,
mirrors=mirrors)
try:
srcpkg.pull()
except DownloadError, e:
Logger.error('Failed to download: %s', str(e))
sys.exit(1)
if not options.download_only:
srcpkg.unpack()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.normal('User abort.')

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-uca-udebs -- pull udeb package files for ubuntu cloud archive
# Basic usage: pull-uca-udebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="uca", pull="udebs")

View File

@ -1,6 +0,0 @@
[tool.black]
line-length = 99
[tool.isort]
line_length = 99
profile = "black"

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com> # Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
# #
@ -14,21 +14,21 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import argparse
import sys
from collections import defaultdict from collections import defaultdict
import optparse
import re
import sys
import apt import apt
from distro_info import UbuntuDistroInfo from distro_info import UbuntuDistroInfo
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig from ubuntutools.config import UDTConfig
from ubuntutools.lp.lpapicache import Distribution, Launchpad from ubuntutools.lp.lpapicache import Launchpad, Distribution
from ubuntutools.lp.udtexceptions import PackageNotFoundException from ubuntutools.lp.udtexceptions import PackageNotFoundException
from ubuntutools.question import EditBugReport, YesNoQuestion, confirmation_prompt from ubuntutools.logger import Logger
from ubuntutools.rdepends import RDependsException, query_rdepends from ubuntutools.question import (YesNoQuestion, EditBugReport,
confirmation_prompt)
Logger = getLogger() from ubuntutools.rdepends import query_rdepends, RDependsException
class DestinationException(Exception): class DestinationException(Exception):
@ -38,14 +38,16 @@ class DestinationException(Exception):
def determine_destinations(source, destination): def determine_destinations(source, destination):
ubuntu_info = UbuntuDistroInfo() ubuntu_info = UbuntuDistroInfo()
if destination is None: if destination is None:
destination = ubuntu_info.lts() destination = ubuntu_info.stable()
if source not in ubuntu_info.all: if source not in ubuntu_info.all:
raise DestinationException(f"Source release {source} does not exist") raise DestinationException("Source release %s does not exist" % source)
if destination not in ubuntu_info.all: if destination not in ubuntu_info.all:
raise DestinationException(f"Destination release {destination} does not exist") raise DestinationException("Destination release %s does not exist"
% destination)
if destination not in ubuntu_info.supported(): if destination not in ubuntu_info.supported():
raise DestinationException(f"Destination release {destination} is not supported") raise DestinationException("Destination release %s is not supported"
% destination)
found = False found = False
destinations = [] destinations = []
@ -73,37 +75,35 @@ def determine_destinations(source, destination):
def disclaimer(): def disclaimer():
print( print ("Ubuntu's backports are not for fixing bugs in stable releases, "
"Ubuntu's backports are not for fixing bugs in stable releases, " "but for bringing new features to older, stable releases.")
"but for bringing new features to older, stable releases.\n" print ("See https://wiki.ubuntu.com/UbuntuBackports for the Ubuntu "
"See https://wiki.ubuntu.com/UbuntuBackports for the Ubuntu " "Backports policy and processes.")
"Backports policy and processes.\n" print ("See https://wiki.ubuntu.com/StableReleaseUpdates for the process "
"See https://wiki.ubuntu.com/StableReleaseUpdates for the process " "for fixing bugs in stable releases.")
"for fixing bugs in stable releases."
)
confirmation_prompt() confirmation_prompt()
def check_existing(package): def check_existing(package, destinations):
"""Search for possible existing bug reports""" """Search for possible existing bug reports"""
distro = Distribution("ubuntu") # The LP bug search is indexed, not substring:
srcpkg = distro.getSourcePackage(name=package.getPackageName()) query = re.findall(r'[a-z]+', package)
bugs = []
bugs = srcpkg.searchTasks( for release in destinations:
omit_duplicates=True, project = Launchpad.projects[release + '-backports']
search_text="[BPO]", bugs += project.searchTasks(omit_duplicates=True,
status=["Incomplete", "New", "Confirmed", "Triaged", "In Progress", "Fix Committed"], search_text=query,
) status=["Incomplete", "New", "Confirmed",
"Triaged", "In Progress",
"Fix Committed"])
if not bugs: if not bugs:
return return
Logger.info( Logger.normal("There are existing bug reports that look similar to your "
"There are existing bug reports that look similar to your " "request. Please check before continuing:")
"request. Please check before continuing:"
)
for bug in sorted([bug_task.bug for bug_task in bugs], key=lambda bug: bug.id): for bug in sorted(set(bug_task.bug for bug_task in bugs)):
Logger.info(" * LP: #%-7i: %s %s", bug.id, bug.title, bug.web_link) Logger.normal(" * LP: #%-7i: %s %s", bug.id, bug.title, bug.web_link)
confirmation_prompt() confirmation_prompt()
@ -113,9 +113,9 @@ def find_rdepends(releases, published_binaries):
# We want to display every pubilshed binary, even if it has no rdepends # We want to display every pubilshed binary, even if it has no rdepends
for binpkg in published_binaries: for binpkg in published_binaries:
intermediate[binpkg] # pylint: disable=pointless-statement intermediate[binpkg]
for arch in ("any", "source"): for arch in ('any', 'source'):
for release in releases: for release in releases:
for binpkg in published_binaries: for binpkg in published_binaries:
try: try:
@ -123,25 +123,27 @@ def find_rdepends(releases, published_binaries):
except RDependsException: except RDependsException:
# Not published? TODO: Check # Not published? TODO: Check
continue continue
for relationship, rdeps in raw_rdeps.items(): for relationship, rdeps in raw_rdeps.iteritems():
for rdep in rdeps: for rdep in rdeps:
# Ignore circular deps: # Ignore circular deps:
if rdep["Package"] in published_binaries: if rdep['Package'] in published_binaries:
continue continue
# arch==any queries return Reverse-Build-Deps: # arch==any queries return Reverse-Build-Deps:
if arch == "any" and rdep.get("Architectures", []) == ["source"]: if (arch == 'any' and rdep.get('Architectures', [])
== ['source']):
continue continue
intermediate[binpkg][rdep["Package"]].append((release, relationship)) intermediate[binpkg][rdep['Package']] \
.append((release, relationship))
output = [] output = []
for binpkg, rdeps in intermediate.items(): for binpkg, rdeps in intermediate.iteritems():
output += ["", binpkg, "-" * len(binpkg)] output += ['', binpkg, '-' * len(binpkg)]
for pkg, appearences in rdeps.items(): for pkg, appearences in rdeps.iteritems():
output += [f"* {pkg}"] output += ['* %s' % pkg]
for release, relationship in appearences: for release, relationship in appearences:
output += [f" [ ] {release} ({relationship})"] output += [' [ ] %s (%s)' % (release, relationship)]
found_any = sum(len(rdeps) for rdeps in intermediate.values()) found_any = sum(len(rdeps) for rdeps in intermediate.itervalues())
if found_any: if found_any:
output = [ output = [
"Reverse dependencies:", "Reverse dependencies:",
@ -154,8 +156,8 @@ def find_rdepends(releases, published_binaries):
"package currently in the release still works with the new " "package currently in the release still works with the new "
"%(package)s installed. " "%(package)s installed. "
"Reverse- Recommends, Suggests, and Enhances don't need to be " "Reverse- Recommends, Suggests, and Enhances don't need to be "
"tested, and are listed for completeness-sake.", "tested, and are listed for completeness-sake."
] + output ] + output
else: else:
output = ["No reverse dependencies"] output = ["No reverse dependencies"]
@ -163,164 +165,147 @@ def find_rdepends(releases, published_binaries):
def locate_package(package, distribution): def locate_package(package, distribution):
archive = Distribution("ubuntu").getArchive() archive = Distribution('ubuntu').getArchive()
try: for pass_ in ('source', 'binary'):
package_spph = archive.getSourcePackage(package, distribution) try:
return package_spph package_spph = archive.getSourcePackage(package, distribution)
except PackageNotFoundException as e: return package_spph
except PackageNotFoundException, e:
if pass_ == 'binary':
Logger.error(str(e))
sys.exit(1)
try: try:
apt_pkg = apt.Cache()[package] apt_pkg = apt.Cache()[package]
except KeyError: except KeyError:
Logger.error(str(e)) continue
sys.exit(1)
package = apt_pkg.candidate.source_name package = apt_pkg.candidate.source_name
Logger.info( Logger.normal("Binary package specified, considering its source "
"Binary package specified, considering its source package instead: %s", package "package instead: %s", package)
)
return None
def request_backport(package_spph, source, destinations): def request_backport(package_spph, source, destinations):
published_binaries = set() published_binaries = set()
for bpph in package_spph.getBinaries(): for bpph in package_spph.getBinaries():
published_binaries.add(bpph.getPackageName()) published_binaries.add(bpph.getPackageName())
if not published_binaries: if not published_binaries:
Logger.error( Logger.error("%s (%s) has no published binaries in %s. ",
"%s (%s) has no published binaries in %s. ", package_spph.getPackageName(), package_spph.getVersion(),
package_spph.getPackageName(), source)
package_spph.getVersion(), Logger.normal("Is it stuck in bin-NEW? It can't be backported until "
source, "the binaries have been accepted.")
)
Logger.info(
"Is it stuck in bin-NEW? It can't be backported until "
"the binaries have been accepted."
)
sys.exit(1) sys.exit(1)
testing = ["[Testing]", ""] testing = []
testing += ["You can test-build the backport in your PPA with "
"backportpackage:"]
testing += ["$ backportpackage -u ppa:<lp username>/<ppa name> "
"-s %s -d %s %s"
% (source, dest, package_spph.getPackageName())
for dest in destinations]
testing += [""]
for dest in destinations: for dest in destinations:
testing += [f" * {dest.capitalize()}:"] testing += ['* %s:' % dest]
testing += [" [ ] Package builds without modification"] testing += ["[ ] Package builds without modification"]
testing += [f" [ ] {binary} installs cleanly and runs" for binary in published_binaries] testing += ["[ ] %s installs cleanly and runs" % binary
for binary in published_binaries]
subst = { subst = {
"package": package_spph.getPackageName(), 'package': package_spph.getPackageName(),
"version": package_spph.getVersion(), 'version': package_spph.getVersion(),
"component": package_spph.getComponent(), 'component': package_spph.getComponent(),
"source": package_spph.getSeriesAndPocket(), 'source': package_spph.getSeriesAndPocket(),
"destinations": ", ".join(destinations), 'destinations': ', '.join(destinations),
} }
subject = "[BPO] %(package)s %(version)s to %(destinations)s" % subst subject = ("Please backport %(package)s %(version)s (%(component)s) "
body = ( "from %(source)s" % subst)
"\n".join( body = ('\n'.join(
[ [
"[Impact]", "Please backport %(package)s %(version)s (%(component)s) "
"", "from %(source)s to %(destinations)s.",
" * Justification for backporting the new version to the stable release.", "",
"", "Reason for the backport:",
"[Scope]", "========================",
"", ">>> Enter your reasoning here <<<",
" * List the Ubuntu release you will backport from," "",
" and the specific package version.", "Testing:",
"", "========",
" * List the Ubuntu release(s) you will backport to.", "Mark off items in the checklist [X] as you test them, "
"", "but please leave the checklist so that backporters can quickly "
"[Other Info]", "evaluate the state of testing.",
"", ""
" * Anything else you think is useful to include",
"",
] ]
+ testing + testing
+ [""] + [""]
+ find_rdepends(destinations, published_binaries) + find_rdepends(destinations, published_binaries)
+ [""] + [""]
) ) % subst)
% subst
)
editor = EditBugReport(subject, body) editor = EditBugReport(subject, body)
editor.edit() editor.edit()
subject, body = editor.get_report() subject, body = editor.get_report()
Logger.info("The final report is:\nSummary: %s\nDescription:\n%s\n", subject, body) Logger.normal('The final report is:\nSummary: %s\nDescription:\n%s\n',
subject, body)
if YesNoQuestion().ask("Request this backport", "yes") == "no": if YesNoQuestion().ask("Request this backport", "yes") == "no":
sys.exit(1) sys.exit(1)
distro = Distribution("ubuntu") targets = [Launchpad.projects['%s-backports' % destination]
pkgname = package_spph.getPackageName() for destination in destinations]
bug = Launchpad.bugs.createBug(title=subject, description=body,
target=targets[0])
for target in targets[1:]:
bug.addTask(target=target)
bug = Launchpad.bugs.createBug( Logger.normal("Backport request filed as %s", bug.web_link)
title=subject, description=body, target=distro.getSourcePackage(name=pkgname)
)
bug.subscribe(person=Launchpad.people["ubuntu-backporters"])
for dest in destinations:
series = distro.getSeries(dest)
try:
bug.addTask(target=series.getSourcePackage(name=pkgname))
except Exception: # pylint: disable=broad-except
break
Logger.info("Backport request filed as %s", bug.web_link)
def main(): def main():
parser = argparse.ArgumentParser(usage="%(prog)s [options] package") parser = optparse.OptionParser('%prog [options] package')
parser.add_argument( parser.add_option('-d', '--destination', metavar='DEST',
"-d", help='Backport to DEST release and necessary '
"--destination", 'intermediate releases '
metavar="DEST", '(default: current stable release)')
help="Backport to DEST release and necessary " parser.add_option('-s', '--source', metavar='SOURCE',
"intermediate releases " help='Backport from SOURCE release '
"(default: current LTS release)", '(default: current devel release)')
) parser.add_option('-l', '--lpinstance', metavar='INSTANCE', default=None,
parser.add_argument( help='Launchpad instance to connect to '
"-s", '(default: production).')
"--source", parser.add_option('--no-conf', action='store_true',
metavar="SOURCE", dest='no_conf', default=False,
help="Backport from SOURCE release (default: current devel release)", help="Don't read config files or environment variables")
) options, args = parser.parse_args()
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
default=None,
help="Launchpad instance to connect to (default: production).",
)
parser.add_argument(
"--no-conf",
action="store_true",
dest="no_conf",
default=False,
help="Don't read config files or environment variables",
)
parser.add_argument("package", help=argparse.SUPPRESS)
args = parser.parse_args()
config = UDTConfig(args.no_conf) if len(args) != 1:
parser.error("One (and only one) package must be specified")
package = args[0]
if args.lpinstance is None: config = UDTConfig(options.no_conf)
args.lpinstance = config.get_value("LPINSTANCE")
Launchpad.login(args.lpinstance)
if args.source is None: if options.lpinstance is None:
args.source = Distribution("ubuntu").getDevelopmentSeries().name options.lpinstance = config.get_value('LPINSTANCE')
Launchpad.login(options.lpinstance)
if options.source is None:
options.source = Distribution('ubuntu').getDevelopmentSeries().name
try: try:
destinations = determine_destinations(args.source, args.destination) destinations = determine_destinations(options.source,
except DestinationException as e: options.destination)
except DestinationException, e:
Logger.error(str(e)) Logger.error(str(e))
sys.exit(1) sys.exit(1)
disclaimer() disclaimer()
package_spph = locate_package(args.package, args.source) check_existing(package, destinations)
check_existing(package_spph) package_spph = locate_package(package, options.source)
request_backport(package_spph, args.source, destinations) request_backport(package_spph, options.source, destinations)
if __name__ == "__main__": if __name__ == '__main__':
main() main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
# (C) 2007 Canonical Ltd., Steve Kowalik # (C) 2007 Canonical Ltd., Steve Kowalik
@ -26,217 +26,195 @@
# #
# ################################################################## # ##################################################################
import argparse import optparse
import os import os
import sys import sys
from debian.changelog import Version
from distro_info import UbuntuDistroInfo from distro_info import UbuntuDistroInfo
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig, ubu_email from ubuntutools.config import UDTConfig, ubu_email
from ubuntutools.lp import udtexceptions from ubuntutools.lp import udtexceptions
from ubuntutools.misc import require_utf8 from ubuntutools.misc import require_utf8
from ubuntutools.question import EditBugReport, confirmation_prompt from ubuntutools.question import confirmation_prompt, EditBugReport
from ubuntutools.version import Version
Logger = getLogger()
# #
# entry point # entry point
# #
def main(): def main():
# Our usage options. # Our usage options.
usage = "%(prog)s [options] <source package> [<target release> [base version]]" usage = ('Usage: %prog [options] '
parser = argparse.ArgumentParser(usage=usage) '<source package> [<target release> [base version]]')
parser = optparse.OptionParser(usage)
parser.add_argument( parser.add_option('-d', type='string',
"-d", dest="dist", default="unstable", help="Debian distribution to sync from." dest='dist', default='unstable',
) help='Debian distribution to sync from.')
parser.add_argument( parser.add_option('-k', type='string',
"-k", dest='keyid', default=None,
dest="keyid", help='GnuPG key ID to use for signing report '
default=None, '(only used when emailing the sync request).')
help="GnuPG key ID to use for signing report " parser.add_option('-n', action='store_true',
"(only used when emailing the sync request).", dest='newpkg', default=False,
) help='Whether package to sync is a new package in '
parser.add_argument( 'Ubuntu.')
"-n", parser.add_option('--email', action='store_true', default=False,
action="store_true", help='Use a PGP-signed email for filing the sync '
dest="newpkg", 'request, rather than the LP API.')
default=False, parser.add_option('--lp', dest='deprecated_lp_flag',
help="Whether package to sync is a new package in Ubuntu.", action='store_true', default=False,
) help=optparse.SUPPRESS_HELP)
parser.add_argument( parser.add_option('-l', '--lpinstance', metavar='INSTANCE',
"--email", dest='lpinstance', default=None,
action="store_true", help='Launchpad instance to connect to '
default=False, '(default: production).')
help="Use a PGP-signed email for filing the sync request, rather than the LP API.", parser.add_option('-s', action='store_true',
) dest='sponsorship', default=False,
parser.add_argument( help='Force sponsorship')
"--lp", parser.add_option('-C', action='store_true',
dest="deprecated_lp_flag", dest='missing_changelog_ok', default=False,
action="store_true", help='Allow changelog to be manually filled in '
default=False, 'when missing')
help=argparse.SUPPRESS, parser.add_option('-e', action='store_true',
) dest='ffe', default=False,
parser.add_argument( help='Use this after FeatureFreeze for non-bug fix '
"-l", 'syncs, changes default subscription to the '
"--lpinstance", 'appropriate release team.')
metavar="INSTANCE", parser.add_option('--no-conf', action='store_true',
dest="lpinstance", dest='no_conf', default=False,
default=None, help="Don't read config files or environment variables")
help="Launchpad instance to connect to (default: production).",
) (options, args) = parser.parse_args()
parser.add_argument(
"-s", action="store_true", dest="sponsorship", default=False, help="Force sponsorship" if not len(args):
) parser.print_help()
parser.add_argument( sys.exit(1)
"-C",
action="store_true",
dest="missing_changelog_ok",
default=False,
help="Allow changelog to be manually filled in when missing",
)
parser.add_argument(
"-e",
action="store_true",
dest="ffe",
default=False,
help="Use this after FeatureFreeze for non-bug fix "
"syncs, changes default subscription to the "
"appropriate release team.",
)
parser.add_argument(
"--no-conf",
action="store_true",
dest="no_conf",
default=False,
help="Don't read config files or environment variables",
)
parser.add_argument("source_package", help=argparse.SUPPRESS)
parser.add_argument("release", nargs="?", help=argparse.SUPPRESS)
parser.add_argument("base_version", nargs="?", type=Version, help=argparse.SUPPRESS)
args = parser.parse_args()
require_utf8() require_utf8()
config = UDTConfig(args.no_conf) config = UDTConfig(options.no_conf)
if args.deprecated_lp_flag: if options.deprecated_lp_flag:
Logger.info("The --lp flag is now default, ignored.") print "The --lp flag is now default, ignored."
if args.email: if options.email:
args.lpapi = False options.lpapi = False
else: else:
args.lpapi = config.get_value("USE_LPAPI", default=True, boolean=True) options.lpapi = config.get_value('USE_LPAPI', default=True,
if args.lpinstance is None: boolean=True)
args.lpinstance = config.get_value("LPINSTANCE") if options.lpinstance is None:
options.lpinstance = config.get_value('LPINSTANCE')
if args.keyid is None: if options.keyid is None:
args.keyid = config.get_value("KEYID") options.keyid = config.get_value('KEYID')
if not args.lpapi: if not options.lpapi:
if args.lpinstance == "production": if options.lpinstance == 'production':
bug_mail_domain = "bugs.launchpad.net" bug_mail_domain = 'bugs.launchpad.net'
elif args.lpinstance == "staging": elif options.lpinstance == 'staging':
bug_mail_domain = "bugs.staging.launchpad.net" bug_mail_domain = 'bugs.staging.launchpad.net'
else: else:
Logger.error("Error: Unknown launchpad instance: %s", args.lpinstance) print >> sys.stderr, ('Error: Unknown launchpad instance: %s'
% options.lpinstance)
sys.exit(1) sys.exit(1)
mailserver_host = config.get_value( mailserver_host = config.get_value('SMTP_SERVER',
"SMTP_SERVER", default=None, compat_keys=["UBUSMTP", "DEBSMTP"] default=None,
) compat_keys=['UBUSMTP', 'DEBSMTP'])
if not args.lpapi and not mailserver_host: if not options.lpapi and not mailserver_host:
try: try:
import DNS # pylint: disable=import-outside-toplevel import DNS
DNS.DiscoverNameServers() DNS.DiscoverNameServers()
mxlist = DNS.mxlookup(bug_mail_domain) mxlist = DNS.mxlookup(bug_mail_domain)
firstmx = mxlist[0] firstmx = mxlist[0]
mailserver_host = firstmx[1] mailserver_host = firstmx[1]
except ImportError: except ImportError:
Logger.error("Please install python-dns to support Launchpad mail server lookup.") print >> sys.stderr, ('Please install python-dns to support '
'Launchpad mail server lookup.')
sys.exit(1) sys.exit(1)
mailserver_port = config.get_value( mailserver_port = config.get_value('SMTP_PORT', default=25,
"SMTP_PORT", default=25, compat_keys=["UBUSMTP_PORT", "DEBSMTP_PORT"] compat_keys=['UBUSMTP_PORT',
) 'DEBSMTP_PORT'])
mailserver_user = config.get_value("SMTP_USER", compat_keys=["UBUSMTP_USER", "DEBSMTP_USER"]) mailserver_user = config.get_value('SMTP_USER',
mailserver_pass = config.get_value("SMTP_PASS", compat_keys=["UBUSMTP_PASS", "DEBSMTP_PASS"]) compat_keys=['UBUSMTP_USER',
'DEBSMTP_USER'])
mailserver_pass = config.get_value('SMTP_PASS',
compat_keys=['UBUSMTP_PASS',
'DEBSMTP_PASS'])
# import the needed requestsync module # import the needed requestsync module
# pylint: disable=import-outside-toplevel if options.lpapi:
if args.lpapi: from ubuntutools.requestsync.lp import (check_existing_reports,
get_debian_srcpkg,
get_ubuntu_srcpkg,
get_ubuntu_delta_changelog,
need_sponsorship, post_bug)
from ubuntutools.lp.lpapicache import Distribution, Launchpad from ubuntutools.lp.lpapicache import Distribution, Launchpad
from ubuntutools.requestsync.lp import (
check_existing_reports,
get_debian_srcpkg,
get_ubuntu_delta_changelog,
get_ubuntu_srcpkg,
need_sponsorship,
post_bug,
)
# See if we have LP credentials and exit if we don't - # See if we have LP credentials and exit if we don't -
# cannot continue in this case # cannot continue in this case
try: try:
# devel for changelogUrl() # devel for changelogUrl()
Launchpad.login(service=args.lpinstance, api_version="devel") Launchpad.login(service=options.lpinstance, api_version='devel')
except IOError: except IOError:
sys.exit(1) sys.exit(1)
else: else:
from ubuntutools.requestsync.mail import ( from ubuntutools.requestsync.mail import (check_existing_reports,
check_existing_reports, get_debian_srcpkg,
get_debian_srcpkg, get_ubuntu_srcpkg,
get_ubuntu_delta_changelog, get_ubuntu_delta_changelog,
get_ubuntu_srcpkg, mail_bug, need_sponsorship)
mail_bug, if not any(x in os.environ for x in ('UBUMAIL', 'DEBEMAIL', 'EMAIL')):
need_sponsorship, print >> sys.stderr, (
) 'E: The environment variable UBUMAIL, DEBEMAIL or EMAIL needs '
'to be set to let this script mail the sync request.')
if not any(x in os.environ for x in ("UBUMAIL", "DEBEMAIL", "EMAIL")):
Logger.error(
"The environment variable UBUMAIL, DEBEMAIL or EMAIL needs "
"to be set to let this script mail the sync request."
)
sys.exit(1) sys.exit(1)
newsource = args.newpkg newsource = options.newpkg
sponsorship = args.sponsorship sponsorship = options.sponsorship
distro = args.dist distro = options.dist
ffe = args.ffe ffe = options.ffe
lpapi = args.lpapi lpapi = options.lpapi
need_interaction = False need_interaction = False
srcpkg = args.source_package force_base_version = None
srcpkg = args[0]
if not args.release: if len(args) == 1:
if lpapi: if lpapi:
args.release = Distribution("ubuntu").getDevelopmentSeries().name release = Distribution('ubuntu').getDevelopmentSeries().name
else: else:
ubu_info = UbuntuDistroInfo() ubu_info = UbuntuDistroInfo()
args.release = ubu_info.devel() release = ubu_info.devel()
Logger.warning("Target release missing - assuming %s", args.release) print >> sys.stderr, 'W: Target release missing - assuming %s' % release
elif len(args) == 2:
release = args[1]
elif len(args) == 3:
release = args[1]
force_base_version = Version(args[2])
else:
print >> sys.stderr, 'E: Too many arguments.'
parser.print_help()
sys.exit(1)
# Get the current Ubuntu source package # Get the current Ubuntu source package
try: try:
ubuntu_srcpkg = get_ubuntu_srcpkg(srcpkg, args.release, "Proposed") ubuntu_srcpkg = get_ubuntu_srcpkg(srcpkg, release, 'Proposed')
ubuntu_version = Version(ubuntu_srcpkg.getVersion()) ubuntu_version = Version(ubuntu_srcpkg.getVersion())
ubuntu_component = ubuntu_srcpkg.getComponent() ubuntu_component = ubuntu_srcpkg.getComponent()
newsource = False # override the -n flag newsource = False # override the -n flag
except udtexceptions.PackageNotFoundException: except udtexceptions.PackageNotFoundException:
ubuntu_srcpkg = None ubuntu_srcpkg = None
ubuntu_version = Version("~") ubuntu_version = Version('~')
ubuntu_component = None # Set after getting the Debian info ubuntu_component = None # Set after getting the Debian info
if not newsource: if not newsource:
Logger.info("'%s' doesn't exist in 'Ubuntu %s'.", srcpkg, args.release) print ("'%s' doesn't exist in 'Ubuntu %s'.\n"
Logger.info("Do you want to sync a new package?") "Do you want to sync a new package?"
% (srcpkg, release))
confirmation_prompt() confirmation_prompt()
newsource = True newsource = True
except udtexceptions.SeriesNotFoundException as error: except udtexceptions.SeriesNotFoundException, error:
Logger.error(error) print >> sys.stderr, "E: %s" % error
sys.exit(1) sys.exit(1)
# Get the requested Debian source package # Get the requested Debian source package
@ -244,107 +222,100 @@ def main():
debian_srcpkg = get_debian_srcpkg(srcpkg, distro) debian_srcpkg = get_debian_srcpkg(srcpkg, distro)
debian_version = Version(debian_srcpkg.getVersion()) debian_version = Version(debian_srcpkg.getVersion())
debian_component = debian_srcpkg.getComponent() debian_component = debian_srcpkg.getComponent()
except udtexceptions.PackageNotFoundException as error: except udtexceptions.PackageNotFoundException, error:
Logger.error(error) print >> sys.stderr, "E: %s" % error
sys.exit(1) sys.exit(1)
except udtexceptions.SeriesNotFoundException as error: except udtexceptions.SeriesNotFoundException, error:
Logger.error(error) print >> sys.stderr, "E: %s" % error
sys.exit(1) sys.exit(1)
if ubuntu_component is None: if ubuntu_component is None:
if debian_component == "main": if debian_component == 'main':
ubuntu_component = "universe" ubuntu_component = 'universe'
else: else:
ubuntu_component = "multiverse" ubuntu_component = 'multiverse'
# Stop if Ubuntu has already the version from Debian or a newer version # Stop if Ubuntu has already the version from Debian or a newer version
if (ubuntu_version >= debian_version) and args.lpapi: if (ubuntu_version >= debian_version) and options.lpapi:
# try rmadison # try rmadison
import ubuntutools.requestsync.mail # pylint: disable=import-outside-toplevel import ubuntutools.requestsync.mail
try: try:
debian_srcpkg = ubuntutools.requestsync.mail.get_debian_srcpkg(srcpkg, distro) debian_srcpkg = ubuntutools.requestsync.mail.get_debian_srcpkg(
srcpkg, distro)
debian_version = Version(debian_srcpkg.getVersion()) debian_version = Version(debian_srcpkg.getVersion())
debian_component = debian_srcpkg.getComponent() debian_component = debian_srcpkg.getComponent()
except udtexceptions.PackageNotFoundException as error: except udtexceptions.PackageNotFoundException, error:
Logger.error(error) print >> sys.stderr, "E: %s" % error
sys.exit(1) sys.exit(1)
if ubuntu_version == debian_version: if ubuntu_version == debian_version:
Logger.error( print >> sys.stderr, ('E: The versions in Debian and Ubuntu are the '
"The versions in Debian and Ubuntu are the same already (%s). Aborting.", 'same already (%s). Aborting.'
ubuntu_version, % ubuntu_version)
)
sys.exit(1) sys.exit(1)
if ubuntu_version > debian_version: if ubuntu_version > debian_version:
Logger.error( print >> sys.stderr, ('E: The version in Ubuntu (%s) is newer than '
"The version in Ubuntu (%s) is newer than the version in Debian (%s). Aborting.", 'the version in Debian (%s). Aborting.'
ubuntu_version, % (ubuntu_version, debian_version))
debian_version,
)
sys.exit(1) sys.exit(1)
# -s flag not specified - check if we do need sponsorship # -s flag not specified - check if we do need sponsorship
if not sponsorship: if not sponsorship:
sponsorship = need_sponsorship(srcpkg, ubuntu_component, args.release) sponsorship = need_sponsorship(srcpkg, ubuntu_component, release)
if not sponsorship and not ffe: if not sponsorship and not ffe:
Logger.error( print >> sys.stderr, ('Consider using syncpackage(1) for syncs that '
"Consider using syncpackage(1) for syncs that " 'do not require feature freeze exceptions.')
"do not require feature freeze exceptions."
)
# Check for existing package reports # Check for existing package reports
if not newsource: if not newsource:
check_existing_reports(srcpkg) check_existing_reports(srcpkg)
# Generate bug report # Generate bug report
pkg_to_sync = ( pkg_to_sync = ('%s %s (%s) from Debian %s (%s)'
f"{srcpkg} {debian_version} ({ubuntu_component})" % (srcpkg, debian_version, ubuntu_component,
f" from Debian {distro} ({debian_component})" distro, debian_component))
) title = "Sync %s" % pkg_to_sync
title = f"Sync {pkg_to_sync}"
if ffe: if ffe:
title = "FFe: " + title title = "FFe: " + title
report = f"Please sync {pkg_to_sync}\n\n" report = "Please sync %s\n\n" % pkg_to_sync
if "ubuntu" in str(ubuntu_version): if 'ubuntu' in str(ubuntu_version):
need_interaction = True need_interaction = True
Logger.info("Changes have been made to the package in Ubuntu.") print ('Changes have been made to the package in Ubuntu.\n'
Logger.info("Please edit the report and give an explanation.") 'Please edit the report and give an explanation.\n'
Logger.info("Not saving the report file will abort the request.") 'Not saving the report file will abort the request.')
report += ( report += (u'Explanation of the Ubuntu delta and why it can be '
f"Explanation of the Ubuntu delta and why it can be dropped:\n" u'dropped:\n%s\n>>> ENTER_EXPLANATION_HERE <<<\n\n'
f"{get_ubuntu_delta_changelog(ubuntu_srcpkg)}\n>>> ENTER_EXPLANATION_HERE <<<\n\n" % get_ubuntu_delta_changelog(ubuntu_srcpkg))
)
if ffe: if ffe:
need_interaction = True need_interaction = True
Logger.info("To approve FeatureFreeze exception, you need to state") print ('To approve FeatureFreeze exception, you need to state\n'
Logger.info("the reason why you feel it is necessary.") 'the reason why you feel it is necessary.\n'
Logger.info("Not saving the report file will abort the request.") 'Not saving the report file will abort the request.')
report += "Explanation of FeatureFreeze exception:\n>>> ENTER_EXPLANATION_HERE <<<\n\n" report += ('Explanation of FeatureFreeze exception:\n'
'>>> ENTER_EXPLANATION_HERE <<<\n\n')
if need_interaction: if need_interaction:
confirmation_prompt() confirmation_prompt()
base_version = args.base_version or ubuntu_version base_version = force_base_version or ubuntu_version
if newsource: if newsource:
report += "All changelog entries:\n\n" report += 'All changelog entries:\n\n'
else: else:
report += f"Changelog entries since current {args.release} version {ubuntu_version}:\n\n" report += ('Changelog entries since current %s version %s:\n\n'
% (release, ubuntu_version))
changelog = debian_srcpkg.getChangelog(since_version=base_version) changelog = debian_srcpkg.getChangelog(since_version=base_version)
if not changelog: if not changelog:
if not args.missing_changelog_ok: if not options.missing_changelog_ok:
Logger.error( print >> sys.stderr, ("E: Did not retrieve any changelog entries. "
"Did not retrieve any changelog entries. " "Do you need to specify '-C'? "
"Do you need to specify '-C'? " "Was the package recently uploaded? (check "
"Was the package recently uploaded? (check " "http://packages.debian.org/changelogs/)")
"http://packages.debian.org/changelogs/)"
)
sys.exit(1) sys.exit(1)
else: else:
need_interaction = True need_interaction = True
@ -355,51 +326,37 @@ def main():
editor.edit(optional=not need_interaction) editor.edit(optional=not need_interaction)
title, report = editor.get_report() title, report = editor.get_report()
if "XXX FIXME" in report: if 'XXX FIXME' in report:
Logger.error( print >> sys.stderr, ("E: changelog boilerplate found in report, "
"changelog boilerplate found in report, " "please manually add changelog when using '-C'")
"please manually add changelog when using '-C'"
)
sys.exit(1) sys.exit(1)
# bug status and bug subscriber # bug status and bug subscriber
status = "confirmed" status = 'confirmed'
subscribe = "ubuntu-archive" subscribe = 'ubuntu-archive'
if sponsorship: if sponsorship:
status = "new" status = 'new'
subscribe = "ubuntu-sponsors" subscribe = 'ubuntu-sponsors'
if ffe: if ffe:
status = "new" status = 'new'
subscribe = "ubuntu-release" subscribe = 'ubuntu-release'
srcpkg = None if newsource else srcpkg srcpkg = not newsource and srcpkg or None
if lpapi: if lpapi:
# Map status to the values expected by LP API # Map status to the values expected by LP API
mapping = {"new": "New", "confirmed": "Confirmed"} mapping = {'new': 'New', 'confirmed': 'Confirmed'}
# Post sync request using LP API # Post sync request using LP API
post_bug(srcpkg, subscribe, mapping[status], title, report) post_bug(srcpkg, subscribe, mapping[status], title, report)
else: else:
email_from = ubu_email(export=False)[1] email_from = ubu_email(export=False)[1]
# Mail sync request # Mail sync request
mail_bug( mail_bug(srcpkg, subscribe, status, title, report, bug_mail_domain,
srcpkg, options.keyid, email_from, mailserver_host, mailserver_port,
subscribe, mailserver_user, mailserver_pass)
status,
title,
report,
bug_mail_domain,
args.keyid,
email_from,
mailserver_host,
mailserver_port,
mailserver_user,
mailserver_pass,
)
if __name__ == '__main__':
if __name__ == "__main__":
try: try:
main() main()
except KeyboardInterrupt: except KeyboardInterrupt:
Logger.error("User abort.") print "\nUser abort."
sys.exit(2) sys.exit(2)

View File

@ -1,8 +0,0 @@
python-debian
python-debianbts
dateutil
distro-info
httplib2
launchpadlib
requests
setuptools

10
reverse-build-depends Executable file
View File

@ -0,0 +1,10 @@
#!/bin/sh
cat >&2 <<EOF
reverse-build-depends has been replaced by reverse-depends -b
This script now wraps reverse-depends.
Please use it in the future.
EOF
exec $(dirname $0)/reverse-depends -b "$@"

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com> # Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
# #
@ -14,239 +14,157 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name import optparse
# pylint: enable=invalid-name
import argparse
import sys import sys
from distro_info import DistroDataOutdated from distro_info import DistroDataOutdated
from ubuntutools import getLogger from ubuntutools.logger import Logger
from ubuntutools.misc import codename_to_distribution, system_distribution, vendor_to_distroinfo from ubuntutools.misc import (system_distribution, vendor_to_distroinfo,
from ubuntutools.rdepends import RDependsException, query_rdepends codename_to_distribution)
from ubuntutools.rdepends import query_rdepends, RDependsException
Logger = getLogger()
DEFAULT_MAX_DEPTH = 10 # We want avoid any infinite loop...
def main(): def main():
system_distro_info = vendor_to_distroinfo(system_distribution())() system_distro_info = vendor_to_distroinfo(system_distribution())()
try: try:
default_release = system_distro_info.devel() default_release = system_distro_info.devel()
except DistroDataOutdated as e: except DistroDataOutdated, e:
Logger.warning(e) Logger.warn(e)
default_release = "unstable" default_release = 'unstable'
description = ( parser = optparse.OptionParser('%prog [options] package',
"List reverse-dependencies of package. " description="List reverse-dependencies of package. "
"If the package name is prefixed with src: then the " "If the package name is prefixed with src: then the "
"reverse-dependencies of all the binary packages that " "reverse-dependencies of all the binary packages that "
"the specified source package builds will be listed." "the specified source package builds will be listed.")
) parser.add_option('-r', '--release', metavar='RELEASE',
default=default_release,
help='Query dependencies in RELEASE. '
'Default: %s' % default_release)
parser.add_option('-R', '--without-recommends',
action='store_false', dest='recommends', default=True,
help='Only consider Depends relationships, '
'not Recommends')
parser.add_option('-s', '--with-suggests',
action='store_true', dest='suggests', default=False,
help='Also consider Suggests relationships')
parser.add_option('-b', '--build-depends',
action='store_const', dest='arch', const='source',
help='Query build dependencies (synonym for '
'--arch=source)')
parser.add_option('-a', '--arch', metavar='ARCH', default='any',
help='Query dependencies in ARCH. '
'Default: any')
parser.add_option('-c', '--component', metavar='COMPONENT',
action='append',
help='Only consider reverse-dependencies in COMPONENT. '
'Can be specified multiple times. Default: all')
parser.add_option('-l', '--list',
action='store_true', default=False,
help='Display a simple, machine-readable list')
parser.add_option('-u', '--service-url', metavar='URL',
dest='server', default=None,
help='Reverse Dependencies webservice URL. '
'Default: UbuntuWire')
parser = argparse.ArgumentParser(description=description) options, args = parser.parse_args()
parser.add_argument(
"-r",
"--release",
default=default_release,
help="Query dependencies in RELEASE. Default: %(default)s",
)
parser.add_argument(
"-R",
"--without-recommends",
action="store_false",
dest="recommends",
help="Only consider Depends relationships, not Recommends",
)
parser.add_argument(
"-s", "--with-suggests", action="store_true", help="Also consider Suggests relationships"
)
parser.add_argument(
"-b",
"--build-depends",
action="store_true",
help="Query build dependencies (synonym for --arch=source)",
)
parser.add_argument(
"-a", "--arch", default="any", help="Query dependencies in ARCH. Default: any"
)
parser.add_argument(
"-c",
"--component",
action="append",
help="Only consider reverse-dependencies in COMPONENT. "
"Can be specified multiple times. Default: all",
)
parser.add_argument(
"-l", "--list", action="store_true", help="Display a simple, machine-readable list"
)
parser.add_argument(
"-u",
"--service-url",
metavar="URL",
dest="server",
default=None,
help="Reverse Dependencies webservice URL. Default: UbuntuWire",
)
parser.add_argument(
"-x",
"--recursive",
action="store_true",
help="Consider to find reverse dependencies recursively.",
)
parser.add_argument(
"-d",
"--recursive-depth",
type=int,
default=DEFAULT_MAX_DEPTH,
help="If recusive, you can specify the depth.",
)
parser.add_argument("package")
options = parser.parse_args() if len(args) != 1:
parser.error("One (and only one) package must be specified")
package = args[0]
opts = {} opts = {}
if options.server is not None: if options.server is not None:
opts["server"] = options.server opts['server'] = options.server
# Convert unstable/testing aliases to codenames: # Convert unstable/testing aliases to codenames:
distribution = codename_to_distribution(options.release) distribution = codename_to_distribution(options.release)
if not distribution: if not distribution:
parser.error(f"Unknown release codename {options.release}") parser.error('Unknown release codename %s' % options.release)
distro_info = vendor_to_distroinfo(distribution)() distro_info = vendor_to_distroinfo(distribution)()
try: try:
options.release = distro_info.codename(options.release, default=options.release) options.release = distro_info.codename(options.release,
default=options.release)
except DistroDataOutdated: except DistroDataOutdated:
# We already logged a warning # We already printed a warning
pass pass
if options.build_depends: try:
options.arch = "source" data = query_rdepends(package, options.release, options.arch, **opts)
except RDependsException, e:
Logger.error(str(e))
sys.exit(1)
if options.arch == "source": if options.arch == 'source':
fields = [ fields = ['Reverse-Build-Depends', 'Reverse-Build-Depends-Indep']
"Reverse-Build-Depends",
"Reverse-Build-Depends-Indep",
"Reverse-Build-Depends-Arch",
"Reverse-Testsuite-Triggers",
]
else: else:
fields = ["Reverse-Depends"] fields = ['Reverse-Depends']
if options.recommends: if options.recommends:
fields.append("Reverse-Recommends") fields.append('Reverse-Recommends')
if options.with_suggests: if options.suggests:
fields.append("Reverse-Suggests") fields.append('Reverse-Suggests')
def build_results(package, result, fields, component, recursive): for field in data.keys():
try: if field not in fields:
data = query_rdepends(package, options.release, options.arch, **opts) del data[field]
except RDependsException as e:
Logger.error(str(e))
sys.exit(1)
if not data:
return
if fields: if options.component:
data = {k: v for k, v in data.items() if k in fields} for field, rdeps in data.items():
if component: filtered = [rdep for rdep in rdeps
data = { if rdep['Component'] in options.component]
k: [rdep for rdep in v if rdep["Component"] in component] for k, v in data.items() if not filtered:
} del data[field]
data = {k: v for k, v in data.items() if v} else:
data[field] = filtered
result[package] = data
if recursive > 0:
for rdeps in result[package].values():
for rdep in rdeps:
build_results(rdep["Package"], result, fields, component, recursive - 1)
result = {}
build_results(
options.package,
result,
fields,
options.component,
options.recursive and options.recursive_depth or 0,
)
if options.list: if options.list:
display_consise(result) display_consise(data)
else: else:
display_verbose(options.package, result) display_verbose(data)
def display_verbose(package, values): def display_verbose(data):
if not values: if not data:
Logger.info("No reverse dependencies found") print "No reverse dependencies found"
return return
def log_package(values, package, arch, dependency, visited, offset=0):
line = f"{' ' * offset}* {package}"
if all_archs and set(arch) != all_archs:
line += f" [{' '.join(sorted(arch))}]"
if dependency:
if len(line) < 30:
line += " " * (30 - len(line))
line += f" (for {dependency})"
Logger.info(line)
if package in visited:
return
visited = visited.copy().add(package)
data = values.get(package)
if data:
offset = offset + 1
for rdeps in data.values():
for rdep in rdeps:
log_package(
values,
rdep["Package"],
rdep.get("Architectures", all_archs),
rdep.get("Dependency"),
visited,
offset,
)
all_archs = set() all_archs = set()
# This isn't accurate, but we make up for it by displaying what we found # This isn't accurate, but we make up for it by displaying what we found
for data in values.values(): for rdeps in data.itervalues():
for rdeps in data.values():
for rdep in rdeps:
if "Architectures" in rdep:
all_archs.update(rdep["Architectures"])
for field, rdeps in values[package].items():
Logger.info("%s", field)
Logger.info("%s", "=" * len(field))
rdeps.sort(key=lambda x: x["Package"])
for rdep in rdeps: for rdep in rdeps:
log_package( if 'Architectures' in rdep:
values, all_archs.update(rdep['Architectures'])
rdep["Package"],
rdep.get("Architectures", all_archs), for field, rdeps in data.iteritems():
rdep.get("Dependency"), print field
{package}, print '=' * len(field)
) rdeps.sort(key=lambda x: x['Package'])
Logger.info("") for rdep in rdeps:
line = '* %s' % rdep['Package']
if all_archs and set(rdep['Architectures']) != all_archs:
line += ' [%s]' % ' '.join(sorted(rdep['Architectures']))
if 'Dependency' in rdep:
if len(line) < 30:
line += ' ' * (30 - len(line))
line += ' (for %s)' % rdep['Dependency']
print line
print
if all_archs: if all_archs:
Logger.info( print ("Packages without architectures listed are "
"Packages without architectures listed are reverse-dependencies in: %s", "reverse-dependencies in: %s"
", ".join(sorted(list(all_archs))), % ', '.join(sorted(list(all_archs))))
)
def display_consise(values): def display_consise(data):
result = set() result = set()
for data in values.values(): for rdeps in data.itervalues():
for rdeps in data.values(): for rdep in rdeps:
for rdep in rdeps: result.add(rdep['Package'])
result.add(rdep["Package"])
Logger.info("\n".join(sorted(list(result)))) print u'\n'.join(sorted(list(result)))
if __name__ == "__main__": if __name__ == '__main__':
main() main()

View File

@ -1,19 +0,0 @@
#!/bin/sh
set -eu
# Copyright 2023, Canonical Ltd.
# SPDX-License-Identifier: GPL-3.0
PYTHON_SCRIPTS=$(grep -l -r '^#! */usr/bin/python3$' .)
echo "Running black..."
black --check --diff . $PYTHON_SCRIPTS
echo "Running isort..."
isort --check-only --diff .
echo "Running flake8..."
flake8 --max-line-length=99 --ignore=E203,W503 . $PYTHON_SCRIPTS
echo "Running pylint..."
pylint $(find * -name '*.py') $PYTHON_SCRIPTS

View File

@ -1,81 +0,0 @@
#!/usr/bin/python3
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# Authors:
# Andy P. Whitcroft
# Christian Ehrhardt
# Chris Peterson <chris.peterson@canonical.com>
#
# Copyright (C) 2024 Canonical Ltd.
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Dumps a list of currently running tests in Autopkgtest"""
__example__ = """
Display first listed test running on amd64 hardware:
$ running-autopkgtests | grep amd64 | head -n1
R 0:01:40 systemd-upstream - focal amd64\
upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb',\
'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true',\
'UPSTREAM_PULL_REQUEST=23153',\
'GITHUB_STATUSES_URL=https://api.github.com/repos/\
systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
"""
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from ubuntutools.running_autopkgtests import get_queued, get_running
def parse_args():
description = (
"Dumps a list of currently running and queued tests in Autopkgtest. "
"Pass --running to only see running tests, or --queued to only see "
"queued tests. Passing both will print both, which is the default behavior. "
)
parser = ArgumentParser(
prog="running-autopkgtests",
description=description,
epilog=f"example: {__example__}",
formatter_class=RawDescriptionHelpFormatter,
)
parser.add_argument(
"-r", "--running", action="store_true", help="Print runnning autopkgtests (default: true)"
)
parser.add_argument(
"-q", "--queued", action="store_true", help="Print queued autopkgtests (default: true)"
)
options = parser.parse_args()
# If neither flag was specified, default to both not neither
if not options.running and not options.queued:
options.running = True
options.queued = True
return options
def main() -> int:
args = parse_args()
if args.running:
print(get_running())
if args.queued:
print(get_queued())
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com> # Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
# #
@ -14,138 +14,130 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import argparse
import collections import collections
import gzip import gzip
import json import json
import optparse
import os import os
import time import time
import urllib.request import urllib
from ubuntutools import getLogger from ubuntutools.lp.lpapicache import (Distribution, Launchpad,
from ubuntutools.lp.lpapicache import Distribution, Launchpad, PackageNotFoundException PackageNotFoundException)
from ubuntutools.logger import Logger
Logger = getLogger() DATA_URL = 'http://qa.ubuntuwire.org/ubuntu-seeded-packages/seeded.json.gz'
DATA_URL = "http://qa.ubuntuwire.org/ubuntu-seeded-packages/seeded.json.gz"
def load_index(url): def load_index(url):
"""Download a new copy of the image contents index, if necessary, '''Download a new copy of the image contents index, if necessary,
and read it. and read it.
""" '''
cachedir = os.path.expanduser("~/.cache/ubuntu-dev-tools") cachedir = os.path.expanduser('~/.cache/ubuntu-dev-tools')
seeded = os.path.join(cachedir, "seeded.json.gz") fn = os.path.join(cachedir, 'seeded.json.gz')
if not os.path.isfile(seeded) or time.time() - os.path.getmtime(seeded) > 60 * 60 * 2: if (not os.path.isfile(fn)
or time.time() - os.path.getmtime(fn) > 60 * 60 * 2):
if not os.path.isdir(cachedir): if not os.path.isdir(cachedir):
os.makedirs(cachedir) os.makedirs(cachedir)
urllib.request.urlretrieve(url, seeded) urllib.urlretrieve(url, fn)
try: try:
with gzip.open(seeded, "r") as f: with gzip.open(fn, 'r') as f:
return json.load(f) return json.load(f)
except Exception as e: # pylint: disable=broad-except except Exception, e:
Logger.error( Logger.error("Unable to parse seed data: %s. "
"Unable to parse seed data: %s. Deleting cached data, please try again.", str(e) "Deleting cached data, please try again.",
) str(e))
os.unlink(seeded) os.unlink(fn)
return None
def resolve_binaries(sources): def resolve_binaries(sources):
"""Return a dict of source:binaries for all binary packages built by '''Return a dict of source:binaries for all binary packages built by
sources sources
""" '''
archive = Distribution("ubuntu").getArchive() archive = Distribution('ubuntu').getArchive()
binaries = {} binaries = {}
for source in sources: for source in sources:
try: try:
spph = archive.getSourcePackage(source) spph = archive.getSourcePackage(source)
except PackageNotFoundException as e: except PackageNotFoundException, e:
Logger.error(str(e)) Logger.error(str(e))
continue continue
binaries[source] = sorted(set(bpph.getPackageName() for bpph in spph.getBinaries())) binaries[source] = sorted(set(bpph.getPackageName()
for bpph in spph.getBinaries()))
return binaries return binaries
def present_on(appearences): def present_on(appearences):
"""Format a list of (flavor, type) tuples into a human-readable string""" '''Format a list of (flavor, type) tuples into a human-readable string'''
present = collections.defaultdict(set) present = collections.defaultdict(set)
for flavor, type_ in appearences: for flavor, type_ in appearences:
present[flavor].add(type_) present[flavor].add(type_)
for flavor, types in present.items(): for flavor, types in present.iteritems():
if len(types) > 1: if len(types) > 1:
types.discard("supported") types.discard('supported')
output = [f" {flavor}: {', '.join(sorted(types))}" for flavor, types in present.items()] output = [' %s: %s' % (flavor, ', '.join(sorted(types)))
for flavor, types in present.iteritems()]
output.sort() output.sort()
return "\n".join(output) return '\n'.join(output)
def output_binaries(index, binaries): def output_binaries(index, binaries):
"""Print binaries found in index""" '''Print binaries found in index'''
for binary in binaries: for binary in binaries:
if binary in index: if binary in index:
Logger.info("%s is seeded in:", binary) print "%s is seeded in:" % binary
Logger.info(present_on(index[binary])) print present_on(index[binary])
else: else:
Logger.info("%s is not seeded (and may not exist).", binary) print "%s is not seeded (and may not exist)." % binary
def output_by_source(index, by_source): def output_by_source(index, by_source):
"""Logger.Info(binaries found in index. Grouped by source""" '''Print binaries found in index. Grouped by source'''
for source, binaries in by_source.items(): for source, binaries in by_source.iteritems():
seen = False seen = False
if not binaries: if not binaries:
Logger.info( print ("Status unknown: No binary packages built by the latest "
"Status unknown: No binary packages built by the latest " "%s.\nTry again using -b and the expected binary packages."
"%s.\nTry again using -b and the expected binary packages.", % source)
source,
)
continue continue
for binary in binaries: for binary in binaries:
if binary in index: if binary in index:
seen = True seen = True
Logger.info("%s (from %s) is seeded in:", binary, source) print "%s (from %s) is seeded in:" % (binary, source)
Logger.info(present_on(index[binary])) print present_on(index[binary])
if not seen: if not seen:
Logger.info("%s's binaries are not seeded.", source) print "%s's binaries are not seeded." % source
def main(): def main():
"""Query which images the specified packages are on""" '''Query which images the specified packages are on'''
parser = argparse.ArgumentParser(usage="%(prog)s [options] package...") parser = optparse.OptionParser('%prog [options] package...')
parser.add_argument( parser.add_option('-b', '--binary',
"-b", default=False, action='store_true',
"--binary", help="Binary packages are being specified, "
default=False, "not source packages (fast)")
action="store_true", parser.add_option('-u', '--data-url', metavar='URL',
help="Binary packages are being specified, not source packages (fast)", default=DATA_URL,
) help='URL for the seeded packages index. '
parser.add_argument( 'Default: UbuntuWire')
"-u", options, args = parser.parse_args()
"--data-url",
metavar="URL", if len(args) < 1:
default=DATA_URL, parser.error("At least one package must be specified")
help="URL for the seeded packages index. Default: UbuntuWire",
)
parser.add_argument("packages", metavar="package", nargs="+", help=argparse.SUPPRESS)
args = parser.parse_args()
# Login anonymously to LP # Login anonymously to LP
Launchpad.login_anonymously() Launchpad.login_anonymously()
index = load_index(args.data_url) index = load_index(options.data_url)
if args.binary: if options.binary:
output_binaries(index, args.packages) output_binaries(index, args)
else: else:
binaries = resolve_binaries(args.packages) binaries = resolve_binaries(args)
output_by_source(index, binaries) output_by_source(index, binaries)
if __name__ == "__main__": if __name__ == '__main__':
main() main()

View File

@ -104,7 +104,7 @@ echo "In order to do packaging work, you'll need a minimal set of packages."
echo "Those, together with other packages which, though optional, have proven" echo "Those, together with other packages which, though optional, have proven"
echo "to be useful, will now be installed." echo "to be useful, will now be installed."
echo echo
sudo apt-get install ubuntu-dev-tools devscripts debhelper patchutils pbuilder build-essential sudo apt-get install ubuntu-dev-tools devscripts debhelper cdbs patchutils pbuilder build-essential
separator2 separator2
echo "Enabling the source repository" echo "Enabling the source repository"

163
setup.py
View File

@ -1,100 +1,75 @@
#!/usr/bin/python3 #!/usr/bin/python
import glob
import pathlib
import re
from setuptools import setup from setuptools import setup
import glob
import os
import re
import sys
import codecs
# look/set what version we have
changelog = "debian/changelog"
if os.path.exists(changelog):
head=codecs.open(changelog, 'r', 'utf-8', 'replace').readline()
match = re.compile(".*\((.*)\).*").match(head)
if match:
version = match.group(1)
def get_debian_version() -> str: if sys.version_info[0] >= 3:
"""Look what Debian version we have.""" scripts = []
changelog = pathlib.Path(__file__).parent / "debian" / "changelog" data_files = []
with changelog.open("r", encoding="utf-8") as changelog_f: else:
head = changelog_f.readline() scripts = ['404main',
match = re.compile(r".*\((.*)\).*").match(head) 'backportpackage',
if not match: 'bitesize',
raise ValueError(f"Failed to extract Debian version from '{head}'.") 'check-mir',
return match.group(1) 'check-symbols',
'dch-repeat',
'grab-merge',
'grep-merges',
'harvest',
'hugdaylist',
'import-bug-from-debian',
'merge-changelog',
'mk-sbuild',
'pbuilder-dist',
'pbuilder-dist-simple',
'pull-debian-debdiff',
'pull-debian-source',
'pull-lp-source',
'pull-revu-source',
'pull-uca-source',
'requestbackport',
'requestsync',
'reverse-build-depends',
'reverse-depends',
'seeded-in-ubuntu',
'setup-packaging-environment',
'sponsor-patch',
'submittodebian',
'syncpackage',
'ubuntu-build',
'ubuntu-iso',
'ubuntu-upload-permission',
'update-maintainer',
]
data_files = [
('share/bash-completion/completions', glob.glob("bash_completion/*")),
('share/man/man1', glob.glob("doc/*.1")),
('share/man/man5', glob.glob("doc/*.5")),
('share/ubuntu-dev-tools', ['enforced-editing-wrapper']),
]
if __name__ == '__main__':
def make_pep440_compliant(version: str) -> str: setup(name='ubuntu-dev-tools',
"""Convert the version into a PEP440 compliant version.""" version=version,
public_version_re = re.compile(r"^([0-9][0-9.]*(?:(?:a|b|rc|.post|.dev)[0-9]+)*)\+?") scripts=scripts,
_, public, local = public_version_re.split(version, maxsplit=1) packages=['ubuntutools',
if not local: 'ubuntutools/lp',
return version 'ubuntutools/requestsync',
sanitized_local = re.sub("[+~]+", ".", local).strip(".") 'ubuntutools/sponsor_patch',
pep440_version = f"{public}+{sanitized_local}" 'ubuntutools/test',
assert re.match("^[a-zA-Z0-9.]+$", sanitized_local), f"'{pep440_version}' not PEP440 compliant" ],
return pep440_version data_files=data_files,
test_suite='ubuntutools.test.discover',
scripts = [
"backportpackage",
"check-mir",
"check-symbols",
"dch-repeat",
"grab-merge",
"grep-merges",
"import-bug-from-debian",
"lp-bitesize",
"merge-changelog",
"mk-sbuild",
"pbuilder-dist",
"pbuilder-dist-simple",
"pm-helper",
"pull-pkg",
"pull-debian-debdiff",
"pull-debian-source",
"pull-debian-debs",
"pull-debian-ddebs",
"pull-debian-udebs",
"pull-lp-source",
"pull-lp-debs",
"pull-lp-ddebs",
"pull-lp-udebs",
"pull-ppa-source",
"pull-ppa-debs",
"pull-ppa-ddebs",
"pull-ppa-udebs",
"pull-uca-source",
"pull-uca-debs",
"pull-uca-ddebs",
"pull-uca-udebs",
"requestbackport",
"requestsync",
"reverse-depends",
"running-autopkgtests",
"seeded-in-ubuntu",
"setup-packaging-environment",
"sponsor-patch",
"submittodebian",
"syncpackage",
"ubuntu-build",
"ubuntu-iso",
"ubuntu-upload-permission",
"update-maintainer",
]
data_files = [
("share/bash-completion/completions", glob.glob("bash_completion/*")),
("share/man/man1", glob.glob("doc/*.1")),
("share/man/man5", glob.glob("doc/*.5")),
("share/ubuntu-dev-tools", ["enforced-editing-wrapper"]),
]
if __name__ == "__main__":
setup(
name="ubuntu-dev-tools",
version=make_pep440_compliant(get_debian_version()),
scripts=scripts,
packages=[
"ubuntutools",
"ubuntutools/lp",
"ubuntutools/requestsync",
"ubuntutools/sponsor_patch",
"ubuntutools/test",
],
data_files=data_files,
test_suite="ubuntutools.test",
) )

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# #
# Copyright (C) 2010-2011, Benjamin Drung <bdrung@ubuntu.com> # Copyright (C) 2010-2011, Benjamin Drung <bdrung@ubuntu.com>
# #
@ -14,155 +14,119 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name import optparse
# pylint: enable=invalid-name
import argparse
import logging
import os import os
import shutil import shutil
import sys import sys
import tempfile import tempfile
from ubuntutools import getLogger
from ubuntutools.builder import get_builder from ubuntutools.builder import get_builder
from ubuntutools.config import UDTConfig from ubuntutools.config import UDTConfig
from ubuntutools.sponsor_patch.sponsor_patch import check_dependencies, sponsor_patch from ubuntutools.logger import Logger
from ubuntutools.sponsor_patch.sponsor_patch import (sponsor_patch,
Logger = getLogger() check_dependencies)
def parse(script_name): def parse(script_name):
"""Parse the command line parameters.""" """Parse the command line parameters."""
usage = ( usage = ("%s [options] <bug number>\n" % (script_name)
"%(prog)s [options] <bug number>\n" + "One of --upload, --workdir, or --sponsor must be specified.")
"One of --upload, --workdir, or --sponsor must be specified." epilog = "See %s(1) for more info." % (script_name)
) parser = optparse.OptionParser(usage=usage, epilog=epilog)
epilog = f"See {script_name}(1) for more info."
parser = argparse.ArgumentParser(usage=usage, epilog=epilog)
parser.add_argument( parser.add_option("-b", "--build", dest="build",
"-b", help="Build the package with the specified builder.",
"--build", action="store_true", default=False)
dest="build", parser.add_option("-B", "--builder", dest="builder", default=None,
help="Build the package with the specified builder.", help="Specify the package builder (default pbuilder)")
action="store_true", parser.add_option("-e", "--edit",
) help="launch sub-shell to allow editing of the patch",
parser.add_argument( dest="edit", action="store_true", default=False)
"-B", "--builder", dest="builder", help="Specify the package builder (default pbuilder)" parser.add_option("-k", "--key", dest="keyid", default=None,
) help="Specify the key ID to be used for signing.")
parser.add_argument( parser.add_option("-l", "--lpinstance", dest="lpinstance", default=None,
"-e", help="Launchpad instance to connect to "
"--edit", "(default: production)",
help="launch sub-shell to allow editing of the patch", metavar="INSTANCE")
dest="edit", parser.add_option("--no-conf", dest="no_conf", default=False,
action="store_true", help="Don't read config files or environment variables.",
) action="store_true")
parser.add_argument( parser.add_option("-s", "--sponsor", help="sponsoring; equals -b -u ubuntu",
"-k", "--key", dest="keyid", help="Specify the key ID to be used for signing." dest="sponsoring", action="store_true", default=False)
) parser.add_option("-u", "--upload", dest="upload", default=None,
parser.add_argument( help="Specify an upload destination (default none).")
"-l", parser.add_option("-U", "--update", dest="update", default=False,
"--lpinstance", action="store_true",
dest="lpinstance", help="Update the build environment before building.")
help="Launchpad instance to connect to (default: production)", parser.add_option("-v", "--verbose", help="print more information",
metavar="INSTANCE", dest="verbose", action="store_true", default=False)
) parser.add_option("-w", "--workdir", dest="workdir", default=None,
parser.add_argument( help="Specify a working directory (default is a "
"--no-conf", "temporary directory, deleted afterwards).")
dest="no_conf",
help="Don't read config files or environment variables.",
action="store_true",
)
parser.add_argument(
"-s",
"--sponsor",
help="sponsoring; equals -b -u ubuntu",
dest="sponsoring",
action="store_true",
)
parser.add_argument(
"-u", "--upload", dest="upload", help="Specify an upload destination (default none)."
)
parser.add_argument(
"-U",
"--update",
dest="update",
action="store_true",
help="Update the build environment before building.",
)
parser.add_argument(
"-v", "--verbose", help="print more information", dest="verbose", action="store_true"
)
parser.add_argument(
"-w",
"--workdir",
dest="workdir",
help="Specify a working directory (default is a "
"temporary directory, deleted afterwards).",
)
parser.add_argument("bug_number", type=int, help=argparse.SUPPRESS)
args = parser.parse_args() (options, args) = parser.parse_args()
if args.verbose: Logger.set_verbosity(options.verbose)
Logger.setLevel(logging.DEBUG)
check_dependencies() check_dependencies()
config = UDTConfig(args.no_conf) if len(args) == 0:
if args.builder is None: Logger.error("No bug number specified.")
args.builder = config.get_value("BUILDER") sys.exit(1)
if args.lpinstance is None: elif len(args) > 1:
args.lpinstance = config.get_value("LPINSTANCE") Logger.error("Multiple bug numbers specified: %s" % (", ".join(args)))
if not args.update: sys.exit(1)
args.update = config.get_value("UPDATE_BUILDER", boolean=True)
if args.workdir is None:
args.workdir = config.get_value("WORKDIR")
if args.keyid is None:
args.keyid = config.get_value("KEYID")
if args.sponsoring: bug_number = args[0]
args.build = True if bug_number.isdigit():
args.upload = "ubuntu" bug_number = int(bug_number)
else:
Logger.error("Invalid bug number specified: %s" % (bug_number))
sys.exit(1)
return args config = UDTConfig(options.no_conf)
if options.builder is None:
options.builder = config.get_value("BUILDER")
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
if not options.update:
options.update = config.get_value("UPDATE_BUILDER", boolean=True)
if options.workdir is None:
options.workdir = config.get_value("WORKDIR")
if options.keyid is None:
options.keyid = config.get_value("KEYID")
if options.sponsoring:
options.build = True
options.upload = "ubuntu"
return (options, bug_number)
def main(): def main():
script_name = os.path.basename(sys.argv[0]) script_name = os.path.basename(sys.argv[0])
args = parse(script_name) (options, bug_number) = parse(script_name)
builder = get_builder(args.builder) builder = get_builder(options.builder)
if not builder: if not builder:
sys.exit(1) sys.exit(1)
if not args.upload and not args.workdir: if not options.upload and not options.workdir:
Logger.error("Please specify either a working directory or an upload target!") Logger.error("Please specify either a working directory or an upload "
"target!")
sys.exit(1) sys.exit(1)
if args.workdir is None: if options.workdir is None:
workdir = tempfile.mkdtemp(prefix=script_name + "-") workdir = tempfile.mkdtemp(prefix=script_name+"-")
else: else:
workdir = args.workdir workdir = options.workdir
try: try:
sponsor_patch( sponsor_patch(bug_number, options.build, builder, options.edit,
args.bug_number, options.keyid, options.lpinstance, options.update,
args.build, options.upload, workdir)
builder,
args.edit,
args.keyid,
args.lpinstance,
args.update,
args.upload,
workdir,
)
except KeyboardInterrupt: except KeyboardInterrupt:
Logger.error("User abort.") print "\nUser abort."
sys.exit(2) sys.exit(2)
finally: finally:
if args.workdir is None: if options.workdir is None:
shutil.rmtree(workdir) shutil.rmtree(workdir)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3 #!/usr/bin/python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
# submittodebian - tool to submit patches to Debian's BTS # submittodebian - tool to submit patches to Debian's BTS
@ -22,36 +22,33 @@
# #
# ################################################################## # ##################################################################
"""Submit the Ubuntu changes in a package to Debian. import optparse
Run inside an unpacked Ubuntu source package.
"""
import argparse
import os import os
import re import re
import shutil import shutil
import sys import sys
from subprocess import DEVNULL, PIPE, Popen, call, check_call, run
from tempfile import mkdtemp from tempfile import mkdtemp
from debian.changelog import Changelog from distro_info import UbuntuDistroInfo, DistroDataOutdated
from distro_info import DistroDataOutdated, UbuntuDistroInfo
from ubuntutools import getLogger
from ubuntutools.config import ubu_email from ubuntutools.config import ubu_email
from ubuntutools.question import EditFile, YesNoQuestion from ubuntutools.question import YesNoQuestion, EditFile
from ubuntutools.update_maintainer import restore_maintainer, update_maintainer from ubuntutools.subprocess import call, check_call, Popen, PIPE
from ubuntutools.update_maintainer import update_maintainer, restore_maintainer
Logger = getLogger() try:
from debian.changelog import Changelog
except ImportError:
print (u"This utility requires modules from the «python-debian» package, "
u"which isn't currently installed.")
sys.exit(1)
def get_most_recent_debian_version(changelog): def get_most_recent_debian_version(changelog):
for block in changelog: for block in changelog:
version = block.version.full_version version = block.version.full_version
if not re.search("(ubuntu|build)", version): if not re.search('(ubuntu|build)', version):
return version return version
return None
def get_bug_body(changelog): def get_bug_body(changelog):
@ -69,20 +66,19 @@ In Ubuntu, the attached patch was applied to achieve the following:
%s %s
Thanks for considering the patch. Thanks for considering the patch.
""" % ( """ % ("\n".join([a for a in entry.changes()]))
"\n".join(entry.changes())
)
return msg return msg
def build_source_package(): def build_source_package():
if os.path.isdir(".bzr"): if os.path.isdir('.bzr'):
cmd = ["bzr", "bd", "--builder=dpkg-buildpackage", "-S", "--", "-uc", "-us", "-nc"] cmd = ['bzr', 'bd', '--builder=dpkg-buildpackage', '-S',
'--', '-uc', '-us', '-nc']
else: else:
cmd = ["dpkg-buildpackage", "-S", "-uc", "-us", "-nc"] cmd = ['dpkg-buildpackage', '-S', '-uc', '-us', '-nc']
env = os.environ.copy() env = os.environ.copy()
# Unset DEBEMAIL in case there's an @ubuntu.com e-mail address # Unset DEBEMAIL in case there's an @ubuntu.com e-mail address
env.pop("DEBEMAIL", None) env.pop('DEBEMAIL', None)
check_call(cmd, env=env) check_call(cmd, env=env)
@ -93,35 +89,35 @@ def gen_debdiff(tmpdir, changelog):
newver = next(changelog_it).version newver = next(changelog_it).version
oldver = next(changelog_it).version oldver = next(changelog_it).version
debdiff = os.path.join(tmpdir, f"{pkg}_{newver}.debdiff") debdiff = os.path.join(tmpdir, '%s_%s.debdiff' % (pkg, newver))
diff_cmd = ["bzr", "diff", "-r", "tag:" + str(oldver)] devnull = open('/dev/null', 'w')
if call(diff_cmd, stdout=DEVNULL, stderr=DEVNULL) == 1: diff_cmd = ['bzr', 'diff', '-r', 'tag:' + str(oldver)]
Logger.info("Extracting bzr diff between %s and %s", oldver, newver) if call(diff_cmd, stdout=devnull, stderr=devnull) == 1:
print "Extracting bzr diff between %s and %s" % (oldver, newver)
else: else:
if oldver.epoch is not None: if oldver.epoch is not None:
oldver = str(oldver)[str(oldver).index(":") + 1 :] oldver = str(oldver)[str(oldver).index(":") + 1:]
if newver.epoch is not None: if newver.epoch is not None:
newver = str(newver)[str(newver).index(":") + 1 :] newver = str(newver)[str(newver).index(":") + 1:]
olddsc = f"../{pkg}_{oldver}.dsc" olddsc = '../%s_%s.dsc' % (pkg, oldver)
newdsc = f"../{pkg}_{newver}.dsc" newdsc = '../%s_%s.dsc' % (pkg, newver)
check_file(olddsc) check_file(olddsc)
check_file(newdsc) check_file(newdsc)
Logger.info("Generating debdiff between %s and %s", oldver, newver) print "Generating debdiff between %s and %s" % (oldver, newver)
diff_cmd = ["debdiff", olddsc, newdsc] diff_cmd = ['debdiff', olddsc, newdsc]
with Popen(diff_cmd, stdout=PIPE, encoding="utf-8") as diff: diff = Popen(diff_cmd, stdout=PIPE)
with open(debdiff, "w", encoding="utf-8") as debdiff_f: debdiff_f = open(debdiff, 'w')
run( filterdiff = Popen(['filterdiff', '-x', '*changelog*'],
["filterdiff", "-x", "*changelog*"], stdin=diff.stdout, stdout=debdiff_f)
check=False, diff.stdout.close()
stdin=diff.stdout, filterdiff.wait()
stdout=debdiff_f, debdiff_f.close()
encoding="utf-8", devnull.close()
)
return debdiff return debdiff
@ -129,95 +125,88 @@ def gen_debdiff(tmpdir, changelog):
def check_file(fname, critical=True): def check_file(fname, critical=True):
if os.path.exists(fname): if os.path.exists(fname):
return fname return fname
if not critical: else:
return False if not critical:
Logger.info("Couldn't find «%s».\n", fname) return False
sys.exit(1) print u"Couldn't find «%s».\n" % fname
sys.exit(1)
def submit_bugreport(body, debdiff, deb_version, changelog): def submit_bugreport(body, debdiff, deb_version, changelog):
try: try:
devel = UbuntuDistroInfo().devel() devel = UbuntuDistroInfo().devel()
except DistroDataOutdated as e: except DistroDataOutdated, e:
Logger.info(str(e)) print str(e)
devel = "" devel = ''
if os.path.dirname(sys.argv[0]).startswith("/usr/bin"): if os.path.dirname(sys.argv[0]).startswith('/usr/bin'):
editor_path = "/usr/share/ubuntu-dev-tools" editor_path = '/usr/share/ubuntu-dev-tools'
else: else:
editor_path = os.path.dirname(sys.argv[0]) editor_path = os.path.dirname(sys.argv[0])
env = dict(os.environ.items()) env = dict(os.environ.items())
if "EDITOR" in env: if 'EDITOR' in env:
env["UDT_EDIT_WRAPPER_EDITOR"] = env["EDITOR"] env['UDT_EDIT_WRAPPER_EDITOR'] = env['EDITOR']
if "VISUAL" in env: if 'VISUAL' in env:
env["UDT_EDIT_WRAPPER_VISUAL"] = env["VISUAL"] env['UDT_EDIT_WRAPPER_VISUAL'] = env['VISUAL']
env["EDITOR"] = os.path.join(editor_path, "enforced-editing-wrapper") env['EDITOR'] = os.path.join(editor_path, 'enforced-editing-wrapper')
env["VISUAL"] = os.path.join(editor_path, "enforced-editing-wrapper") env['VISUAL'] = os.path.join(editor_path, 'enforced-editing-wrapper')
env["UDT_EDIT_WRAPPER_TEMPLATE_RE"] = ".*REPLACE THIS WITH ACTUAL INFORMATION.*" env['UDT_EDIT_WRAPPER_TEMPLATE_RE'] = (
env["UDT_EDIT_WRAPPER_FILE_DESCRIPTION"] = "bug report" '.*REPLACE THIS WITH ACTUAL INFORMATION.*')
env['UDT_EDIT_WRAPPER_FILE_DESCRIPTION'] = 'bug report'
# In external mua mode, attachments are lost (Reportbug bug: #679907) # In external mua mode, attachments are lost (Reportbug bug: #679907)
internal_mua = True internal_mua = True
for cfgfile in ("/etc/reportbug.conf", "~/.reportbugrc"): for cfgfile in ('/etc/reportbug.conf', '~/.reportbugrc'):
cfgfile = os.path.expanduser(cfgfile) cfgfile = os.path.expanduser(cfgfile)
if not os.path.exists(cfgfile): if not os.path.exists(cfgfile):
continue continue
with open(cfgfile, "r", encoding="utf-8") as f: with open(cfgfile, 'r') as f:
for line in f: for line in f:
line = line.strip() line = line.strip()
if line in ("gnus", "mutt", "nmh") or line.startswith("mua "): if line in ('gnus', 'mutt', 'nmh') or line.startswith('mua '):
internal_mua = False internal_mua = False
break break
cmd = ( cmd = ('reportbug',
"reportbug", '--no-check-available',
"--no-check-available", '--no-check-installed',
"--no-check-installed", '--pseudo-header', 'User: ubuntu-devel@lists.ubuntu.com',
"--pseudo-header", '--pseudo-header', 'Usertags: origin-ubuntu %s ubuntu-patch'
"User: ubuntu-devel@lists.ubuntu.com", % devel,
"--pseudo-header", '--tag', 'patch',
f"Usertags: origin-ubuntu {devel} ubuntu-patch", '--bts', 'debian',
"--tag", '--include', body,
"patch", '--attach' if internal_mua else '--include', debdiff,
"--bts", '--package-version', deb_version,
"debian", changelog.package)
"--include",
body,
"--attach" if internal_mua else "--include",
debdiff,
"--package-version",
deb_version,
changelog.package,
)
check_call(cmd, env=env) check_call(cmd, env=env)
def check_reportbug_config(): def check_reportbug_config():
reportbugrc_filename = os.path.expanduser("~/.reportbugrc") fn = os.path.expanduser('~/.reportbugrc')
if os.path.exists(reportbugrc_filename): if os.path.exists(fn):
return return
email = ubu_email()[1] email = ubu_email()[1]
reportbugrc = f"""# Reportbug configuration generated by submittodebian(1) reportbugrc = """# Reportbug configuration generated by submittodebian(1)
# See reportbug.conf(5) for the configuration file format. # See reportbug.conf(5) for the configuration file format.
# Use Debian's reportbug SMTP Server: # Use Debian's reportbug SMTP Server:
# Note: it's limited to 5 connections per hour, and cannot CC you at submission # Note: it's limited to 5 connections per hour, and cannot CC you at submission
# time. See /usr/share/doc/reportbug/README.Users.gz for more details. # time. See /usr/share/doc/reportbug/README.Users.gz for more details.
smtphost reportbug.debian.org:587 smtphost reportbug.debian.org:587
header "X-Debbugs-CC: {email}" header "X-Debbugs-CC: %s"
no-cc no-cc
# Use GMail's SMTP Server: # Use GMail's SMTP Server:
#smtphost smtp.googlemail.com:587 #smtphost smtp.googlemail.com:587
#smtpuser "<your address>@gmail.com" #smtpuser "<your address>@gmail.com"
#smtptls #smtptls
""" """ % email
with open(reportbugrc_filename, "w", encoding="utf-8") as f: with file(fn, 'w') as f:
f.write(reportbugrc) f.write(reportbugrc)
Logger.info( print """\
"""\
You have not configured reportbug. Assuming this is the first time you have You have not configured reportbug. Assuming this is the first time you have
used it. Writing a ~/.reportbugrc that will use Debian's mail server, and CC used it. Writing a ~/.reportbugrc that will use Debian's mail server, and CC
the bug to you at <%s> the bug to you at <%s>
@ -228,43 +217,40 @@ the bug to you at <%s>
If this is not correct, please exit now and edit ~/.reportbugrc or run If this is not correct, please exit now and edit ~/.reportbugrc or run
reportbug --configure for its configuration wizard. reportbug --configure for its configuration wizard.
""", """ % (email, reportbugrc.strip())
email,
reportbugrc.strip(),
)
if YesNoQuestion().ask("Continue submitting this bug", "yes") == "no": if YesNoQuestion().ask("Continue submitting this bug", "yes") == "no":
sys.exit(1) sys.exit(1)
def main(): def main():
parser = argparse.ArgumentParser(description=__doc__) description = 'Submit the Ubuntu changes in a package to Debian. ' + \
'Run inside an unpacked Ubuntu source package.'
parser = optparse.OptionParser(description=description)
parser.parse_args() parser.parse_args()
if not os.path.exists("/usr/bin/reportbug"): if not os.path.exists('/usr/bin/reportbug'):
Logger.error( print(u"This utility requires the «reportbug» package, which isn't "
"This utility requires the «reportbug» package, which isn't currently installed." u"currently installed.")
)
sys.exit(1) sys.exit(1)
check_reportbug_config() check_reportbug_config()
changelog_file = check_file("debian/changelog", critical=False) or check_file( changelog_file = (check_file('debian/changelog', critical=False) or
"../debian/changelog" check_file('../debian/changelog'))
) changelog = Changelog(file(changelog_file).read())
with open(changelog_file, encoding="utf-8") as f:
changelog = Changelog(f.read())
deb_version = get_most_recent_debian_version(changelog) deb_version = get_most_recent_debian_version(changelog)
bug_body = get_bug_body(changelog) bug_body = get_bug_body(changelog)
tmpdir = mkdtemp() tmpdir = mkdtemp()
body = os.path.join(tmpdir, "bug_body") body = os.path.join(tmpdir, 'bug_body')
with open(body, "wb") as f: fp = open(body, 'w')
f.write(bug_body.encode("utf-8")) fp.write(bug_body.encode('utf-8'))
fp.close()
restore_maintainer("debian") restore_maintainer('debian')
build_source_package() build_source_package()
update_maintainer("debian") update_maintainer('debian')
debdiff = gen_debdiff(tmpdir, changelog) debdiff = gen_debdiff(tmpdir, changelog)
@ -272,13 +258,12 @@ def main():
# reverted in the most recent build # reverted in the most recent build
build_source_package() build_source_package()
EditFile(debdiff, "debdiff").edit(optional=True) EditFile(debdiff, 'debdiff').edit(optional=True)
submit_bugreport(body, debdiff, deb_version, changelog) submit_bugreport(body, debdiff, deb_version, changelog)
os.unlink(body) os.unlink(body)
os.unlink(debdiff) os.unlink(debdiff)
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
if __name__ == '__main__':
if __name__ == "__main__":
main() main()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
upstream

View File

@ -0,0 +1 @@
7

Some files were not shown because too many files have changed in this diff Show More