Compare commits

..

No commits in common. "main" and "0.163" have entirely different histories.
main ... 0.163

141 changed files with 6380 additions and 10062 deletions

18
.gitignore vendored
View File

@ -1,2 +1,16 @@
__pycache__
*.egg-info
/ubuntu_dev_tools.egg-info/
__pycache__/
*.pyc
/build/
/.pybuild/
/test-data/example_1.0-1.debian.tar.xz
/test-data/example_1.0-1.dsc
/test-data/example_1.0.orig.tar.gz
/debian/python-ubuntutools/
/debian/python3-ubuntutools/
/debian/ubuntu-dev-tools/
/debian/debhelper-build-stamp
/debian/files
/debian/*.debhelper
/debian/*.debhelper.log
/debian/*.substvars

View File

@ -1,65 +0,0 @@
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=apt_pkg
# Pickle collected data for later comparisons.
persistent=no
# Use all cpus, to speed up testing
jobs=0
[MESSAGES CONTROL]
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=fixme,locally-disabled,missing-docstring,useless-option-value,
# TODO: Fix all following disabled checks!
invalid-name,
consider-using-with,
too-many-arguments,
too-many-branches,
too-many-statements,
too-many-locals,
duplicate-code,
too-many-instance-attributes,
too-many-nested-blocks,
too-many-lines,
[REPORTS]
# Tells whether to display a full report or only the messages
reports=no
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=99
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
[BASIC]
# Allow variables called e, f, lp
good-names=i,j,k,ex,Run,_,e,f,lp,me,to
[IMPORTS]
# Force import order to recognize a module as part of a third party library.
known-third-party=debian

182
404main Executable file
View File

@ -0,0 +1,182 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2006-2007 (C) Pete Savage <petesavage@ubuntu.com>
# Copyright 2007 (C) Siegfried-A. Gevatter <rainct@ubuntu.com>
# Copyright 2009 (C) Canonical Ltd. (by Colin Watson <cjwatson@ubuntu.com>)
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL for more details.
#
# ##################################################################
#
# This script is used to check if a package and all its build
# dependencies are in main or not.
import sys
import apt_pkg
import apt
from ubuntutools import subprocess
def process_deps(cache, deps):
"""Takes a list of (build) dependencies and processes it."""
for basedep in [d.or_dependencies[0] for d in deps]:
if basedep.name not in packages and basedep.name != '':
# Check the (build) dependencies recursively
find_main(cache, basedep.name)
def get_package_version(cache, distro, pack):
if pack not in cache:
return None
for version in (cache[pack].candidate, cache[pack].installed):
if not version:
continue
for origin in version.origins:
if origin.archive == distro:
return version
return None
# Cache::CompTypeDeb isn't exposed via python-apt
def comp_type_deb(op):
ops = ("", "<=", ">=", "<<", ">>", "=", "!=")
if (op & 15) < 7:
return ops[op & 15]
return ""
def find_main(cache, pack):
"""Searches the dependencies and build dependencies of a package recursively
to determine if they are all in the 'main' component or not."""
global packages
if pack in packages:
return
# Retrieve information about the package
version = get_package_version(cache, distro, pack)
if not version:
packages[pack] = False
return
elif [origin for origin in version.origins if origin.component == 'main']:
packages[pack] = True
return
else:
if pack not in packages:
packages[pack] = False
# Retrieve package dependencies
process_deps(cache, version.dependencies)
# Retrieve package build dependencies. There's no handy
# attribute on version for this, so unfortunately we have to
# do a lot of messing about with apt.
deps = []
src_records = apt_pkg.SourceRecords()
got_src = False
while src_records.lookup(version.source_name):
if pack in src_records.binaries:
got_src = True
break
if got_src:
# pylint: disable=E1101
for _, all_deps in src_records.build_depends.iteritems():
# pylint: enable=E1101
for or_deps in all_deps:
base_deps = []
for (name, ver, op) in or_deps:
# pylint: disable=too-many-function-args
base_deps.append(apt.package.BaseDependency(name, op,
ver, False))
# pylint: enable=too-many-function-args
# pylint: disable=no-value-for-parameter
deps.append(apt.package.Dependency(base_deps))
# pylint: enable=no-value-for-parameter
process_deps(cache, deps)
def usage(exit_code):
print 'Usage: %s <package name> [<distribution>]' % sys.argv[0]
sys.exit(exit_code)
def main():
global packages, distro
# Check if the amount of arguments is correct
if len(sys.argv) > 1 and sys.argv[1] in ('help', '-h', '--help'):
usage(0)
if len(sys.argv) < 2 or len(sys.argv) > 3:
usage(1)
cache = apt.cache.Cache()
if len(sys.argv) == 3 and sys.argv[2]:
distro = sys.argv[2]
if not get_package_version(cache, distro, 'bash'):
print u'«%s» is not a valid distribution.' % distro
print('Remember that for 404main to work with a certain distribution '
'it must be in your /etc/apt/sources.list file.')
sys.exit(1)
else:
cmd = ['lsb_release', '-cs']
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
distro = process.stdout.read().strip('\n')
if not get_package_version(cache, distro, sys.argv[1]):
print(u"Can't find package «%s» in distribution «%s»." % (sys.argv[1], distro))
sys.exit(1)
print(u'Checking package «%s» in distribution «%s»...' % (sys.argv[1], distro))
find_main(cache, sys.argv[1])
# True if everything checked until the point is in main
all_in_main = True
for package in packages:
if not packages[package]:
if all_in_main:
print 'The following packages aren\'t in main:'
all_in_main = False
print ' ', package
if all_in_main:
print((u'Package «%s» and all its dependencies and build dependencies are in main.') %
sys.argv[1])
if __name__ == '__main__':
# Global variable to hold the status of all packages
packages = {}
# Global variable to hold the target distribution
distro = ''
try:
main()
except KeyboardInterrupt:
print 'Aborted.'
sys.exit(1)

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ##################################################################
#
@ -18,232 +18,205 @@
#
# ##################################################################
import argparse
import glob
import optparse
import os
import shutil
import subprocess
import sys
import tempfile
from urllib.parse import quote
try:
import lsb_release
except ImportError:
lsb_release = None
from distro_info import DebianDistroInfo, UbuntuDistroInfo
import lsb_release
from httplib2 import Http, HttpLib2Error
from ubuntutools import getLogger
from ubuntutools.archive import DebianSourcePackage, DownloadError, UbuntuSourcePackage
from ubuntutools.builder import get_builder
from ubuntutools.archive import (SourcePackage, DebianSourcePackage,
UbuntuSourcePackage, DownloadError)
from ubuntutools.config import UDTConfig, ubu_email
from ubuntutools.lp.lpapicache import (
Distribution,
Launchpad,
PackageNotFoundException,
SeriesNotFoundException,
)
from ubuntutools.misc import codename_to_distribution, system_distribution, vendor_to_distroinfo
from ubuntutools.builder import get_builder
from ubuntutools.lp.lpapicache import (Launchpad, Distribution,
SeriesNotFoundException,
PackageNotFoundException)
from ubuntutools.logger import Logger
from ubuntutools.misc import (system_distribution, vendor_to_distroinfo,
codename_to_distribution)
from ubuntutools.question import YesNoQuestion
Logger = getLogger()
from ubuntutools import subprocess
def error(msg, *args):
Logger.error(msg, *args)
def error(msg):
Logger.error(msg)
sys.exit(1)
def check_call(cmd, *args, **kwargs):
Logger.debug(" ".join(cmd))
Logger.command(cmd)
ret = subprocess.call(cmd, *args, **kwargs)
if ret != 0:
error("%s returned %d.", cmd[0], ret)
error('%s returned %d.' % (cmd[0], ret))
def parse(argv):
usage = "%(prog)s [options] <source package name or .dsc URL/file>"
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument(
"-d",
"--destination",
metavar="DEST",
dest="dest_releases",
default=[],
action="append",
help="Backport to DEST release (default: current release)",
)
parser.add_argument(
"-s",
"--source",
metavar="SOURCE",
dest="source_release",
help="Backport from SOURCE release (default: devel release)",
)
parser.add_argument(
"-S",
"--suffix",
metavar="SUFFIX",
help="Suffix to append to version number (default: ~ppa1 when uploading to a PPA)",
)
parser.add_argument(
"-e",
"--message",
metavar="MESSAGE",
default="No-change",
help='Changelog message to use instead of "No-change" '
"(default: No-change backport to DEST.)",
)
parser.add_argument(
"-b",
"--build",
default=False,
action="store_true",
help="Build the package before uploading (default: %(default)s)",
)
parser.add_argument(
"-B",
"--builder",
metavar="BUILDER",
help="Specify the package builder (default: pbuilder)",
)
parser.add_argument(
"-U",
"--update",
default=False,
action="store_true",
help="Update the build environment before attempting to build",
)
parser.add_argument("-u", "--upload", metavar="UPLOAD", help="Specify an upload destination")
parser.add_argument(
"-k", "--key", dest="keyid", help="Specify the key ID to be used for signing."
)
parser.add_argument(
"--dont-sign", dest="keyid", action="store_false", help="Do not sign the upload."
)
parser.add_argument(
"-y",
"--yes",
dest="prompt",
default=True,
action="store_false",
help="Do not prompt before uploading to a PPA",
)
parser.add_argument(
"-v", "--version", metavar="VERSION", help="Package version to backport (or verify)"
)
parser.add_argument(
"-w",
"--workdir",
metavar="WORKDIR",
help="Specify a working directory (default: temporary dir)",
)
parser.add_argument(
"-r",
"--release-pocket",
default=False,
action="store_true",
help="Target the release pocket in the .changes file. "
"Necessary (and default) for uploads to PPAs",
)
parser.add_argument(
"-c", "--close", metavar="BUG", help="Bug to close in the changelog entry."
)
parser.add_argument(
"-m", "--mirror", metavar="URL", help="Preferred mirror (default: Launchpad)"
)
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
help="Launchpad instance to connect to (default: production)",
)
parser.add_argument(
"--no-conf",
default=False,
action="store_true",
help="Don't read config files or environment variables",
)
parser.add_argument("package_or_dsc", help=argparse.SUPPRESS)
args = parser.parse_args(argv)
config = UDTConfig(args.no_conf)
if args.builder is None:
args.builder = config.get_value("BUILDER")
if not args.update:
args.update = config.get_value("UPDATE_BUILDER", boolean=True)
if args.workdir is None:
args.workdir = config.get_value("WORKDIR")
if args.lpinstance is None:
args.lpinstance = config.get_value("LPINSTANCE")
if args.upload is None:
args.upload = config.get_value("UPLOAD")
if args.keyid is None:
args.keyid = config.get_value("KEYID")
if not args.upload and not args.workdir:
parser.error("Please specify either a working dir or an upload target!")
if args.upload and args.upload.startswith("ppa:"):
args.release_pocket = True
return args, config
def check_program_exists(name, package=None):
paths = set(os.environ['PATH'].split(':'))
paths |= set(('/sbin', '/usr/sbin', '/usr/local/sbin'))
if not any(os.path.exists(os.path.join(p, name)) for p in paths):
Logger.error('Could not find "%s". Please install the package "%s" '
'to use this functionality.',
name, package or name)
sys.exit(1)
def find_release_package(mirror, workdir, package, version, source_release, config):
def parse(args):
usage = 'Usage: %prog [options] <source package name or .dsc URL/file>'
parser = optparse.OptionParser(usage)
parser.add_option('-d', '--destination',
metavar='DEST',
dest='dest_releases',
default=[],
action='append',
help='Backport to DEST release '
'(default: current release)')
parser.add_option('-s', '--source',
metavar='SOURCE',
dest='source_release',
help='Backport from SOURCE release '
'(default: devel release)')
parser.add_option('-S', '--suffix',
metavar='SUFFIX',
help='Suffix to append to version number '
'(default: ~ppa1 when uploading to a PPA)')
parser.add_option('-b', '--build',
default=False,
action='store_true',
help='Build the package before uploading '
'(default: %default)')
parser.add_option('-B', '--builder',
metavar='BUILDER',
help='Specify the package builder (default: pbuilder)')
parser.add_option('-U', '--update',
default=False,
action='store_true',
help='Update the build environment before '
'attempting to build')
parser.add_option('-u', '--upload',
metavar='UPLOAD',
help='Specify an upload destination')
parser.add_option("-k", "--key",
dest='keyid',
help="Specify the key ID to be used for signing.")
parser.add_option('--dont-sign',
dest='keyid', action='store_false',
help='Do not sign the upload.')
parser.add_option('-y', '--yes',
dest='prompt',
default=True,
action='store_false',
help='Do not prompt before uploading to a PPA')
parser.add_option('-v', '--version',
metavar='VERSION',
help='Package version to backport (or verify)')
parser.add_option('-w', '--workdir',
metavar='WORKDIR',
help='Specify a working directory '
'(default: temporary dir)')
parser.add_option('-r', '--release-pocket',
default=False,
action='store_true',
help='Target the release pocket in the .changes file. '
'Necessary (and default) for uploads to PPAs')
parser.add_option('-c', '--close',
metavar='BUG',
help='Bug to close in the changelog entry.')
parser.add_option('-m', '--mirror',
metavar='URL',
help='Preferred mirror (default: Launchpad)')
parser.add_option('-l', '--lpinstance',
metavar='INSTANCE',
help='Launchpad instance to connect to '
'(default: production)')
parser.add_option('--no-conf',
default=False,
action='store_true',
help="Don't read config files or environment variables")
opts, args = parser.parse_args(args)
if len(args) != 1:
parser.error('You must specify a single source package or a .dsc '
'URL/path.')
config = UDTConfig(opts.no_conf)
if opts.builder is None:
opts.builder = config.get_value('BUILDER')
if not opts.update:
opts.update = config.get_value('UPDATE_BUILDER', boolean=True)
if opts.workdir is None:
opts.workdir = config.get_value('WORKDIR')
if opts.lpinstance is None:
opts.lpinstance = config.get_value('LPINSTANCE')
if opts.upload is None:
opts.upload = config.get_value('UPLOAD')
if opts.keyid is None:
opts.keyid = config.get_value('KEYID')
if not opts.upload and not opts.workdir:
parser.error('Please specify either a working dir or an upload target!')
if opts.upload and opts.upload.startswith('ppa:'):
opts.release_pocket = True
if opts.upload:
check_program_exists('dput')
return opts, args, config
def find_release_package(mirror, workdir, package, version, source_release,
config):
srcpkg = None
if source_release:
distribution = codename_to_distribution(source_release)
if not distribution:
error("Unknown release codename %s", source_release)
error('Unknown release codename %s' % source_release)
info = vendor_to_distroinfo(distribution)()
source_release = info.codename(source_release, default=source_release)
else:
distribution = system_distribution()
mirrors = [mirror] if mirror else []
mirrors.append(config.get_value(f"{distribution.upper()}_MIRROR"))
mirrors.append(config.get_value('%s_MIRROR' % distribution.upper()))
if not version:
archive = Distribution(distribution.lower()).getArchive()
try:
spph = archive.getSourcePackage(package, source_release)
except (SeriesNotFoundException, PackageNotFoundException) as e:
error("%s", str(e))
error(str(e))
version = spph.getVersion()
if distribution == "Debian":
srcpkg = DebianSourcePackage(package, version, workdir=workdir, mirrors=mirrors)
elif distribution == "Ubuntu":
srcpkg = UbuntuSourcePackage(package, version, workdir=workdir, mirrors=mirrors)
if distribution == 'Debian':
srcpkg = DebianSourcePackage(package,
version,
workdir=workdir,
mirrors=mirrors)
elif distribution == 'Ubuntu':
srcpkg = UbuntuSourcePackage(package,
version,
workdir=workdir,
mirrors=mirrors)
return srcpkg
def find_package(mirror, workdir, package, version, source_release, config):
"Returns the SourcePackage"
if package.endswith(".dsc"):
# Here we are using UbuntuSourcePackage just because we don't have any
# "general" class that is safely instantiable (as SourcePackage is an
# abstract class). None of the distribution-specific details within
# UbuntuSourcePackage is relevant for this use case.
return UbuntuSourcePackage(
version=version, dscfile=package, workdir=workdir, mirrors=(mirror,)
)
if package.endswith('.dsc'):
return SourcePackage(version=version, dscfile=package,
workdir=workdir, mirrors=(mirror,))
if not source_release and not version:
info = vendor_to_distroinfo(system_distribution())
source_release = info().devel()
srcpkg = find_release_package(mirror, workdir, package, version, source_release, config)
srcpkg = find_release_package(mirror, workdir, package, version,
source_release, config)
if version and srcpkg.version != version:
error(
"Requested backport of version %s but version of %s in %s is %s",
version,
package,
source_release,
srcpkg.version,
)
error('Requested backport of version %s but version of %s in %s is %s'
% (version, package, source_release, srcpkg.version))
return srcpkg
@ -251,27 +224,15 @@ def find_package(mirror, workdir, package, version, source_release, config):
def get_backport_version(version, suffix, upload, release):
distribution = codename_to_distribution(release)
if not distribution:
error("Unknown release codename %s", release)
if distribution == "Debian":
debian_distro_info = DebianDistroInfo()
debian_codenames = debian_distro_info.supported()
if release in debian_codenames:
release_version = debian_distro_info.version(release)
if not release_version:
error("Can't find the release version for %s", release)
backport_version = f"{version}~bpo{release_version}+1"
else:
error("%s is not a supported release (%s)", release, debian_codenames)
elif distribution == "Ubuntu":
series = Distribution(distribution.lower()).getSeries(name_or_version=release)
error('Unknown release codename %s' % release)
series = Distribution(distribution.lower()).\
getSeries(name_or_version=release)
backport_version = f"{version}~bpo{series.version}.1"
else:
error("Unknown distribution «%s» for release «%s»", distribution, release)
backport_version = version + ('~%s%s.1' % (distribution.lower(), series.version))
if suffix is not None:
backport_version += suffix
elif upload and upload.startswith("ppa:"):
backport_version += "~ppa1"
elif upload and upload.startswith('ppa:'):
backport_version += '~ppa1'
return backport_version
@ -279,25 +240,26 @@ def get_old_version(source, release):
try:
distribution = codename_to_distribution(release)
archive = Distribution(distribution.lower()).getArchive()
pkg = archive.getSourcePackage(
source, release, ("Release", "Security", "Updates", "Proposed", "Backports")
)
pkg = archive.getSourcePackage(source,
release,
('Release', 'Security', 'Updates',
'Proposed', 'Backports'))
return pkg.getVersion()
except (SeriesNotFoundException, PackageNotFoundException):
pass
return None
def get_backport_dist(release, release_pocket):
if release_pocket:
return release
return f"{release}-backports"
else:
return '%s-backports' % release
def do_build(workdir, dsc, release, builder, update):
builder = get_builder(builder)
if not builder:
return None
return
if update:
if 0 != builder.update(release):
@ -305,121 +267,101 @@ def do_build(workdir, dsc, release, builder, update):
# builder.build is going to chdir to buildresult:
workdir = os.path.realpath(workdir)
return builder.build(os.path.join(workdir, dsc), release, os.path.join(workdir, "buildresult"))
return builder.build(os.path.join(workdir, dsc),
release,
os.path.join(workdir, "buildresult"))
def do_upload(workdir, package, bp_version, changes, upload, prompt):
print(f"Please check {package} {bp_version} in file://{workdir} carefully!")
if prompt or upload == "ubuntu":
question = f"Do you want to upload the package to {upload}"
print('Please check %s %s in file://%s carefully!' % (package, bp_version, workdir))
if prompt or upload == 'ubuntu':
question = 'Do you want to upload the package to %s' % upload
answer = YesNoQuestion().ask(question, "yes")
if answer == "no":
return
check_call(["dput", upload, changes], cwd=workdir)
check_call(['dput', upload, changes], cwd=workdir)
def orig_needed(upload, workdir, pkg):
"""Avoid a -sa if possible"""
if not upload or not upload.startswith("ppa:"):
'''Avoid a -sa if possible'''
if not upload or not upload.startswith('ppa:'):
return True
ppa = upload.split(":", 1)[1]
user, ppa = ppa.split("/", 1)
ppa = upload.split(':', 1)[1]
user, ppa = ppa.split('/', 1)
version = pkg.version.upstream_version
http = Http()
for filename in glob.glob(os.path.join(workdir, f"{pkg.source}_{version}.orig*")):
url = (
f"https://launchpad.net/~{quote(user)}/+archive/{quote(ppa)}/+sourcefiles"
f"/{quote(pkg.source)}/{quote(pkg.version.full_version)}"
f"/{quote(os.path.basename(filename))}"
)
h = Http()
for filename in glob.glob(os.path.join(workdir, '%s_%s.orig*' % (pkg.source, version))):
url = ('https://launchpad.net/~%s/+archive/%s/+files/%s'
% (user, ppa, filename))
try:
headers = http.request(url, "HEAD")[0]
if headers.status != 200 or not headers["content-location"].startswith(
"https://launchpadlibrarian.net"
):
headers, body = h.request(url, 'HEAD')
if (headers.status != 200 or
not headers['content-location'].startswith('https://launchpadlibrarian.net')):
return True
except HttpLib2Error as e:
Logger.debug(e)
Logger.info(e)
return True
return False
def do_backport(
workdir,
pkg,
suffix,
message,
close,
release,
release_pocket,
build,
builder,
update,
upload,
keyid,
prompt,
):
dirname = f"{pkg.source}-{release}"
def do_backport(workdir, pkg, suffix, close, release, release_pocket, build,
builder, update, upload, keyid, prompt):
dirname = '%s-%s' % (pkg.source, release)
srcdir = os.path.join(workdir, dirname)
if os.path.exists(srcdir):
question = f"Working directory {srcdir} already exists. Delete it?"
if YesNoQuestion().ask(question, "no") == "no":
question = 'Working directory %s already exists. Delete it?' % srcdir
if YesNoQuestion().ask(question, 'no') == 'no':
sys.exit(1)
shutil.rmtree(srcdir)
pkg.unpack(dirname)
bp_version = get_backport_version(pkg.version.full_version, suffix, upload, release)
bp_version = get_backport_version(pkg.version.full_version, suffix,
upload, release)
old_version = get_old_version(pkg.source, release)
bp_dist = get_backport_dist(release, release_pocket)
changelog = f"{message} backport to {release}."
changelog = 'No-change backport to %s' % (release,)
if close:
changelog += f" (LP: #{close})"
check_call(
[
"dch",
"--force-bad-version",
"--force-distribution",
"--preserve",
"--newversion",
bp_version,
"--distribution",
bp_dist,
changelog,
],
cwd=srcdir,
)
changelog += ' (LP: #%s)' % (close,)
check_call(['dch',
'--force-bad-version',
'--force-distribution',
'--preserve',
'--newversion', bp_version,
'--distribution', bp_dist,
changelog],
cwd=srcdir)
cmd = ["debuild", "--no-lintian", "-S", "-nc", "-uc", "-us"]
cmd = ['debuild', '--no-lintian', '-S', '-nc', '-uc', '-us']
if orig_needed(upload, workdir, pkg):
cmd.append("-sa")
cmd.append('-sa')
else:
cmd.append("-sd")
cmd.append('-sd')
if old_version:
cmd.append(f"-v{old_version}")
cmd.append('-v%s' % old_version)
env = os.environ.copy()
# An ubuntu.com e-mail address would make dpkg-buildpackage fail if there
# wasn't an Ubuntu maintainer for an ubuntu-versioned package. LP: #1007042
env.pop("DEBEMAIL", None)
env.pop('DEBEMAIL', None)
check_call(cmd, cwd=srcdir, env=env)
fn_base = pkg.source + "_" + bp_version.split(":", 1)[-1]
changes = fn_base + "_source.changes"
fn_base = pkg.source + '_' + bp_version.split(':', 1)[-1]
changes = fn_base + '_source.changes'
if build:
if 0 != do_build(workdir, fn_base + ".dsc", release, builder, update):
if 0 != do_build(workdir, fn_base + '.dsc', release, builder, update):
sys.exit(1)
# None: sign with the default signature. False: don't sign
if keyid is not False:
cmd = ["debsign"]
cmd = ['debsign']
if keyid:
cmd.append("-k" + keyid)
cmd.append('-k' + keyid)
cmd.append(changes)
check_call(cmd, cwd=workdir)
if upload:
@ -428,68 +370,56 @@ def do_backport(
shutil.rmtree(srcdir)
def main(argv):
def main(args):
ubu_email()
args, config = parse(argv[1:])
opts, (package_or_dsc,), config = parse(args[1:])
Launchpad.login_anonymously(service=args.lpinstance)
Launchpad.login_anonymously(service=opts.lpinstance)
if not args.dest_releases:
if lsb_release:
distinfo = lsb_release.get_distro_information()
try:
current_distro = distinfo["ID"]
except KeyError:
error("No destination release specified and unable to guess yours.")
else:
err, current_distro = subprocess.getstatusoutput("lsb_release --id --short")
if err:
error("Could not run lsb_release to retrieve distribution")
if not opts.dest_releases:
distinfo = lsb_release.get_distro_information()
try:
opts.dest_releases = [distinfo['CODENAME']]
except KeyError:
error('No destination release specified and unable to guess yours.')
if current_distro == "Ubuntu":
args.dest_releases = [UbuntuDistroInfo().lts()]
elif current_distro == "Debian":
args.dest_releases = [DebianDistroInfo().stable()]
else:
error("Unknown distribution %s, can't guess target release", current_distro)
if args.workdir:
workdir = os.path.expanduser(args.workdir)
if opts.workdir:
workdir = os.path.expanduser(opts.workdir)
else:
workdir = tempfile.mkdtemp(prefix="backportpackage-")
workdir = tempfile.mkdtemp(prefix='backportpackage-')
if not os.path.exists(workdir):
os.makedirs(workdir)
try:
pkg = find_package(
args.mirror, workdir, args.package_or_dsc, args.version, args.source_release, config
)
pkg = find_package(opts.mirror,
workdir,
package_or_dsc,
opts.version,
opts.source_release,
config)
pkg.pull()
for release in args.dest_releases:
do_backport(
workdir,
pkg,
args.suffix,
args.message,
args.close,
release,
args.release_pocket,
args.build,
args.builder,
args.update,
args.upload,
args.keyid,
args.prompt,
)
for release in opts.dest_releases:
do_backport(workdir,
pkg,
opts.suffix,
opts.close,
release,
opts.release_pocket,
opts.build,
opts.builder,
opts.update,
opts.upload,
opts.keyid,
opts.prompt)
except DownloadError as e:
error("%s", str(e))
error(str(e))
finally:
if not args.workdir:
if not opts.workdir:
shutil.rmtree(workdir)
if __name__ == "__main__":
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -36,7 +36,7 @@ _pbuilder-dist()
for distro in $(ubuntu-distro-info --all; debian-distro-info --all) stable testing unstable; do
for builder in pbuilder cowbuilder; do
echo "$builder-$distro"
for arch in i386 amd64 armhf; do
for arch in i386 amd64 armel armhf; do
echo "$builder-$distro-$arch"
done
done

93
bitesize Executable file
View File

@ -0,0 +1,93 @@
#!/usr/bin/python
"""Add 'bitesize' tag to bugs and add a comment."""
# Copyright (c) 2011 Canonical Ltd.
#
# bitesize is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any
# later version.
#
# bitesize is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with bitesize; see the file COPYING. If not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
# Authors:
# Daniel Holbach <daniel.holbach@canonical.com>
import sys
from optparse import OptionParser
from launchpadlib.launchpad import Launchpad
from launchpadlib.errors import HTTPError
from ubuntutools.config import UDTConfig
from ubuntutools.logger import Logger
def error_out(msg):
Logger.error(msg)
sys.exit(1)
def save_entry(entry):
try:
entry.lp_save()
except HTTPError, error:
error_out(error.content)
def tag_bug(bug):
bug.tags = bug.tags + ['bitesize'] # LP: #254901 workaround
save_entry(bug)
def main():
usage = "Usage: %prog <bug number>"
opt_parser = OptionParser(usage)
opt_parser.add_option("-l", "--lpinstance", metavar="INSTANCE",
help="Launchpad instance to connect to "
"(default: production)",
dest="lpinstance", default=None)
opt_parser.add_option("--no-conf",
help="Don't read config files or "
"environment variables.",
dest="no_conf", default=False, action="store_true")
(options, args) = opt_parser.parse_args()
config = UDTConfig(options.no_conf)
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
if len(args) < 1:
opt_parser.error("Need at least one bug number.")
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
if launchpad is None:
error_out("Couldn't authenticate to Launchpad.")
# check that the new main bug isn't a duplicate
try:
bug = launchpad.bugs[args[0]]
except HTTPError, error:
if error.response.status == 401:
error_out("Don't have enough permissions to access bug %s. %s" %
(args[0], error.content))
else:
raise
if 'bitesize' in bug.tags:
error_out("Bug is already marked as 'bitesize'.")
bug.newMessage(content="I'm marking this bug as 'bitesize' as it looks "
"like an issue that is easy to fix and suitable "
"for newcomers in Ubuntu development. If you need "
"any help with fixing it, talk to me about it.")
bug.subscribe(person=launchpad.me)
tag_bug(launchpad.bugs[bug.id]) # fresh bug object, LP: #336866 workaround
if __name__ == '__main__':
main()

141
check-mir
View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# Check components of build dependencies and warn about universe/multiverse
# ones, for a package destined for main/restricted
@ -21,116 +21,71 @@
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from __future__ import print_function
"""Check if any of a package's build or binary dependencies are in universe or multiverse.
Run this inside an unpacked source package
"""
import argparse
import os.path
import sys
import optparse
import os.path
import apt
def check_support(apt_cache, pkgname, alt=False):
"""Check if pkgname is in main or restricted.
'''Check if pkgname is in main or restricted.
This prints messages if a package is not in main/restricted, or only
partially (i. e. source in main, but binary in universe).
"""
'''
if alt:
prefix = " ... alternative " + pkgname
prefix = ' ... alternative ' + pkgname
else:
prefix = " * " + pkgname
prefix = ' * ' + pkgname
prov_packages = apt_cache.get_providing_packages(pkgname)
if pkgname in apt_cache:
try:
pkg = apt_cache[pkgname]
# If this is a virtual package, iterate through the binary packages that
# provide this, and ensure they are all in Main. Source packages in and of
# themselves cannot provide virtual packages, only binary packages can.
elif len(prov_packages) > 0:
supported, unsupported = [], []
for pkg in prov_packages:
candidate = pkg.candidate
if candidate:
section = candidate.section
if section.startswith("universe") or section.startswith("multiverse"):
unsupported.append(pkg.name)
else:
supported.append(pkg.name)
if len(supported) > 0:
msg = "is a virtual package, which is provided by the following "
msg += "candidates in Main: " + " ".join(supported)
print(prefix, msg)
elif len(unsupported) > 0:
msg = "is a virtual package, but is only provided by the "
msg += "following non-Main candidates: " + " ".join(unsupported)
print(prefix, msg, file=sys.stderr)
return False
else:
msg = "is a virtual package that exists but is not provided by "
msg += "package currently in the archive. Proceed with caution."
print(prefix, msg, file=sys.stderr)
return False
else:
print(prefix, "does not exist", file=sys.stderr)
except KeyError:
print(prefix, 'does not exist (pure virtual?)', file=sys.stderr)
return False
section = pkg.candidate.section
if section.startswith("universe") or section.startswith("multiverse"):
if section.startswith('universe') or section.startswith('multiverse'):
# check if the source package is in main and thus will only need binary
# promotion
source_records = apt.apt_pkg.SourceRecords()
if not source_records.lookup(pkg.candidate.source_name):
print("ERROR: Cannot lookup source package for", pkg.name, file=sys.stderr)
print(prefix, "package is in", section.split("/")[0])
print('ERROR: Cannot lookup source package for', pkg.name,
file=sys.stderr)
print(prefix, 'package is in', section.split('/')[0])
return False
src = apt.apt_pkg.TagSection(source_records.record)
if src["Section"].startswith("universe") or src["Section"].startswith("multiverse"):
print(prefix, "binary and source package is in", section.split("/")[0])
if (src['Section'].startswith('universe') or
src['Section'].startswith('multiverse')):
print(prefix, 'binary and source package is in',
section.split('/')[0])
return False
print(
prefix,
"is in",
section.split("/")[0] + ", but its source",
pkg.candidate.source_name,
"is already in main; file an ubuntu-archive bug for "
"promoting the current preferred alternative",
)
return True
else:
print(prefix, 'is in', section.split('/')[0] + ', but its source',
pkg.candidate.source_name,
'is already in main; file an ubuntu-archive bug for '
'promoting the current preferred alternative')
return True
if alt:
print(prefix, "is already in main; consider preferring it")
print(prefix, 'is already in main; consider preferring it')
return True
def check_build_dependencies(apt_cache, control):
print("Checking support status of build dependencies...")
print('Checking support status of build dependencies...')
any_unsupported = False
for field in ("Build-Depends", "Build-Depends-Indep"):
for field in ('Build-Depends', 'Build-Depends-Indep'):
if field not in control.section:
continue
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
pkgname = or_group[0][0]
# debhelper-compat is expected to be a build dependency of every
# package, so it is a red herring to display it in this report.
# (src:debhelper is in Ubuntu Main anyway)
if pkgname == "debhelper-compat":
continue
if not check_support(apt_cache, pkgname):
# check non-preferred alternatives
for altpkg in or_group[1:]:
@ -145,19 +100,20 @@ def check_build_dependencies(apt_cache, control):
def check_binary_dependencies(apt_cache, control):
any_unsupported = False
print("\nChecking support status of binary dependencies...")
print('\nChecking support status of binary dependencies...')
while True:
try:
next(control)
control.next()
except StopIteration:
break
for field in ("Depends", "Pre-Depends", "Recommends"):
for field in ('Depends', 'Pre-Depends', 'Recommends'):
if field not in control.section:
continue
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
for or_group in apt.apt_pkg.parse_src_depends(
control.section[field]):
pkgname = or_group[0][0]
if pkgname.startswith("$"):
if pkgname.startswith('$'):
continue
if not check_support(apt_cache, pkgname):
# check non-preferred alternatives
@ -171,33 +127,32 @@ def check_binary_dependencies(apt_cache, control):
def main():
parser = argparse.ArgumentParser(description=__doc__)
description = "Check if any of a package's build or binary " + \
"dependencies are in universe or multiverse. " + \
"Run this inside an unpacked source package"
parser = optparse.OptionParser(description=description)
parser.parse_args()
apt_cache = apt.Cache()
if not os.path.exists("debian/control"):
print(
"debian/control not found. You need to run this tool in a source package directory",
file=sys.stderr,
)
if not os.path.exists('debian/control'):
print('debian/control not found. You need to run this tool in a '
'source package directory', file=sys.stderr)
sys.exit(1)
# get build dependencies from debian/control
control = apt.apt_pkg.TagFile(open("debian/control", encoding="utf-8"))
next(control)
control = apt.apt_pkg.TagFile(open('debian/control'))
control.next()
unsupported_build_deps = check_build_dependencies(apt_cache, control)
unsupported_binary_deps = check_binary_dependencies(apt_cache, control)
if unsupported_build_deps or unsupported_binary_deps:
print(
"\nPlease check https://wiki.ubuntu.com/MainInclusionProcess if "
"this source package needs to get into in main/restricted, or "
"reconsider if the package really needs above dependencies."
)
print('\nPlease check https://wiki.ubuntu.com/MainInclusionProcess if '
'this source package needs to get into in main/restricted, or '
'reconsider if the package really needs above dependencies.')
else:
print("All dependencies are supported in main or restricted.")
print('All dependencies are supported in main or restricted.')
if __name__ == "__main__":
if __name__ == '__main__':
main()

1
debian/.gitignore vendored
View File

@ -1 +0,0 @@
files

780
debian/changelog vendored
View File

@ -1,769 +1,3 @@
ubuntu-dev-tools (0.206) unstable; urgency=medium
[ Dan Bungert ]
* mk-sbuild: enable pkgmaintainermangler
[ Shengjing Zhu ]
* import-bug-from-debian: package option is overridden and not used
[ Fernando Bravo Hernández ]
* Parsing arch parameter to getBinaryPackage() (LP: #2081861)
[ Simon Quigley ]
* Read ~/.devscripts in a more robust way, to ideally pick up multi-line
variables (Closes: #725418).
* mk-sbuild: default to using UTC for schroots (LP: #2097159).
* syncpackage: s/syncblacklist/syncblocklist/g
* syncpackage: Cache the sync blocklist in-memory, so it's not fetched
multiple times when syncing more than one package.
* syncpackage: Catch exceptions cleanly, simply skipping to the next
package (erring on the side of caution) if there is an error doing the
download (LP: #1943286).
-- Simon Quigley <tsimonq2@debian.org> Tue, 04 Mar 2025 13:43:15 -0600
ubuntu-dev-tools (0.205) unstable; urgency=medium
* [syncpackage] When syncing multiple packages, if one of the packages is in
the sync blocklist, do not exit, simply continue.
* [syncpackage] Do not use exit(1) on an error or exception unless it
applies to all packages, instead return None so we can continue to the
next package.
* [syncpackage] Add support for -y or --yes, noted that it should be used
with care.
* Update Standards-Version to 4.7.2, no changes needed.
-- Simon Quigley <tsimonq2@debian.org> Sat, 01 Mar 2025 11:29:54 -0600
ubuntu-dev-tools (0.204) unstable; urgency=medium
[ Simon Quigley ]
* Update Standards-Version to 4.7.1, no changes needed.
* Add several Lintian overrides related to .pyc files.
* Add my name to the copyright file.
* Rename bitesize to lp-bitesize (Closes: #1076224).
* Add a manpage for running-autopkgtests.
* Add a large warning at the top of mk-sbuild encouraging the use of the
unshare backend. This is to provide ample warning to users.
* Remove mail line from default ~/.sbuildrc, to resolve the undeclared
dependency on sendmail (Closes: #1074632).
[ Julien Plissonneau Duquène ]
* Fix reverse-depends -b crash on packages that b-d on themselves
(Closes: #1087760).
-- Simon Quigley <tsimonq2@debian.org> Mon, 24 Feb 2025 19:54:39 -0600
ubuntu-dev-tools (0.203) unstable; urgency=medium
[ Steve Langasek ]
* ubuntu-build: handle TOCTOU issue with the "can be retried" value on
builds.
* Recommend sbuild over pbuilder. sbuild is the tool recommended by
Ubuntu developers whose behavior most closely approximates Launchpad
builds.
[ Florent 'Skia' Jacquet ]
* import-bug-from-debian: handle multipart message (Closes: #969510)
[ Benjamin Drung ]
* import-bug-from-debian: add type hints
* Bump Standards-Version to 4.7.0
* Bump year and add missing files to copyright
* setup.py: add pm-helper
* Format code with black and isort
* Address several issues pointed out by Pylint
* Depend on python3-yaml for pm-helper
-- Benjamin Drung <bdrung@debian.org> Sat, 02 Nov 2024 18:19:24 +0100
ubuntu-dev-tools (0.202) unstable; urgency=medium
[ Steve Langasek ]
* ubuntu-build: support --batch with no package names to retry all
* ubuntu-build: in batch mode, print a count of packages retried
* ubuntu-build: make the --arch option top-level.
This gets rid of the fugly --arch2 option
* ubuntu-build: support retrying builds in other states that failed-to-build
* ubuntu-build: Handling of proposed vs release pocket default for ppas
* ubuntu-build: update manpage
[ Chris Peterson ]
* Replace Depends on python3-launchpadlib with Depends on
python3-launchpadlib-desktop (LP: #2049217)
-- Simon Quigley <tsimonq2@ubuntu.com> Fri, 12 Apr 2024 23:33:14 -0500
ubuntu-dev-tools (0.201) unstable; urgency=medium
* running-autopkgtests: fix packaging to make the script available
(LP: #2055466)
-- Chris Peterson <chris.peterson@canonical.com> Thu, 29 Feb 2024 11:09:14 -0800
ubuntu-dev-tools (0.200) unstable; urgency=medium
[ Gianfranco Costamagna ]
* Team upload
[ Chris Peterson ]
* Add support to see currently running autopkgtests (running-autopkgtests)
* running-autopkgtests: use f-strings
[ Athos Ribeiro ]
* syncpackage: log LP authentication errors before halting.
[ Ying-Chun Liu (PaulLiu) ]
* Drop qemu-debootstrap
qemu-debootstrap is deprecated for a while. In newer qemu release
the command is totally removed. We can use debootstrap directly.
Signed-off-by: Ying-Chun Liu (PaulLiu) <paulliu@debian.org>
[ Logan Rosen ]
* Don't rely on debootstrap for validating Ubuntu distro
-- Gianfranco Costamagna <locutusofborg@debian.org> Thu, 15 Feb 2024 17:53:48 +0100
ubuntu-dev-tools (0.199) unstable; urgency=medium
[ Simon Quigley ]
* Add my name to Uploaders.
[ Steve Langasek ]
* Introduce a pm-helper tool.
-- Simon Quigley <tsimonq2@debian.org> Mon, 29 Jan 2024 10:03:22 -0600
ubuntu-dev-tools (0.198) unstable; urgency=medium
* In check-mir, ignore debhelper-compat when checking the build
dependencies. This is expected to be a build dependency of all packages,
so warning about it in any way is surely a red herring.
* Add proper support for virtual packages in check-mir, basing the
determination solely off of binary packages. This is not expected to be a
typical case.
-- Simon Quigley <tsimonq2@debian.org> Wed, 10 Jan 2024 20:04:02 -0600
ubuntu-dev-tools (0.197) unstable; urgency=medium
* Update the manpage for syncpackage to reflect the ability to sync
multiple packages at once.
* When using pull-*-source to grab a package which already has a defined
Vcs- field, display the exact same warning message `apt source` does.
-- Simon Quigley <tsimonq2@debian.org> Tue, 03 Oct 2023 14:01:25 -0500
ubuntu-dev-tools (0.196) unstable; urgency=medium
* Allow the user to sync multiple packages at one time (LP: #1756748).
-- Simon Quigley <tsimonq2@debian.org> Fri, 04 Aug 2023 14:37:59 -0500
ubuntu-dev-tools (0.195) unstable; urgency=medium
* Add support for the non-free-firmware components in all tools already
referencing non-free.
-- Simon Quigley <tsimonq2@debian.org> Wed, 26 Jul 2023 13:03:31 -0500
ubuntu-dev-tools (0.194) unstable; urgency=medium
[ Gianfranco Costamagna ]
* ubuntu-build: For some reasons, now you need to be authenticated before
trying to use the "PersonTeam" class features.
Do it at the begin instead of replicating the same code inside the
tool itself.
[ Steve Langasek ]
* Remove references to deprecated
http://people.canonical.com/~ubuntu-archive.
* Remove references to architectures not supported in any active
Ubuntu release.
* Remove references to ftpmaster.internal. When this name is resolvable
but firewalled, syncpackage hangs; and these are tools for developers,
not for running in an automated context in the DCs where
ftpmaster.internal is reachable.
* Excise all references to cdbs (including in test cases)
* Set apt preferences for the -proposed pocket in mk-sbuild so that
it works as expected for lunar and forward.
[ Robie Basak ]
* ubuntutools/misc: swap iter_content for raw stream with "Accept-Encoding:
identity" to fix .diff.gz downloads (LP: #2025748).
[ Vladimir Petko ]
* Fix a typo introduced in the last upload that made mk-sbuild fail
unconditionally. LP: #2017177.
-- Gianfranco Costamagna <locutusofborg@debian.org> Sat, 08 Jul 2023 08:42:05 +0200
ubuntu-dev-tools (0.193) unstable; urgency=medium
* Don't run linters at build time, or in autopkgtests. (Closes: #1031436).
-- Stefano Rivera <stefanor@debian.org> Sat, 25 Feb 2023 13:19:56 -0400
ubuntu-dev-tools (0.192) unstable; urgency=medium
[ Benjamin Drung ]
* sponsor-patch:
+ Ignore exit code 1 of debdiff call.
+ Use --skip-patches instead of --no-preparation with dpkg-source -x.
* Demote bzr/brz from Recommends to Suggests, as nowadays git is the way.
Closes: #940531
* Use PEP440 compliant version in setup.py (LP: #1991606)
* Fix issues found by flake8 on the Python scripts
* Check Python scripts with flake8 again
* Format Python code with black and run black during package build
* Sort Python imports with isort and run isort during package build
* Replace deprecated optparse with argparse
* requestbackport: Remove useless loop from locate_package
* reverse-depends: Restore field titles format
* test: Fix deprecated return value for test case
* Fix all errors and warnings found by pylint and implement most refactorings
and conventions. Run pylint during package build again.
* Bump Standards-Version to 4.6.2
* Drop unneeded X-Python3-Version from d/control
[ Masahiro Yamada ]
* mk-sbuild:
+ Handle the new location of the Debian bullseye security archive.
Closes: #1001832; LP: #1955116
[ Mattia Rizzolo ]
* requestbackport:
+ Apply patch from Krytarik Raido and Unit 193 to update the template and
workflow after the new Ubuntu Backport process has been established.
LP: #1959115
-- Benjamin Drung <bdrung@debian.org> Wed, 01 Feb 2023 12:45:15 +0100
ubuntu-dev-tools (0.191) unstable; urgency=medium
[ Dan Streetman ]
* lpapicache:
+ Make sure that login() actually logins and doesn't use cached credentials.
* ubuntu-build:
+ Fix crash caused by a change in lpapicache that changed the default
operation mode from authenticated to anonymous. LP: #1984113
[ Stefano Rivera ]
* backportpackage:
+ Add support for lsb-release-minimal, which doesn't have a Python module.
Thanks to Gioele Barabucci for the patch. Closes: #1020901; LP: #1991828
[ Mattia Rizzolo ]
* ubuntutools/archive.py:
+ Fix operation of SourcePackage._source_urls() (as used, for example, in
SourcePackage.pull() called by backportpackage) to also work when the
class is instantiated with a URL as .dsc. Fixes regression from v0.184.
Thanks to Unit 193 for the initial patch.
-- Mattia Rizzolo <mattia@debian.org> Tue, 11 Oct 2022 13:56:03 +0200
ubuntu-dev-tools (0.190) unstable; urgency=medium
[ Dimitri John Ledkov ]
* mk-sbuild:
+ For ubuntu, fix the debootstrap script to "gutsy", so to allow using
mk-sbuild for newer releases without requiring a newer debootstrap.
[ Gianfranco Costamagna ]
* pbuilder-dist: fix typo kernal/kernel
[ Benjamin Drung ]
* Add missing files to debian/copyright
* Bump Standards-Version to 4.6.1
-- Benjamin Drung <bdrung@debian.org> Thu, 16 Jun 2022 10:55:17 +0200
ubuntu-dev-tools (0.189) unstable; urgency=medium
[ Heinrich Schuchardt ]
* mk-sbuild: don't require pkg-config-<target>. LP: #1966881.
[ Tobias Heider ]
* mk-sbuild: document SCHROOT_TYPE zfs in the manpage.
-- Mattia Rizzolo <mattia@debian.org> Mon, 04 Apr 2022 15:03:31 +0200
ubuntu-dev-tools (0.188) unstable; urgency=medium
[ Mattia Rizzolo ]
* archive.py:
+ Support Python 3.6 by calling functools.lru_cache() as a function, and
avoid using @functools.cached_property (both new in Python 3.8).
[ Graham Inggs ]
* lpapicache.py:
+ Use collections.abc.Callable instead of the long deprecated
collections.Callable. LP: #1959541
-- Mattia Rizzolo <mattia@debian.org> Mon, 07 Feb 2022 16:30:07 +0100
ubuntu-dev-tools (0.187) unstable; urgency=medium
[ Paride Legovini ]
* mk-sbuild:
+ Add support for zfs-snapshot schroots. LP: #1945349
[ Mattia Rizzolo ]
* mk-sbuild:
+ Apply patch from Peter Pentchev to avoid a broken log message.
Closes: #968316
* backportpackage:
+ Support backporting to Debian releases. Closes: #776442; LP: #974132
+ Fix the guessing algorithm for the target release:
- for Debian: pick the current stable release.
- for Ubuntu: pick the current LTS release.
[ Unit 193 ]
* backportpackage:
+ Change the generated Ubuntu version following the new policy from the
Backporters team.
[ Dan Streetman ]
* misc:
+ Refactor download progress bar code.
+ Save files that have Content-Encoding correctly,
such as the changes file from upload queue packages.
* pullpkg:
+ Extract source packages pulled from upload queue.
* hugdaylist:
+ Remove long unused and non-working script.
-- Mattia Rizzolo <mattia@debian.org> Sun, 05 Dec 2021 15:58:15 +0100
ubuntu-dev-tools (0.186) unstable; urgency=medium
* Replace nose with pytest (see: #997758).
-- Stefano Rivera <stefanor@debian.org> Sun, 24 Oct 2021 16:10:44 -0700
ubuntu-dev-tools (0.185) unstable; urgency=medium
[ Alex Murray ]
* ubuntutools/archive.py:
+ Fix crash due to PersonalPackageArchiveSourcePackage() returning the
wrong object when requesting a download url. LP: #1938659
[ Krytarik Raido ]
* merge-changelog: Fix setting of newlines.
[ Dan Streetman ]
* misc: download to tmp file, to avoid leftover 0-size file on error
* misc: handle ConnectionError as NotFoundError
* archive: use proper component source packages sometimes have different
component than their bpphs, so use the correct component when downloading
binaries (LP: #1943819)
* misc: fix flake8 complaints
[ Stefano Rivera ]
* Bump Standards-Version to 4.6.0, no changes needed.
-- Stefano Rivera <stefanor@debian.org> Fri, 17 Sep 2021 15:53:02 -0700
ubuntu-dev-tools (0.184) experimental; urgency=medium
[ Dan Streetman ]
* Drop never packaged ubuntu-archive-assistant.
* Add support for downloading from private PPAs:
+ ubuntutools/misc:
- Refactor to use Pathlib and f-strings.
- Refactor to use requests instead of urllib (for the earier auth)
+ ubuntutools/archive:
- Refactor to use Pathlib.
- Add support for the special URLs of private PPAs.
* Don't use existing file without verifying their checksum.
* tests: recreate the test package files on demand.
* Remove no longer used dependencies on python3-termcolor and python3-yaml
[ Mattia Rizzolo ]
* pbuilder-dist: use shutil.which instead of
distutils.spawn.find_executable() to save a dependency. LP: #1936697
* d/control:
+ Drop redundant Recommends that are already in Depends.
+ Bump debhelper compat level to 13.
[ Marco Trevisan (Treviño) ]
* mk-sbuild:
+ Enable debugging in the finish.sh script if --debug is used.
+ Add support to configure ccache for each schroot.
-- Mattia Rizzolo <mattia@debian.org> Sat, 17 Jul 2021 17:31:19 +0200
ubuntu-dev-tools (0.183) unstable; urgency=medium
[ Dan Streetman ]
* pbuilder-dist: include missing import
-- Stefano Rivera <stefanor@debian.org> Tue, 08 Jun 2021 10:09:11 -0400
ubuntu-dev-tools (0.182) unstable; urgency=medium
[ Dan Streetman ]
* syncpackage, ubuntutools/archive.py:
Don't save dsc file to disk until requested with pull()
(LP: #1928946)
* syncpackage:
Don't login to LP if using --simulate
* d/t/control: Add minimum flake8 version
The --extend-exclude parameter is first available in flake8 3.8.0
* ubuntutools/archive.py: Fix flake8 test failure
* d/rules, d/control: Override build tests to use flake8 and nosetests3
[ Stefano Rivera ]
* Respect nocheck in DEB_BUILD_OPTIONS, again.
-- Stefano Rivera <stefanor@debian.org> Sun, 06 Jun 2021 19:52:18 -0400
ubuntu-dev-tools (0.181) unstable; urgency=medium
[ Logan Rosen ]
* Fix a couple of remaining issues from the py2→py3 move.
[ Krytarik Raido ]
* Fix typo in the logging configuration.
[ Dan Streetman ]
* pbuilder: Handle debian change from /updates to -security. LP: #1916633
Starting in bullseye, the security suite is -security instead of /updates.
* backportpackage: Don't use SourcePackage() directly. Closes: #983854
As the warning from 2010 says, don't use this class directly.
[ Balint Reczey ]
* mk-sbuild:
+ Use eatmydata only with the dpkg command.
Eatmydata wrapping the build as well could break tests.
Thanks to Julian Andres Klode for suggesting this solution
+ Use eatmydata by default.
Since only the dpkg is wrapped in eatmydata it should be the safe and
fast default. Eatmydata is widely used around apt thus it should be a
serious bug if a package can't be installed with eatmydata in use.
[ Marco Trevisan (Treviño) ]
* doc/mk-sbuild.1: Add documentation for --debootstrap-proxy and
DEBOOTSTRAP_PROXY. LP: #1926166
-- Mattia Rizzolo <mattia@debian.org> Sun, 02 May 2021 19:56:48 +0200
ubuntu-dev-tools (0.180) unstable; urgency=medium
* Drop coverage in the autopkgtest, as python3-nose-cov is not in Debian.
-- Mattia Rizzolo <mattia@debian.org> Fri, 19 Feb 2021 12:12:33 +0100
ubuntu-dev-tools (0.179) unstable; urgency=medium
[ Stefano Rivera ]
* archive.py: Evaluate the filter() fixing Debian source history queries
LP: #1913330
[ Dan Streetman ]
* allow running tests using tox
* add autopkgtests to run tests
* simplify/combine archive download functions
* add support for private ppa by logging into lp
* improve support for pull-uca-*
* fix logging/printing output to stdout/stderr
-- Dan Streetman <ddstreet@canonical.com> Mon, 01 Feb 2021 11:59:03 -0500
ubuntu-dev-tools (0.178) unstable; urgency=medium
[ Dan Streetman ]
* pullpkg: also catch and deal with InvalidPullValueError. LP: #1908770
[ Mattia Rizzolo ]
* d/control: Bump Standards-Version to 4.5.1, no changes needed.
* ubuntu-archive-assistant/mir: Fix a SyntaxWarning.
* backportpackage:
+ Add a -e/--message option to change the default "No-change"
in "No-change backport to DIST".
Thanks to Unit 193 for the initial patch.
[ You-Sheng Yang ]
* Add a dependency on tzdata, used by mk-sbuild.
[ Logan Rosen ]
* import-bug-from-debian:
+ Limit bug description length to 50k chars to support Launchpad's limits.
LP: #1193941
[ Dimitri John Ledkov ]
* pullpkg.py: fix --mirror option parsing.
* config.py: add UBUNTU_INTERNAL_MIRROR option, for launchpad internal
mirror.
* archive.py: use Regular, Ports, and Internal mirrors by default. Thus
enabling pull-lp-debs to work with ports architectures, and inside
launchpad builds too.
[ Michael R. Crusoe ]
* pbuilder-dist:
+ Use `arch-test` to determine whether the current system can run binaries
of the requested architecture, instead of hardcoding an ever-growing
list of whether something requires qemu or not. Add the "arch-test"
package to Recommends to that effect.
-- Dimitri John Ledkov <xnox@ubuntu.com> Mon, 25 Jan 2021 23:28:24 +0000
ubuntu-dev-tools (0.177) unstable; urgency=medium
[ Dan Streetman ]
* Verify checksums for downloaded binary files
* pullpkg: support pulling from Ubuntu upload queues
[ Mattia Rizzolo ]
* ubuntu-build:
+ Add support for riscv64.
* syncpackge:
+ Fix the new flake8 E741. Closes: #963310
[ Bryce Harrington ]
* update-maintainer:
+ Try to recurse upwards to find a valid debian directory. LP: #1885233
-- Mattia Rizzolo <mattia@debian.org> Sun, 28 Jun 2020 15:52:27 +0200
ubuntu-dev-tools (0.176) unstable; urgency=medium
[ Debian Janitor ]
* Fix day-of-week for changelog entry 0.66.
[ Mattia Rizzolo ]
* pbuilder-dist:
+ Add support for riscv64. LP: #1859277
* d/control: Bump Standards-Version to 4.5.0, no changes needed.
[ Colin Watson ]
* Use +sourcefiles URLs where possible. LP: #1860456
[ Dan Streetman ]
* submittodebian:
+ Open file in binary mode before writing utf-8 encoded bytes. LP: #1863119
* ubuntu-upload-permission:
+ Explicitly sort packagesets by name. LP: #1862372
* pullpkg:
+ For "-p list", show bpph arch for files built for 'all' arch.
* archive.py:
+ If using local file, avoid error trying to copy file to itself.
+ Allow pull_binaries() to accept arch=None.
* lpapicache:
+ Remove SPPH _have_all_binaries flag, as there are cases where it yield
unexpected results.
+ Remove fallback_arch from getBinaries.
+ Allow getBinaries(arch=None) to get all archs. LP: #1862286
-- Mattia Rizzolo <mattia@debian.org> Sun, 23 Feb 2020 13:03:21 +0100
ubuntu-dev-tools (0.175) unstable; urgency=medium
[ Mattia Rizzolo ]
* Trust the installed debian-keyring when checking validity of dsc
signatures.
* requestbackport:
+ Error out nicely when a tracking project doesn't exist. LP: #1852901
* d/control: Bump Standards-Version to 4.4.1, no changes needed.
[ Stefano Rivera ]
* merge-changelog: rewrite the changelog handling to use python3-debian.
[ Dan Streetman ]
* tests/pylint.conf: use jobs=0 to speed up tests.
* submittodebian: use a context manager while opening a file.
* d/control: add dependency on python3-lazr.restfulclient.
* Big refactor/rewrite of the whole archive.py module, together with a
restracturing of all the pull-pkg-* commands.
* Unify the logging using the standard python logging module, and remove the
local ubuntutools.logger module.
-- Mattia Rizzolo <mattia@debian.org> Sun, 01 Dec 2019 19:36:23 +0100
ubuntu-dev-tools (0.174) unstable; urgency=medium
[ Stefano Rivera ]
* reverse-depends:
+ Support reverse test dependencies as well. LP: #1843614
* ubuntutools.misc:
+ Replace Popen() calls with check_output(). Closes: #940040
+ Use a context manager to open file, to be sure to close them.
[ Dan Streetman ]
* Update setup.py to also use python3.
* reverse-depends:
+ Move from optparse to argparse.
+ Rename the typoed --recursive-deph to --recursive-depth.
+ Use list comprehensions to simplify del-during-iteration functions.
* import-bug-from-debian:
+ Migrate to argparge.
+ Add --verbose option.
+ Actually make --dry-run do a dry run.
+ Handle multiple bug numbers in the command line.
+ Correctly get the bug summary.
-- Mattia Rizzolo <mattia@debian.org> Thu, 26 Sep 2019 11:05:53 +0200
ubuntu-dev-tools (0.173) unstable; urgency=medium
[ Stefano Rivera ]
* pull-debian-debdiff:
+ Don't unpack the older source package, it will often use the same
directory as the newer one, and break.
* merge-changelog:
+ Use ubuntutools.version.Version, to support Python 3.
* Drop 404main, it's been totally broken for years.
* Port all the Python scripts to Python 3, and remove Python 2 support.
Closes: #938740, LP: #1099537
[ Dan Streetman ]
* pull-pkg:
+ Use ubuntutools.version.Version which has strip_epoch() instead
of debian.debian_support.Version.
* Have ubuntu-dev-tools depend on the matching version of python3-ubuntutools.
[ Scott Kitterman ]
* Update requestsync to python3. Closes: #927147
[ Mattia Rizzolo ]
* Explicitly require Python3 >= 3.6.
-- Mattia Rizzolo <mattia@debian.org> Tue, 10 Sep 2019 15:35:06 +0200
ubuntu-dev-tools (0.172) unstable; urgency=medium
[ Mattia Rizzolo ]
* autopkgtest: Add a allow-stderr restriction, as things log to stderr.
[ Stefano Rivera ]
* Build-Depend on pylint (>= 2, the Python 3 version), it replaced pylint3.
* Explicitly declare python dependencies in the python library modules (the
setup.py doesn't provide these) and ubuntu-dev-tools itself.
* dh_python2 doesn't generate a python dependency in ubuntu-dev-tools, now
that it's scripts-only, so manually declare one.
* Install pull-debian-source as python 3. It was ported, and doesn't work
under python 2 any more. LP: #1841127
* Use dh_install to split the build into multiple binary packages.
-- Mattia Rizzolo <mattia@debian.org> Wed, 04 Sep 2019 08:44:51 +0200
ubuntu-dev-tools (0.171) unstable; urgency=medium
* Add an autopkgtest running the package tests.
Currently it runs the tests directly over the sources, instead of the
installed package.
* ubuntutools/archive: Disable dsc signature verification for non-Debian.
Ubuntu doesn't have a unified keyring of developers like Debian has, so
it is not feasible to check for the dsc signatures.
-- Mattia Rizzolo <mattia@debian.org> Mon, 12 Aug 2019 13:42:31 +0200
ubuntu-dev-tools (0.170) unstable; urgency=medium
[ Robie Basak ]
* pull-debian-source:
+ Add a new --no-verify-signature option option, to download a source
package without checking its signature.
+ Port to Python 3. LP: #1700846
[ Mattia Rizzolo ]
* d/control:
+ Bump debhelper compat level to 12.
* reverse-depends:
+ prevent crash when specifying a specific architecture. Closes: #933018
* ubuntutools/archive:
+ Default to checking signatures while pulling a .dsc.
-- Mattia Rizzolo <mattia@debian.org> Mon, 05 Aug 2019 13:28:23 +0200
ubuntu-dev-tools (0.169) unstable; urgency=medium
[ Colin Watson ]
* mk-sbuild:
+ Set personality=linux32 by default on armel and armhf as well.
[ Sahid Orentino Ferdjaoui ]
* reverse-depends:
+ New ability to find reverse-depends recursively, and print a tree.
[ Mattia Rizzolo ]
* d/control:
+ Add myself to uploaders.
+ Bump Standards-Version to 4.4.0, no changes needed.
-- Mattia Rizzolo <mattia@debian.org> Sat, 20 Jul 2019 11:18:00 +0200
ubuntu-dev-tools (0.168) eoan; urgency=medium
* grep-merges: flake8-clean.
-- Steve Langasek <steve.langasek@ubuntu.com> Tue, 07 May 2019 21:26:05 -0700
ubuntu-dev-tools (0.167) eoan; urgency=medium
[ Colin Watson ]
* syncpackage:
+ Support wildcards in sync-blacklist (LP: #1807992).
[ Steve Langasek ]
* grep-merges:
+ support grepping by team name (full name match) now that MoM exposes
this
-- Steve Langasek <steve.langasek@ubuntu.com> Tue, 07 May 2019 18:53:46 -0700
ubuntu-dev-tools (0.166) unstable; urgency=medium
* Team upload.
[ Felipe Reyes ]
* pbuilder-dist:
+ Fix handling of --othermirror when a local archive found. LP: #1314076
[ Jelmer Vernooij ]
* Recommend Bazaar (brz) or Breezy (brz); the latter provides a
command-line compatible interface.
[ Mathieu Trudel-Lapierre ]
* Add a new tool "ubuntu-archive-assistant" tool for
proposed-migration / mir review. More information on:
https://lists.ubuntu.com/archives/ubuntu-devel/2018-September/040492.html
The tool is not currently installed, see the launchpad bug #1799568.
[ Benjamin Drung ]
* ubuntutools/test: Introduce get_source_files helper function.
* Update pylint and flake8 unittests.
* Fix invalid escape sequences '\(' or '\)'. Closes: #911689
[ Mattia Rizzolo ]
* Add missing dependencies on sensible-utils (thanks lintian!).
* wrap-and-sort -ast.
* Bump Standards-Version to 4.2.1, no changes needed.
* Use the new debhelper-compat(=11) notation and drop d/compat.
* Clarify package descriptions for Python libraries.
Thanks to Ben Finney for the patch. Closes: #804198, #804199
* Add a recommends on ubuntu-keyring | ubuntu-archive-keyring.
Closes: #838254
* mk-sbuild: disable recommends also within the chroot.
Thanks to Steve Beattie for the patch. LP: #1268684
-- Mattia Rizzolo <mattia@debian.org> Tue, 23 Oct 2018 22:08:04 +0200
ubuntu-dev-tools (0.165) unstable; urgency=medium
* Team upload.
* Bump debhelper compat level to 11.
* Fix FTBFS due to newest tar being picker about arguments order.
Closes: #897478
-- Mattia Rizzolo <mattia@debian.org> Thu, 10 May 2018 10:40:49 +0200
ubuntu-dev-tools (0.164) unstable; urgency=medium
* mk-sbuild: Initialise ubuntu_dist_ge vars so unknown releases work.
-- Adam Conrad <adconrad@ubuntu.com> Tue, 24 Apr 2018 05:24:43 -0600
ubuntu-dev-tools (0.163) unstable; urgency=medium
* mk-sbuild: Add ubuntu_dist_ge and use it to set BUILD_PKGS for Ubuntu.
@ -784,7 +18,6 @@ ubuntu-dev-tools (0.162) unstable; urgency=medium
[ Dimitri John Ledkov ]
* mk-sbuild: add support for 'overlay' in favor of older 'overlayfs'.
Closes: 799267
[ Scott Kitterman ]
* pbuilder-dist: add a --backports option to make it easier to build for
@ -2941,7 +2174,7 @@ ubuntu-dev-tools (0.66) jaunty; urgency=low
[ Jonathan Davies ]
* Added grab-merge from merges.ubuntu.com (LP: #155098).
-- Jonathan Davies <jpds@ubuntu.com> Mon, 09 Mar 2009 17:01:19 +0000
-- Jonathan Davies <jpds@ubuntu.com> Thu, 09 Mar 2009 17:01:19 +0000
ubuntu-dev-tools (0.65) jaunty; urgency=low
@ -3768,10 +3001,10 @@ ubuntu-dev-tools (0.25) hardy; urgency=low
didn't work (LP: #175183)
- added support for --http-proxy, honours now $http_proxy or $HTTP_PROXY
- removed $COMPONENTS_LINE from pbuilder call, data is crippled in the
pbuilder chroot. Instead of this behaviour add
$BASE_DIR/etc/$DISTRIBUTION/apt.conf/ directory and install a sane
sources.list, depending on the releases of Ubuntu and add --aptconfdir to
pbuilder call (LP: #175183)
pbuilder chroot.
Instead of this behaviour add $BASE_DIR/etc/$DISTRIBUTION/apt.conf/
directory and install a sane sources.list, depending on the releases of Ubuntu
and add --aptconfdir to pbuilder call (LP: #175183)
- add support for gksudo|kdesudo|sudo depending on $DESKTOP_SESSION.
or if $PBUILDAUTH is set to something else, it will be used instead of
sudo|gksudo|kdesudo (LP: #172943)
@ -4019,8 +3252,7 @@ ubuntu-dev-tools (0.11) gutsy; urgency=low
[ Siegfried-Angel Gevatter Pujals (RainCT) ]
* Added a manpage for suspicious-source.
* Fixed a bug in pbuilder-dist (it needed ftp.debian.org in sources.list to
work with Debian).
* Fixed a bug in pbuilder-dist (it needed ftp.debian.org in sources.list to work with Debian).
-- Daniel Holbach <daniel.holbach@ubuntu.com> Mon, 24 Sep 2007 09:39:24 +0200

3
debian/clean vendored
View File

@ -1 +1,2 @@
*.egg-info/
*.egg-info/*
test-data/example_*

1
debian/compat vendored Normal file
View File

@ -0,0 +1 @@
10

184
debian/control vendored
View File

@ -2,36 +2,38 @@ Source: ubuntu-dev-tools
Section: devel
Priority: optional
Maintainer: Ubuntu Developers <ubuntu-dev-tools@packages.debian.org>
Uploaders:
Benjamin Drung <bdrung@debian.org>,
Stefano Rivera <stefanor@debian.org>,
Mattia Rizzolo <mattia@debian.org>,
Simon Quigley <tsimonq2@debian.org>,
Build-Depends:
black <!nocheck>,
dctrl-tools,
debhelper-compat (= 13),
devscripts (>= 2.11.0~),
dh-make,
dh-python,
distro-info (>= 0.2~),
flake8,
isort <!nocheck>,
lsb-release,
pylint <!nocheck>,
python3-all,
python3-apt,
python3-dateutil,
python3-debian,
python3-debianbts,
python3-distro-info,
python3-httplib2,
python3-launchpadlib-desktop,
python3-pytest,
python3-requests <!nocheck>,
python3-setuptools,
python3-yaml <!nocheck>,
Standards-Version: 4.7.2
Uploaders: Benjamin Drung <bdrung@debian.org>,
Stefano Rivera <stefanor@debian.org>
Build-Depends: dctrl-tools,
debhelper (>= 10),
devscripts (>= 2.11.0~),
dh-python,
distro-info (>= 0.2~),
libwww-perl,
lsb-release,
pylint,
pylint3,
python-all (>= 2.6.5-13~),
python-apt (>= 0.7.93~),
python-debian (>= 0.1.20~),
python-distro-info (>= 0.4~),
python-flake8,
python-httplib2,
python-launchpadlib (>= 1.5.7),
python-mock,
python-setuptools,
python-soappy,
python-unittest2,
python3-all,
python3-apt,
python3-debian,
python3-distro-info,
python3-flake8,
python3-httplib2,
python3-launchpadlib,
python3-mock,
python3-setuptools
Standards-Version: 4.1.4
Rules-Requires-Root: no
Vcs-Git: https://git.launchpad.net/ubuntu-dev-tools
Vcs-Browser: https://git.launchpad.net/ubuntu-dev-tools
@ -39,54 +41,50 @@ Homepage: https://launchpad.net/ubuntu-dev-tools
Package: ubuntu-dev-tools
Architecture: all
Depends:
binutils,
dctrl-tools,
devscripts (>= 2.11.0~),
diffstat,
distro-info (>= 0.2~),
dpkg-dev,
dput,
lsb-release,
python3,
python3-apt,
python3-debian,
python3-debianbts,
python3-distro-info,
python3-httplib2,
python3-launchpadlib-desktop,
python3-lazr.restfulclient,
python3-ubuntutools (= ${binary:Version}),
python3-yaml,
sensible-utils,
sudo,
tzdata,
${misc:Depends},
${perl:Depends},
Recommends:
arch-test,
ca-certificates,
debian-archive-keyring,
debian-keyring,
debootstrap,
genisoimage,
lintian,
patch,
sbuild | pbuilder | cowbuilder,
python3-dns,
quilt,
reportbug (>= 3.39ubuntu1),
ubuntu-keyring | ubuntu-archive-keyring,
Suggests:
bzr | brz,
bzr-builddeb | brz-debian,
qemu-user-static,
Depends: binutils,
dctrl-tools,
devscripts (>= 2.11.0~),
diffstat,
distro-info (>= 0.2~),
dpkg-dev,
lsb-release,
python-apt (>= 0.7.93~),
python-debian (>= 0.1.20~),
python-distro-info (>= 0.4~),
python-httplib2,
python-launchpadlib (>= 1.5.7),
python-lazr.restfulclient,
python-ubuntutools,
sensible-utils,
sudo,
${misc:Depends},
${perl:Depends},
${python:Depends}
Recommends: bzr,
bzr-builddeb,
ca-certificates,
debian-archive-keyring,
debian-keyring,
debootstrap,
dput,
genisoimage,
libwww-perl,
lintian,
patch,
pbuilder | cowbuilder | sbuild,
python-dns,
python-soappy,
quilt,
reportbug (>= 3.39ubuntu1)
Suggests: python-simplejson | python (>= 2.7), qemu-user-static
Description: useful tools for Ubuntu developers
This is a collection of useful tools that Ubuntu developers use to make their
packaging work a lot easier.
.
Such tools include:
.
- 404main - used to check what components a package's deps are in, for
doing a main inclusion report for example.
- backportpackage - helper to test package backports
- bitesize - add the 'bitesize' tag to a bug and comment that you are
willing to help fix it.
@ -96,6 +94,7 @@ Description: useful tools for Ubuntu developers
- dch-repeat - used to repeat a change log into an older release.
- grab-merge - grabs a merge from merges.ubuntu.com easily.
- grep-merges - search for pending merges from Debian.
- hugdaylist - compile HugDay lists from bug list URLs.
- import-bug-from-debian - copy a bug from the Debian BTS to Launchpad
- merge-changelog - manually merges two Debian changelogs with the same base
version.
@ -107,19 +106,12 @@ Description: useful tools for Ubuntu developers
a Debian package and its immediate parent to generate a debdiff.
- pull-debian-source - downloads the latest source package available in
Debian of a package.
- pull-lp-source - downloads source package from Launchpad.
- pull-lp-debs - downloads debs package(s) from Launchpad.
- pull-lp-ddebs - downloads dbgsym/ddebs package(s) from Launchpad.
- pull-lp-udebs - downloads udebs package(s) from Launchpad.
- pull-debian-* - same as pull-lp-* but for Debian packages.
- pull-uca-* - same as pull-lp-* but for Ubuntu Cloud Archive packages.
- pull-pkg - common script that provides above pull-* functionality.
- pull-lp-source - downloads latest source package from Launchpad.
- pull-revu-source - downloads the latest source package from REVU
- requestbackport - file a backporting request.
- requestsync - files a sync request with Debian changelog and rationale.
- reverse-depends - find the reverse dependencies (or build dependencies) of
a package.
- running-autopkgtests - lists the currently running and/or queued
autopkgtests on the Ubuntu autopkgtest infrastructure
- seeded-in-ubuntu - query if a package is safe to upload during a freeze.
- setup-packaging-environment - assistant to get an Ubuntu installation
ready for packaging work.
@ -134,22 +126,24 @@ Description: useful tools for Ubuntu developers
package.
- update-maintainer - script to update maintainer field in ubuntu packages.
Package: python3-ubuntutools
Package: python-ubuntutools
Architecture: all
Section: python
Depends:
python3-dateutil,
python3-debian,
python3-distro-info,
python3-httplib2,
python3-launchpadlib-desktop,
python3-lazr.restfulclient,
python3-requests,
sensible-utils,
${misc:Depends},
${python3:Depends},
Description: useful APIs for Ubuntu developer tools — Python 3 library
Depends: ${misc:Depends}, ${python:Depends}
Breaks: ubuntu-dev-tools (<< 0.154)
Replaces: ubuntu-dev-tools (<< 0.154)
Description: useful library of APIs for Ubuntu developer tools (Python 2)
This package ships a collection of APIs, helpers and wrappers used to
develop useful utilities for Ubuntu developers.
.
This package installs the library for Python 3.
Python 2 variant.
Package: python3-ubuntutools
Architecture: all
Section: python
Depends: ${misc:Depends}, ${python3:Depends}
Description: useful library of APIs for Ubuntu developer tools
This package ships a collection of APIs, helpers and wrappers used to
develop useful utilities for Ubuntu developers.
.
Python 3 variant.

65
debian/copyright vendored
View File

@ -3,30 +3,26 @@ Upstream-Name: Ubuntu Developer Tools
Upstream-Contact: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>
Source: https://launchpad.net/ubuntu-dev-tools
Files: backportpackage
Files: *
backportpackage
bash_completion/pbuilder-dist
check-symbols
debian/*
doc/backportpackage.1
doc/check-symbols.1
doc/requestsync.1
doc/ubuntu-iso.1
doc/running-autopkgtests.1
GPL-2
README.updates
requestsync
setup.py
TODO
ubuntu-iso
ubuntutools/requestsync/*.py
ubuntutools/requestsync/lp.py
ubuntutools/requestsync/mail.py
Copyright: 2007, Albert Damen <albrt@gmx.net>
2010-2024, Benjamin Drung <bdrung@ubuntu.com>
2007-2023, Canonical Ltd.
2010, Benjamin Drung <bdrung@ubuntu.com>
2007-2010, Canonical Ltd.
2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com>
2010, Evan Broder <evan@ebroder.net>
2006-2007, Luke Yelavich <themuso@ubuntu.com>
2009-2010, Michael Bienia <geser@ubuntu.com>
2024-2025, Simon Quigley <tsimonq2@debian.org>
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
2008, Stephan Hermann <sh@sourcecode.de>
2007, Steve Kowalik <stevenk@ubuntu.com>
@ -43,7 +39,9 @@ License: GPL-2
On Debian systems, the complete text of the GNU General Public License
version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
Files: doc/import-bug-from-debian.1
Files: 404main
doc/404main.1
doc/import-bug-from-debian.1
doc/pbuilder-dist-simple.1
doc/pbuilder-dist.1
doc/submittodebian.1
@ -74,28 +72,20 @@ License: GPL-2+
On Debian systems, the complete text of the GNU General Public License
version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
Files: doc/lp-bitesize.1
doc/check-mir.1
Files: doc/bitesize.1
doc/grab-merge.1
doc/hugdaylist.1
doc/merge-changelog.1
doc/pm-helper.1
doc/setup-packaging-environment.1
doc/syncpackage.1
lp-bitesize
check-mir
GPL-3
bitesize
grab-merge
hugdaylist
merge-changelog
pm-helper
pyproject.toml
run-linters
running-autopkgtests
setup-packaging-environment
syncpackage
ubuntutools/running_autopkgtests.py
ubuntutools/utils.py
Copyright: 2010-2024, Benjamin Drung <bdrung@ubuntu.com>
2007-2024, Canonical Ltd.
Copyright: 2010, Benjamin Drung <bdrung@ubuntu.com>
2007-2011, Canonical Ltd.
2008, Jonathan Patrick Davies <jpds@ubuntu.com>
2008-2010, Martin Pitt <martin.pitt@canonical.com>
2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
@ -117,23 +107,17 @@ Files: dch-repeat
doc/dch-repeat.1
doc/grep-merges.1
doc/mk-sbuild.1
doc/pull-pkg.1
doc/pull-lp-source.1
doc/pull-revu-source.1
doc/ubuntu-build.1
grep-merges
mk-sbuild
pull-pkg
pull-*debs
pull-*-source
requirements.txt
test-requirements.txt
tox.ini
pull-lp-source
pull-revu-source
ubuntu-build
ubuntutools/__init__.py
ubuntutools/lp/__init__.py
ubuntutools/lp/libsupport.py
ubuntutools/lp/lpapicache.py
ubuntutools/lp/udtexceptions.py
ubuntutools/misc.py
ubuntutools/pullpkg.py
Copyright: 2007-2010, Canonical Ltd.
2008-2009, Iain Lane <iain@orangesquash.org.uk>
2006, John Dong <jdong@ubuntu.com>
@ -159,6 +143,7 @@ License: GPL-3+
version 3 can be found in the /usr/share/common-licenses/GPL-3 file.
Files: doc/pull-debian-debdiff.1
doc/pull-debian-source.1
doc/requestbackport.1
doc/reverse-depends.1
doc/seeded-in-ubuntu.1
@ -168,10 +153,12 @@ Files: doc/pull-debian-debdiff.1
doc/update-maintainer.1
enforced-editing-wrapper
pull-debian-debdiff
pull-debian-source
requestbackport
reverse-depends
seeded-in-ubuntu
sponsor-patch
test-data/*
ubuntu-upload-permission
ubuntutools/archive.py
ubuntutools/builder.py
@ -181,15 +168,11 @@ Files: doc/pull-debian-debdiff.1
ubuntutools/sponsor_patch/*
ubuntutools/test/*
ubuntutools/update_maintainer.py
ubuntutools/version.py
update-maintainer
.pylintrc
Copyright: 2009-2024, Benjamin Drung <bdrung@ubuntu.com>
Copyright: 2009-2011, Benjamin Drung <bdrung@ubuntu.com>
2010, Evan Broder <evan@ebroder.net>
2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
2017-2021, Dan Streetman <ddstreet@canonical.com>
2024, Canonical Ltd.
License: ISC
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above

1
debian/gbp.conf vendored
View File

@ -4,6 +4,7 @@ debian-branch = master
sign-tags = True
[dch]
id-length = 7
meta = True
auto = True
full = True

View File

@ -1 +0,0 @@
/usr/lib/python3.*

18
debian/rules vendored
View File

@ -1,14 +1,12 @@
#!/usr/bin/make -f
override_dh_auto_clean:
dh_auto_clean
rm -f .coverage
rm -rf .tox
override_dh_auto_test:
ifeq (,$(filter nocheck,$(DEB_BUILD_OPTIONS)))
python3 -m pytest -v ubuntutools
endif
export PYBUILD_NAME=ubuntutools
%:
dh $@ --with python3 --buildsystem=pybuild
dh $@ --with python2,python3 --buildsystem=pybuild
override_dh_install:
dh_install
mkdir -p debian/ubuntu-dev-tools/usr
mv debian/python-ubuntutools/usr/bin debian/ubuntu-dev-tools/usr/
mv debian/python-ubuntutools/usr/share debian/ubuntu-dev-tools/usr/

View File

@ -1,3 +0,0 @@
# pyc files are machine-generated; they're expected to have long lines and have unstated copyright
source: file-without-copyright-information *.pyc [debian/copyright]
source: very-long-line-length-in-source-file * > 512 [*.pyc:*]

View File

@ -1,7 +0,0 @@
Test-Command: python3 -m pytest -v ubuntutools
Depends:
dh-make,
python3-pytest,
python3-setuptools,
@,
Restrictions: allow-stderr

View File

@ -1,2 +0,0 @@
/usr/bin
/usr/share

29
doc/404main.1 Normal file
View File

@ -0,0 +1,29 @@
.TH 404main 1 "February 17, 2008" "ubuntu-dev-tools"
.SH NAME
404main \- check if all build dependencies of a package are in main
.SH SYNOPSIS
\fB404main\fP <\fIpackage name\fP> [<\fIdistribution\fP>]
.SH DESCRIPTION
\fB404main\fP is a script that can be used to check if a package and
all its build dependencies are in Ubuntu's main component or not.
.SH CAVEATS
\fB404main\fP will take the dependencies and build dependencies of the
packages from the distribution you have first in your
/etc/apt/sources.list file.
.PP
Also, because of this the <\fIdistribution\fP> option is NOT trustworthy; if
the dependencies changed YOU WILL GET INCORRECT RESULTS.
.SH SEE ALSO
.BR apt-cache (8)
.SH AUTHORS
\fB404main\fP was written by Pete Savage <petesavage@ubuntu.com> and
this manpage by Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>.
.PP
Both are released under the GNU General Public License, version 2 or
later.

View File

@ -1,21 +1,21 @@
.TH lp-bitesize "1" "May 9 2010" "ubuntu-dev-tools"
.TH bitesize "1" "May 9 2010" "ubuntu-dev-tools"
.SH NAME
lp-bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
.SH SYNOPSIS
.B lp-bitesize \fR<\fIbug number\fR>
.B bitesize \fR<\fIbug number\fR>
.br
.B lp-bitesize \-\-help
.B bitesize \-\-help
.SH DESCRIPTION
\fBlp-bitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
\fBbitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
also adds a comment to the bug indicating that you are willing to help with
fixing it.
It checks for permission to operate on a given bug first,
then perform required tasks on Launchpad.
.SH OPTIONS
Listed below are the command line options for \fBlp-bitesize\fR:
Listed below are the command line options for \fBbitesize\fR:
.TP
.BR \-h ", " \-\-help
Display a help message and exit.
@ -48,7 +48,7 @@ The default value for \fB--lpinstance\fR.
.BR ubuntu\-dev\-tools (5)
.SH AUTHORS
\fBlp-bitesize\fR and this manual page were written by Daniel Holbach
\fBbitesize\fR and this manual page were written by Daniel Holbach
<daniel.holbach@canonical.com>.
.PP
Both are released under the terms of the GNU General Public License, version 3.

26
doc/hugdaylist.1 Normal file
View File

@ -0,0 +1,26 @@
.TH HUGDAYLIST "1" "August 27, 2008" "ubuntu-dev-tools"
.SH NAME
hugdaylist \- produce MoinMoin wiki formatted tables based on a Launchpad bug list
.SH SYNOPSIS
.B hugdaylist [\fB\-n\fP|\fB\-\-number <NUMBER>\fP] \fBlaunchpad-buglist-url\fP
.SH DESCRIPTION
\fBhugdaylist\fP produces MoinMoin wiki formatted tables based on a
Launchpad bug list
.SH OPTIONS
.TP
\fB\-\-number=<NUMBER>\fP
This option allows you to specify the number of entries to output.
.TP
\fBlaunchpad-buglist-url\fP
Required, this option is a URL pointing to a launchpad bug list.
.SH AUTHOR
\fBhugdaylist\fP has been written by Canonical Ltd., Daniel Holbach
<daniel.holbach@canonical.com> and Jonathan Patrick Davies <jpds@ubuntu.com>.
This manual page was written by Ryan Kavanagh <ryanakca@kubuntu.org>.
.PP
Both are released under the GNU General Public License, version 3.

View File

@ -64,15 +64,6 @@ Disable checking gpg signatures of downloaded Release files by using
debootstrap's \fB\-\-no\-check\-gpg\fR option. See \fBdebootstrap\fR (8)
for more details.
.TP
.B \-\-debootstrap\-proxy\fR=\fIPROXY
Use \fIPROXY\fR as apt proxy.
.TP
.B \-\-eatmydata
Install and use eatmydata (default)
.TP
.B \-\-skip\-eatmydata
Don't install and use eatmydata
.TP
.B \-\-distro\fR=\fIDISTRO
Enable distro-specific logic.
When not provided, the distribution is determined from \fIrelease\fR.
@ -83,31 +74,10 @@ Specify a volume group, and subsequently use a default \fBSCHROOT_TYPE\fR of
"\fBlvm-snapshot\fR" rather than "\fBdirectory\fR" (via overlayfs or
aufs) mounts.
.TP
.B \-\-zfs-dataset=\fIDATASET
Specify a zfs dataset, and subsequently use a default \fBSCHROOT_TYPE\fR of
"\fBzfs-snapshot\fR" rather than "\fBdirectory\fR" (via overlayfs or
aufs) mounts.
.TP
.B \-\-type\fR=\fISHROOT_TYPE
Specify a \fBSCHROOT_TYPE\fR. Supported values are "\fBdirectory\fR"
(default if \fB\-\-vg\fR not specified), "\fBlvm-snapshot\fR" (default
if \fB\-\-vg\fR specified), "\fBbtrfs-snapshot\fR", "\fBzfs-snapshot\fR"
and "\fBfile\fR".
.TP
.B \-\-ccache
Enable usage of \fBccache\fR by default. See \fBccache\fR (1) for
more details.
.TP
.B \-\-ccache-dir=\fIPATH
Use \fBPATH\fR as schroot ccache directory. This directory can be
safely shared by multiple schroots, but they will all use the same
\fBCCACHE_MAXSIZE\fR.
Defaults to /var/cache/ccache-sbuild.
See \fBccache\fR (1) for more details.
.TP
.B \-\-ccache-size=\fISIZE
Sets \fBSIZE\fR as the schroot \fBCCACHE_DIR\fR max-size used by ccache.
See \fBccache\fR (1) for more details.
if \fB\-\-vg\fR specified), "\fBbtrfs-snapshot\fR", and "\fBfile\fR".
.SH ENVIRONMENT VARIABLES
.TP
@ -150,14 +120,6 @@ Keyring file to use for checking gpg signatures of retrieved release files
Disable gpg verification of retrieved release files (same as
\fB\-\-debootstrap\-no\-check\-gpg\fR)
.TP
.B DEBOOTSTRAP_PROXY
Proxy to use for apt. (same as
\fB\-\-debootstrap\-proxy\fR)
.TP
.B EATMYDATA
Enable or disable eatmydata usage, see \fB\-\-eatmydata\fR
and \fB\-\-skip\-eatmydata\fR
.TP
.B SOURCE_CHROOTS_DIR
Use \fBSOURCE_CHROOTS_DIR\fR as home of schroot source directories.
(default \fB/var/lib/schroot/chroots\fR)
@ -169,18 +131,6 @@ Use \fBSOURCE_CHROOTS_TGZ\fR as home of schroot source tarballs.
.B CHROOT_SNAPSHOT_DIR
Use \fBCHROOT_SNAPSHOT_DIR\fR as home of mounted btrfs snapshots.
(default \fB/var/lib/schroot/snapshots\fR)
.TP
.B CCACHE
Enable \fBccache\fR (1) by default.
(defaults to \fB0\fR)
.TP
.B CCACHE_DIR
Use \fBCCACHE_DIR\fR as the \fBccache\fR (1) directory.
(default \fB/var/cache/ccache-sbuild\fR)
.TP
.B CCACHE_SIZE
Use \fBCCACHE_SIZE\fR as the \fBccache\fR (1) max-size.
(defaults to \fB4G\fR)
.SH FILES

View File

@ -20,7 +20,7 @@ like for example \fBpbuilder\-feisty\fP, \fBpbuilder\-sid\fP, \fBpbuilder\-gutsy
.PP
The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main
difference between both is that pbuilder compresses the created chroot as a
tarball, thus using less disc space but needing to uncompress (and possibly
a tarball, thus using less disc space but needing to uncompress (and possibly
compress) its contents again on each run, and cowbuilder doesn't do this.
.SH USAGE
@ -38,7 +38,7 @@ This optional parameter will attempt to construct a chroot in a foreign
architecture.
For some architecture pairs (e.g. i386 on an amd64 install), the chroot
will be created natively.
For others (e.g. arm64 on an amd64 install), qemu\-user\-static will be
For others (e.g. armel on an i386 install), qemu\-user\-static will be
used.
Note that some combinations (e.g. amd64 on an i386 install) require
special separate kernel handling, and may break in unexpected ways.

View File

@ -1,44 +0,0 @@
.\" Copyright (C) 2023, Canonical Ltd.
.\"
.\" This program is free software; you can redistribute it and/or
.\" modify it under the terms of the GNU General Public License, version 3.
.\"
.\" This program is distributed in the hope that it will be useful,
.\" but WITHOUT ANY WARRANTY; without even the implied warranty of
.\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
.\" General Public License for more details.
.\"
.\" You should have received a copy of the GNU General Public License
.\" along with this program. If not, see <http://www.gnu.org/licenses/>.
.TH pm\-helper 1 "June 2023" ubuntu\-dev\-tools
.SH NAME
pm\-helper \- helper to guide a developer through proposed\-migration work
.SH SYNOPSIS
.B pm\-helper \fR[\fIoptions\fR] [\fIpackage\fR]
.SH DESCRIPTION
Claim a package from proposed\-migration to work on and get additional
information (such as the state of the package in Debian) that may be helpful
in unblocking it.
.PP
This tool is incomplete and under development.
.SH OPTIONS
.TP
.B \-l \fIINSTANCE\fR, \fB\-\-launchpad\fR=\fIINSTANCE\fR
Use the specified instance of Launchpad (e.g. "staging"), instead of
the default of "production".
.TP
.B \-v\fR, \fB--verbose\fR
be more verbose
.TP
\fB\-h\fR, \fB\-\-help\fR
Display a help message and exit
.SH AUTHORS
\fBpm\-helper\fR and this manpage were written by Steve Langasek
<steve.langasek@ubuntu.com>.
.PP
Both are released under the GPLv3 license.

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

89
doc/pull-debian-source.1 Normal file
View File

@ -0,0 +1,89 @@
.\" Copyright (C) 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
.\"
.\" Permission to use, copy, modify, and/or distribute this software for any
.\" purpose with or without fee is hereby granted, provided that the above
.\" copyright notice and this permission notice appear in all copies.
.\"
.\" THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
.\" REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
.\" AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
.\" INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
.\" LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
.\" OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
.\" PERFORMANCE OF THIS SOFTWARE.
.TH PULL\-DEBIAN\-SOURCE "1" "22 January 2011" "ubuntu\-dev\-tools"
.SH NAME
pull\-debian\-source \- download and extract a source package from Debian
.SH SYNOPSIS
.B pull\-debian\-source \fR[\fIoptions\fR] <\fIsource package\fR>
[\fIrelease\fR|\fIversion\fR]
.SH DESCRIPTION
\fBpull\-debian\-source\fR downloads and extracts the specified
\fIversion\fR of \fIsource package\fR, or the latest version in the
specified Debian \fIrelease\fR.
.P
\fBpull\-debian\-source\fR will try the preferred mirror, default
mirror, security mirror, and fall back to \fBLaunchpad\fR or
\fBsnapshot.debian.org\fR, in search of the requested version.
.SH OPTIONS
.TP
.I source package
The source package to download from Debian.
.TP
.I release
The release to download the source package from. Defaults to
\fBunstable\fR.
.TP
.I version
The specific version of the package to download.
.TP
.BR \-d ", " \-\-download\-only
Do not extract the source package.
.TP
.B \-m \fIDEBIAN_MIRROR\fR, \fB\-\-mirror\fR=\fIDEBIAN_MIRROR\fR
Use the specified mirror.
Should be in the form \fBhttp://ftp.debian.org/debian\fR.
If the package isn't found on this mirror, \fBpull\-debian\-source\fR
will fall back to the default mirror.
.TP
.B \-s \fIDEBSEC_MIRROR\fR, \fB\-\-security\-mirror\fR=\fIDEBSEC_MIRROR\fR
Use the specified mirror.
Should be in the form \fBhttp://security.debian.org\fR.
If the package isn't found on this mirror, \fBpull\-debian\-source\fR
will fall back to the default mirror.
.TP
.B \-\-no\-conf
Do not read any configuration files, or configuration from environment
variables.
.TP
.BR \-h ", " \-\-help
Display the usage instructions and exit.
.SH ENVIRONMENT
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
environment variables.
Variables in the environment take precedence to those in configuration
files.
.SH CONFIGURATION VARIABLES
The following variables can be set in the environment or in
.BR ubuntu\-dev\-tools (5)
configuration files.
In each case, the script\-specific variable takes precedence over the
package\-wide variable.
.TP
.BR PULL_DEBIAN_SOURCE_DEBIAN_MIRROR ", " UBUNTUTOOLS_DEBIAN_MIRROR
The default value for \fB\-\-mirror\fR.
.TP
.BR PULL_DEBIAN_SOURCE_DEBSEC_MIRROR ", " UBUNTUTOOLS_DEBSEC_MIRROR
The default value for \fB\-\-security\-mirror\fR.
.SH SEE ALSO
.BR dget (1),
.BR pull\-debian\-debdiff (1),
.BR pull\-lp\-source (1),
.BR ubuntu\-dev\-tools (5)

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

79
doc/pull-lp-source.1 Normal file
View File

@ -0,0 +1,79 @@
.TH PULL\-LP\-SOURCE "1" "4 August 2008" "ubuntu-dev-tools"
.SH NAME
pull\-lp\-source \- download a source package from Launchpad
.SH SYNOPSIS
.B pull\-lp\-source \fR[\fIoptions\fR]\fB \fBsource package\fR
[\fIrelease\fR|\fIversion\fR]
.SH DESCRIPTION
\fBpull\-lp\-source\fR downloads and extracts the specified
\fIversion\fR of <\fBsource package\fR> from Launchpad, or the latest
version of the specified \fIrelease\fR.
To request a version from a particular pocket say
\fIrelease\fB\-\fIpocket\fR (with a magic \fB\-release\fR for only the
release pocket).
If no \fIversion\fR or \fIrelease\fR is specified, the latest version in
the development release will be downloaded.
.SH OPTIONS
Listed below are the command line options for pull\-lp\-source:
.TP
.B source package
This is the source package that you would like to be downloaded from Launchpad.
.TP
.B version
This is the version of the source package to be downloaded.
.TP
.B release
This is the release that you would like the source package to be downloaded from.
This value defaults to the current development release.
.TP
.BR \-h ", " \-\-help
Display a help message and exit.
.TP
.BR \-d ", " \-\-download\-only
Do not extract the source package.
.TP
.B \-m \fIUBUNTU_MIRROR\fR, \fB\-\-mirror\fR=\fIUBUNTU_MIRROR\fR
Use the specified Ubuntu mirror.
Should be in the form \fBhttp://archive.ubuntu.com/ubuntu\fR.
If the package isn't found on this mirror, \fBpull\-lp\-source\fR will
fall back to Launchpad, as its name implies.
.TP
.B \-\-no\-conf
Do not read any configuration files, or configuration from environment
variables.
.SH ENVIRONMENT
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
environment variables.
Variables in the environment take precedence to those in configuration
files.
.TP
.B
DIST
Specifies the default target.
.SH CONFIGURATION VARIABLES
The following variables can be set in the environment or in
.BR ubuntu\-dev\-tools (5)
configuration files.
In each case, the script\-specific variable takes precedence over the
package\-wide variable.
.TP
.BR PULL_LP_SOURCE_UBUNTU_MIRROR ", " UBUNTUTOOLS_UBUNTU_MIRROR
The default value for \fB\-\-mirror\fR.
.SH SEE ALSO
.BR dget (1),
.BR pull\-debian\-source (1),
.BR pull\-debian\-debdiff (1),
.BR ubuntu\-dev\-tools (5)
.SH AUTHOR
.PP
\fBpull\-lp\-source\fR and this manual page were written by Iain Lane
<iain@orangesquash.org.uk>.
Both are released under the GNU General Public License, version 3 or later.

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1,147 +0,0 @@
.TH PULL\-PKG "1" "28 August 2017" "ubuntu-dev-tools"
.SH NAME
pull\-pkg \- download a package for Debian, Ubuntu, UCA, or a PPA
.SH SYNOPSIS
.B pull\-pkg \fR[\fIoptions\fR]\fR <\fIpackage name\fR>
[\fIrelease\fR|\fIversion\fR]
.SH DESCRIPTION
\fBpull\-pkg\fR downloads the specified \fIversion\fR of
<\fIpackage name\fR>, or the latest version from the
specified \fIrelease\fR. To request a version from
a particular pocket say \fIrelease\fB\-\fIpocket\fR (with a magic
\fB\-release\fR for only the release pocket). If no \fIpocket\fR is
specified, all pockets will be searched except -backports.
If no \fIversion\fR or \fIrelease\fR is specified, the latest version in
the development release will be downloaded.
There are convenience scripts that set pull type and distribution
appropriately: these are
\fBpull\-lp\-source\fR, \fBpull\-lp\-debs\fR, \fBpull\-lp\-ddebs\fR,
and \fBpull\-lp\-udebs\fR, which all pull Ubuntu packages;
\fBpull\-debian\-source\fR, \fBpull\-debian\-debs\fR, \fBpull\-debian\-ddebs\fR,
and \fBpull\-debian\-udebs\fR, which all pull Debian packages;
\fBpull\-uca\-source\fR, \fBpull\-uca\-debs\fR, \fBpull\-uca\-ddebs\fR,
and \fBpull\-uca\-udebs\fR, which all pull Ubuntu Cloud Archive packages;
and \fBpull\-ppa\-source\fR, \fBpull\-ppa\-debs\fR, \fBpull\-ppa\-ddebs\fR,
and \fBpull\-ppa\-udebs\fR, which all pull from a specified Personal Package
Archive on Launchpad. Each script pulls the file type in its name, i.e.
\fIsource\fR, \fIdebs\fR, \fIddebs\fR, or \fIudebs\fR.
.SH OPTIONS
Listed below are the command line options for pull\-pkg:
.TP
.I package name
This is name of the package to downloaded.
You can use either the source package name, or binary package name.
.TP
.I version
This is the version of the package to downloaded.
.TP
.I release
This is the release to downloaded from.
For debian, you can use either the release name like \fBjessie\fR
or \fBsid\fR, or you can use the special release names \fBunstable\fR,
\fBstable\fR, or \fBtesting\fR.
For ubuntu, you can use either the release name like \fBxenial\fR
or the release-pocket like \fBxenial-proposed\fR.
For ubuntu cloud archive (uca) you can use either the uca release
name like \fBmitaka\fR or the ubuntu and uca release names like
\fBtrusty-mitaka\fR. Defaults to the current development release.
.TP
.BR \-h ", " \-\-help
Display a help message and exit.
.TP
.BR \-v ", " \-\-verbose
Be verbose about what is being done.
.TP
.BR \-d ", " \-\-download\-only
Do not extract the source package (applies only to source packages).
.TP
.B \-m \fIMIRROR\fR, \fB\-\-mirror\fR=\fIMIRROR\fR
Use the specified mirror server.
Should be in the form \fBhttp://archive.ubuntu.com/ubuntu\fR or
\fBhttp://deb.debian.org/debian\fR. If not specified or if the
package is not found on the specified mirror, this will fall
back to the default mirror(s) and/or mirror(s) from environment
variables, and then will fall back to Launchpad or Debian Snapshot.
This can be specified multiple times to try multiple mirrors.
.TP
.B \-\-no\-conf
Do not use mirrors from the default configuration, or from
any environment variables.
.TP
.B \-a \fIARCH\fR, \fB\-\-arch\fR=\fIARCH\fR
Get binary packages from the \fIARCH\fR architecture.
Defaults to the local architecture, if it can be deteected.
.TP
.B \-p \fIPULL\fR, \fB\-\-pull\fR=\fIPULL\fR
What to pull: \fBsource\fR, \fBdebs\fR, \fBddebs\fR, \fBudebs\fR,
or \fBlist\fR. The \fBlist\fR action only lists all a package's
source and binary files, but does not actually download any.
Defaults to \fBsource\fR.
.TP
.B \-D \fIDISTRO\fR, \fB\-\-distro\fR=\fIDISTRO\fR
Pull from: \fBdebian\fR, \fBuca\fR, \fBubuntu\fR, or a \fBppa\fR.
\fBlp\fR can be used instead of \fBubuntu\fR.
Any string containing \fBcloud\fR can be used instead of \fBuca\fR.
If pulling from a ppa, you must specify the PPA. Defaults to \fBubuntu\fR.
.TP
.B \-\-ppa\fR=ppa:\fIUSER/NAME\fR
Applies only when \fBdistro\fR is \fIppa\fR. Can be provided either as
a value to the \fB\-\-ppa\fR option parameter, or as a plain option
(like \fIrelease\fR or \fIversion\fR). When specified as a plain option,
the form must be \fBppa:USER/NAME\fR; when specified as a value to the
\fB\-\-ppa\fR option parameter, the leading \fBppa:\fR is optional.
.SH ENVIRONMENT
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
environment variables.
Variables in the environment take precedence to those in configuration
files.
.SH CONFIGURATION VARIABLES
The following variables can be set in the environment or in
.BR ubuntu\-dev\-tools (5)
configuration files.
In each case, the script\-specific variable takes precedence over the
package\-wide variable.
.TP
.BR UBUNTUTOOLS_UBUNTU_MIRROR
The default mirror.
.TP
.BR PULL_PKG_UBUNTU_MIRROR
The default mirror when using the \fBpull\-pkg\fR script.
.TP
.BR PULL_[LP|DEBIAN|PPA|UCA]_[SOURCE|DEBS|DDEBS|UDEBS]_MIRROR
The default mirror when using the associated script.
.SH SEE ALSO
.BR dget (1),
.BR pull\-lp\-source (1),
.BR pull\-lp\-debs (1),
.BR pull\-lp\-ddebs (1),
.BR pull\-lp\-udebs (1),
.BR pull\-debian\-source (1),
.BR pull\-debian\-debs (1),
.BR pull\-debian\-ddebs (1),
.BR pull\-debian\-udebs (1),
.BR pull\-ppa\-source (1),
.BR pull\-ppa\-debs (1),
.BR pull\-ppa\-ddebs (1),
.BR pull\-ppa\-udebs (1),
.BR pull\-uca\-source (1),
.BR pull\-uca\-debs (1),
.BR pull\-uca\-ddebs (1),
.BR pull\-uca\-udebs (1),
.BR pull\-debian\-debdiff (1),
.BR ubuntu\-dev\-tools (5)
.SH AUTHOR
.PP
\fBpull\-pkg\fR was written by Dan Streetman <ddstreet@canonical.com>,
based on the original \fBpull\-lp\-source\fR; it and this manual page
were written by Iain Lane <iain@orangesquash.org.uk>.
All are released under the GNU General Public License, version 3 or later.

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

27
doc/pull-revu-source.1 Normal file
View File

@ -0,0 +1,27 @@
.TH PULL\-REVU\-SOURCE "1" "30 August 2009" "ubuntu-dev-tools"
.SH NAME
pull\-revu\-source \- download a source package from REVU
.SH SYNOPSIS
.B pull\-revu\-source \fR[\fB\-h\fR]\fB <\fBsource package\fR>
.SH DESCRIPTION
\fBpull\-revu\-source\fR downloads and extracts the latest version of
<\fBsource package\fR> from REVU.
.SH OPTIONS
Listed below are the command line options for pull\-revu\-source:
.TP
.B \-h, \-\-help
Display the usage instructions and exit.
.TP
.B <source package>
This is the source package that you would like to be downloaded from Debian.
.SH AUTHOR
.PP
\fBpull\-revu\-source\fR and this manual page were written by Nathan Handler
<nhandler@ubuntu.com>. \fBpull\-revu\-source\fR is based on \fBrevupull\fR in
\fBkubuntu\-dev\-tools\fR, written by Harald Sitter <apachelogger@ubuntu.com>.
Both are released under the GNU General Public License, version 3 or later.

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -1 +0,0 @@
pull-pkg.1

View File

@ -0,0 +1,13 @@
.TH REVERSE-BUILD-DEPENDS "1" "June 2012" "ubuntu-dev-tools"
.SH NAME
reverse-build-depends \- find packages that depend on a specific package to
build (reverse build depends)
.SH SYNOPSIS
.TP
.B reverse-build-depends \fR[\fIoptions\fR] \fIpackage
.SH DESCRIPTION
\fBreverse-build-depends\fR has been replaced by \fBreverse-depends \-b\fR.
This script now wraps \fBreverse-depends\fR.
Please use it in the future.
.SH SEE ALSO
.BR reverse-depends (1)

View File

@ -1,15 +0,0 @@
.TH running\-autopkgtests "1" "18 January 2024" "ubuntu-dev-tools"
.SH NAME
running\-autopkgtests \- dumps a list of currently running autopkgtests
.SH SYNOPSIS
.B running\-autopkgtests
.SH DESCRIPTION
Dumps a list of currently running and queued tests in Autopkgtest.
Pass --running to only see running tests, or --queued to only see
queued tests. Passing both will print both, which is the default behavior.
.SH AUTHOR
.B running\-autopkgtests
was written by Chris Peterson <chris.peterson@canonical.com>.

View File

@ -6,13 +6,7 @@
\fBsetup-packaging-environment\fR
.SH DESCRIPTION
\fBsetup-packaging-environment\fR aims to make it more straightforward for new
contributors to get their Ubuntu installation ready for packaging work. It
ensures that all four components from Ubuntu's official repositories are enabled
along with their corresponding source repositories. It also installs a minimal
set of packages needed for Ubuntu packaging work (ubuntu-dev-tools, devscripts,
debhelper, patchutils, pbuilder, and build-essential). Finally, it assists
in defining the DEBEMAIL and DEBFULLNAME environment variables.
\fBsetup-packaging-environment\fR aims to make it more straightforward for new contributors to get their Ubuntu installation ready for packaging work. It ensures that all four components from Ubuntu's official repositories are enabled along with their corresponding source repositories. It also installs a minimal set of packages needed for Ubuntu packaging work (ubuntu-dev-tools, devscripts, debhelper, cdbs, patchutils, pbuilder, and build-essential). Finally, it assists in defining the DEBEMAIL and DEBFULLNAME environment variables.
.SH AUTHORS
\fBsetup-packaging-environment\fR was written by Siegfried-A. Gevatter <rainct@ubuntu.com>.

View File

@ -4,11 +4,11 @@ syncpackage \- copy source packages from Debian to Ubuntu
.\"
.SH SYNOPSIS
.B syncpackage
[\fIoptions\fR] \fI<.dsc URL/path or package name(s)>\fR
[\fIoptions\fR] \fI<.dsc URL/path or package name>\fR
.\"
.SH DESCRIPTION
\fBsyncpackage\fR causes one or more source package(s) to be copied from Debian
to Ubuntu.
\fBsyncpackage\fR causes a source package to be copied from Debian to
Ubuntu.
.PP
\fBsyncpackage\fR allows you to upload files with the same checksums of the
Debian ones, as the common script used by Ubuntu archive administrators does,
@ -58,7 +58,7 @@ Display more progress information.
\fB\-F\fR, \fB\-\-fakesync\fR
Perform a fakesync, to work around a tarball mismatch between Debian and
Ubuntu.
This option ignores blocklisting, and performs a local sync.
This option ignores blacklisting, and performs a local sync.
It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file
for you to upload.
.TP

View File

@ -1,14 +1,9 @@
.TH UBUNTU-BUILD "1" "Mar 2024" "ubuntu-dev-tools"
.TH UBUNTU-BUILD "1" "June 2010" "ubuntu-dev-tools"
.SH NAME
ubuntu-build \- command-line interface to Launchpad build operations
.SH SYNOPSIS
.nf
\fBubuntu-build\fR <srcpackage> <release> <operation>
\fBubuntu-build\fR --batch [--retry] [--rescore \fIPRIORITY\fR] [--arch \fIARCH\fR [...]]
[--series \fISERIES\fR] [--state \fIBUILD-STATE\fR]
[-A \fIARCHIVE\fR] [pkg]...
.fi
.B ubuntu-build <srcpackage> <release> <operation>
.SH DESCRIPTION
\fBubuntu-build\fR provides a command line interface to the Launchpad build
@ -42,8 +37,8 @@ operations.
.IP
\fB\-a\fR ARCHITECTURE, \fB\-\-arch\fR=\fIARCHITECTURE\fR
Rebuild or rescore a specific architecture. Valid
architectures are:
armhf, arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x.
architectures include: amd64, sparc, powerpc, i386,
armel, armhf, arm64, ia64, lpia, hppa, ppc64el, s390x.
.TP
Batch processing:
.IP
@ -63,16 +58,14 @@ Retry builds (give\-back).
\fB\-\-rescore\fR=\fIPRIORITY\fR
Rescore builds to <priority>.
.IP
\fB\-\-arch\fR=\fIARCHITECTURE\fR
\fB\-\-arch2\fR=\fIARCHITECTURE\fR
Affect only 'architecture' (can be used several
times). Valid architectures are:
arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x.
.IP
\fB\-A=\fIARCHIVE\fR
Act on the named archive (ppa) instead of on the main Ubuntu archive.
times). Valid architectures are: amd64, sparc,
powerpc, i386, armel, armhf, arm64, ia64, lpia, hppa.
.SH AUTHORS
\fBubuntu-build\fR was written by Martin Pitt <martin.pitt@canonical.com>, and
this manual page was written by Jonathan Patrick Davies <jpds@ubuntu.com>.
.PP
Both are released under the terms of the GNU General Public License, version 3.
Both are released under the terms of the GNU General Public License, version 3
or (at your option) any later version.

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
#
@ -22,10 +22,7 @@
# UDT_EDIT_WRAPPER_TEMPLATE_RE: An extra boilerplate-detecting regex.
# UDT_EDIT_WRAPPER_FILE_DESCRIPTION: The type of file being edited.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import argparse
import optparse
import os
import re
@ -33,30 +30,33 @@ from ubuntutools.question import EditFile
def main():
parser = argparse.ArgumentParser(usage="%(prog)s [options] filename")
parser.add_argument("filename", help=argparse.SUPPRESS)
args = parser.parse_args()
if not os.path.isfile(args.filename):
parser.error(f"File {args.filename} does not exist")
parser = optparse.OptionParser('%prog [options] filename')
options, args = parser.parse_args()
if "UDT_EDIT_WRAPPER_EDITOR" in os.environ:
os.environ["EDITOR"] = os.environ["UDT_EDIT_WRAPPER_EDITOR"]
else:
del os.environ["EDITOR"]
if len(args) != 1:
parser.error('A filename must be specified')
body = args[0]
if not os.path.isfile(body):
parser.error('File %s does not exist' % body)
if "UDT_EDIT_WRAPPER_VISUAL" in os.environ:
os.environ["VISUAL"] = os.environ["UDT_EDIT_WRAPPER_VISUAL"]
if 'UDT_EDIT_WRAPPER_EDITOR' in os.environ:
os.environ['EDITOR'] = os.environ['UDT_EDIT_WRAPPER_EDITOR']
else:
del os.environ["VISUAL"]
del os.environ['EDITOR']
if 'UDT_EDIT_WRAPPER_VISUAL' in os.environ:
os.environ['VISUAL'] = os.environ['UDT_EDIT_WRAPPER_VISUAL']
else:
del os.environ['VISUAL']
placeholders = []
if "UDT_EDIT_WRAPPER_TEMPLATE_RE" in os.environ:
placeholders.append(re.compile(os.environ["UDT_EDIT_WRAPPER_TEMPLATE_RE"]))
if 'UDT_EDIT_WRAPPER_TEMPLATE_RE' in os.environ:
placeholders.append(re.compile(
os.environ['UDT_EDIT_WRAPPER_TEMPLATE_RE']))
description = os.environ.get("UDT_EDIT_WRAPPER_FILE_DESCRIPTION", "file")
description = os.environ.get('UDT_EDIT_WRAPPER_FILE_DESCRIPTION', 'file')
EditFile(args.filename, description, placeholders).edit()
EditFile(body, description, placeholders).edit()
if __name__ == "__main__":
if __name__ == '__main__':
main()

View File

@ -1,4 +1,4 @@
#! /usr/bin/python3
#! /usr/bin/python
#
# grep-merges - search for pending merges from Debian
#
@ -19,70 +19,57 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import argparse
import json
import optparse
import sys
import json
from httplib2 import Http, HttpLib2Error
import ubuntutools.misc
from ubuntutools import getLogger
Logger = getLogger()
def main():
parser = argparse.ArgumentParser(
usage="%(prog)s [options] [string]",
description="List pending merges from Debian matching string",
)
parser.add_argument("string", nargs="?", help=argparse.SUPPRESS)
args = parser.parse_args()
parser = optparse.OptionParser(
usage='%prog [options] [string]',
description='List pending merges from Debian matching string')
args = parser.parse_args()[1]
if len(args) > 1:
parser.error('Too many arguments')
elif len(args) == 1:
match = args[0]
else:
match = None
ubuntutools.misc.require_utf8()
for component in (
"main",
"main-manual",
"restricted",
"restricted-manual",
"universe",
"universe-manual",
"multiverse",
"multiverse-manual",
):
url = f"https://merges.ubuntu.com/{component}.json"
for component in ('main', 'main-manual',
'restricted', 'restricted-manual',
'universe', 'universe-manual',
'multiverse', 'multiverse-manual'):
url = 'https://merges.ubuntu.com/%s.json' % component
try:
headers, page = Http().request(url)
except HttpLib2Error as e:
Logger.exception(e)
except HttpLib2Error, e:
print >> sys.stderr, str(e)
sys.exit(1)
if headers.status != 200:
Logger.error("%s: %s %s", url, headers.status, headers.reason)
print >> sys.stderr, "%s: %s %s" % (url, headers.status,
headers.reason)
sys.exit(1)
for merge in json.loads(page):
package = merge["source_package"]
author, uploader = "", ""
if merge.get("user"):
author = merge["user"]
if merge.get("uploader"):
uploader = f"({merge['uploader']})"
teams = merge.get("teams", [])
pretty_uploader = f"{author} {uploader}"
if (
args.string is None
or args.string in package
or args.string in author
or args.string in uploader
or args.string in teams
):
Logger.info("%s\t%s", package, pretty_uploader)
package = merge['source_package']
author, uploader = '', ''
if merge.get('user'):
author = merge['user']
if merge.get('uploader'):
uploader = '(%s)' % merge['uploader']
pretty_uploader = u'{} {}'.format(author, uploader)
if match is None or match in package or match in author or match in uploader:
print '%s\t%s' % (package.encode("utf-8"), pretty_uploader.encode("utf-8"))
if __name__ == "__main__":
if __name__ == '__main__':
main()

139
hugdaylist Executable file
View File

@ -0,0 +1,139 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Canonical Ltd., Daniel Holbach
# Copyright (C) 2008 Jonathan Patrick Davies <jpds@ubuntu.com>
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL-3 for more details.
#
# ##################################################################
#
#
# hugdaylist - produces MoinMoin wiki formatted tables based on a Launchpad bug
# list.
#
# hugdaylist <url>
# - produces lists like https://wiki.ubuntu.com/UbuntuBugDay/20070912?action=raw
#
# hugdaylist -n <howmany> <url>
# - will only list <howmany> URLs.
import sys
from optparse import OptionParser
from launchpadlib.launchpad import Launchpad
from ubuntutools.lp.libsupport import translate_web_api
def check_args():
howmany = -1
url = ""
# Our usage options.
usage = "usage: %prog [-n <number>] launchpad-buglist-url"
opt_parser = OptionParser(usage)
# Options - namely just the number of bugs to output.
opt_parser.add_option("-n", "--number", type="int",
dest="number", help="Number of entries to output.")
# Parse arguments.
(options, args) = opt_parser.parse_args()
# Check if we want a number other than the default.
howmany = options.number
# Check that we have an URL.
if not args:
print >> sys.stderr, "An URL pointing to a Launchpad bug list is " \
"required."
opt_parser.print_help()
sys.exit(1)
else:
url = args[0]
return (howmany, url)
def filter_unsolved(task):
# TODO: don't use this filter here, only check status and assignee of
# the given task
# Filter out special types of bugs:
# - https://wiki.ubuntu.com/Bugs/HowToTriage#Special%20types%20of%20bugs
# this is expensive, parse name out of self_link instead?
subscriptions = set(s.person.name for s in task.bug.subscriptions)
if (task.status != "Fix Committed" and
(not task.assignee or task.assignee.name in ['motu', 'desktop-bugs']) and
'ubuntu-sponsors' not in subscriptions and
'ubuntu-archive' not in subscriptions):
return True
return False
def main():
(howmany, url) = check_args()
if len(url.split("?", 1)) == 2:
# search options not supported, because there is no mapping web ui
# options <-> API options
print >> sys.stderr, "Options in url are not supported, url: %s" % url
sys.exit(1)
launchpad = None
try:
launchpad = Launchpad.login_with("ubuntu-dev-tools", 'production')
except IOError, error:
print error
sys.exit(1)
api_url = translate_web_api(url, launchpad)
try:
product = launchpad.load(api_url)
except Exception, error:
response = getattr(error, "response", {})
if response.get("status", None) == "404":
print >> sys.stderr, ("The URL at '%s' does not appear to be a "
"valid url to a product") % url
sys.exit(1)
else:
raise
bug_list = [b for b in product.searchTasks() if filter_unsolved(b)]
if not bug_list:
print "Bug list of %s is empty." % url
sys.exit(0)
if howmany == -1:
howmany = len(bug_list)
print """
## ||<rowbgcolor="#CCFFCC"> This task is done || somebody || ||
## ||<rowbgcolor="#FFFFCC"> This task is assigned || somebody || <status> ||
## ||<rowbgcolor="#FFEBBB"> This task isn't || ... || ||
## ||<rowbgcolor="#FFCCCC"> This task is blocked on something || somebody || <explanation> ||
|| Bug || Subject || Triager ||"""
for i in list(bug_list)[:howmany]:
bug = i.bug
print '||<rowbgcolor="#FFEBBB"> [%s %s] || %s || ||' % \
(bug.web_link, bug.id, bug.title)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print >> sys.stderr, "Aborted."
sys.exit(1)

View File

@ -1,4 +1,5 @@
#!/usr/bin/python3
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright © 2009 James Westby <james.westby@ubuntu.com>,
# 2010, 2011 Stefano Rivera <stefanor@ubuntu.com>
@ -21,66 +22,67 @@
#
# ##################################################################
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import argparse
import logging
from optparse import OptionParser, SUPPRESS_HELP
import re
import sys
import webbrowser
from collections.abc import Iterable
from email.message import EmailMessage
import debianbts
from launchpadlib.launchpad import Launchpad
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig
from ubuntutools.logger import Logger
Logger = getLogger()
ATTACHMENT_MAX_SIZE = 2000
try:
import SOAPpy
except ImportError:
Logger.error("Please install 'python-soappy' in order to use this utility.")
sys.exit(1)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"-b",
"--browserless",
action="store_true",
help="Don't open the bug in the browser at the end",
)
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
help="LP instance to connect to (default: production)",
)
parser.add_argument(
"-v", "--verbose", action="store_true", help="Print info about the bug being imported"
)
parser.add_argument(
"-n",
"--dry-run",
action="store_true",
help="Don't actually open a bug (also sets verbose)",
)
parser.add_argument(
"-p", "--package", help="Launchpad package to file bug against (default: Same as Debian)"
)
parser.add_argument(
"--no-conf", action="store_true", help="Don't read config files or environment variables."
)
parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)")
return parser.parse_args()
def get_bug_numbers(bug_list: Iterable[str]) -> list[int]:
def main():
bug_re = re.compile(r"bug=(\d+)")
url = 'http://bugs.debian.org/cgi-bin/soap.cgi'
namespace = 'Debbugs/SOAP'
debbugs = SOAPpy.SOAPProxy(url, namespace)
# debug
# debbugs.config.dumpSOAPOut = 1
# debbugs.config.dumpSOAPIn = 1
parser = OptionParser(usage="%prog [option] bug ...")
parser.add_option("-b", "--browserless",
help="Don't open the bug in the browser at the end",
dest="browserless", action="store_true")
parser.add_option("-l", "--lpinstance", metavar="INSTANCE",
help="Launchpad instance to connect to "
"(default: production)",
dest="lpinstance", default=None)
parser.add_option("-n", "--dry-run",
help=SUPPRESS_HELP,
dest="lpinstance", action="store_const", const="staging")
parser.add_option("-p", "--package", metavar="PACKAGE",
help="Launchpad package to file bug against "
"(default: Same as Debian)",
dest="package", default=None)
parser.add_option("--no-conf", dest="no_conf", default=False,
help="Don't read config files or environment variables.",
action="store_true")
(options, args) = parser.parse_args()
config = UDTConfig(options.no_conf)
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
debian = launchpad.distributions['debian']
ubuntu = launchpad.distributions['ubuntu']
lp_debbugs = launchpad.bug_trackers.getByName(name='debbugs')
bug_nums = []
for bug_num in bug_list:
for bug_num in args:
if bug_num.startswith("http"):
# bug URL
match = bug_re.search(bug_num)
@ -89,168 +91,49 @@ def get_bug_numbers(bug_list: Iterable[str]) -> list[int]:
sys.exit(1)
bug_num = match.groups()[0]
bug_num = bug_num.lstrip("#")
bug_nums.append(int(bug_num))
bug_num = int(bug_num)
bug_nums.append(bug_num)
return bug_nums
bugs = debbugs.get_status(*bug_nums)
def walk_multipart_message(message: EmailMessage) -> tuple[str, list[tuple[int, EmailMessage]]]:
summary = ""
attachments = []
i = 1
for part in message.walk():
content_type = part.get_content_type()
if content_type.startswith("multipart/"):
# we're already iterating on multipart items
# let's just skip the multipart extra metadata
continue
if content_type == "application/pgp-signature":
# we're not interested in importing pgp signatures
continue
if part.is_attachment():
attachments.append((i, part))
elif content_type.startswith("image/"):
# images here are not attachment, they are inline, but Launchpad can't handle that,
# so let's add them as attachments
summary += f"Message part #{i}\n"
summary += f"[inline image '{part.get_filename()}']\n\n"
attachments.append((i, part))
elif content_type.startswith("text/html"):
summary += f"Message part #{i}\n"
summary += "[inline html]\n\n"
attachments.append((i, part))
elif content_type == "text/plain":
summary += f"Message part #{i}\n"
summary += part.get_content() + "\n"
else:
raise RuntimeError(
f"""Unknown message part
Your Debian bug is too weird to be imported in Launchpad, sorry.
You can fix that by patching this script in ubuntu-dev-tools.
Faulty message part:
{part}"""
)
i += 1
return summary, attachments
def process_bugs(
bugs: Iterable[debianbts.Bugreport],
launchpad: Launchpad,
package: str,
dry_run: bool = True,
browserless: bool = False,
) -> bool:
debian = launchpad.distributions["debian"]
ubuntu = launchpad.distributions["ubuntu"]
lp_debbugs = launchpad.bug_trackers.getByName(name="debbugs")
err = False
for bug in bugs:
ubupackage = bug.source
if package:
ubupackage = package
bug_num = bug.bug_num
subject = bug.subject
log = debianbts.get_bug_log(bug_num)
message = log[0]["message"]
assert isinstance(message, EmailMessage)
attachments: list[tuple[int, EmailMessage]] = []
if message.is_multipart():
summary, attachments = walk_multipart_message(message)
else:
summary = str(message.get_payload())
target = ubuntu.getSourcePackage(name=ubupackage)
if target is None:
Logger.error(
"Source package '%s' is not in Ubuntu. Please specify "
"the destination source package with --package",
ubupackage,
)
err = True
continue
description = f"Imported from Debian bug http://bugs.debian.org/{bug_num}:\n\n{summary}"
# LP limits descriptions to 50K chars
description = (description[:49994] + " [...]") if len(description) > 50000 else description
Logger.debug("Target: %s", target)
Logger.debug("Subject: %s", subject)
Logger.debug("Description: ")
Logger.debug(description)
for i, attachment in attachments:
Logger.debug("Attachment #%s (%s)", i, attachment.get_filename() or "inline")
Logger.debug("Content:")
if attachment.get_content_type() == "text/plain":
content = attachment.get_content()
if len(content) > ATTACHMENT_MAX_SIZE:
content = (
content[:ATTACHMENT_MAX_SIZE]
+ f" [attachment cropped after {ATTACHMENT_MAX_SIZE} characters...]"
)
Logger.debug(content)
else:
Logger.debug("[data]")
if dry_run:
Logger.info("Dry-Run: not creating Ubuntu bug.")
continue
u_bug = launchpad.bugs.createBug(target=target, title=subject, description=description)
for i, attachment in attachments:
name = f"#{i}-{attachment.get_filename() or "inline"}"
content = attachment.get_content()
if isinstance(content, str):
# Launchpad only wants bytes
content = content.encode()
u_bug.addAttachment(
filename=name,
data=content,
comment=f"Imported from Debian bug http://bugs.debian.org/{bug_num}",
)
d_sp = debian.getSourcePackage(name=package)
if d_sp is None and package:
d_sp = debian.getSourcePackage(name=package)
d_task = u_bug.addTask(target=d_sp)
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
d_task.bug_watch = d_watch
d_task.lp_save()
Logger.info("Opened %s", u_bug.web_link)
if not browserless:
webbrowser.open(u_bug.web_link)
return err
def main() -> None:
options = parse_args()
config = UDTConfig(options.no_conf)
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
if options.dry_run:
launchpad = Launchpad.login_anonymously("ubuntu-dev-tools")
options.verbose = True
else:
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
if options.verbose:
Logger.setLevel(logging.DEBUG)
bugs = debianbts.get_status(get_bug_numbers(options.bugs))
if len(bug_nums) > 1:
bugs = bugs[0]
if not bugs:
Logger.error("Cannot find any of the listed bugs")
sys.exit(1)
if process_bugs(bugs, launchpad, options.package, options.dry_run, options.browserless):
sys.exit(1)
for bug in bugs:
bug = bug.value
ubupackage = package = bug.source
if options.package:
ubupackage = options.package
bug_num = bug.bug_num
subject = bug.subject
log = debbugs.get_bug_log(bug_num)
summary = log[0][0]
target = ubuntu.getSourcePackage(name=ubupackage)
if target is None:
Logger.error("Source package '%s' is not in Ubuntu. Please specify "
"the destination source package with --package",
ubupackage)
sys.exit(1)
u_bug = launchpad.bugs.createBug(
target=target, title=subject,
description='Imported from Debian bug http://bugs.debian.org/%d:\n\n%s' %
(bug_num, summary))
d_sp = debian.getSourcePackage(name=package)
if d_sp is None and options.package:
d_sp = debian.getSourcePackage(name=options.package)
d_task = u_bug.addTask(target=d_sp)
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
d_task.bug_watch = d_watch
d_task.lp_save()
Logger.normal("Opened %s", u_bug.web_link)
if not options.browserless:
webbrowser.open(u_bug.web_link)
if __name__ == "__main__":
if __name__ == '__main__':
main()

View File

@ -1,105 +0,0 @@
#!/usr/bin/python3
"""Add 'bitesize' tag to bugs and add a comment."""
# Copyright (c) 2011 Canonical Ltd.
#
# bitesize is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any
# later version.
#
# bitesize is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with bitesize; see the file COPYING. If not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
# Authors:
# Daniel Holbach <daniel.holbach@canonical.com>
import argparse
import sys
from launchpadlib.errors import HTTPError
from launchpadlib.launchpad import Launchpad
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig
Logger = getLogger()
def error_out(msg, *args):
Logger.error(msg, *args)
sys.exit(1)
def save_entry(entry):
try:
entry.lp_save()
except HTTPError as error:
error_out("%s", error.content)
def tag_bug(bug):
bug.tags = bug.tags + ["bitesize"] # LP: #254901 workaround
save_entry(bug)
def main():
parser = argparse.ArgumentParser(usage="%(prog)s [options] <bug number>")
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
help="Launchpad instance to connect to (default: production)",
dest="lpinstance",
default=None,
)
parser.add_argument(
"--no-conf",
help="Don't read config files or environment variables.",
dest="no_conf",
default=False,
action="store_true",
)
parser.add_argument("bug_number", help=argparse.SUPPRESS)
args = parser.parse_args()
config = UDTConfig(args.no_conf)
if args.lpinstance is None:
args.lpinstance = config.get_value("LPINSTANCE")
launchpad = Launchpad.login_with("ubuntu-dev-tools", args.lpinstance)
if launchpad is None:
error_out("Couldn't authenticate to Launchpad.")
# check that the new main bug isn't a duplicate
try:
bug = launchpad.bugs[args.bug_number]
except HTTPError as error:
if error.response.status == 401:
error_out(
"Don't have enough permissions to access bug %s. %s",
args.bug_number,
error.content,
)
else:
raise
if "bitesize" in bug.tags:
error_out("Bug is already marked as 'bitesize'.")
bug.newMessage(
content="I'm marking this bug as 'bitesize' as it looks "
"like an issue that is easy to fix and suitable "
"for newcomers in Ubuntu development. If you need "
"any help with fixing it, talk to me about it."
)
bug.subscribe(person=launchpad.me)
tag_bug(launchpad.bugs[bug.id]) # fresh bug object, LP: #336866 workaround
if __name__ == "__main__":
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright © 2008 Canonical Ltd.
# Author: Scott James Remnant <scott at ubuntu.com>.
@ -18,67 +18,256 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import re
import sys
from debian.changelog import Changelog
from ubuntutools import getLogger
Logger = getLogger()
def usage(exit_code=1):
Logger.info(
"""Usage: merge-changelog <left changelog> <right changelog>
print '''Usage: merge-changelog <left changelog> <right changelog>
merge-changelog takes two changelogs that once shared a common source,
merges them back together, and prints the merged result to stdout. This
is useful if you need to manually merge a ubuntu package with a new
Debian release of the package.
"""
)
'''
sys.exit(exit_code)
########################################################################
# Changelog Management
########################################################################
# Regular expression for top of debian/changelog
CL_RE = re.compile(r'^(\w[-+0-9a-z.]*) \(([^\(\) \t]+)\)((\s+[-0-9a-z]+)+)\;',
re.IGNORECASE)
def merge_changelog(left_changelog, right_changelog):
"""Merge a changelog file."""
with open(left_changelog, encoding="utf-8") as f:
left_cl = Changelog(f)
with open(right_changelog, encoding="utf-8") as f:
right_cl = Changelog(f)
left_cl = read_changelog(left_changelog)
right_cl = read_changelog(right_changelog)
left_versions = set(left_cl.versions)
right_versions = set(right_cl.versions)
left_blocks = iter(left_cl)
right_blocks = iter(right_cl)
for right_ver, right_text in right_cl:
while len(left_cl) and left_cl[0][0] > right_ver:
(left_ver, left_text) = left_cl.pop(0)
print left_text
clist = sorted(left_versions | right_versions, reverse=True)
remaining = len(clist)
for version in clist:
remaining -= 1
if version in left_versions:
block = next(left_blocks)
if version in right_versions:
next(right_blocks)
else:
block = next(right_blocks)
while len(left_cl) and left_cl[0][0] == right_ver:
(left_ver, left_text) = left_cl.pop(0)
assert block.version == version
print right_text
Logger.info("%s%s", str(block).strip(), "\n" if remaining else "")
for _, left_text in left_cl:
print left_text
return False
def read_changelog(filename):
"""Return a parsed changelog file."""
entries = []
changelog_file = open(filename)
try:
(ver, text) = (None, "")
for line in changelog_file:
match = CL_RE.search(line)
if match:
try:
ver = Version(match.group(2))
except ValueError:
ver = None
text += line
elif line.startswith(" -- "):
if ver is None:
ver = Version("0")
text += line
entries.append((ver, text))
(ver, text) = (None, "")
elif len(line.strip()) or ver is not None:
text += line
finally:
changelog_file.close()
if len(text):
entries.append((ver, text))
return entries
########################################################################
# Version parsing code
########################################################################
# Regular expressions make validating things easy
VALID_EPOCH = re.compile(r'^[0-9]+$')
VALID_UPSTREAM = re.compile(r'^[A-Za-z0-9+:.~-]*$')
VALID_REVISION = re.compile(r'^[A-Za-z0-9+.~]+$')
# Character comparison table for upstream and revision components
CMP_TABLE = "~ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz+-.:"
class Version(object):
"""Debian version number.
This class is designed to be reasonably transparent and allow you
to write code like:
| s.version >= '1.100-1'
The comparison will be done according to Debian rules, so '1.2' will
compare lower.
Properties:
epoch Epoch
upstream Upstream version
revision Debian/local revision
"""
def __init__(self, ver):
"""Parse a string or number into the three components."""
self.epoch = 0
self.upstream = None
self.revision = None
ver = str(ver)
if not len(ver):
raise ValueError
# Epoch is component before first colon
idx = ver.find(":")
if idx != -1:
self.epoch = ver[:idx]
if not len(self.epoch):
raise ValueError
if not VALID_EPOCH.search(self.epoch):
raise ValueError
ver = ver[idx+1:]
# Revision is component after last hyphen
idx = ver.rfind("-")
if idx != -1:
self.revision = ver[idx+1:]
if not len(self.revision):
raise ValueError
if not VALID_REVISION.search(self.revision):
raise ValueError
ver = ver[:idx]
# Remaining component is upstream
self.upstream = ver
if not len(self.upstream):
raise ValueError
if not VALID_UPSTREAM.search(self.upstream):
raise ValueError
self.epoch = int(self.epoch)
def get_without_epoch(self):
"""Return the version without the epoch."""
string = self.upstream
if self.revision is not None:
string += "-%s" % (self.revision,)
return string
without_epoch = property(get_without_epoch)
def __str__(self):
"""Return the class as a string for printing."""
string = ""
if self.epoch > 0:
string += "%d:" % (self.epoch,)
string += self.upstream
if self.revision is not None:
string += "-%s" % (self.revision,)
return string
def __repr__(self):
"""Return a debugging representation of the object."""
return "<%s epoch: %d, upstream: %r, revision: %r>" \
% (self.__class__.__name__, self.epoch,
self.upstream, self.revision)
def __cmp__(self, other):
"""Compare two Version classes."""
other = Version(other)
result = cmp(self.epoch, other.epoch)
if result != 0:
return result
result = deb_cmp(self.upstream, other.upstream)
if result != 0:
return result
result = deb_cmp(self.revision or "", other.revision or "")
if result != 0:
return result
return 0
def strcut(string, idx, accept):
"""Cut characters from string that are entirely in accept."""
ret = ""
while idx < len(string) and string[idx] in accept:
ret += string[idx]
idx += 1
return (ret, idx)
def deb_order(string, idx):
"""Return the comparison order of two characters."""
if idx >= len(string):
return 0
elif string[idx] == "~":
return -1
else:
return CMP_TABLE.index(string[idx])
def deb_cmp_str(x, y):
"""Compare two strings in a deb version."""
idx = 0
while (idx < len(x)) or (idx < len(y)):
result = deb_order(x, idx) - deb_order(y, idx)
if result < 0:
return -1
elif result > 0:
return 1
idx += 1
return 0
def deb_cmp(x, y):
"""Implement the string comparison outlined by Debian policy."""
x_idx = y_idx = 0
while x_idx < len(x) or y_idx < len(y):
# Compare strings
(x_str, x_idx) = strcut(x, x_idx, CMP_TABLE)
(y_str, y_idx) = strcut(y, y_idx, CMP_TABLE)
result = deb_cmp_str(x_str, y_str)
if result != 0:
return result
# Compare numbers
(x_str, x_idx) = strcut(x, x_idx, "0123456789")
(y_str, y_idx) = strcut(y, y_idx, "0123456789")
result = cmp(int(x_str or "0"), int(y_str or "0"))
if result != 0:
return result
return 0
def main():
if len(sys.argv) > 1 and sys.argv[1] in ("-h", "--help"):
if len(sys.argv) > 1 and sys.argv[1] in ('-h', '--help'):
usage(0)
if len(sys.argv) != 3:
usage(1)
@ -90,5 +279,5 @@ def main():
sys.exit(0)
if __name__ == "__main__":
if __name__ == '__main__':
main()

331
mk-sbuild
View File

@ -26,7 +26,7 @@
# ##################################################################
#
# This script creates chroots designed to be used in a snapshot mode
# (with LVM, btrfs, zfs, overlay, overlay or aufs) with schroot and sbuild.
# (with LVM, btrfs, overlay, or aufs) with schroot and sbuild.
# Much love to "man sbuild-setup", https://wiki.ubuntu.com/PbuilderHowto,
# and https://help.ubuntu.com/community/SbuildLVMHowto.
#
@ -40,18 +40,15 @@ SOURCE_CHROOTS_DIR="/var/lib/schroot/chroots"
SOURCE_CHROOTS_TGZ="/var/lib/schroot/tarballs"
CHROOT_SNAPSHOT_DIR="/var/lib/schroot/snapshots"
SCHROOT_PROFILE="sbuild"
CCACHE_DIR="/var/cache/ccache-sbuild"
CCACHE_SIZE="4G"
function usage()
{
echo "Usage: $0 [OPTIONS] Release"
echo "Usage: $0 [OPTIONS] Release" >&2
echo "Options:"
echo " --arch=ARCH What architecture to select"
echo " --name=NAME Base name for the schroot (arch is appended)"
echo " --personality=PERSONALITY What personality to use (defaults to match --arch)"
echo " --vg=VG use LVM snapshots, with group VG"
echo " --zfs-dataset=DATASET use ZFS snapshots, with parent dataset DATASET"
echo " --debug Turn on script debugging"
echo " --skip-updates Do not include -updates pocket in sources.list"
echo " --skip-security Do not include -security pocket in sources.list"
@ -65,21 +62,14 @@ function usage()
echo " --debootstrap-keyring=KEYRING"
echo " Use KEYRING to check signatures of retrieved Release files"
echo " --debootstrap-no-check-gpg Disables checking gpg signatures of retrieved Release files"
echo " --skip-eatmydata Don't install and use eatmydata"
echo " --eatmydata Install and use eatmydata (default)"
echo " --ccache Install configure and use ccache as default"
echo " --ccache-dir=PATH Sets the CCACHE_DIR to PATH"
echo " (can be shared between all schroots, defaults to ${CCACHE_DIR})"
echo " --ccache-size=SIZE Sets the ccache max-size to SIZE"
echo " (shared by each CCACHE_DIR, defaults to ${CCACHE_SIZE})"
echo " --eatmydata Install and use eatmydata"
echo " --distro=DISTRO Install specific distro:"
echo " 'ubuntu' or 'debian' "
echo " (defaults to determining from release name)"
echo " --target=ARCH Target architecture for cross-building"
echo " --type=SCHROOT_TYPE Define the schroot type:"
echo " 'directory' (default), 'file', or 'btrfs-snapshot'."
echo " 'directory'(default), 'file', or 'btrfs-snapshot'"
echo " 'lvm-snapshot' is selected via --vg"
echo " 'zfs-snapshot' is selected via --zfs-dataset"
echo ""
echo "Configuration (via ~/.mk-sbuild.rc)"
echo " LV_SIZE Size of source LVs (default ${LV_SIZE})"
@ -99,12 +89,7 @@ function usage()
echo " DEBOOTSTRAP_PROXY Apt proxy (same as --debootstrap-proxy)"
echo " DEBOOTSTRAP_KEYRING GPG keyring (same as --debootstrap-keyring)"
echo " DEBOOTSTRAP_NO_CHECK_GPG Disable GPG verification (same as --debootstrap-no-check-gpg)"
echo " EATMYDATA Enable or disable eatmydata usage, see --eatmydata and --skip-eatmydata"
echo " CCACHE Enable --ccache"
echo " CCACHE_DIR Path for ccache (can be shared between all schroots, "
echo " same as --ccache-dir, default ${CCACHE_DIR})"
echo " CCACHE_SIZE Sets the ccache max-size (shared by each CCACHE_DIR, "
echo " same as --ccache-size, default ${CCACHE_SIZE})"
echo " EATMYDATA Enable --eatmydata"
echo " TEMPLATE_SOURCES A template for sources.list"
echo " TEMPLATE_SCHROOTCONF A template for schroot.conf stanza"
if [ -z "$1" ]; then
@ -117,62 +102,28 @@ function usage()
if [ -z "$1" ]; then
usage
fi
supported_options=(
help
debug
skip-updates
skip-security
skip-proposed
skip-eatmydata
ccache
arch:
name:
source-template:
debootstrap-mirror:
debootstrap-include:
debootstrap-exclude:
debootstrap-opts:
debootstrap-proxy:
debootstrap-no-check-gpg
debootstrap-keyring:
personality:
distro:
vg:
zfs-dataset:
type:
target:
ccache-dir:
ccache-size:
)
OPTS=$(getopt -o 'h' --long "$(IFS=, && echo "${supported_options[*]}")" -- "$@")
OPTS=`getopt -o 'h' --long "help,debug,skip-updates,skip-security,skip-proposed,eatmydata,arch:,name:,source-template:,debootstrap-mirror:,debootstrap-include:,debootstrap-exclude:,debootstrap-opts:,debootstrap-proxy:,debootstrap-no-check-gpg,debootstrap-keyring:,personality:,distro:,vg:,type:,target:" -- "$@"`
eval set -- "$OPTS"
VG=""
DISTRO=""
COMMAND_PREFIX=""
name=""
proxy="_unset_"
DEBOOTSTRAP_NO_CHECK_GPG=0
EATMYDATA=1
CCACHE=0
USE_PKGBINARYMANGLER=0
EATMYDATA=0
while :; do
case "$1" in
--debug)
DEBUG=1
set -x
shift
;;
--arch)
CHROOT_ARCH="$2"
case $2 in
armhf|i386)
if [ -z "$personality" ]; then
personality="linux32"
fi
;;
esac
if [ "$2" = "i386" ] || [ "$2" = "lpia" ] && [ -z "$personality" ];
then
personality="linux32"
fi
shift 2
;;
--personality)
@ -232,12 +183,8 @@ while :; do
DEBOOTSTRAP_NO_CHECK_GPG=1
shift
;;
--skip-eatmydata)
EATMYDATA=0
shift
;;
--ccache)
CCACHE=1
--eatmydata)
EATMYDATA=1
shift
;;
--distro)
@ -248,10 +195,6 @@ while :; do
VG="$2"
shift 2
;;
--zfs-dataset)
ZFS_PARENT_DATASET="$2"
shift 2
;;
--type)
SCHROOT_TYPE="$2"
shift 2
@ -260,14 +203,6 @@ while :; do
TARGET_ARCH="$2"
shift 2
;;
--ccache-dir)
CCACHE_DIR="$2"
shift 2
;;
--ccache-size)
CCACHE_SIZE="$2"
shift 2
;;
--)
shift
break
@ -304,27 +239,11 @@ if [ ! -w /var/lib/sbuild ]; then
# Prepare a usable default .sbuildrc
if [ ! -e ~/.sbuildrc ]; then
cat > ~/.sbuildrc <<EOM
# *** THIS COMMAND IS DEPRECATED ***
#
# In sbuild 0.87.0 and later, the unshare backend is available. This is
# expected to become the default in a future release.
#
# This is the new preferred way of building Debian packages, making the manual
# creation of schroots no longer necessary. To retain the default behavior,
# you may remove this comment block and continue.
#
# To test the unshare backend while retaining the default settings, run sbuild
# with --chroot-mode=unshare like this:
# $ sbuild --chroot-mode=unshare --dist=unstable hello
#
# To switch to the unshare backend by default (recommended), uncomment the
# following lines and delete the rest of the file (with the exception of the
# last two lines):
#\$chroot_mode = 'unshare';
#\$unshare_mmdebstrap_keep_tarball = 1;
# *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW ***
# Mail address where logs are sent to (mandatory, no default!)
\$mailto = '$USER';
# Name to use as override in .changes files for the Maintainer: field
#\$maintainer_name='$USER <$USER@localhost>';
@ -411,46 +330,27 @@ elif [ -z "$DISTRO" ]; then
exit 1
fi
# By default DEBOOTSTRAP_SCRIPT must match RELEASE
DEBOOTSTRAP_SCRIPT="$RELEASE"
dist_ge() {
local releases="$($3-distro-info --all)"
local left=999
local right=0
local seq=1
for i in $releases; do
if [ $1 = $i ]; then
local left=$seq
break
fi
seq=$((seq+1))
done
seq=1
for i in $releases; do
if [ $2 = $i ]; then
local right=$seq
break
fi
seq=$((seq+1))
done
[ $left -ge $right ] && return 0 || return 1
}
ubuntu_dist_ge () {
dist_ge $1 $2 ubuntu
}
debian_dist_ge () {
dist_ge $1 $2 debian
}
if [ "$DISTRO" = "ubuntu" ]; then
# On Ubuntu, set DEBOOTSTRAP_SCRIPT to gutsy to allow building new RELEASES without new debootstrap
DEBOOTSTRAP_SCRIPT=gutsy
ubuntu_dist_ge() {
local releases="$(ubuntu-distro-info --all)"
local seq=1
for i in $releases; do
if [ $1 = $i ]; then
local left=$seq
break
fi
seq=$((seq+1))
done
seq=1
for i in $releases; do
if [ $2 = $i ]; then
local right=$seq
break
fi
seq=$((seq+1))
done
[ $left -ge $right ] && return 0 || return 1
}
fi
# By default, name the schroot the same as the release
@ -497,58 +397,10 @@ if [ $EATMYDATA -eq 1 ]; then
esac
fi
if [ $CCACHE -eq 1 ]; then
if [ -z "$CCACHE_DIR" ] || [[ "$(dirname "$CCACHE_DIR")" == '/' ]]; then
echo "Invalid ccache dir: ${CCACHE_DIR}" >&2
exit 1
fi
# We can safely use a global cache path, in such case changing size applies
# to all the schroots
setup_script="$CCACHE_DIR"/mk-sbuild-setup
if [ -d "$CCACHE_DIR" ]; then
echo "Reusing $CCACHE_DIR as CCACHE_DIR, will be configured to use max-size=${CCACHE_SIZE}"
rm -f "$setup_script"
else
echo "Configuring $CCACHE_DIR as CCACHE_DIR with max-size=${CCACHE_SIZE}"
sudo install --group=sbuild --mode=2775 -d "$CCACHE_DIR"
fi
if [ ! -x "$setup_script" ]; then
cat <<END | sudo tee "$setup_script" 1>/dev/null
#!/bin/sh
export CCACHE_DIR="$CCACHE_DIR"
export CCACHE_MAXSIZE="${CCACHE_SIZE}"
export CCACHE_UMASK=002
export CCACHE_COMPRESS=1
unset CCACHE_HARDLINK
export CCACHE_NOHARDLINK=1
export PATH="/usr/lib/ccache:\$PATH"
exec "\$@"
END
sudo chmod a+rx "$setup_script"
fi
if ! sudo grep -qs "$CCACHE_DIR" /etc/schroot/sbuild/fstab; then
# This acts on host configuration, but there is no other way to handle
# this, however it won't affect anything
cat <<END | sudo tee -a /etc/schroot/sbuild/fstab 1>/dev/null
${CCACHE_DIR} ${CCACHE_DIR} none rw,bind 0 0
END
fi
DEBOOTSTRAP_INCLUDE="${DEBOOTSTRAP_INCLUDE:+$DEBOOTSTRAP_INCLUDE,}ccache"
BUILD_PKGS="$BUILD_PKGS ccache"
COMMAND_PREFIX="${COMMAND_PREFIX:+$COMMAND_PREFIX,}$setup_script"
fi
if [ -z "$SCHROOT_TYPE" ]; then
# To build the LV, we need to know which volume group to use
if [ -n "$VG" ]; then
SCHROOT_TYPE="lvm-snapshot"
# To build the ZFS dataset, we need to know which parent to use
elif [ -n "$ZFS_PARENT_DATASET" ]; then
SCHROOT_TYPE="zfs-snapshot"
else
SCHROOT_TYPE="directory"
fi
@ -595,7 +447,7 @@ case "$SCHROOT_TYPE" in
# Set up some variables for use in the paths and names
CHROOT_PATH="${SOURCE_CHROOTS_TGZ}/${CHROOT_NAME}.tgz"
;;
"btrfs-snapshot" | "zfs-snapshot")
"btrfs-snapshot")
if [ ! -d "${SOURCE_CHROOTS_DIR}" ]; then
sudo mkdir -p "${SOURCE_CHROOTS_DIR}"
fi
@ -612,8 +464,8 @@ esac
# Is the specified release known to debootstrap?
variant_opt="--variant=buildd"
if [ ! -r "/usr/share/debootstrap/scripts/$DEBOOTSTRAP_SCRIPT" ]; then
echo "Specified release ($DEBOOTSTRAP_SCRIPT) not known to debootstrap" >&2
if [ ! -r "/usr/share/debootstrap/scripts/$RELEASE" ]; then
echo "Specified release ($RELEASE) not known to debootstrap" >&2
exit 1
fi
@ -668,11 +520,9 @@ ubuntu)
if ubuntu_dist_ge "$RELEASE" "edgy"; then
# Add pkgbinarymangler (edgy and later)
BUILD_PKGS="$BUILD_PKGS pkgbinarymangler"
USE_PKGBINARYMANGLER=1
# Disable recommends for a smaller chroot (gutsy and later only)
if ubuntu_dist_ge "$RELEASE" "gutsy"; then
BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
SKIP_RECOMMENDS=1
fi
# Add pkg-create-dbgsym (edgy through zesty)
if ! ubuntu_dist_ge "$RELEASE" "artful"; then
@ -685,7 +535,7 @@ debian)
DEBOOTSTRAP_MIRROR="http://deb.debian.org/debian"
fi
if [ -z "$COMPONENTS" ]; then
COMPONENTS="main non-free non-free-firmware contrib"
COMPONENTS="main non-free contrib"
fi
if [ -z "$SOURCES_PROPOSED_SUITE" ]; then
SOURCES_PROPOSED_SUITE="RELEASE-proposed-updates"
@ -693,11 +543,7 @@ debian)
# Debian only performs security updates
SKIP_UPDATES=1
if [ -z "$SOURCES_SECURITY_SUITE" ]; then
if debian_dist_ge "$RELEASE" "bullseye"; then
SOURCES_SECURITY_SUITE="RELEASE-security"
else
SOURCES_SECURITY_SUITE="RELEASE/updates"
fi
SOURCES_SECURITY_SUITE="RELEASE/updates"
fi
if [ -z "$SOURCES_SECURITY_URL" ]; then
SOURCES_SECURITY_URL="http://security.debian.org/"
@ -713,7 +559,6 @@ debian)
fi
# Keep the chroot as minimal as possible
BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
SKIP_RECOMMENDS=1
;;
*)
echo "Unknown --distro '$DISTRO': aborting" >&2
@ -730,7 +575,7 @@ if [ -n "$TARGET_ARCH" ]; then
echo "Unknown target architecture $TARGET_ARCH" >&2
exit 1
fi
BUILD_PKGS="$BUILD_PKGS g++-$target_tuple pkg-config dpkg-cross libc-dev:$TARGET_ARCH"
BUILD_PKGS="$BUILD_PKGS g++-$target_tuple pkg-config-$target_tuple dpkg-cross libc-dev:$TARGET_ARCH"
fi
debootstrap_opts="--components=$(echo $COMPONENTS | tr ' ' ,)"
@ -768,12 +613,12 @@ DEBOOTSTRAP_COMMAND=debootstrap
if [ "$CHROOT_ARCH" != "$HOST_ARCH" ] ; then
case "$CHROOT_ARCH-$HOST_ARCH" in
# Sometimes we don't need qemu
amd64-i386|arm64-armhf|armhf-arm64|i386-amd64|powerpc-ppc64|ppc64-powerpc)
amd64-i386|amd64-lpia|armel-armhf|armhf-armel|arm64-armel|arm64-armhf|armel-arm64|armhf-arm64|i386-amd64|i386-lpia|lpia-i386|powerpc-ppc64|ppc64-powerpc|sparc-sparc64|sparc64-sparc)
;;
# Sometimes we do
*)
DEBOOTSTRAP_COMMAND=debootstrap
if ! which "qemu-x86_64-static"; then
DEBOOTSTRAP_COMMAND=qemu-debootstrap
if ! which "$DEBOOTSTRAP_COMMAND"; then
sudo apt-get install qemu-user-static
fi
;;
@ -806,19 +651,6 @@ case "$SCHROOT_TYPE" in
fi
sudo btrfs subvolume create "${MNT}"
;;
"zfs-snapshot")
ZFS_DATASET="${ZFS_PARENT_DATASET}/${CHROOT_NAME}"
if sudo zfs list "${ZFS_DATASET}" >/dev/null 2>&1; then
echo "E: ZFS dataset ${ZFS_DATASET} already exists; aborting" >&2
exit 1
fi
sudo zfs create -p -o mountpoint=legacy "${ZFS_DATASET}"
# Mount
MNT=`mktemp -d -t schroot-XXXXXX`
sudo mount -t zfs "${ZFS_DATASET}" "${MNT}"
;;
"file")
MNT=`mktemp -d -t schroot-XXXXXX`
esac
@ -840,14 +672,7 @@ esac
sudo mkdir -p -m 0700 "$MNT"/root/.gnupg
# debootstrap the chroot
sudo ${proxy:+"http_proxy=${proxy}"} "$DEBOOTSTRAP_COMMAND" --arch="$CHROOT_ARCH" $variant_opt $debootstrap_opts "$RELEASE" "$MNT" "${DEBOOTSTRAP_MIRROR:-http://archive.ubuntu.com/ubuntu}" "$DEBOOTSTRAP_SCRIPT"
if [ $EATMYDATA -eq 1 ]; then
sudo mkdir -p "${MNT}/usr/local/libexec/mk-sbuild"
sudo ln -s /usr/bin/eatmydata "${MNT}/usr/local/libexec/mk-sbuild/dpkg"
echo 'Dir::Bin::dpkg "/usr/local/libexec/mk-sbuild/dpkg";' \
| sudo tee "${MNT}/etc/apt/apt.conf.d/00mk-sbuild-eatmydata" > /dev/null
fi
sudo ${proxy:+"http_proxy=${proxy}"} "$DEBOOTSTRAP_COMMAND" --arch="$CHROOT_ARCH" $variant_opt $debootstrap_opts "$RELEASE" "$MNT" "${DEBOOTSTRAP_MIRROR:-http://archive.ubuntu.com/ubuntu}"
# Update the package sources
TEMP_SOURCES=`mktemp -t sources-XXXXXX`
@ -892,13 +717,6 @@ EOM
fi
fi
if [ -z "$SKIP_PROPOSED" ]; then
TEMP_PREFERENCES=`mktemp -t preferences-XXXXXX`
cat >> "$TEMP_PREFERENCES" <<EOM
# override for NotAutomatic: yes
Package: *
Pin: release a=*-proposed
Pin-Priority: 500
EOM
cat >> "$TEMP_SOURCES" <<EOM
deb ${MIRROR_ARCHS}${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS}
deb-src ${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS}
@ -924,12 +742,9 @@ fi
cat "$TEMP_SOURCES" | sed -e "s|RELEASE|$RELEASE|g" | \
sudo bash -c "cat > $MNT/etc/apt/sources.list"
rm -f "$TEMP_SOURCES"
if [ -n "$TEMP_PREFERENCES" ]; then
sudo mv "$TEMP_PREFERENCES" $MNT/etc/apt/preferences.d/proposed.pref
fi
# Copy the timezone (uncomment this if you want to use your local time zone)
#sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
# Copy the timezone (comment this out if you want to leave the chroot at UTC)
sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
# Create a schroot entry for this chroot
TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX`
TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf
@ -956,9 +771,9 @@ root-groups=$ADMIN_GROUPS
type=SCHROOT_TYPE
profile=$SCHROOT_PROFILE
EOM
if [ -n "$COMMAND_PREFIX" ]; then
if [ $EATMYDATA -eq 1 ]; then
cat >> "$TEMP_SCHROOTCONF" <<EOM
command-prefix=${COMMAND_PREFIX}
command-prefix=eatmydata
EOM
fi
case "$SCHROOT_TYPE" in
@ -981,12 +796,6 @@ btrfs-source-subvolume=CHROOT_PATH
btrfs-snapshot-directory=CHROOT_SNAPSHOT_DIR
EOM
;;
zfs-snapshot)
cat >> "${TEMP_SCHROOTCONF}" <<EOM
zfs-dataset=ZFS_DATASET
EOM
;;
esac
fi
if [ ! -z "$personality" ]; then
@ -1003,7 +812,6 @@ sed -e "s|CHROOT_NAME|$CHROOT_NAME|g" \
-e "s|SNAPSHOT_SIZE|$SNAPSHOT_SIZE|g" \
-e "s|SCHROOT_TYPE|$SCHROOT_TYPE|g" \
-e "s|CHROOT_SNAPSHOT_DIR|$CHROOT_SNAPSHOT_DIR|g" \
-e "s|ZFS_DATASET|$ZFS_DATASET|g" \
"$TEMP_SCHROOTCONF" \
| sudo tee "/etc/schroot/chroot.d/sbuild-$CHROOT_NAME" > /dev/null
rm -f "$TEMP_SCHROOTCONF"
@ -1025,9 +833,7 @@ sudo chmod a+x "$MNT"/usr/sbin/policy-rc.d
# Create image finalization script
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
#!/bin/bash
if [ "$DEBUG" = 1 ]; then
set -x
fi
#set -x
set -e
if [ -n "$proxy" ]; then
mkdir -p /etc/apt/apt.conf.d/
@ -1038,35 +844,6 @@ EOF
fi
EOM
if [ -n "$SKIP_RECOMMENDS" ]; then
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
mkdir -p /etc/apt/apt.conf.d/
cat > /etc/apt/apt.conf.d/99mk-sbuild-no-recommends <<EOF
// disable install recommends
APT::Install-Recommends "0";
EOF
EOM
fi
if [ "$USE_PKGBINARYMANGLER" = 1 ]; then
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
mkdir -p /etc/pkgbinarymangler/
cat > /etc/pkgbinarymangler/maintainermangler.conf <<EOF
# pkgmaintainermangler configuration file
# pkgmaintainermangler will do nothing unless enable is set to "true"
enable: true
# Configure what happens if /CurrentlyBuilding is present, but invalid
# (i. e. it does not contain a Package: field). If "ignore" (default),
# the file is ignored (i. e. the Maintainer field is mangled) and a
# warning is printed. If "fail" (or any other value), pkgmaintainermangler
# exits with an error, which causes a package build to fail.
invalid_currentlybuilding: ignore
EOF
EOM
fi
if [ -n "$TARGET_ARCH" ]; then
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
# Configure target architecture
@ -1085,7 +862,7 @@ apt-get update || true
echo set debconf/frontend Noninteractive | debconf-communicate
echo set debconf/priority critical | debconf-communicate
# Install basic build tool set, trying to match buildd
apt-get -y --force-yes -o Dpkg::Options::="--force-confold" install $BUILD_PKGS
apt-get -y --force-yes install $BUILD_PKGS
# Set up expected /dev entries
if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi
if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi
@ -1097,7 +874,7 @@ EOM
sudo chmod a+x "$MNT"/finish.sh
case "$SCHROOT_TYPE" in
"lvm-snapshot"|"zfs-snapshot")
"lvm-snapshot")
sudo umount "$MNT"
rmdir "$MNT"
;;
@ -1121,7 +898,7 @@ echo ""
echo " To CHANGE the golden image: sudo schroot -c source:${CHROOT_NAME} -u root"
echo " To ENTER an image snapshot: schroot -c ${CHROOT_NAME}"
echo " To BUILD within a snapshot: sbuild -A -d ${CHROOT_NAME} PACKAGE*.dsc"
if [ -n "$TARGET_ARCH" ] && [ "$CHROOT_ARCH" != "$TARGET_ARCH" ] ; then
if [ "$CHROOT_ARCH" != "$TARGET_ARCH" ] ; then
echo " To BUILD for ${TARGET_ARCH}: sbuild -A -d ${CHROOT_NAME} --host ${TARGET_ARCH} PACKAGE*.dsc"
fi
echo ""

View File

@ -1,4 +1,4 @@
#! /usr/bin/python3
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2010, Siegfried-A. Gevatter <rainct@ubuntu.com>,
@ -29,29 +29,21 @@
# configurations. For example, a symlink called pbuilder-hardy will assume
# that the target distribution is always meant to be Ubuntu Hardy.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import os
import os.path
import shutil
import subprocess
import sys
from contextlib import suppress
import debian.deb822
from distro_info import DebianDistroInfo, DistroDataOutdated, UbuntuDistroInfo
from distro_info import DebianDistroInfo, UbuntuDistroInfo, DistroDataOutdated
import ubuntutools.misc
import ubuntutools.version
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig
from ubuntutools.logger import Logger
from ubuntutools.question import YesNoQuestion
Logger = getLogger()
from ubuntutools import subprocess
class PbuilderDist:
class PbuilderDist(object):
def __init__(self, builder):
# Base directory where pbuilder will put all the files it creates.
self.base = None
@ -90,36 +82,31 @@ class PbuilderDist:
self.chroot_string = None
# Authentication method
self.auth = "sudo"
self.auth = 'sudo'
# Builder
self.builder = builder
# Distro info
self.debian_distro_info = DebianDistroInfo()
self.ubuntu_distro_info = UbuntuDistroInfo()
self._debian_distros = self.debian_distro_info.all + ["stable", "testing", "unstable"]
self._debian_distros = DebianDistroInfo().all + \
['stable', 'testing', 'unstable']
# Ensure that the used builder is installed
paths = set(os.environ["PATH"].split(":"))
paths |= set(("/sbin", "/usr/sbin", "/usr/local/sbin"))
paths = set(os.environ['PATH'].split(':'))
paths |= set(('/sbin', '/usr/sbin', '/usr/local/sbin'))
if not any(os.path.exists(os.path.join(p, builder)) for p in paths):
Logger.error('Could not find "%s".', builder)
sys.exit(1)
##############################################################
self.base = os.path.expanduser(os.environ.get("PBUILDFOLDER", "~/pbuilder/"))
self.base = os.path.expanduser(os.environ.get('PBUILDFOLDER',
'~/pbuilder/'))
if "SUDO_USER" in os.environ:
Logger.warning(
"Running under sudo. "
"This is probably not what you want. "
"pbuilder-dist will use sudo itself, "
"when necessary."
)
if os.stat(os.environ["HOME"]).st_uid != os.getuid():
if 'SUDO_USER' in os.environ:
Logger.warn('Running under sudo. '
'This is probably not what you want. '
'pbuilder-dist will use sudo itself, when necessary.')
if os.stat(os.environ['HOME']).st_uid != os.getuid():
Logger.error("You don't own $HOME")
sys.exit(1)
@ -130,8 +117,8 @@ class PbuilderDist:
Logger.error('Cannot create base directory "%s"', self.base)
sys.exit(1)
if "PBUILDAUTH" in os.environ:
self.auth = os.environ["PBUILDAUTH"]
if 'PBUILDAUTH' in os.environ:
self.auth = os.environ['PBUILDAUTH']
self.system_architecture = ubuntutools.misc.host_architecture()
self.system_distro = ubuntutools.misc.system_distribution()
@ -141,7 +128,7 @@ class PbuilderDist:
self.target_distro = self.system_distro
def set_target_distro(self, distro):
"""PbuilderDist.set_target_distro(distro) -> None
""" PbuilderDist.set_target_distro(distro) -> None
Check if the given target distribution name is correct, if it
isn't know to the system ask the user for confirmation before
@ -152,17 +139,16 @@ class PbuilderDist:
Logger.error('"%s" is an invalid distribution codename.', distro)
sys.exit(1)
if not os.path.isfile(os.path.join("/usr/share/debootstrap/scripts/", distro)):
if os.path.isdir("/usr/share/debootstrap/scripts/"):
if not os.path.isfile(os.path.join('/usr/share/debootstrap/scripts/',
distro)):
if os.path.isdir('/usr/share/debootstrap/scripts/'):
# Debian experimental doesn't have a debootstrap file but
# should work nevertheless. Ubuntu releases automatically use
# the gutsy script as of debootstrap 1.0.128+nmu2ubuntu1.1.
if distro not in (self._debian_distros + self.ubuntu_distro_info.all):
question = (
f'Warning: Unknown distribution "{distro}". ' "Do you want to continue"
)
answer = YesNoQuestion().ask(question, "no")
if answer == "no":
# should work nevertheless.
if distro not in self._debian_distros:
question = ('Warning: Unknown distribution "%s". '
'Do you want to continue' % distro)
answer = YesNoQuestion().ask(question, 'no')
if answer == 'no':
sys.exit(0)
else:
Logger.error('Please install package "debootstrap".')
@ -171,34 +157,33 @@ class PbuilderDist:
self.target_distro = distro
def set_operation(self, operation):
"""PbuilderDist.set_operation -> None
""" PbuilderDist.set_operation -> None
Check if the given string is a valid pbuilder operation and
depending on this either save it into the appropiate variable
or finalize pbuilder-dist's execution.
"""
arguments = ("create", "update", "build", "clean", "login", "execute")
arguments = ('create', 'update', 'build', 'clean', 'login', 'execute')
if operation not in arguments:
if operation.endswith(".dsc"):
if operation.endswith('.dsc'):
if os.path.isfile(operation):
self.operation = "build"
self.operation = 'build'
return [operation]
Logger.error('Could not find file "%s".', operation)
else:
Logger.error('Could not find file "%s".', operation)
sys.exit(1)
else:
Logger.error('"%s" is not a recognized argument.\n'
'Please use one of these: %s.',
operation, ', '.join(arguments))
sys.exit(1)
Logger.error(
'"%s" is not a recognized argument.\nPlease use one of these: %s.',
operation,
", ".join(arguments),
)
sys.exit(1)
self.operation = operation
return []
else:
self.operation = operation
return []
def get_command(self, remaining_arguments=None):
"""PbuilderDist.get_command -> string
""" PbuilderDist.get_command -> string
Generate the pbuilder command which matches the given configuration
and return it as a string.
@ -209,34 +194,30 @@ class PbuilderDist:
if self.build_architecture == self.system_architecture:
self.chroot_string = self.target_distro
else:
self.chroot_string = self.target_distro + "-" + self.build_architecture
self.chroot_string = (self.target_distro + '-'
+ self.build_architecture)
prefix = os.path.join(self.base, self.chroot_string)
if "--buildresult" not in remaining_arguments:
result = os.path.normpath(f"{prefix}_result/")
if '--buildresult' not in remaining_arguments:
result = os.path.normpath('%s_result/' % prefix)
else:
location_of_arg = remaining_arguments.index("--buildresult")
result = os.path.normpath(remaining_arguments[location_of_arg + 1])
remaining_arguments.pop(location_of_arg + 1)
location_of_arg = remaining_arguments.index('--buildresult')
result = os.path.normpath(remaining_arguments[location_of_arg+1])
remaining_arguments.pop(location_of_arg+1)
remaining_arguments.pop(location_of_arg)
if not self.logfile and self.operation != "login":
if self.operation == "build":
dsc_files = [a for a in remaining_arguments if a.strip().endswith(".dsc")]
if not self.logfile and self.operation != 'login':
if self.operation == 'build':
dsc_files = [a for a in remaining_arguments
if a.strip().endswith('.dsc')]
assert len(dsc_files) == 1
dsc = debian.deb822.Dsc(open(dsc_files[0], encoding="utf-8"))
version = ubuntutools.version.Version(dsc["Version"])
name = (
dsc["Source"]
+ "_"
+ version.strip_epoch()
+ "_"
+ self.build_architecture
+ ".build"
)
dsc = debian.deb822.Dsc(open(dsc_files[0]))
version = ubuntutools.version.Version(dsc['Version'])
name = (dsc['Source'] + '_' + version.strip_epoch() + '_' +
self.build_architecture + '.build')
self.logfile = os.path.join(result, name)
else:
self.logfile = os.path.join(result, "last_operation.log")
self.logfile = os.path.join(result, 'last_operation.log')
if not os.path.isdir(result):
try:
@ -246,91 +227,89 @@ class PbuilderDist:
sys.exit(1)
arguments = [
f"--{self.operation}",
"--distribution",
self.target_distro,
"--buildresult",
result,
'--%s' % self.operation,
'--distribution', self.target_distro,
'--buildresult', result,
]
if self.operation == "update":
arguments += ["--override-config"]
if self.operation == 'update':
arguments += ['--override-config']
if self.builder == "pbuilder":
arguments += ["--basetgz", prefix + "-base.tgz"]
elif self.builder == "cowbuilder":
arguments += ["--basepath", prefix + "-base.cow"]
if self.builder == 'pbuilder':
arguments += ['--basetgz', prefix + '-base.tgz']
elif self.builder == 'cowbuilder':
arguments += ['--basepath', prefix + '-base.cow']
else:
Logger.error('Unrecognized builder "%s".', self.builder)
sys.exit(1)
if self.logfile:
arguments += ["--logfile", self.logfile]
arguments += ['--logfile', self.logfile]
if os.path.exists("/var/cache/archive/"):
arguments += ["--bindmounts", "/var/cache/archive/"]
if os.path.exists('/var/cache/archive/'):
arguments += ['--bindmounts', '/var/cache/archive/']
localrepo = '/var/cache/archive/' + self.target_distro
if os.path.exists(localrepo):
arguments += [
'--othermirror',
'deb file:///var/cache/archive/ %s/' % self.target_distro,
]
config = UDTConfig()
if self.target_distro in self._debian_distros:
mirror = os.environ.get("MIRRORSITE", config.get_value("DEBIAN_MIRROR"))
components = "main"
mirror = os.environ.get('MIRRORSITE',
config.get_value('DEBIAN_MIRROR'))
components = 'main'
if self.extra_components:
components += " contrib non-free non-free-firmware"
components += ' contrib non-free'
else:
mirror = os.environ.get("MIRRORSITE", config.get_value("UBUNTU_MIRROR"))
if self.build_architecture not in ("amd64", "i386"):
mirror = os.environ.get("MIRRORSITE", config.get_value("UBUNTU_PORTS_MIRROR"))
components = "main restricted"
mirror = os.environ.get('MIRRORSITE',
config.get_value('UBUNTU_MIRROR'))
if self.build_architecture not in ('amd64', 'i386'):
mirror = os.environ.get(
'MIRRORSITE', config.get_value('UBUNTU_PORTS_MIRROR'))
components = 'main restricted'
if self.extra_components:
components += " universe multiverse"
components += ' universe multiverse'
arguments += ["--mirror", mirror]
arguments += ['--mirror', mirror]
othermirrors = []
localrepo = f"/var/cache/archive/{self.target_distro}"
if os.path.exists(localrepo):
repo = f"deb file:///var/cache/archive/ {self.target_distro}/"
othermirrors.append(repo)
if self.target_distro in self._debian_distros:
debian_info = DebianDistroInfo()
try:
codename = self.debian_distro_info.codename(
self.target_distro, default=self.target_distro
)
except DistroDataOutdated as error:
Logger.warning(error)
if codename in (self.debian_distro_info.devel(), "experimental"):
codename = debian_info.codename(self.target_distro,
default=self.target_distro)
except DistroDataOutdated, error:
Logger.warn(error)
if codename in (debian_info.devel(), 'experimental'):
self.enable_security = False
self.enable_updates = False
self.enable_proposed = False
elif codename in (self.debian_distro_info.testing(), "testing"):
elif codename in (debian_info.testing(), 'testing'):
self.enable_updates = False
if self.enable_security:
pocket = "-security"
with suppress(ValueError):
# before bullseye (version 11) security suite is /updates
if float(self.debian_distro_info.version(codename)) < 11.0:
pocket = "/updates"
othermirrors.append(
f"deb {config.get_value('DEBSEC_MIRROR')}"
f" {self.target_distro}{pocket} {components}"
)
othermirrors.append('deb %s %s/updates %s'
% (config.get_value('DEBSEC_MIRROR'),
self.target_distro, components))
if self.enable_updates:
othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}")
othermirrors.append('deb %s %s-updates %s'
% (mirror, self.target_distro, components))
if self.enable_proposed:
othermirrors.append(
f"deb {mirror} {self.target_distro}-proposed-updates {components}"
)
othermirrors.append('deb %s %s-proposed-updates %s'
% (mirror, self.target_distro, components))
if self.enable_backports:
othermirrors.append(f"deb {mirror} {self.target_distro}-backports {components}")
othermirrors.append('deb %s %s-backports %s'
% (mirror, self.target_distro, components))
aptcache = os.path.join(self.base, "aptcache", "debian")
aptcache = os.path.join(self.base, 'aptcache', 'debian')
else:
try:
dev_release = self.target_distro == self.ubuntu_distro_info.devel()
except DistroDataOutdated as error:
Logger.warning(error)
dev_release = self.target_distro == UbuntuDistroInfo().devel()
except DistroDataOutdated, error:
Logger.warn(error)
dev_release = True
if dev_release:
@ -338,45 +317,46 @@ class PbuilderDist:
self.enable_updates = False
if self.enable_security:
othermirrors.append(f"deb {mirror} {self.target_distro}-security {components}")
othermirrors.append('deb %s %s-security %s'
% (mirror, self.target_distro, components))
if self.enable_updates:
othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}")
othermirrors.append('deb %s %s-updates %s'
% (mirror, self.target_distro, components))
if self.enable_proposed:
othermirrors.append(f"deb {mirror} {self.target_distro}-proposed {components}")
othermirrors.append('deb %s %s-proposed %s'
% (mirror, self.target_distro, components))
aptcache = os.path.join(self.base, "aptcache", "ubuntu")
aptcache = os.path.join(self.base, 'aptcache', 'ubuntu')
if "OTHERMIRROR" in os.environ:
othermirrors += os.environ["OTHERMIRROR"].split("|")
if 'OTHERMIRROR' in os.environ:
othermirrors += os.environ['OTHERMIRROR'].split('|')
if othermirrors:
arguments += ["--othermirror", "|".join(othermirrors)]
arguments += ['--othermirror', '|'.join(othermirrors)]
# Work around LP:#599695
if (
ubuntutools.misc.system_distribution() == "Debian"
and self.target_distro not in self._debian_distros
):
if not os.path.exists("/usr/share/keyrings/ubuntu-archive-keyring.gpg"):
Logger.error("ubuntu-keyring not installed")
if (ubuntutools.misc.system_distribution() == 'Debian'
and self.target_distro not in self._debian_distros):
if not os.path.exists(
'/usr/share/keyrings/ubuntu-archive-keyring.gpg'):
Logger.error('ubuntu-keyring not installed')
sys.exit(1)
arguments += [
"--debootstrapopts",
"--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg",
'--debootstrapopts',
'--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg',
]
elif (
ubuntutools.misc.system_distribution() == "Ubuntu"
and self.target_distro in self._debian_distros
):
if not os.path.exists("/usr/share/keyrings/debian-archive-keyring.gpg"):
Logger.error("debian-archive-keyring not installed")
elif (ubuntutools.misc.system_distribution() == 'Ubuntu'
and self.target_distro in self._debian_distros):
if not os.path.exists(
'/usr/share/keyrings/debian-archive-keyring.gpg'):
Logger.error('debian-archive-keyring not installed')
sys.exit(1)
arguments += [
"--debootstrapopts",
"--keyring=/usr/share/keyrings/debian-archive-keyring.gpg",
'--debootstrapopts',
'--keyring=/usr/share/keyrings/debian-archive-keyring.gpg',
]
arguments += ["--aptcache", aptcache, "--components", components]
arguments += ['--aptcache', aptcache, '--components', components]
if not os.path.isdir(aptcache):
try:
@ -386,11 +366,13 @@ class PbuilderDist:
sys.exit(1)
if self.build_architecture != self.system_architecture:
arguments += ["--debootstrapopts", "--arch=" + self.build_architecture]
arguments += ['--debootstrapopts',
'--arch=' + self.build_architecture]
apt_conf_dir = os.path.join(self.base, f"etc/{self.target_distro}/apt.conf")
apt_conf_dir = os.path.join(self.base,
'etc/%s/apt.conf' % self.target_distro)
if os.path.exists(apt_conf_dir):
arguments += ["--aptconfdir", apt_conf_dir]
arguments += ['--aptconfdir', apt_conf_dir]
# Append remaining arguments
if remaining_arguments:
@ -401,28 +383,28 @@ class PbuilderDist:
# With both common variable name schemes (BTS: #659060).
return [
self.auth,
"HOME=" + os.path.expanduser("~"),
"ARCHITECTURE=" + self.build_architecture,
"DISTRIBUTION=" + self.target_distro,
"ARCH=" + self.build_architecture,
"DIST=" + self.target_distro,
"DEB_BUILD_OPTIONS=" + os.environ.get("DEB_BUILD_OPTIONS", ""),
'HOME=' + os.path.expanduser('~'),
'ARCHITECTURE=' + self.build_architecture,
'DISTRIBUTION=' + self.target_distro,
'ARCH=' + self.build_architecture,
'DIST=' + self.target_distro,
'DEB_BUILD_OPTIONS=' + os.environ.get('DEB_BUILD_OPTIONS', ''),
self.builder,
] + arguments
def show_help(exit_code=0):
"""help() -> None
""" help() -> None
Print a help message for pbuilder-dist, and exit with the given code.
"""
Logger.info("See man pbuilder-dist for more information.")
print 'See man pbuilder-dist for more information.'
sys.exit(exit_code)
def main():
"""main() -> None
""" main() -> None
This is pbuilder-dist's main function. It creates a PbuilderDist
object, modifies all necessary settings taking data from the
@ -430,122 +412,100 @@ def main():
the script and runs pbuilder itself or exists with an error message.
"""
script_name = os.path.basename(sys.argv[0])
parts = script_name.split("-")
parts = script_name.split('-')
# Copy arguments into another list for save manipulation
args = sys.argv[1:]
if "-" in script_name and parts[0] not in ("pbuilder", "cowbuilder") or len(parts) > 3:
Logger.error('"%s" is not a valid name for a "pbuilder-dist" executable.', script_name)
if ('-' in script_name and parts[0] not in ('pbuilder', 'cowbuilder')
or len(parts) > 3):
Logger.error('"%s" is not a valid name for a "pbuilder-dist" '
'executable.', script_name)
sys.exit(1)
if len(args) < 1:
Logger.error("Insufficient number of arguments.")
Logger.error('Insufficient number of arguments.')
show_help(1)
if args[0] in ("-h", "--help", "help"):
if args[0] in ('-h', '--help', 'help'):
show_help(0)
app = PbuilderDist(parts[0])
if len(parts) > 1 and parts[1] != "dist" and "." not in parts[1]:
if len(parts) > 1 and parts[1] != 'dist' and '.' not in parts[1]:
app.set_target_distro(parts[1])
else:
app.set_target_distro(args.pop(0))
if len(parts) > 2:
requested_arch = parts[2]
elif len(args) > 0:
if shutil.which("arch-test") is not None:
arch_test = subprocess.run(
["arch-test", args[0]], check=False, stdout=subprocess.DEVNULL
)
if arch_test.returncode == 0:
requested_arch = args.pop(0)
elif os.path.isdir("/usr/lib/arch-test") and args[0] in os.listdir(
"/usr/lib/arch-test/"
):
Logger.error(
'Architecture "%s" is not supported on your '
"currently running kernel. Consider installing "
"the qemu-user-static package to enable the use of "
"foreign architectures.",
args[0],
)
sys.exit(1)
else:
requested_arch = None
else:
Logger.error(
'Cannot determine if "%s" is a valid architecture. '
"Please install the arch-test package and retry.",
args[0],
)
sys.exit(1)
elif len(args) > 0 and args[0] in (
'alpha', 'amd64', 'arm', 'armeb', 'armel', 'armhf', 'arm64',
'i386', 'lpia', 'm68k', 'mips', 'mipsel', 'powerpc', 'ppc64',
'ppc64el', 's390x', 'sh4', 'sh4eb', 'sparc', 'sparc64'):
requested_arch = args.pop(0)
else:
requested_arch = None
if requested_arch:
app.build_architecture = requested_arch
# For some foreign architectures we need to use qemu
if requested_arch != app.system_architecture and (
app.system_architecture,
requested_arch,
) not in [
("amd64", "i386"),
("arm64", "arm"),
("arm64", "armhf"),
("powerpc", "ppc64"),
("ppc64", "powerpc"),
]:
args += ["--debootstrap", "debootstrap"]
if (requested_arch != app.system_architecture
and (app.system_architecture, requested_arch) not in [
('amd64', 'i386'), ('amd64', 'lpia'), ('arm', 'armel'),
('armel', 'arm'), ('armel', 'armhf'), ('armhf', 'armel'),
('arm64', 'arm'), ('arm64', 'armhf'), ('arm64', 'armel'),
('i386', 'lpia'), ('lpia', 'i386'), ('powerpc', 'ppc64'),
('ppc64', 'powerpc'), ('sparc', 'sparc64'),
('sparc64', 'sparc')]):
args += ['--debootstrap', 'qemu-debootstrap']
if "mainonly" in sys.argv or "--main-only" in sys.argv:
if 'mainonly' in sys.argv or '--main-only' in sys.argv:
app.extra_components = False
if "mainonly" in sys.argv:
args.remove("mainonly")
if 'mainonly' in sys.argv:
args.remove('mainonly')
else:
args.remove("--main-only")
args.remove('--main-only')
if "--release-only" in sys.argv:
args.remove("--release-only")
if '--release-only' in sys.argv:
args.remove('--release-only')
app.enable_security = False
app.enable_updates = False
app.enable_proposed = False
elif "--security-only" in sys.argv:
args.remove("--security-only")
elif '--security-only' in sys.argv:
args.remove('--security-only')
app.enable_updates = False
app.enable_proposed = False
elif "--updates-only" in sys.argv:
args.remove("--updates-only")
elif '--updates-only' in sys.argv:
args.remove('--updates-only')
app.enable_proposed = False
elif "--backports" in sys.argv:
args.remove("--backports")
elif '--backports' in sys.argv:
args.remove('--backports')
app.enable_backports = True
if len(args) < 1:
Logger.error("Insufficient number of arguments.")
Logger.error('Insufficient number of arguments.')
show_help(1)
# Parse the operation
args = app.set_operation(args.pop(0)) + args
if app.operation == "build":
if len([a for a in args if a.strip().endswith(".dsc")]) != 1:
msg = "You have to specify one .dsc file if you want to build."
if app.operation == 'build':
if len([a for a in args if a.strip().endswith('.dsc')]) != 1:
msg = 'You have to specify one .dsc file if you want to build.'
Logger.error(msg)
sys.exit(1)
# Execute the pbuilder command
if "--debug-echo" not in args:
if '--debug-echo' not in args:
sys.exit(subprocess.call(app.get_command(args)))
else:
Logger.info(app.get_command([arg for arg in args if arg != "--debug-echo"]))
print app.get_command([arg for arg in args if arg != '--debug-echo'])
if __name__ == "__main__":
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.error("Manually aborted.")
Logger.error('Manually aborted.')
sys.exit(1)

142
pm-helper
View File

@ -1,142 +0,0 @@
#!/usr/bin/python3
# Find the next thing to work on for proposed-migration
# Copyright (C) 2023 Canonical Ltd.
# Author: Steve Langasek <steve.langasek@ubuntu.com>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License, version 3.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import lzma
import sys
import webbrowser
from argparse import ArgumentParser
import yaml
from launchpadlib.launchpad import Launchpad
from ubuntutools.utils import get_url
# proposed-migration is only concerned with the devel series; unlike other
# tools, don't make this configurable
excuses_url = "https://ubuntu-archive-team.ubuntu.com/proposed-migration/update_excuses.yaml.xz"
def get_proposed_version(excuses, package):
for k in excuses["sources"]:
if k["source"] == package:
return k.get("new-version")
return None
def claim_excuses_bug(launchpad, bug, package):
print(f"LP: #{bug.id}: {bug.title}")
ubuntu = launchpad.distributions["ubuntu"]
series = ubuntu.current_series.fullseriesname
for task in bug.bug_tasks:
# targeting to a series doesn't make the default task disappear,
# it just makes it useless
if task.bug_target_name == f"{package} ({series})":
our_task = task
break
if task.bug_target_name == f"{package} (Ubuntu)":
our_task = task
if our_task.assignee == launchpad.me:
print("Bug already assigned to you.")
return True
if our_task.assignee:
print(f"Currently assigned to {our_task.assignee.name}")
print("""Do you want to claim this bug? [yN] """, end="")
sys.stdout.flush()
response = sys.stdin.readline()
if response.strip().lower().startswith("y"):
our_task.assignee = launchpad.me
our_task.lp_save()
return True
return False
def create_excuses_bug(launchpad, package, version):
print("Will open a new bug")
bug = launchpad.bugs.createBug(
title=f"proposed-migration for {package} {version}",
tags=("update-excuse"),
target=f"https://api.launchpad.net/devel/ubuntu/+source/{package}",
description=f"{package} {version} is stuck in -proposed.",
)
task = bug.bug_tasks[0]
task.assignee = launchpad.me
task.lp_save()
print(f"Opening {bug.web_link} in browser")
webbrowser.open(bug.web_link)
return bug
def has_excuses_bugs(launchpad, package):
ubuntu = launchpad.distributions["ubuntu"]
pkg = ubuntu.getSourcePackage(name=package)
if not pkg:
raise ValueError(f"No such source package: {package}")
tasks = pkg.searchTasks(tags=["update-excuse"], order_by=["id"])
bugs = [task.bug for task in tasks]
if not bugs:
return False
if len(bugs) == 1:
print(f"There is 1 open update-excuse bug against {package}")
else:
print(f"There are {len(bugs)} open update-excuse bugs against {package}")
for bug in bugs:
if claim_excuses_bug(launchpad, bug, package):
return True
return True
def main():
parser = ArgumentParser()
parser.add_argument("-l", "--launchpad", dest="launchpad_instance", default="production")
parser.add_argument(
"-v", "--verbose", default=False, action="store_true", help="be more verbose"
)
parser.add_argument("package", nargs="?", help="act on this package only")
args = parser.parse_args()
args.launchpad = Launchpad.login_with("pm-helper", args.launchpad_instance, version="devel")
f = get_url(excuses_url, False)
with lzma.open(f) as lzma_f:
excuses = yaml.load(lzma_f, Loader=yaml.CSafeLoader)
if args.package:
try:
if not has_excuses_bugs(args.launchpad, args.package):
proposed_version = get_proposed_version(excuses, args.package)
if not proposed_version:
print(f"Package {args.package} not found in -proposed.")
sys.exit(1)
create_excuses_bug(args.launchpad, args.package, proposed_version)
except ValueError as e:
sys.stderr.write(f"{e}\n")
else:
pass # for now
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-debian-ddebs -- pull ddeb package files for debian
# Basic usage: pull-debian-ddebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="debian", pull="ddebs")

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
# pull-debian-debdiff - find and download a specific version of a Debian
# package and its immediate parent to generate a debdiff.
#
@ -17,32 +17,27 @@
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import argparse
import optparse
import sys
import debian.debian_support
import debian.changelog
from ubuntutools import getLogger
from ubuntutools.archive import DebianSourcePackage, DownloadError
from ubuntutools.config import UDTConfig
from ubuntutools.version import Version
Logger = getLogger()
from ubuntutools.logger import Logger
def previous_version(package, version, distance):
"Given an (extracted) package, determine the version distance versions ago"
upver = Version(version).upstream_version
filename = f"{package}-{upver}/debian/changelog"
changelog_file = open(filename, "r", encoding="utf-8")
upver = debian.debian_support.Version(version).upstream_version
filename = '%s-%s/debian/changelog' % (package, upver)
changelog_file = open(filename, 'r')
changelog = debian.changelog.Changelog(changelog_file.read())
changelog_file.close()
seen = 0
for entry in changelog:
if entry.distributions == "UNRELEASED":
if entry.distributions == 'UNRELEASED':
continue
if seen == distance:
return entry.version.full_version
@ -51,78 +46,70 @@ def previous_version(package, version, distance):
def main():
parser = argparse.ArgumentParser(usage="%(prog)s [options] <package> <version> [distance]")
parser.add_argument(
"-f",
"--fetch",
dest="fetch_only",
default=False,
action="store_true",
help="Only fetch the source packages, don't diff.",
)
parser.add_argument(
"-d",
"--debian-mirror",
metavar="DEBIAN_MIRROR",
dest="debian_mirror",
help="Preferred Debian mirror (default: http://deb.debian.org/debian)",
)
parser.add_argument(
"-s",
"--debsec-mirror",
metavar="DEBSEC_MIRROR",
dest="debsec_mirror",
help="Preferred Debian Security mirror (default: http://security.debian.org)",
)
parser.add_argument(
"--no-conf",
dest="no_conf",
default=False,
action="store_true",
help="Don't read config files or environment variables",
)
parser.add_argument("package", help=argparse.SUPPRESS)
parser.add_argument("version", help=argparse.SUPPRESS)
parser.add_argument("distance", default=1, type=int, nargs="?", help=argparse.SUPPRESS)
args = parser.parse_args()
parser = optparse.OptionParser('%prog [options] <package> <version> '
'[distance]')
parser.add_option('-f', '--fetch',
dest='fetch_only', default=False, action='store_true',
help="Only fetch the source packages, don't diff.")
parser.add_option('-d', '--debian-mirror', metavar='DEBIAN_MIRROR',
dest='debian_mirror',
help='Preferred Debian mirror '
'(default: http://deb.debian.org/debian)')
parser.add_option('-s', '--debsec-mirror', metavar='DEBSEC_MIRROR',
dest='debsec_mirror',
help='Preferred Debian Security mirror '
'(default: http://security.debian.org)')
parser.add_option('--no-conf',
dest='no_conf', default=False, action='store_true',
help="Don't read config files or environment variables")
config = UDTConfig(args.no_conf)
if args.debian_mirror is None:
args.debian_mirror = config.get_value("DEBIAN_MIRROR")
if args.debsec_mirror is None:
args.debsec_mirror = config.get_value("DEBSEC_MIRROR")
mirrors = [args.debsec_mirror, args.debian_mirror]
opts, args = parser.parse_args()
if len(args) < 2:
parser.error('Must specify package and version')
elif len(args) > 3:
parser.error('Too many arguments')
package = args[0]
version = args[1]
distance = int(args[2]) if len(args) > 2 else 1
Logger.info("Downloading %s %s", args.package, args.version)
config = UDTConfig(opts.no_conf)
if opts.debian_mirror is None:
opts.debian_mirror = config.get_value('DEBIAN_MIRROR')
if opts.debsec_mirror is None:
opts.debsec_mirror = config.get_value('DEBSEC_MIRROR')
mirrors = [opts.debsec_mirror, opts.debian_mirror]
newpkg = DebianSourcePackage(args.package, args.version, mirrors=mirrors)
Logger.normal('Downloading %s %s', package, version)
newpkg = DebianSourcePackage(package, version, mirrors=mirrors)
try:
newpkg.pull()
except DownloadError as e:
Logger.error("Failed to download: %s", str(e))
except DownloadError, e:
Logger.error('Failed to download: %s', str(e))
sys.exit(1)
newpkg.unpack()
if args.fetch_only:
if opts.fetch_only:
sys.exit(0)
oldversion = previous_version(args.package, args.version, args.distance)
oldversion = previous_version(package, version, distance)
if not oldversion:
Logger.error("No previous version could be found")
Logger.error('No previous version could be found')
sys.exit(1)
Logger.info("Downloading %s %s", args.package, oldversion)
Logger.normal('Downloading %s %s', package, oldversion)
oldpkg = DebianSourcePackage(args.package, oldversion, mirrors=mirrors)
oldpkg = DebianSourcePackage(package, oldversion, mirrors=mirrors)
try:
oldpkg.pull()
except DownloadError as e:
Logger.error("Failed to download: %s", str(e))
except DownloadError, e:
Logger.error('Failed to download: %s', str(e))
sys.exit(1)
Logger.info("file://%s", oldpkg.debdiff(newpkg, diffstat=True))
oldpkg.unpack()
print 'file://' + oldpkg.debdiff(newpkg, diffstat=True)
if __name__ == "__main__":
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.info("User abort.")
Logger.normal('User abort.')

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-debian-debs -- pull deb package files for debian
# Basic usage: pull-debian-debs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="debian", pull="debs")

View File

@ -1,14 +1,143 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# pull-debian-source -- pull source package files for debian
# Basic usage: pull-debian-source <package name> [version|release]
# pull-debian-source -- pull a source package from Launchpad
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
# Inspired by a tool of the same name by Nathan Handler.
#
# See pull-pkg
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import json
import optparse
import sys
import urllib2
from ubuntutools.pullpkg import PullPkg
from distro_info import DebianDistroInfo, DistroDataOutdated
if __name__ == "__main__":
PullPkg.main(distro="debian", pull="source")
from ubuntutools.archive import DebianSourcePackage, DownloadError, rmadison
from ubuntutools.config import UDTConfig
from ubuntutools.logger import Logger
def is_suite(version):
"""If version could be considered to be a Debian suite, return the
canonical suite name. Otherwise None
"""
debian_info = DebianDistroInfo()
debian_releases = debian_info.all + ['experimental']
if '-' in version:
release, pocket = version.split('-', 1)
release = debian_info.codename(release, default=release)
if release in debian_releases:
if pocket in ('proposed-updates', 'p-u'):
return (release + '-proposed-updates')
elif pocket == 'security':
return (release + '-security')
else:
release = debian_info.codename(version, default=version)
if release in debian_releases:
return release
return None
def source_package_for(binary, release):
"""Query DDE to find the source package for a particular binary"""
try:
release = DebianDistroInfo().codename(release, default=release)
except DistroDataOutdated, e:
Logger.warn(e)
url = ('http://dde.debian.net/dde/q/udd/dist/d:debian/r:%s/p:%s/?t=json'
% (release, binary))
data = None
try:
data = json.load(urllib2.urlopen(url))['r']
except urllib2.URLError, e:
Logger.error('Unable to retrieve package information from DDE: '
'%s (%s)', url, str(e))
except ValueError, e:
Logger.error('Unable to parse JSON response from DDE: '
'%s (%s)', url, str(e))
if not data:
return None
return data[0]['source']
def main():
usage = 'Usage: %prog <package> [release|version]'
parser = optparse.OptionParser(usage)
parser.add_option('-d', '--download-only',
dest='download_only', default=False, action='store_true',
help='Do not extract the source package')
parser.add_option('-m', '--mirror', metavar='DEBIAN_MIRROR',
dest='debian_mirror',
help='Preferred Debian mirror (default: %s)'
% UDTConfig.defaults['DEBIAN_MIRROR'])
parser.add_option('-s', '--security-mirror', metavar='DEBSEC_MIRROR',
dest='debsec_mirror',
help='Preferred Debian Security mirror (default: %s)'
% UDTConfig.defaults['DEBSEC_MIRROR'])
parser.add_option('--no-conf',
dest='no_conf', default=False, action='store_true',
help="Don't read config files or environment variables")
(options, args) = parser.parse_args()
if not args:
parser.error('Must specify package name')
elif len(args) > 2:
parser.error('Too many arguments. '
'Must only specify package and (optionally) release.')
config = UDTConfig(options.no_conf)
if options.debian_mirror is None:
options.debian_mirror = config.get_value('DEBIAN_MIRROR')
if options.debsec_mirror is None:
options.debsec_mirror = config.get_value('DEBSEC_MIRROR')
package = args[0].lower()
version = args[1] if len(args) > 1 else 'unstable'
component = None
suite = is_suite(version)
if suite is not None:
line = list(rmadison('debian', package, suite, 'source'))
if not line:
source_package = source_package_for(package, suite)
if source_package is not None and package != source_package:
package = source_package
line = list(rmadison('debian', package, suite, 'source'))
if not line:
Logger.error('Unable to find %s in Debian suite "%s".', package,
suite)
sys.exit(1)
line = line[-1]
version = line['version']
component = line['component']
Logger.normal('Downloading %s version %s', package, version)
srcpkg = DebianSourcePackage(package, version, component=component,
mirrors=[options.debian_mirror,
options.debsec_mirror])
try:
srcpkg.pull()
except DownloadError, e:
Logger.error('Failed to download: %s', str(e))
sys.exit(1)
if not options.download_only:
srcpkg.unpack()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.normal('User abort.')

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-debian-udebs -- pull udeb package files for debian
# Basic usage: pull-debian-udebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="debian", pull="udebs")

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-lp-ddebs -- pull ddeb package files for ubuntu
# Basic usage: pull-lp-ddebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ubuntu", pull="ddebs")

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-lp-debs -- pull deb package files for ubuntu
# Basic usage: pull-lp-debs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ubuntu", pull="debs")

View File

@ -1,14 +1,149 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# pull-lp-source -- pull source package files for ubuntu
# Basic usage: pull-lp-source <package name> [version|release]
# pull-lp-source -- pull a source package from Launchpad
# Basic usage: pull-lp-source <source package> [<release>]
#
# See pull-pkg
# Copyright (C) 2008, Iain Lane <iain@orangesquash.org.uk>,
# 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL for more details.
#
# ##################################################################
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
import json
import os
import sys
import urllib2
from optparse import OptionParser
if __name__ == "__main__":
PullPkg.main(distro="ubuntu", pull="source")
from distro_info import UbuntuDistroInfo, DistroDataOutdated
from ubuntutools.archive import UbuntuSourcePackage, DownloadError
from ubuntutools.config import UDTConfig
from ubuntutools.lp.lpapicache import Distribution, Launchpad
from ubuntutools.lp.udtexceptions import (SeriesNotFoundException,
PackageNotFoundException,
PocketDoesNotExistError)
from ubuntutools.logger import Logger
from ubuntutools.misc import split_release_pocket
def source_package_for(binary, release):
"""Query DDE to find the source package for a particular binary
Should really do this with LP, but it's not possible LP: #597041
"""
url = ('http://dde.debian.net/dde/q/udd/dist/d:ubuntu/r:%s/p:%s/?t=json'
% (release, binary))
data = None
try:
data = json.load(urllib2.urlopen(url))['r']
except urllib2.URLError, e:
Logger.error('Unable to retrieve package information from DDE: '
'%s (%s)', url, str(e))
except ValueError, e:
Logger.error('Unable to parse JSON response from DDE: '
'%s (%s)', url, str(e))
if not data:
return None
return data[0]['source']
def main():
usage = "Usage: %prog <package> [release|version]"
opt_parser = OptionParser(usage)
opt_parser.add_option('-d', '--download-only',
dest='download_only', default=False,
action='store_true',
help="Do not extract the source package")
opt_parser.add_option('-m', '--mirror', metavar='UBUNTU_MIRROR',
dest='ubuntu_mirror',
help='Preferred Ubuntu mirror (default: Launchpad)')
opt_parser.add_option('--no-conf',
dest='no_conf', default=False, action='store_true',
help="Don't read config files or environment "
"variables")
(options, args) = opt_parser.parse_args()
if not args:
opt_parser.error("Must specify package name")
config = UDTConfig(options.no_conf)
if options.ubuntu_mirror is None:
options.ubuntu_mirror = config.get_value('UBUNTU_MIRROR')
# Login anonymously to LP
Launchpad.login_anonymously()
package = str(args[0]).lower()
ubuntu_info = UbuntuDistroInfo()
if len(args) > 1: # Custom distribution specified.
version = str(args[1])
else:
try:
version = os.getenv('DIST') or ubuntu_info.devel()
except DistroDataOutdated, e:
Logger.warn("%s\nOr specify a distribution.", e)
sys.exit(1)
component = None
# Release, not package version number:
release = None
pocket = None
try:
(release, pocket) = split_release_pocket(version, default=None)
except PocketDoesNotExistError, e:
pass
if release in ubuntu_info.all:
archive = Distribution('ubuntu').getArchive()
try:
spph = archive.getSourcePackage(package, release, pocket)
except SeriesNotFoundException, e:
Logger.error(str(e))
sys.exit(1)
except PackageNotFoundException, e:
source_package = source_package_for(package, release)
if source_package is not None and source_package != package:
try:
spph = archive.getSourcePackage(source_package, release,
pocket)
package = source_package
except PackageNotFoundException:
Logger.error(str(e))
sys.exit(1)
else:
Logger.error(str(e))
sys.exit(1)
version = spph.getVersion()
component = spph.getComponent()
Logger.normal('Downloading %s version %s', package, version)
srcpkg = UbuntuSourcePackage(package, version, component=component,
mirrors=[options.ubuntu_mirror])
try:
srcpkg.pull()
except DownloadError, e:
Logger.error('Failed to download: %s', str(e))
sys.exit(1)
if not options.download_only:
srcpkg.unpack()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.normal('User abort.')

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-lp-udebs -- pull udeb package files for ubuntu
# Basic usage: pull-lp-udebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ubuntu", pull="udebs")

View File

@ -1,32 +0,0 @@
#!/usr/bin/python3
#
# pull-pkg -- pull package files for debian/ubuntu/uca/ppa
# Basic usage: pull-pkg -D distro -p type <package name> [version|release]
#
# Copyright (C) 2008, Iain Lane <iain@orangesquash.org.uk>,
# 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
# 2017-2018, Dan Streetman <ddstreet@canonical.com>
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL for more details.
#
# ##################################################################
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main()

View File

@ -1,15 +0,0 @@
#!/usr/bin/python3
#
# pull-ppa-ddebs -- pull ddeb package files for a Launchpad Personal Package Archive
# Basic usage: pull-ppa-ddebs <package name> <ppa:USER/NAME> [version|release]
# pull-ppa-ddebs --ppa USER/NAME <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ppa", pull="ddebs")

View File

@ -1,15 +0,0 @@
#!/usr/bin/python3
#
# pull-ppa-debs -- pull deb package files for a Launchpad Personal Package Archive
# Basic usage: pull-ppa-debs <package name> <ppa:USER/NAME> [version|release]
# pull-ppa-debs --ppa USER/NAME <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ppa", pull="debs")

View File

@ -1,15 +0,0 @@
#!/usr/bin/python3
#
# pull-ppa-source -- pull source package files for a Launchpad Personal Package Archive
# Basic usage: pull-ppa-source <package name> <ppa:USER/NAME> [version|release]
# pull-ppa-source --ppa USER/NAME <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ppa", pull="source")

View File

@ -1,15 +0,0 @@
#!/usr/bin/python3
#
# pull-ppa-udebs -- pull udeb package files for a Launchpad Personal Package Archive
# Basic usage: pull-ppa-udebs <package name> <ppa:USER/NAME> [version|release]
# pull-ppa-udebs --ppa USER/NAME <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="ppa", pull="udebs")

59
pull-revu-source Executable file
View File

@ -0,0 +1,59 @@
#!/usr/bin/perl
# Script Name: pull-revu-source
# Author: Nathan Handler <nhandler@ubuntu.com>
# Usage: pull-revu-source <source package>
# Copyright (C) 2009 Nathan Handler <nhandler@ubuntu.com>
# Based on revupull in kubuntu-dev-tools,
# written by Harald Sitter <apachelogger@ubuntu.com>
# License: GNU General Public License
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# On Debian GNU/Linux systems, the complete text of the GNU General
# Public License can be found in the /usr/share/common-licenses/GPL-3 file.
use warnings;
use strict;
use File::Basename;
use Getopt::Long;
my $REVU = "revu.ubuntuwire.com";
my($package) = lc($ARGV[0]) || usage(2);
my($help)=0;
GetOptions('help' => \$help);
usage(0) if($help);
eval { require LWP::Simple; };
if ($@=~ m#^Can\'t locate LWP/Simple#) {
print(STDERR "Please install libwww-perl.\n");
exit(1);
}
use LWP::Simple;
dget(getURL());
sub getURL {
my($url) = "http://" . $REVU . "/dsc.py?url&package=" . $package;
my($page)=get($url);
die("Could Not Get $url") unless (defined $page);
return $page;
}
sub dget {
my($dsc) = @_;
exec("dget -xu $dsc");
}
sub usage {
my($exit) = @_;
my($name)=basename($0);
print("USAGE: $name [-h] <source package>\n");
exit($exit);
}

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-uca-ddebs -- pull ddeb package files for ubuntu cloud archive
# Basic usage: pull-uca-ddebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="uca", pull="ddebs")

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-uca-debs -- pull deb package files for ubuntu cloud archive
# Basic usage: pull-uca-debs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="uca", pull="debs")

View File

@ -1,14 +1,157 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# pull-uca-source -- pull source package files for ubuntu cloud archive
# Basic usage: pull-uca-source <package name> [version|release]
# pull-uca-source -- pull a source package from Ubuntu Cloud Archive
# Basic usage: pull-uca-source <source package> <openstack release> [version]
#
# See pull-pkg
# Copyright (C) 2008, Iain Lane <iain@orangesquash.org.uk>,
# 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
# 2016, Corey Bryant <corey.bryant@ubuntu.com>
# 2016, Dan Streetman <dan.streetman@canonical.com>
#
# ##################################################################
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See file /usr/share/common-licenses/GPL for more details.
#
# ##################################################################
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
import re
import sys
from optparse import OptionParser
if __name__ == "__main__":
PullPkg.main(distro="uca", pull="source")
from ubuntutools.archive import UbuntuCloudArchiveSourcePackage, DownloadError
from ubuntutools.config import UDTConfig
from ubuntutools.lp.lpapicache import Launchpad
from ubuntutools.lp.udtexceptions import PocketDoesNotExistError
from ubuntutools.logger import Logger
from ubuntutools.misc import split_release_pocket
from lazr.restfulclient.errors import NotFound
from launchpadlib.launchpad import Launchpad as LP
def showOpenstackReleases(uca):
releases = []
for p in uca.ppas:
if re.match(r"\w*-staging", p.name):
releases.append(re.sub("-staging", "", p.name))
Logger.error("Openstack releases are:\n\t%s", ", ".join(releases))
def getSPPH(lp, archive, package, version=None, series=None, pocket=None, try_binary=True):
params = {'exact_match': True, 'order_by_date': True}
if pocket:
params['pocket'] = pocket
if series:
params['distro_series'] = series()
elif version:
params['version'] = version
Logger.normal("checking %s version %s pocket %s", package, version, pocket)
spphs = archive.getPublishedSources(source_name=package, **params)
if spphs:
return spphs[0]
if not try_binary:
return None
# Didn't find any, maybe the package is a binary package name
if series:
del params['distro_series']
archs = lp.load(series().architectures_collection_link).entries
params['distro_arch_series'] = archs[0]['self_link']
bpphs = archive.getPublishedBinaries(binary_name=package, **params)
if bpphs:
bpph_build = lp.load(bpphs[0].build_link)
source_package = bpph_build.source_package_name
return getSPPH(lp, archive, source_package, version, series, pocket,
try_binary=False)
return None
def main():
usage = "Usage: %prog <package> <openstack release> [version]"
opt_parser = OptionParser(usage)
opt_parser.add_option('-d', '--download-only',
dest='download_only', default=False,
action='store_true',
help="Do not extract the source package")
opt_parser.add_option('-m', '--mirror', metavar='OPENSTACK_MIRROR',
dest='openstack_mirror',
help='Preferred Openstack mirror (default: Launchpad)')
opt_parser.add_option('--no-conf',
dest='no_conf', default=False, action='store_true',
help="Don't read config files or environment "
"variables")
(options, args) = opt_parser.parse_args()
if len(args) < 2:
opt_parser.error("Must specify package name and openstack release")
config = UDTConfig(options.no_conf)
if options.openstack_mirror is None:
options.openstack_mirror = config.get_value('OPENSTACK_MIRROR')
mirrors = []
if options.openstack_mirror:
mirrors.append(options.openstack_mirror)
# Login anonymously to LP
Launchpad.login_anonymously()
lp = LP.login_anonymously("pull-uca-source", "production")
uca = lp.people("ubuntu-cloud-archive")
package = str(args[0]).lower()
release = str(args[1]).lower()
version = None
if len(args) > 2:
version = str(args[2])
pocket = None
try:
(release, pocket) = split_release_pocket(release, default=None)
except PocketDoesNotExistError, e:
pass
try:
archive = uca.getPPAByName(name="%s-staging" % release)
except NotFound, e:
Logger.error('Archive does not exist for Openstack release: %s',
release)
showOpenstackReleases(uca)
sys.exit(1)
spph = getSPPH(lp, archive, package, version, pocket=pocket)
if not spph:
Logger.error("Package %s in %s not found.", package, release)
sys.exit(1)
package = spph.source_package_name
version = spph.source_package_version
component = spph.component_name
Logger.normal('Downloading %s version %s component %s', package, version, component)
srcpkg = UbuntuCloudArchiveSourcePackage(release, package, version, component=component,
mirrors=mirrors)
try:
srcpkg.pull()
except DownloadError, e:
Logger.error('Failed to download: %s', str(e))
sys.exit(1)
if not options.download_only:
srcpkg.unpack()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.normal('User abort.')

View File

@ -1,14 +0,0 @@
#!/usr/bin/python3
#
# pull-uca-udebs -- pull udeb package files for ubuntu cloud archive
# Basic usage: pull-uca-udebs <package name> [version|release]
#
# See pull-pkg
# pylint: disable=invalid-name
# pylint: enable=invalid-name
from ubuntutools.pullpkg import PullPkg
if __name__ == "__main__":
PullPkg.main(distro="uca", pull="udebs")

View File

@ -1,6 +0,0 @@
[tool.black]
line-length = 99
[tool.isort]
line_length = 99
profile = "black"

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
#
@ -14,21 +14,21 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import argparse
import sys
from collections import defaultdict
import optparse
import re
import sys
import apt
from distro_info import UbuntuDistroInfo
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig
from ubuntutools.lp.lpapicache import Distribution, Launchpad
from ubuntutools.lp.lpapicache import Launchpad, Distribution
from ubuntutools.lp.udtexceptions import PackageNotFoundException
from ubuntutools.question import EditBugReport, YesNoQuestion, confirmation_prompt
from ubuntutools.rdepends import RDependsException, query_rdepends
Logger = getLogger()
from ubuntutools.logger import Logger
from ubuntutools.question import (YesNoQuestion, EditBugReport,
confirmation_prompt)
from ubuntutools.rdepends import query_rdepends, RDependsException
class DestinationException(Exception):
@ -38,14 +38,16 @@ class DestinationException(Exception):
def determine_destinations(source, destination):
ubuntu_info = UbuntuDistroInfo()
if destination is None:
destination = ubuntu_info.lts()
destination = ubuntu_info.stable()
if source not in ubuntu_info.all:
raise DestinationException(f"Source release {source} does not exist")
raise DestinationException("Source release %s does not exist" % source)
if destination not in ubuntu_info.all:
raise DestinationException(f"Destination release {destination} does not exist")
raise DestinationException("Destination release %s does not exist"
% destination)
if destination not in ubuntu_info.supported():
raise DestinationException(f"Destination release {destination} is not supported")
raise DestinationException("Destination release %s is not supported"
% destination)
found = False
destinations = []
@ -73,37 +75,35 @@ def determine_destinations(source, destination):
def disclaimer():
print(
"Ubuntu's backports are not for fixing bugs in stable releases, "
"but for bringing new features to older, stable releases.\n"
"See https://wiki.ubuntu.com/UbuntuBackports for the Ubuntu "
"Backports policy and processes.\n"
"See https://wiki.ubuntu.com/StableReleaseUpdates for the process "
"for fixing bugs in stable releases."
)
print("Ubuntu's backports are not for fixing bugs in stable releases, "
"but for bringing new features to older, stable releases.\n"
"See https://wiki.ubuntu.com/UbuntuBackports for the Ubuntu "
"Backports policy and processes.\n"
"See https://wiki.ubuntu.com/StableReleaseUpdates for the process "
"for fixing bugs in stable releases.")
confirmation_prompt()
def check_existing(package):
def check_existing(package, destinations):
"""Search for possible existing bug reports"""
distro = Distribution("ubuntu")
srcpkg = distro.getSourcePackage(name=package.getPackageName())
bugs = srcpkg.searchTasks(
omit_duplicates=True,
search_text="[BPO]",
status=["Incomplete", "New", "Confirmed", "Triaged", "In Progress", "Fix Committed"],
)
# The LP bug search is indexed, not substring:
query = re.findall(r'[a-z]+', package)
bugs = []
for release in destinations:
project = Launchpad.projects[release + '-backports']
bugs += project.searchTasks(omit_duplicates=True,
search_text=query,
status=["Incomplete", "New", "Confirmed",
"Triaged", "In Progress",
"Fix Committed"])
if not bugs:
return
Logger.info(
"There are existing bug reports that look similar to your "
"request. Please check before continuing:"
)
Logger.normal("There are existing bug reports that look similar to your "
"request. Please check before continuing:")
for bug in sorted([bug_task.bug for bug_task in bugs], key=lambda bug: bug.id):
Logger.info(" * LP: #%-7i: %s %s", bug.id, bug.title, bug.web_link)
for bug in sorted(set(bug_task.bug for bug_task in bugs)):
Logger.normal(" * LP: #%-7i: %s %s", bug.id, bug.title, bug.web_link)
confirmation_prompt()
@ -113,9 +113,9 @@ def find_rdepends(releases, published_binaries):
# We want to display every pubilshed binary, even if it has no rdepends
for binpkg in published_binaries:
intermediate[binpkg] # pylint: disable=pointless-statement
intermediate[binpkg]
for arch in ("any", "source"):
for arch in ('any', 'source'):
for release in releases:
for binpkg in published_binaries:
try:
@ -123,25 +123,25 @@ def find_rdepends(releases, published_binaries):
except RDependsException:
# Not published? TODO: Check
continue
for relationship, rdeps in raw_rdeps.items():
for relationship, rdeps in raw_rdeps.iteritems():
for rdep in rdeps:
# Ignore circular deps:
if rdep["Package"] in published_binaries:
if rdep['Package'] in published_binaries:
continue
# arch==any queries return Reverse-Build-Deps:
if arch == "any" and rdep.get("Architectures", []) == ["source"]:
if arch == 'any' and rdep.get('Architectures', []) == ['source']:
continue
intermediate[binpkg][rdep["Package"]].append((release, relationship))
intermediate[binpkg][rdep['Package']].append((release, relationship))
output = []
for binpkg, rdeps in intermediate.items():
output += ["", binpkg, "-" * len(binpkg)]
for pkg, appearences in rdeps.items():
output += [f"* {pkg}"]
for binpkg, rdeps in intermediate.iteritems():
output += ['', binpkg, '-' * len(binpkg)]
for pkg, appearences in rdeps.iteritems():
output += ['* %s' % pkg]
for release, relationship in appearences:
output += [f" [ ] {release} ({relationship})"]
output += [' [ ] %s (%s)' % (release, relationship)]
found_any = sum(len(rdeps) for rdeps in intermediate.values())
found_any = sum(len(rdeps) for rdeps in intermediate.itervalues())
if found_any:
output = [
"Reverse dependencies:",
@ -154,8 +154,8 @@ def find_rdepends(releases, published_binaries):
"package currently in the release still works with the new "
"%(package)s installed. "
"Reverse- Recommends, Suggests, and Enhances don't need to be "
"tested, and are listed for completeness-sake.",
] + output
"tested, and are listed for completeness-sake."
] + output
else:
output = ["No reverse dependencies"]
@ -163,164 +163,146 @@ def find_rdepends(releases, published_binaries):
def locate_package(package, distribution):
archive = Distribution("ubuntu").getArchive()
try:
package_spph = archive.getSourcePackage(package, distribution)
return package_spph
except PackageNotFoundException as e:
archive = Distribution('ubuntu').getArchive()
for pass_ in ('source', 'binary'):
try:
package_spph = archive.getSourcePackage(package, distribution)
return package_spph
except PackageNotFoundException, e:
if pass_ == 'binary':
Logger.error(str(e))
sys.exit(1)
try:
apt_pkg = apt.Cache()[package]
except KeyError:
Logger.error(str(e))
sys.exit(1)
continue
package = apt_pkg.candidate.source_name
Logger.info(
"Binary package specified, considering its source package instead: %s", package
)
return None
Logger.normal("Binary package specified, considering its source "
"package instead: %s", package)
def request_backport(package_spph, source, destinations):
published_binaries = set()
for bpph in package_spph.getBinaries():
published_binaries.add(bpph.getPackageName())
if not published_binaries:
Logger.error(
"%s (%s) has no published binaries in %s. ",
package_spph.getPackageName(),
package_spph.getVersion(),
source,
)
Logger.info(
"Is it stuck in bin-NEW? It can't be backported until "
"the binaries have been accepted."
)
Logger.error("%s (%s) has no published binaries in %s. ",
package_spph.getPackageName(), package_spph.getVersion(),
source)
Logger.normal("Is it stuck in bin-NEW? It can't be backported until "
"the binaries have been accepted.")
sys.exit(1)
testing = ["[Testing]", ""]
testing = []
testing += ["You can test-build the backport in your PPA with "
"backportpackage:"]
testing += ["$ backportpackage -u ppa:<lp username>/<ppa name> "
"-s %s -d %s %s"
% (source, dest, package_spph.getPackageName())
for dest in destinations]
testing += [""]
for dest in destinations:
testing += [f" * {dest.capitalize()}:"]
testing += [" [ ] Package builds without modification"]
testing += [f" [ ] {binary} installs cleanly and runs" for binary in published_binaries]
testing += ['* %s:' % dest]
testing += ["[ ] Package builds without modification"]
testing += ["[ ] %s installs cleanly and runs" % binary
for binary in published_binaries]
subst = {
"package": package_spph.getPackageName(),
"version": package_spph.getVersion(),
"component": package_spph.getComponent(),
"source": package_spph.getSeriesAndPocket(),
"destinations": ", ".join(destinations),
'package': package_spph.getPackageName(),
'version': package_spph.getVersion(),
'component': package_spph.getComponent(),
'source': package_spph.getSeriesAndPocket(),
'destinations': ', '.join(destinations),
}
subject = "[BPO] %(package)s %(version)s to %(destinations)s" % subst
body = (
"\n".join(
subject = ("Please backport %(package)s %(version)s (%(component)s) "
"from %(source)s" % subst)
body = ('\n'.join(
[
"[Impact]",
"",
" * Justification for backporting the new version to the stable release.",
"",
"[Scope]",
"",
" * List the Ubuntu release you will backport from,"
" and the specific package version.",
"",
" * List the Ubuntu release(s) you will backport to.",
"",
"[Other Info]",
"",
" * Anything else you think is useful to include",
"",
"Please backport %(package)s %(version)s (%(component)s) "
"from %(source)s to %(destinations)s.",
"",
"Reason for the backport:",
"========================",
">>> Enter your reasoning here <<<",
"",
"Testing:",
"========",
"Mark off items in the checklist [X] as you test them, "
"but please leave the checklist so that backporters can quickly "
"evaluate the state of testing.",
""
]
+ testing
+ [""]
+ find_rdepends(destinations, published_binaries)
+ [""]
)
% subst
)
+ [""]) % subst)
editor = EditBugReport(subject, body)
editor.edit()
subject, body = editor.get_report()
Logger.info("The final report is:\nSummary: %s\nDescription:\n%s\n", subject, body)
Logger.normal('The final report is:\nSummary: %s\nDescription:\n%s\n',
subject, body)
if YesNoQuestion().ask("Request this backport", "yes") == "no":
sys.exit(1)
distro = Distribution("ubuntu")
pkgname = package_spph.getPackageName()
targets = [Launchpad.projects['%s-backports' % destination]
for destination in destinations]
bug = Launchpad.bugs.createBug(title=subject, description=body,
target=targets[0])
for target in targets[1:]:
bug.addTask(target=target)
bug = Launchpad.bugs.createBug(
title=subject, description=body, target=distro.getSourcePackage(name=pkgname)
)
bug.subscribe(person=Launchpad.people["ubuntu-backporters"])
for dest in destinations:
series = distro.getSeries(dest)
try:
bug.addTask(target=series.getSourcePackage(name=pkgname))
except Exception: # pylint: disable=broad-except
break
Logger.info("Backport request filed as %s", bug.web_link)
Logger.normal("Backport request filed as %s", bug.web_link)
def main():
parser = argparse.ArgumentParser(usage="%(prog)s [options] package")
parser.add_argument(
"-d",
"--destination",
metavar="DEST",
help="Backport to DEST release and necessary "
"intermediate releases "
"(default: current LTS release)",
)
parser.add_argument(
"-s",
"--source",
metavar="SOURCE",
help="Backport from SOURCE release (default: current devel release)",
)
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
default=None,
help="Launchpad instance to connect to (default: production).",
)
parser.add_argument(
"--no-conf",
action="store_true",
dest="no_conf",
default=False,
help="Don't read config files or environment variables",
)
parser.add_argument("package", help=argparse.SUPPRESS)
args = parser.parse_args()
parser = optparse.OptionParser('%prog [options] package')
parser.add_option('-d', '--destination', metavar='DEST',
help='Backport to DEST release and necessary '
'intermediate releases '
'(default: current stable release)')
parser.add_option('-s', '--source', metavar='SOURCE',
help='Backport from SOURCE release '
'(default: current devel release)')
parser.add_option('-l', '--lpinstance', metavar='INSTANCE', default=None,
help='Launchpad instance to connect to '
'(default: production).')
parser.add_option('--no-conf', action='store_true',
dest='no_conf', default=False,
help="Don't read config files or environment variables")
options, args = parser.parse_args()
config = UDTConfig(args.no_conf)
if len(args) != 1:
parser.error("One (and only one) package must be specified")
package = args[0]
if args.lpinstance is None:
args.lpinstance = config.get_value("LPINSTANCE")
Launchpad.login(args.lpinstance)
config = UDTConfig(options.no_conf)
if args.source is None:
args.source = Distribution("ubuntu").getDevelopmentSeries().name
if options.lpinstance is None:
options.lpinstance = config.get_value('LPINSTANCE')
Launchpad.login(options.lpinstance)
if options.source is None:
options.source = Distribution('ubuntu').getDevelopmentSeries().name
try:
destinations = determine_destinations(args.source, args.destination)
except DestinationException as e:
destinations = determine_destinations(options.source,
options.destination)
except DestinationException, e:
Logger.error(str(e))
sys.exit(1)
disclaimer()
package_spph = locate_package(args.package, args.source)
check_existing(package, destinations)
check_existing(package_spph)
request_backport(package_spph, args.source, destinations)
package_spph = locate_package(package, options.source)
request_backport(package_spph, options.source, destinations)
if __name__ == "__main__":
if __name__ == '__main__':
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (C) 2007 Canonical Ltd., Steve Kowalik
@ -26,20 +26,17 @@
#
# ##################################################################
import argparse
import optparse
import os
import sys
from debian.changelog import Version
from distro_info import UbuntuDistroInfo
from ubuntutools import getLogger
from ubuntutools.config import UDTConfig, ubu_email
from ubuntutools.lp import udtexceptions
from ubuntutools.misc import require_utf8
from ubuntutools.question import EditBugReport, confirmation_prompt
from ubuntutools.version import Version
Logger = getLogger()
from ubuntutools.question import confirmation_prompt, EditBugReport
#
# entry point
@ -48,195 +45,176 @@ Logger = getLogger()
def main():
# Our usage options.
usage = "%(prog)s [options] <source package> [<target release> [base version]]"
parser = argparse.ArgumentParser(usage=usage)
usage = ('Usage: %prog [options] '
'<source package> [<target release> [base version]]')
parser = optparse.OptionParser(usage)
parser.add_argument(
"-d", dest="dist", default="unstable", help="Debian distribution to sync from."
)
parser.add_argument(
"-k",
dest="keyid",
default=None,
help="GnuPG key ID to use for signing report "
"(only used when emailing the sync request).",
)
parser.add_argument(
"-n",
action="store_true",
dest="newpkg",
default=False,
help="Whether package to sync is a new package in Ubuntu.",
)
parser.add_argument(
"--email",
action="store_true",
default=False,
help="Use a PGP-signed email for filing the sync request, rather than the LP API.",
)
parser.add_argument(
"--lp",
dest="deprecated_lp_flag",
action="store_true",
default=False,
help=argparse.SUPPRESS,
)
parser.add_argument(
"-l",
"--lpinstance",
metavar="INSTANCE",
dest="lpinstance",
default=None,
help="Launchpad instance to connect to (default: production).",
)
parser.add_argument(
"-s", action="store_true", dest="sponsorship", default=False, help="Force sponsorship"
)
parser.add_argument(
"-C",
action="store_true",
dest="missing_changelog_ok",
default=False,
help="Allow changelog to be manually filled in when missing",
)
parser.add_argument(
"-e",
action="store_true",
dest="ffe",
default=False,
help="Use this after FeatureFreeze for non-bug fix "
"syncs, changes default subscription to the "
"appropriate release team.",
)
parser.add_argument(
"--no-conf",
action="store_true",
dest="no_conf",
default=False,
help="Don't read config files or environment variables",
)
parser.add_argument("source_package", help=argparse.SUPPRESS)
parser.add_argument("release", nargs="?", help=argparse.SUPPRESS)
parser.add_argument("base_version", nargs="?", type=Version, help=argparse.SUPPRESS)
args = parser.parse_args()
parser.add_option('-d', type='string',
dest='dist', default='unstable',
help='Debian distribution to sync from.')
parser.add_option('-k', type='string',
dest='keyid', default=None,
help='GnuPG key ID to use for signing report '
'(only used when emailing the sync request).')
parser.add_option('-n', action='store_true',
dest='newpkg', default=False,
help='Whether package to sync is a new package in '
'Ubuntu.')
parser.add_option('--email', action='store_true', default=False,
help='Use a PGP-signed email for filing the sync '
'request, rather than the LP API.')
parser.add_option('--lp', dest='deprecated_lp_flag',
action='store_true', default=False,
help=optparse.SUPPRESS_HELP)
parser.add_option('-l', '--lpinstance', metavar='INSTANCE',
dest='lpinstance', default=None,
help='Launchpad instance to connect to '
'(default: production).')
parser.add_option('-s', action='store_true',
dest='sponsorship', default=False,
help='Force sponsorship')
parser.add_option('-C', action='store_true',
dest='missing_changelog_ok', default=False,
help='Allow changelog to be manually filled in '
'when missing')
parser.add_option('-e', action='store_true',
dest='ffe', default=False,
help='Use this after FeatureFreeze for non-bug fix '
'syncs, changes default subscription to the '
'appropriate release team.')
parser.add_option('--no-conf', action='store_true',
dest='no_conf', default=False,
help="Don't read config files or environment variables")
(options, args) = parser.parse_args()
if not len(args):
parser.print_help()
sys.exit(1)
require_utf8()
config = UDTConfig(args.no_conf)
config = UDTConfig(options.no_conf)
if args.deprecated_lp_flag:
Logger.info("The --lp flag is now default, ignored.")
if args.email:
args.lpapi = False
if options.deprecated_lp_flag:
print "The --lp flag is now default, ignored."
if options.email:
options.lpapi = False
else:
args.lpapi = config.get_value("USE_LPAPI", default=True, boolean=True)
if args.lpinstance is None:
args.lpinstance = config.get_value("LPINSTANCE")
options.lpapi = config.get_value('USE_LPAPI', default=True,
boolean=True)
if options.lpinstance is None:
options.lpinstance = config.get_value('LPINSTANCE')
if args.keyid is None:
args.keyid = config.get_value("KEYID")
if options.keyid is None:
options.keyid = config.get_value('KEYID')
if not args.lpapi:
if args.lpinstance == "production":
bug_mail_domain = "bugs.launchpad.net"
elif args.lpinstance == "staging":
bug_mail_domain = "bugs.staging.launchpad.net"
if not options.lpapi:
if options.lpinstance == 'production':
bug_mail_domain = 'bugs.launchpad.net'
elif options.lpinstance == 'staging':
bug_mail_domain = 'bugs.staging.launchpad.net'
else:
Logger.error("Error: Unknown launchpad instance: %s", args.lpinstance)
print >> sys.stderr, ('Error: Unknown launchpad instance: %s'
% options.lpinstance)
sys.exit(1)
mailserver_host = config.get_value(
"SMTP_SERVER", default=None, compat_keys=["UBUSMTP", "DEBSMTP"]
)
if not args.lpapi and not mailserver_host:
mailserver_host = config.get_value('SMTP_SERVER',
default=None,
compat_keys=['UBUSMTP', 'DEBSMTP'])
if not options.lpapi and not mailserver_host:
try:
import DNS # pylint: disable=import-outside-toplevel
import DNS
DNS.DiscoverNameServers()
mxlist = DNS.mxlookup(bug_mail_domain)
firstmx = mxlist[0]
mailserver_host = firstmx[1]
except ImportError:
Logger.error("Please install python-dns to support Launchpad mail server lookup.")
print >> sys.stderr, ('Please install python-dns to support '
'Launchpad mail server lookup.')
sys.exit(1)
mailserver_port = config.get_value(
"SMTP_PORT", default=25, compat_keys=["UBUSMTP_PORT", "DEBSMTP_PORT"]
)
mailserver_user = config.get_value("SMTP_USER", compat_keys=["UBUSMTP_USER", "DEBSMTP_USER"])
mailserver_pass = config.get_value("SMTP_PASS", compat_keys=["UBUSMTP_PASS", "DEBSMTP_PASS"])
mailserver_port = config.get_value('SMTP_PORT', default=25,
compat_keys=['UBUSMTP_PORT',
'DEBSMTP_PORT'])
mailserver_user = config.get_value('SMTP_USER',
compat_keys=['UBUSMTP_USER',
'DEBSMTP_USER'])
mailserver_pass = config.get_value('SMTP_PASS',
compat_keys=['UBUSMTP_PASS',
'DEBSMTP_PASS'])
# import the needed requestsync module
# pylint: disable=import-outside-toplevel
if args.lpapi:
if options.lpapi:
from ubuntutools.requestsync.lp import (check_existing_reports,
get_debian_srcpkg,
get_ubuntu_srcpkg,
get_ubuntu_delta_changelog,
need_sponsorship, post_bug)
from ubuntutools.lp.lpapicache import Distribution, Launchpad
from ubuntutools.requestsync.lp import (
check_existing_reports,
get_debian_srcpkg,
get_ubuntu_delta_changelog,
get_ubuntu_srcpkg,
need_sponsorship,
post_bug,
)
# See if we have LP credentials and exit if we don't -
# cannot continue in this case
try:
# devel for changelogUrl()
Launchpad.login(service=args.lpinstance, api_version="devel")
Launchpad.login(service=options.lpinstance, api_version='devel')
except IOError:
sys.exit(1)
else:
from ubuntutools.requestsync.mail import (
check_existing_reports,
get_debian_srcpkg,
get_ubuntu_delta_changelog,
get_ubuntu_srcpkg,
mail_bug,
need_sponsorship,
)
if not any(x in os.environ for x in ("UBUMAIL", "DEBEMAIL", "EMAIL")):
Logger.error(
"The environment variable UBUMAIL, DEBEMAIL or EMAIL needs "
"to be set to let this script mail the sync request."
)
from ubuntutools.requestsync.mail import (check_existing_reports,
get_debian_srcpkg,
get_ubuntu_srcpkg,
get_ubuntu_delta_changelog,
mail_bug, need_sponsorship)
if not any(x in os.environ for x in ('UBUMAIL', 'DEBEMAIL', 'EMAIL')):
print >> sys.stderr, (
'E: The environment variable UBUMAIL, DEBEMAIL or EMAIL needs '
'to be set to let this script mail the sync request.')
sys.exit(1)
newsource = args.newpkg
sponsorship = args.sponsorship
distro = args.dist
ffe = args.ffe
lpapi = args.lpapi
newsource = options.newpkg
sponsorship = options.sponsorship
distro = options.dist
ffe = options.ffe
lpapi = options.lpapi
need_interaction = False
srcpkg = args.source_package
force_base_version = None
srcpkg = args[0]
if not args.release:
if len(args) == 1:
if lpapi:
args.release = Distribution("ubuntu").getDevelopmentSeries().name
release = Distribution('ubuntu').getDevelopmentSeries().name
else:
ubu_info = UbuntuDistroInfo()
args.release = ubu_info.devel()
Logger.warning("Target release missing - assuming %s", args.release)
release = ubu_info.devel()
print >> sys.stderr, 'W: Target release missing - assuming %s' % release
elif len(args) == 2:
release = args[1]
elif len(args) == 3:
release = args[1]
force_base_version = Version(args[2])
else:
print >> sys.stderr, 'E: Too many arguments.'
parser.print_help()
sys.exit(1)
# Get the current Ubuntu source package
try:
ubuntu_srcpkg = get_ubuntu_srcpkg(srcpkg, args.release, "Proposed")
ubuntu_srcpkg = get_ubuntu_srcpkg(srcpkg, release, 'Proposed')
ubuntu_version = Version(ubuntu_srcpkg.getVersion())
ubuntu_component = ubuntu_srcpkg.getComponent()
newsource = False # override the -n flag
except udtexceptions.PackageNotFoundException:
ubuntu_srcpkg = None
ubuntu_version = Version("~")
ubuntu_version = Version('~')
ubuntu_component = None # Set after getting the Debian info
if not newsource:
Logger.info("'%s' doesn't exist in 'Ubuntu %s'.", srcpkg, args.release)
Logger.info("Do you want to sync a new package?")
print("'%s' doesn't exist in 'Ubuntu %s'.\nDo you want to sync a new package?" %
(srcpkg, release))
confirmation_prompt()
newsource = True
except udtexceptions.SeriesNotFoundException as error:
Logger.error(error)
except udtexceptions.SeriesNotFoundException, error:
print >> sys.stderr, "E: %s" % error
sys.exit(1)
# Get the requested Debian source package
@ -244,107 +222,98 @@ def main():
debian_srcpkg = get_debian_srcpkg(srcpkg, distro)
debian_version = Version(debian_srcpkg.getVersion())
debian_component = debian_srcpkg.getComponent()
except udtexceptions.PackageNotFoundException as error:
Logger.error(error)
except udtexceptions.PackageNotFoundException, error:
print >> sys.stderr, "E: %s" % error
sys.exit(1)
except udtexceptions.SeriesNotFoundException as error:
Logger.error(error)
except udtexceptions.SeriesNotFoundException, error:
print >> sys.stderr, "E: %s" % error
sys.exit(1)
if ubuntu_component is None:
if debian_component == "main":
ubuntu_component = "universe"
if debian_component == 'main':
ubuntu_component = 'universe'
else:
ubuntu_component = "multiverse"
ubuntu_component = 'multiverse'
# Stop if Ubuntu has already the version from Debian or a newer version
if (ubuntu_version >= debian_version) and args.lpapi:
if (ubuntu_version >= debian_version) and options.lpapi:
# try rmadison
import ubuntutools.requestsync.mail # pylint: disable=import-outside-toplevel
import ubuntutools.requestsync.mail
try:
debian_srcpkg = ubuntutools.requestsync.mail.get_debian_srcpkg(srcpkg, distro)
debian_version = Version(debian_srcpkg.getVersion())
debian_component = debian_srcpkg.getComponent()
except udtexceptions.PackageNotFoundException as error:
Logger.error(error)
except udtexceptions.PackageNotFoundException, error:
print >> sys.stderr, "E: %s" % error
sys.exit(1)
if ubuntu_version == debian_version:
Logger.error(
"The versions in Debian and Ubuntu are the same already (%s). Aborting.",
ubuntu_version,
)
print >> sys.stderr, ('E: The versions in Debian and Ubuntu are the '
'same already (%s). Aborting.' % ubuntu_version)
sys.exit(1)
if ubuntu_version > debian_version:
Logger.error(
"The version in Ubuntu (%s) is newer than the version in Debian (%s). Aborting.",
ubuntu_version,
debian_version,
)
print >> sys.stderr, ('E: The version in Ubuntu (%s) is newer than '
'the version in Debian (%s). Aborting.'
% (ubuntu_version, debian_version))
sys.exit(1)
# -s flag not specified - check if we do need sponsorship
if not sponsorship:
sponsorship = need_sponsorship(srcpkg, ubuntu_component, args.release)
sponsorship = need_sponsorship(srcpkg, ubuntu_component, release)
if not sponsorship and not ffe:
Logger.error(
"Consider using syncpackage(1) for syncs that "
"do not require feature freeze exceptions."
)
print >> sys.stderr, ('Consider using syncpackage(1) for syncs that '
'do not require feature freeze exceptions.')
# Check for existing package reports
if not newsource:
check_existing_reports(srcpkg)
# Generate bug report
pkg_to_sync = (
f"{srcpkg} {debian_version} ({ubuntu_component})"
f" from Debian {distro} ({debian_component})"
)
title = f"Sync {pkg_to_sync}"
pkg_to_sync = ('%s %s (%s) from Debian %s (%s)'
% (srcpkg, debian_version, ubuntu_component,
distro, debian_component))
title = "Sync %s" % pkg_to_sync
if ffe:
title = "FFe: " + title
report = f"Please sync {pkg_to_sync}\n\n"
report = "Please sync %s\n\n" % pkg_to_sync
if "ubuntu" in str(ubuntu_version):
if 'ubuntu' in str(ubuntu_version):
need_interaction = True
Logger.info("Changes have been made to the package in Ubuntu.")
Logger.info("Please edit the report and give an explanation.")
Logger.info("Not saving the report file will abort the request.")
report += (
f"Explanation of the Ubuntu delta and why it can be dropped:\n"
f"{get_ubuntu_delta_changelog(ubuntu_srcpkg)}\n>>> ENTER_EXPLANATION_HERE <<<\n\n"
)
print('Changes have been made to the package in Ubuntu.\n'
'Please edit the report and give an explanation.\n'
'Not saving the report file will abort the request.')
report += (u'Explanation of the Ubuntu delta and why it can be '
u'dropped:\n%s\n>>> ENTER_EXPLANATION_HERE <<<\n\n'
% get_ubuntu_delta_changelog(ubuntu_srcpkg))
if ffe:
need_interaction = True
Logger.info("To approve FeatureFreeze exception, you need to state")
Logger.info("the reason why you feel it is necessary.")
Logger.info("Not saving the report file will abort the request.")
report += "Explanation of FeatureFreeze exception:\n>>> ENTER_EXPLANATION_HERE <<<\n\n"
print('To approve FeatureFreeze exception, you need to state\n'
'the reason why you feel it is necessary.\n'
'Not saving the report file will abort the request.')
report += ('Explanation of FeatureFreeze exception:\n'
'>>> ENTER_EXPLANATION_HERE <<<\n\n')
if need_interaction:
confirmation_prompt()
base_version = args.base_version or ubuntu_version
base_version = force_base_version or ubuntu_version
if newsource:
report += "All changelog entries:\n\n"
report += 'All changelog entries:\n\n'
else:
report += f"Changelog entries since current {args.release} version {ubuntu_version}:\n\n"
report += ('Changelog entries since current %s version %s:\n\n'
% (release, ubuntu_version))
changelog = debian_srcpkg.getChangelog(since_version=base_version)
if not changelog:
if not args.missing_changelog_ok:
Logger.error(
"Did not retrieve any changelog entries. "
"Do you need to specify '-C'? "
"Was the package recently uploaded? (check "
"http://packages.debian.org/changelogs/)"
)
if not options.missing_changelog_ok:
print >> sys.stderr, ("E: Did not retrieve any changelog entries. "
"Do you need to specify '-C'? "
"Was the package recently uploaded? (check "
"http://packages.debian.org/changelogs/)")
sys.exit(1)
else:
need_interaction = True
@ -355,51 +324,38 @@ def main():
editor.edit(optional=not need_interaction)
title, report = editor.get_report()
if "XXX FIXME" in report:
Logger.error(
"changelog boilerplate found in report, "
"please manually add changelog when using '-C'"
)
if 'XXX FIXME' in report:
print >> sys.stderr, ("E: changelog boilerplate found in report, "
"please manually add changelog when using '-C'")
sys.exit(1)
# bug status and bug subscriber
status = "confirmed"
subscribe = "ubuntu-archive"
status = 'confirmed'
subscribe = 'ubuntu-archive'
if sponsorship:
status = "new"
subscribe = "ubuntu-sponsors"
status = 'new'
subscribe = 'ubuntu-sponsors'
if ffe:
status = "new"
subscribe = "ubuntu-release"
status = 'new'
subscribe = 'ubuntu-release'
srcpkg = None if newsource else srcpkg
srcpkg = not newsource and srcpkg or None
if lpapi:
# Map status to the values expected by LP API
mapping = {"new": "New", "confirmed": "Confirmed"}
mapping = {'new': 'New', 'confirmed': 'Confirmed'}
# Post sync request using LP API
post_bug(srcpkg, subscribe, mapping[status], title, report)
else:
email_from = ubu_email(export=False)[1]
# Mail sync request
mail_bug(
srcpkg,
subscribe,
status,
title,
report,
bug_mail_domain,
args.keyid,
email_from,
mailserver_host,
mailserver_port,
mailserver_user,
mailserver_pass,
)
mail_bug(srcpkg, subscribe, status, title, report, bug_mail_domain,
options.keyid, email_from, mailserver_host, mailserver_port,
mailserver_user, mailserver_pass)
if __name__ == "__main__":
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
Logger.error("User abort.")
print "\nUser abort."
sys.exit(2)

View File

@ -1,8 +0,0 @@
python-debian
python-debianbts
dateutil
distro-info
httplib2
launchpadlib
requests
setuptools

10
reverse-build-depends Executable file
View File

@ -0,0 +1,10 @@
#!/bin/sh
cat >&2 <<EOF
reverse-build-depends has been replaced by reverse-depends -b
This script now wraps reverse-depends.
Please use it in the future.
EOF
exec $(dirname $0)/reverse-depends -b "$@"

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
#
@ -14,239 +14,157 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import argparse
import optparse
import sys
from distro_info import DistroDataOutdated
from ubuntutools import getLogger
from ubuntutools.misc import codename_to_distribution, system_distribution, vendor_to_distroinfo
from ubuntutools.rdepends import RDependsException, query_rdepends
Logger = getLogger()
DEFAULT_MAX_DEPTH = 10 # We want avoid any infinite loop...
from ubuntutools.logger import Logger
from ubuntutools.misc import (system_distribution, vendor_to_distroinfo,
codename_to_distribution)
from ubuntutools.rdepends import query_rdepends, RDependsException
def main():
system_distro_info = vendor_to_distroinfo(system_distribution())()
try:
default_release = system_distro_info.devel()
except DistroDataOutdated as e:
Logger.warning(e)
default_release = "unstable"
except DistroDataOutdated, e:
Logger.warn(e)
default_release = 'unstable'
description = (
"List reverse-dependencies of package. "
"If the package name is prefixed with src: then the "
"reverse-dependencies of all the binary packages that "
"the specified source package builds will be listed."
)
parser = optparse.OptionParser(
'%prog [options] package',
description="List reverse-dependencies of package. "
"If the package name is prefixed with src: then the "
"reverse-dependencies of all the binary packages that "
"the specified source package builds will be listed.")
parser.add_option('-r', '--release', metavar='RELEASE',
default=default_release,
help='Query dependencies in RELEASE. '
'Default: %s' % default_release)
parser.add_option('-R', '--without-recommends',
action='store_false', dest='recommends', default=True,
help='Only consider Depends relationships, '
'not Recommends')
parser.add_option('-s', '--with-suggests',
action='store_true', dest='suggests', default=False,
help='Also consider Suggests relationships')
parser.add_option('-b', '--build-depends',
action='store_const', dest='arch', const='source',
help='Query build dependencies (synonym for --arch=source)')
parser.add_option('-a', '--arch', metavar='ARCH', default='any',
help='Query dependencies in ARCH. '
'Default: any')
parser.add_option('-c', '--component', metavar='COMPONENT',
action='append',
help='Only consider reverse-dependencies in COMPONENT. '
'Can be specified multiple times. Default: all')
parser.add_option('-l', '--list',
action='store_true', default=False,
help='Display a simple, machine-readable list')
parser.add_option('-u', '--service-url', metavar='URL',
dest='server', default=None,
help='Reverse Dependencies webservice URL. '
'Default: UbuntuWire')
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"-r",
"--release",
default=default_release,
help="Query dependencies in RELEASE. Default: %(default)s",
)
parser.add_argument(
"-R",
"--without-recommends",
action="store_false",
dest="recommends",
help="Only consider Depends relationships, not Recommends",
)
parser.add_argument(
"-s", "--with-suggests", action="store_true", help="Also consider Suggests relationships"
)
parser.add_argument(
"-b",
"--build-depends",
action="store_true",
help="Query build dependencies (synonym for --arch=source)",
)
parser.add_argument(
"-a", "--arch", default="any", help="Query dependencies in ARCH. Default: any"
)
parser.add_argument(
"-c",
"--component",
action="append",
help="Only consider reverse-dependencies in COMPONENT. "
"Can be specified multiple times. Default: all",
)
parser.add_argument(
"-l", "--list", action="store_true", help="Display a simple, machine-readable list"
)
parser.add_argument(
"-u",
"--service-url",
metavar="URL",
dest="server",
default=None,
help="Reverse Dependencies webservice URL. Default: UbuntuWire",
)
parser.add_argument(
"-x",
"--recursive",
action="store_true",
help="Consider to find reverse dependencies recursively.",
)
parser.add_argument(
"-d",
"--recursive-depth",
type=int,
default=DEFAULT_MAX_DEPTH,
help="If recusive, you can specify the depth.",
)
parser.add_argument("package")
options, args = parser.parse_args()
options = parser.parse_args()
if len(args) != 1:
parser.error("One (and only one) package must be specified")
package = args[0]
opts = {}
if options.server is not None:
opts["server"] = options.server
opts['server'] = options.server
# Convert unstable/testing aliases to codenames:
distribution = codename_to_distribution(options.release)
if not distribution:
parser.error(f"Unknown release codename {options.release}")
parser.error('Unknown release codename %s' % options.release)
distro_info = vendor_to_distroinfo(distribution)()
try:
options.release = distro_info.codename(options.release, default=options.release)
options.release = distro_info.codename(options.release,
default=options.release)
except DistroDataOutdated:
# We already logged a warning
# We already printed a warning
pass
if options.build_depends:
options.arch = "source"
try:
data = query_rdepends(package, options.release, options.arch, **opts)
except RDependsException, e:
Logger.error(str(e))
sys.exit(1)
if options.arch == "source":
fields = [
"Reverse-Build-Depends",
"Reverse-Build-Depends-Indep",
"Reverse-Build-Depends-Arch",
"Reverse-Testsuite-Triggers",
]
if options.arch == 'source':
fields = ['Reverse-Build-Depends', 'Reverse-Build-Depends-Indep']
else:
fields = ["Reverse-Depends"]
fields = ['Reverse-Depends']
if options.recommends:
fields.append("Reverse-Recommends")
if options.with_suggests:
fields.append("Reverse-Suggests")
fields.append('Reverse-Recommends')
if options.suggests:
fields.append('Reverse-Suggests')
def build_results(package, result, fields, component, recursive):
try:
data = query_rdepends(package, options.release, options.arch, **opts)
except RDependsException as e:
Logger.error(str(e))
sys.exit(1)
if not data:
return
for field in data.keys():
if field not in fields:
del data[field]
if fields:
data = {k: v for k, v in data.items() if k in fields}
if component:
data = {
k: [rdep for rdep in v if rdep["Component"] in component] for k, v in data.items()
}
data = {k: v for k, v in data.items() if v}
result[package] = data
if recursive > 0:
for rdeps in result[package].values():
for rdep in rdeps:
build_results(rdep["Package"], result, fields, component, recursive - 1)
result = {}
build_results(
options.package,
result,
fields,
options.component,
options.recursive and options.recursive_depth or 0,
)
if options.component:
for field, rdeps in data.items():
filtered = [rdep for rdep in rdeps
if rdep['Component'] in options.component]
if not filtered:
del data[field]
else:
data[field] = filtered
if options.list:
display_consise(result)
display_consise(data)
else:
display_verbose(options.package, result)
display_verbose(data)
def display_verbose(package, values):
if not values:
Logger.info("No reverse dependencies found")
def display_verbose(data):
if not data:
print "No reverse dependencies found"
return
def log_package(values, package, arch, dependency, visited, offset=0):
line = f"{' ' * offset}* {package}"
if all_archs and set(arch) != all_archs:
line += f" [{' '.join(sorted(arch))}]"
if dependency:
if len(line) < 30:
line += " " * (30 - len(line))
line += f" (for {dependency})"
Logger.info(line)
if package in visited:
return
visited = visited.copy().add(package)
data = values.get(package)
if data:
offset = offset + 1
for rdeps in data.values():
for rdep in rdeps:
log_package(
values,
rdep["Package"],
rdep.get("Architectures", all_archs),
rdep.get("Dependency"),
visited,
offset,
)
all_archs = set()
# This isn't accurate, but we make up for it by displaying what we found
for data in values.values():
for rdeps in data.values():
for rdep in rdeps:
if "Architectures" in rdep:
all_archs.update(rdep["Architectures"])
for field, rdeps in values[package].items():
Logger.info("%s", field)
Logger.info("%s", "=" * len(field))
rdeps.sort(key=lambda x: x["Package"])
for rdeps in data.itervalues():
for rdep in rdeps:
log_package(
values,
rdep["Package"],
rdep.get("Architectures", all_archs),
rdep.get("Dependency"),
{package},
)
Logger.info("")
if 'Architectures' in rdep:
all_archs.update(rdep['Architectures'])
for field, rdeps in data.iteritems():
print field
print '=' * len(field)
rdeps.sort(key=lambda x: x['Package'])
for rdep in rdeps:
line = '* %s' % rdep['Package']
if all_archs and set(rdep['Architectures']) != all_archs:
line += ' [%s]' % ' '.join(sorted(rdep['Architectures']))
if 'Dependency' in rdep:
if len(line) < 30:
line += ' ' * (30 - len(line))
line += ' (for %s)' % rdep['Dependency']
print line
print
if all_archs:
Logger.info(
"Packages without architectures listed are reverse-dependencies in: %s",
", ".join(sorted(list(all_archs))),
)
print ("Packages without architectures listed are "
"reverse-dependencies in: %s"
% ', '.join(sorted(list(all_archs))))
def display_consise(values):
def display_consise(data):
result = set()
for data in values.values():
for rdeps in data.values():
for rdep in rdeps:
result.add(rdep["Package"])
for rdeps in data.itervalues():
for rdep in rdeps:
result.add(rdep['Package'])
Logger.info("\n".join(sorted(list(result))))
print u'\n'.join(sorted(list(result)))
if __name__ == "__main__":
if __name__ == '__main__':
main()

View File

@ -1,19 +0,0 @@
#!/bin/sh
set -eu
# Copyright 2023, Canonical Ltd.
# SPDX-License-Identifier: GPL-3.0
PYTHON_SCRIPTS=$(grep -l -r '^#! */usr/bin/python3$' .)
echo "Running black..."
black --check --diff . $PYTHON_SCRIPTS
echo "Running isort..."
isort --check-only --diff .
echo "Running flake8..."
flake8 --max-line-length=99 --ignore=E203,W503 . $PYTHON_SCRIPTS
echo "Running pylint..."
pylint $(find * -name '*.py') $PYTHON_SCRIPTS

View File

@ -1,81 +0,0 @@
#!/usr/bin/python3
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# Authors:
# Andy P. Whitcroft
# Christian Ehrhardt
# Chris Peterson <chris.peterson@canonical.com>
#
# Copyright (C) 2024 Canonical Ltd.
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Dumps a list of currently running tests in Autopkgtest"""
__example__ = """
Display first listed test running on amd64 hardware:
$ running-autopkgtests | grep amd64 | head -n1
R 0:01:40 systemd-upstream - focal amd64\
upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb',\
'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true',\
'UPSTREAM_PULL_REQUEST=23153',\
'GITHUB_STATUSES_URL=https://api.github.com/repos/\
systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
"""
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from ubuntutools.running_autopkgtests import get_queued, get_running
def parse_args():
description = (
"Dumps a list of currently running and queued tests in Autopkgtest. "
"Pass --running to only see running tests, or --queued to only see "
"queued tests. Passing both will print both, which is the default behavior. "
)
parser = ArgumentParser(
prog="running-autopkgtests",
description=description,
epilog=f"example: {__example__}",
formatter_class=RawDescriptionHelpFormatter,
)
parser.add_argument(
"-r", "--running", action="store_true", help="Print runnning autopkgtests (default: true)"
)
parser.add_argument(
"-q", "--queued", action="store_true", help="Print queued autopkgtests (default: true)"
)
options = parser.parse_args()
# If neither flag was specified, default to both not neither
if not options.running and not options.queued:
options.running = True
options.queued = True
return options
def main() -> int:
args = parse_args()
if args.running:
print(get_running())
if args.queued:
print(get_queued())
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
#
@ -14,138 +14,130 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import argparse
import collections
import gzip
import json
import optparse
import os
import time
import urllib.request
import urllib
from ubuntutools import getLogger
from ubuntutools.lp.lpapicache import Distribution, Launchpad, PackageNotFoundException
from ubuntutools.lp.lpapicache import (Distribution, Launchpad,
PackageNotFoundException)
from ubuntutools.logger import Logger
Logger = getLogger()
DATA_URL = "http://qa.ubuntuwire.org/ubuntu-seeded-packages/seeded.json.gz"
DATA_URL = 'http://qa.ubuntuwire.org/ubuntu-seeded-packages/seeded.json.gz'
def load_index(url):
"""Download a new copy of the image contents index, if necessary,
'''Download a new copy of the image contents index, if necessary,
and read it.
"""
cachedir = os.path.expanduser("~/.cache/ubuntu-dev-tools")
seeded = os.path.join(cachedir, "seeded.json.gz")
'''
cachedir = os.path.expanduser('~/.cache/ubuntu-dev-tools')
fn = os.path.join(cachedir, 'seeded.json.gz')
if not os.path.isfile(seeded) or time.time() - os.path.getmtime(seeded) > 60 * 60 * 2:
if (not os.path.isfile(fn)
or time.time() - os.path.getmtime(fn) > 60 * 60 * 2):
if not os.path.isdir(cachedir):
os.makedirs(cachedir)
urllib.request.urlretrieve(url, seeded)
urllib.urlretrieve(url, fn)
try:
with gzip.open(seeded, "r") as f:
with gzip.open(fn, 'r') as f:
return json.load(f)
except Exception as e: # pylint: disable=broad-except
Logger.error(
"Unable to parse seed data: %s. Deleting cached data, please try again.", str(e)
)
os.unlink(seeded)
return None
except Exception, e:
Logger.error("Unable to parse seed data: %s. "
"Deleting cached data, please try again.",
str(e))
os.unlink(fn)
def resolve_binaries(sources):
"""Return a dict of source:binaries for all binary packages built by
'''Return a dict of source:binaries for all binary packages built by
sources
"""
archive = Distribution("ubuntu").getArchive()
'''
archive = Distribution('ubuntu').getArchive()
binaries = {}
for source in sources:
try:
spph = archive.getSourcePackage(source)
except PackageNotFoundException as e:
except PackageNotFoundException, e:
Logger.error(str(e))
continue
binaries[source] = sorted(set(bpph.getPackageName() for bpph in spph.getBinaries()))
binaries[source] = sorted(set(bpph.getPackageName()
for bpph in spph.getBinaries()))
return binaries
def present_on(appearences):
"""Format a list of (flavor, type) tuples into a human-readable string"""
'''Format a list of (flavor, type) tuples into a human-readable string'''
present = collections.defaultdict(set)
for flavor, type_ in appearences:
present[flavor].add(type_)
for flavor, types in present.items():
for flavor, types in present.iteritems():
if len(types) > 1:
types.discard("supported")
output = [f" {flavor}: {', '.join(sorted(types))}" for flavor, types in present.items()]
types.discard('supported')
output = [' %s: %s' % (flavor, ', '.join(sorted(types)))
for flavor, types in present.iteritems()]
output.sort()
return "\n".join(output)
return '\n'.join(output)
def output_binaries(index, binaries):
"""Print binaries found in index"""
'''Print binaries found in index'''
for binary in binaries:
if binary in index:
Logger.info("%s is seeded in:", binary)
Logger.info(present_on(index[binary]))
print "%s is seeded in:" % binary
print present_on(index[binary])
else:
Logger.info("%s is not seeded (and may not exist).", binary)
print "%s is not seeded (and may not exist)." % binary
def output_by_source(index, by_source):
"""Logger.Info(binaries found in index. Grouped by source"""
for source, binaries in by_source.items():
'''Print binaries found in index. Grouped by source'''
for source, binaries in by_source.iteritems():
seen = False
if not binaries:
Logger.info(
"Status unknown: No binary packages built by the latest "
"%s.\nTry again using -b and the expected binary packages.",
source,
)
print ("Status unknown: No binary packages built by the latest "
"%s.\nTry again using -b and the expected binary packages."
% source)
continue
for binary in binaries:
if binary in index:
seen = True
Logger.info("%s (from %s) is seeded in:", binary, source)
Logger.info(present_on(index[binary]))
print "%s (from %s) is seeded in:" % (binary, source)
print present_on(index[binary])
if not seen:
Logger.info("%s's binaries are not seeded.", source)
print "%s's binaries are not seeded." % source
def main():
"""Query which images the specified packages are on"""
parser = argparse.ArgumentParser(usage="%(prog)s [options] package...")
parser.add_argument(
"-b",
"--binary",
default=False,
action="store_true",
help="Binary packages are being specified, not source packages (fast)",
)
parser.add_argument(
"-u",
"--data-url",
metavar="URL",
default=DATA_URL,
help="URL for the seeded packages index. Default: UbuntuWire",
)
parser.add_argument("packages", metavar="package", nargs="+", help=argparse.SUPPRESS)
args = parser.parse_args()
'''Query which images the specified packages are on'''
parser = optparse.OptionParser('%prog [options] package...')
parser.add_option('-b', '--binary',
default=False, action='store_true',
help="Binary packages are being specified, "
"not source packages (fast)")
parser.add_option('-u', '--data-url', metavar='URL',
default=DATA_URL,
help='URL for the seeded packages index. '
'Default: UbuntuWire')
options, args = parser.parse_args()
if len(args) < 1:
parser.error("At least one package must be specified")
# Login anonymously to LP
Launchpad.login_anonymously()
index = load_index(args.data_url)
if args.binary:
output_binaries(index, args.packages)
index = load_index(options.data_url)
if options.binary:
output_binaries(index, args)
else:
binaries = resolve_binaries(args.packages)
binaries = resolve_binaries(args)
output_by_source(index, binaries)
if __name__ == "__main__":
if __name__ == '__main__':
main()

View File

@ -104,7 +104,7 @@ echo "In order to do packaging work, you'll need a minimal set of packages."
echo "Those, together with other packages which, though optional, have proven"
echo "to be useful, will now be installed."
echo
sudo apt-get install ubuntu-dev-tools devscripts debhelper patchutils pbuilder build-essential
sudo apt-get install ubuntu-dev-tools devscripts debhelper cdbs patchutils pbuilder build-essential
separator2
echo "Enabling the source repository"

155
setup.py
View File

@ -1,100 +1,77 @@
#!/usr/bin/python3
import glob
import pathlib
import re
#!/usr/bin/python
from setuptools import setup
import glob
import os
import re
import sys
import codecs
# look/set what version we have
changelog = "debian/changelog"
if os.path.exists(changelog):
head = codecs.open(changelog, 'r', 'utf-8', 'replace').readline()
match = re.compile(".*\((.*)\).*").match(head)
if match:
version = match.group(1)
def get_debian_version() -> str:
"""Look what Debian version we have."""
changelog = pathlib.Path(__file__).parent / "debian" / "changelog"
with changelog.open("r", encoding="utf-8") as changelog_f:
head = changelog_f.readline()
match = re.compile(r".*\((.*)\).*").match(head)
if not match:
raise ValueError(f"Failed to extract Debian version from '{head}'.")
return match.group(1)
if sys.version_info[0] >= 3:
scripts = []
data_files = []
else:
scripts = [
'404main',
'backportpackage',
'bitesize',
'check-mir',
'check-symbols',
'dch-repeat',
'grab-merge',
'grep-merges',
'hugdaylist',
'import-bug-from-debian',
'merge-changelog',
'mk-sbuild',
'pbuilder-dist',
'pbuilder-dist-simple',
'pull-debian-debdiff',
'pull-debian-source',
'pull-lp-source',
'pull-revu-source',
'pull-uca-source',
'requestbackport',
'requestsync',
'reverse-build-depends',
'reverse-depends',
'seeded-in-ubuntu',
'setup-packaging-environment',
'sponsor-patch',
'submittodebian',
'syncpackage',
'ubuntu-build',
'ubuntu-iso',
'ubuntu-upload-permission',
'update-maintainer',
]
data_files = [
('share/bash-completion/completions', glob.glob("bash_completion/*")),
('share/man/man1', glob.glob("doc/*.1")),
('share/man/man5', glob.glob("doc/*.5")),
('share/ubuntu-dev-tools', ['enforced-editing-wrapper']),
]
def make_pep440_compliant(version: str) -> str:
"""Convert the version into a PEP440 compliant version."""
public_version_re = re.compile(r"^([0-9][0-9.]*(?:(?:a|b|rc|.post|.dev)[0-9]+)*)\+?")
_, public, local = public_version_re.split(version, maxsplit=1)
if not local:
return version
sanitized_local = re.sub("[+~]+", ".", local).strip(".")
pep440_version = f"{public}+{sanitized_local}"
assert re.match("^[a-zA-Z0-9.]+$", sanitized_local), f"'{pep440_version}' not PEP440 compliant"
return pep440_version
scripts = [
"backportpackage",
"check-mir",
"check-symbols",
"dch-repeat",
"grab-merge",
"grep-merges",
"import-bug-from-debian",
"lp-bitesize",
"merge-changelog",
"mk-sbuild",
"pbuilder-dist",
"pbuilder-dist-simple",
"pm-helper",
"pull-pkg",
"pull-debian-debdiff",
"pull-debian-source",
"pull-debian-debs",
"pull-debian-ddebs",
"pull-debian-udebs",
"pull-lp-source",
"pull-lp-debs",
"pull-lp-ddebs",
"pull-lp-udebs",
"pull-ppa-source",
"pull-ppa-debs",
"pull-ppa-ddebs",
"pull-ppa-udebs",
"pull-uca-source",
"pull-uca-debs",
"pull-uca-ddebs",
"pull-uca-udebs",
"requestbackport",
"requestsync",
"reverse-depends",
"running-autopkgtests",
"seeded-in-ubuntu",
"setup-packaging-environment",
"sponsor-patch",
"submittodebian",
"syncpackage",
"ubuntu-build",
"ubuntu-iso",
"ubuntu-upload-permission",
"update-maintainer",
]
data_files = [
("share/bash-completion/completions", glob.glob("bash_completion/*")),
("share/man/man1", glob.glob("doc/*.1")),
("share/man/man5", glob.glob("doc/*.5")),
("share/ubuntu-dev-tools", ["enforced-editing-wrapper"]),
]
if __name__ == "__main__":
if __name__ == '__main__':
setup(
name="ubuntu-dev-tools",
version=make_pep440_compliant(get_debian_version()),
name='ubuntu-dev-tools',
version=version,
scripts=scripts,
packages=[
"ubuntutools",
"ubuntutools/lp",
"ubuntutools/requestsync",
"ubuntutools/sponsor_patch",
"ubuntutools/test",
'ubuntutools',
'ubuntutools/lp',
'ubuntutools/requestsync',
'ubuntutools/sponsor_patch',
'ubuntutools/test',
],
data_files=data_files,
test_suite="ubuntutools.test",
test_suite='ubuntutools.test.discover',
)

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
#
# Copyright (C) 2010-2011, Benjamin Drung <bdrung@ubuntu.com>
#
@ -14,153 +14,119 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# pylint: disable=invalid-name
# pylint: enable=invalid-name
import argparse
import logging
import optparse
import os
import shutil
import sys
import tempfile
from ubuntutools import getLogger
from ubuntutools.builder import get_builder
from ubuntutools.config import UDTConfig
from ubuntutools.sponsor_patch.sponsor_patch import check_dependencies, sponsor_patch
Logger = getLogger()
from ubuntutools.logger import Logger
from ubuntutools.sponsor_patch.sponsor_patch import sponsor_patch, check_dependencies
def parse(script_name):
"""Parse the command line parameters."""
usage = (
"%(prog)s [options] <bug number>\n"
"One of --upload, --workdir, or --sponsor must be specified."
)
epilog = f"See {script_name}(1) for more info."
parser = argparse.ArgumentParser(usage=usage, epilog=epilog)
usage = ("%s [options] <bug number>\n" % (script_name)
+ "One of --upload, --workdir, or --sponsor must be specified.")
epilog = "See %s(1) for more info." % (script_name)
parser = optparse.OptionParser(usage=usage, epilog=epilog)
parser.add_argument(
"-b",
"--build",
dest="build",
help="Build the package with the specified builder.",
action="store_true",
)
parser.add_argument(
"-B", "--builder", dest="builder", help="Specify the package builder (default pbuilder)"
)
parser.add_argument(
"-e",
"--edit",
help="launch sub-shell to allow editing of the patch",
dest="edit",
action="store_true",
)
parser.add_argument(
"-k", "--key", dest="keyid", help="Specify the key ID to be used for signing."
)
parser.add_argument(
"-l",
"--lpinstance",
dest="lpinstance",
help="Launchpad instance to connect to (default: production)",
metavar="INSTANCE",
)
parser.add_argument(
"--no-conf",
dest="no_conf",
help="Don't read config files or environment variables.",
action="store_true",
)
parser.add_argument(
"-s",
"--sponsor",
help="sponsoring; equals -b -u ubuntu",
dest="sponsoring",
action="store_true",
)
parser.add_argument(
"-u", "--upload", dest="upload", help="Specify an upload destination (default none)."
)
parser.add_argument(
"-U",
"--update",
dest="update",
action="store_true",
help="Update the build environment before building.",
)
parser.add_argument(
"-v", "--verbose", help="print more information", dest="verbose", action="store_true"
)
parser.add_argument(
"-w",
"--workdir",
dest="workdir",
help="Specify a working directory (default is a "
"temporary directory, deleted afterwards).",
)
parser.add_argument("bug_number", type=int, help=argparse.SUPPRESS)
parser.add_option("-b", "--build", dest="build",
help="Build the package with the specified builder.",
action="store_true", default=False)
parser.add_option("-B", "--builder", dest="builder", default=None,
help="Specify the package builder (default pbuilder)")
parser.add_option("-e", "--edit",
help="launch sub-shell to allow editing of the patch",
dest="edit", action="store_true", default=False)
parser.add_option("-k", "--key", dest="keyid", default=None,
help="Specify the key ID to be used for signing.")
parser.add_option("-l", "--lpinstance", dest="lpinstance", default=None,
help="Launchpad instance to connect to "
"(default: production)",
metavar="INSTANCE")
parser.add_option("--no-conf", dest="no_conf", default=False,
help="Don't read config files or environment variables.",
action="store_true")
parser.add_option("-s", "--sponsor", help="sponsoring; equals -b -u ubuntu",
dest="sponsoring", action="store_true", default=False)
parser.add_option("-u", "--upload", dest="upload", default=None,
help="Specify an upload destination (default none).")
parser.add_option("-U", "--update", dest="update", default=False,
action="store_true",
help="Update the build environment before building.")
parser.add_option("-v", "--verbose", help="print more information",
dest="verbose", action="store_true", default=False)
parser.add_option("-w", "--workdir", dest="workdir", default=None,
help="Specify a working directory (default is a "
"temporary directory, deleted afterwards).")
args = parser.parse_args()
if args.verbose:
Logger.setLevel(logging.DEBUG)
(options, args) = parser.parse_args()
Logger.set_verbosity(options.verbose)
check_dependencies()
config = UDTConfig(args.no_conf)
if args.builder is None:
args.builder = config.get_value("BUILDER")
if args.lpinstance is None:
args.lpinstance = config.get_value("LPINSTANCE")
if not args.update:
args.update = config.get_value("UPDATE_BUILDER", boolean=True)
if args.workdir is None:
args.workdir = config.get_value("WORKDIR")
if args.keyid is None:
args.keyid = config.get_value("KEYID")
if len(args) == 0:
Logger.error("No bug number specified.")
sys.exit(1)
elif len(args) > 1:
Logger.error("Multiple bug numbers specified: %s" % (", ".join(args)))
sys.exit(1)
if args.sponsoring:
args.build = True
args.upload = "ubuntu"
bug_number = args[0]
if bug_number.isdigit():
bug_number = int(bug_number)
else:
Logger.error("Invalid bug number specified: %s" % (bug_number))
sys.exit(1)
return args
config = UDTConfig(options.no_conf)
if options.builder is None:
options.builder = config.get_value("BUILDER")
if options.lpinstance is None:
options.lpinstance = config.get_value("LPINSTANCE")
if not options.update:
options.update = config.get_value("UPDATE_BUILDER", boolean=True)
if options.workdir is None:
options.workdir = config.get_value("WORKDIR")
if options.keyid is None:
options.keyid = config.get_value("KEYID")
if options.sponsoring:
options.build = True
options.upload = "ubuntu"
return (options, bug_number)
def main():
script_name = os.path.basename(sys.argv[0])
args = parse(script_name)
(options, bug_number) = parse(script_name)
builder = get_builder(args.builder)
builder = get_builder(options.builder)
if not builder:
sys.exit(1)
if not args.upload and not args.workdir:
Logger.error("Please specify either a working directory or an upload target!")
if not options.upload and not options.workdir:
Logger.error("Please specify either a working directory or an upload "
"target!")
sys.exit(1)
if args.workdir is None:
workdir = tempfile.mkdtemp(prefix=script_name + "-")
if options.workdir is None:
workdir = tempfile.mkdtemp(prefix=script_name+"-")
else:
workdir = args.workdir
workdir = options.workdir
try:
sponsor_patch(
args.bug_number,
args.build,
builder,
args.edit,
args.keyid,
args.lpinstance,
args.update,
args.upload,
workdir,
)
sponsor_patch(bug_number, options.build, builder, options.edit,
options.keyid, options.lpinstance, options.update,
options.upload, workdir)
except KeyboardInterrupt:
Logger.error("User abort.")
print "\nUser abort."
sys.exit(2)
finally:
if args.workdir is None:
if options.workdir is None:
shutil.rmtree(workdir)

View File

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# submittodebian - tool to submit patches to Debian's BTS
@ -22,36 +22,33 @@
#
# ##################################################################
"""Submit the Ubuntu changes in a package to Debian.
Run inside an unpacked Ubuntu source package.
"""
import argparse
import optparse
import os
import re
import shutil
import sys
from subprocess import DEVNULL, PIPE, Popen, call, check_call, run
from tempfile import mkdtemp
from debian.changelog import Changelog
from distro_info import DistroDataOutdated, UbuntuDistroInfo
from distro_info import UbuntuDistroInfo, DistroDataOutdated
from ubuntutools import getLogger
from ubuntutools.config import ubu_email
from ubuntutools.question import EditFile, YesNoQuestion
from ubuntutools.update_maintainer import restore_maintainer, update_maintainer
from ubuntutools.question import YesNoQuestion, EditFile
from ubuntutools.subprocess import call, check_call, Popen, PIPE
from ubuntutools.update_maintainer import update_maintainer, restore_maintainer
Logger = getLogger()
try:
from debian.changelog import Changelog
except ImportError:
print(u"This utility requires modules from the «python-debian» package, "
u"which isn't currently installed.")
sys.exit(1)
def get_most_recent_debian_version(changelog):
for block in changelog:
version = block.version.full_version
if not re.search("(ubuntu|build)", version):
if not re.search('(ubuntu|build)', version):
return version
return None
def get_bug_body(changelog):
@ -69,20 +66,19 @@ In Ubuntu, the attached patch was applied to achieve the following:
%s
Thanks for considering the patch.
""" % (
"\n".join(entry.changes())
)
""" % ("\n".join([a for a in entry.changes()]))
return msg
def build_source_package():
if os.path.isdir(".bzr"):
cmd = ["bzr", "bd", "--builder=dpkg-buildpackage", "-S", "--", "-uc", "-us", "-nc"]
if os.path.isdir('.bzr'):
cmd = ['bzr', 'bd', '--builder=dpkg-buildpackage', '-S',
'--', '-uc', '-us', '-nc']
else:
cmd = ["dpkg-buildpackage", "-S", "-uc", "-us", "-nc"]
cmd = ['dpkg-buildpackage', '-S', '-uc', '-us', '-nc']
env = os.environ.copy()
# Unset DEBEMAIL in case there's an @ubuntu.com e-mail address
env.pop("DEBEMAIL", None)
env.pop('DEBEMAIL', None)
check_call(cmd, env=env)
@ -93,35 +89,35 @@ def gen_debdiff(tmpdir, changelog):
newver = next(changelog_it).version
oldver = next(changelog_it).version
debdiff = os.path.join(tmpdir, f"{pkg}_{newver}.debdiff")
debdiff = os.path.join(tmpdir, '%s_%s.debdiff' % (pkg, newver))
diff_cmd = ["bzr", "diff", "-r", "tag:" + str(oldver)]
if call(diff_cmd, stdout=DEVNULL, stderr=DEVNULL) == 1:
Logger.info("Extracting bzr diff between %s and %s", oldver, newver)
devnull = open('/dev/null', 'w')
diff_cmd = ['bzr', 'diff', '-r', 'tag:' + str(oldver)]
if call(diff_cmd, stdout=devnull, stderr=devnull) == 1:
print "Extracting bzr diff between %s and %s" % (oldver, newver)
else:
if oldver.epoch is not None:
oldver = str(oldver)[str(oldver).index(":") + 1 :]
oldver = str(oldver)[str(oldver).index(":") + 1:]
if newver.epoch is not None:
newver = str(newver)[str(newver).index(":") + 1 :]
newver = str(newver)[str(newver).index(":") + 1:]
olddsc = f"../{pkg}_{oldver}.dsc"
newdsc = f"../{pkg}_{newver}.dsc"
olddsc = '../%s_%s.dsc' % (pkg, oldver)
newdsc = '../%s_%s.dsc' % (pkg, newver)
check_file(olddsc)
check_file(newdsc)
Logger.info("Generating debdiff between %s and %s", oldver, newver)
diff_cmd = ["debdiff", olddsc, newdsc]
print "Generating debdiff between %s and %s" % (oldver, newver)
diff_cmd = ['debdiff', olddsc, newdsc]
with Popen(diff_cmd, stdout=PIPE, encoding="utf-8") as diff:
with open(debdiff, "w", encoding="utf-8") as debdiff_f:
run(
["filterdiff", "-x", "*changelog*"],
check=False,
stdin=diff.stdout,
stdout=debdiff_f,
encoding="utf-8",
)
diff = Popen(diff_cmd, stdout=PIPE)
debdiff_f = open(debdiff, 'w')
filterdiff = Popen(['filterdiff', '-x', '*changelog*'],
stdin=diff.stdout, stdout=debdiff_f)
diff.stdout.close()
filterdiff.wait()
debdiff_f.close()
devnull.close()
return debdiff
@ -129,95 +125,88 @@ def gen_debdiff(tmpdir, changelog):
def check_file(fname, critical=True):
if os.path.exists(fname):
return fname
if not critical:
return False
Logger.info("Couldn't find «%s».\n", fname)
sys.exit(1)
else:
if not critical:
return False
print u"Couldn't find «%s».\n" % fname
sys.exit(1)
def submit_bugreport(body, debdiff, deb_version, changelog):
try:
devel = UbuntuDistroInfo().devel()
except DistroDataOutdated as e:
Logger.info(str(e))
devel = ""
except DistroDataOutdated, e:
print str(e)
devel = ''
if os.path.dirname(sys.argv[0]).startswith("/usr/bin"):
editor_path = "/usr/share/ubuntu-dev-tools"
if os.path.dirname(sys.argv[0]).startswith('/usr/bin'):
editor_path = '/usr/share/ubuntu-dev-tools'
else:
editor_path = os.path.dirname(sys.argv[0])
env = dict(os.environ.items())
if "EDITOR" in env:
env["UDT_EDIT_WRAPPER_EDITOR"] = env["EDITOR"]
if "VISUAL" in env:
env["UDT_EDIT_WRAPPER_VISUAL"] = env["VISUAL"]
env["EDITOR"] = os.path.join(editor_path, "enforced-editing-wrapper")
env["VISUAL"] = os.path.join(editor_path, "enforced-editing-wrapper")
env["UDT_EDIT_WRAPPER_TEMPLATE_RE"] = ".*REPLACE THIS WITH ACTUAL INFORMATION.*"
env["UDT_EDIT_WRAPPER_FILE_DESCRIPTION"] = "bug report"
if 'EDITOR' in env:
env['UDT_EDIT_WRAPPER_EDITOR'] = env['EDITOR']
if 'VISUAL' in env:
env['UDT_EDIT_WRAPPER_VISUAL'] = env['VISUAL']
env['EDITOR'] = os.path.join(editor_path, 'enforced-editing-wrapper')
env['VISUAL'] = os.path.join(editor_path, 'enforced-editing-wrapper')
env['UDT_EDIT_WRAPPER_TEMPLATE_RE'] = (
'.*REPLACE THIS WITH ACTUAL INFORMATION.*')
env['UDT_EDIT_WRAPPER_FILE_DESCRIPTION'] = 'bug report'
# In external mua mode, attachments are lost (Reportbug bug: #679907)
internal_mua = True
for cfgfile in ("/etc/reportbug.conf", "~/.reportbugrc"):
for cfgfile in ('/etc/reportbug.conf', '~/.reportbugrc'):
cfgfile = os.path.expanduser(cfgfile)
if not os.path.exists(cfgfile):
continue
with open(cfgfile, "r", encoding="utf-8") as f:
with open(cfgfile, 'r') as f:
for line in f:
line = line.strip()
if line in ("gnus", "mutt", "nmh") or line.startswith("mua "):
if line in ('gnus', 'mutt', 'nmh') or line.startswith('mua '):
internal_mua = False
break
cmd = (
"reportbug",
"--no-check-available",
"--no-check-installed",
"--pseudo-header",
"User: ubuntu-devel@lists.ubuntu.com",
"--pseudo-header",
f"Usertags: origin-ubuntu {devel} ubuntu-patch",
"--tag",
"patch",
"--bts",
"debian",
"--include",
body,
"--attach" if internal_mua else "--include",
debdiff,
"--package-version",
deb_version,
changelog.package,
)
cmd = ('reportbug',
'--no-check-available',
'--no-check-installed',
'--pseudo-header', 'User: ubuntu-devel@lists.ubuntu.com',
'--pseudo-header', 'Usertags: origin-ubuntu %s ubuntu-patch'
% devel,
'--tag', 'patch',
'--bts', 'debian',
'--include', body,
'--attach' if internal_mua else '--include', debdiff,
'--package-version', deb_version,
changelog.package)
check_call(cmd, env=env)
def check_reportbug_config():
reportbugrc_filename = os.path.expanduser("~/.reportbugrc")
if os.path.exists(reportbugrc_filename):
fn = os.path.expanduser('~/.reportbugrc')
if os.path.exists(fn):
return
email = ubu_email()[1]
reportbugrc = f"""# Reportbug configuration generated by submittodebian(1)
reportbugrc = """# Reportbug configuration generated by submittodebian(1)
# See reportbug.conf(5) for the configuration file format.
# Use Debian's reportbug SMTP Server:
# Note: it's limited to 5 connections per hour, and cannot CC you at submission
# time. See /usr/share/doc/reportbug/README.Users.gz for more details.
smtphost reportbug.debian.org:587
header "X-Debbugs-CC: {email}"
header "X-Debbugs-CC: %s"
no-cc
# Use GMail's SMTP Server:
#smtphost smtp.googlemail.com:587
#smtpuser "<your address>@gmail.com"
#smtptls
"""
""" % email
with open(reportbugrc_filename, "w", encoding="utf-8") as f:
with file(fn, 'w') as f:
f.write(reportbugrc)
Logger.info(
"""\
print """\
You have not configured reportbug. Assuming this is the first time you have
used it. Writing a ~/.reportbugrc that will use Debian's mail server, and CC
the bug to you at <%s>
@ -228,43 +217,40 @@ the bug to you at <%s>
If this is not correct, please exit now and edit ~/.reportbugrc or run
reportbug --configure for its configuration wizard.
""",
email,
reportbugrc.strip(),
)
""" % (email, reportbugrc.strip())
if YesNoQuestion().ask("Continue submitting this bug", "yes") == "no":
sys.exit(1)
def main():
parser = argparse.ArgumentParser(description=__doc__)
description = 'Submit the Ubuntu changes in a package to Debian. ' + \
'Run inside an unpacked Ubuntu source package.'
parser = optparse.OptionParser(description=description)
parser.parse_args()
if not os.path.exists("/usr/bin/reportbug"):
Logger.error(
"This utility requires the «reportbug» package, which isn't currently installed."
)
if not os.path.exists('/usr/bin/reportbug'):
print(u"This utility requires the «reportbug» package, which isn't "
u"currently installed.")
sys.exit(1)
check_reportbug_config()
changelog_file = check_file("debian/changelog", critical=False) or check_file(
"../debian/changelog"
)
with open(changelog_file, encoding="utf-8") as f:
changelog = Changelog(f.read())
changelog_file = (check_file('debian/changelog', critical=False) or
check_file('../debian/changelog'))
changelog = Changelog(file(changelog_file).read())
deb_version = get_most_recent_debian_version(changelog)
bug_body = get_bug_body(changelog)
tmpdir = mkdtemp()
body = os.path.join(tmpdir, "bug_body")
with open(body, "wb") as f:
f.write(bug_body.encode("utf-8"))
body = os.path.join(tmpdir, 'bug_body')
fp = open(body, 'w')
fp.write(bug_body.encode('utf-8'))
fp.close()
restore_maintainer("debian")
restore_maintainer('debian')
build_source_package()
update_maintainer("debian")
update_maintainer('debian')
debdiff = gen_debdiff(tmpdir, changelog)
@ -272,7 +258,7 @@ def main():
# reverted in the most recent build
build_source_package()
EditFile(debdiff, "debdiff").edit(optional=True)
EditFile(debdiff, 'debdiff').edit(optional=True)
submit_bugreport(body, debdiff, deb_version, changelog)
os.unlink(body)
@ -280,5 +266,5 @@ def main():
shutil.rmtree(tmpdir)
if __name__ == "__main__":
if __name__ == '__main__':
main()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
upstream

View File

@ -0,0 +1 @@
7

View File

@ -0,0 +1,12 @@
Source: example
Section: misc
Priority: extra
Maintainer: Ubuntu Developers <ubuntu-dev-team@lists.alioth.debian.org>
Build-Depends: debhelper (>= 7.0.50~)
Standards-Version: 3.9.1
Package: example
Architecture: all
Depends: ${misc:Depends}, ${shlibs:Depends}
Description: Example package for testing purposes
An example package used by the test suite. Useless.

View File

@ -0,0 +1,17 @@
Format: http://svn.debian.org/wsvn/dep/web/deps/dep5.mdwn?op=file&rev=152
Source: https://launchpad.net/ubuntu-dev-tools
Files: *
Copyright: 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
License: ISC
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.

View File

@ -0,0 +1,4 @@
#!/usr/bin/make -f
%:
dh $@

View File

@ -0,0 +1 @@
3.0 (quilt)

View File

@ -0,0 +1 @@
abort-on-upstream-changes

View File

@ -0,0 +1 @@
compression=xz

Some files were not shown because too many files have changed in this diff Show More