parent
fe640925f7
commit
be8f7e326b
@ -0,0 +1,3 @@
|
||||
This is a Bazaar control directory.
|
||||
Do not change any files in this directory.
|
||||
See http://bazaar.canonical.com/ for more information about Bazaar.
|
@ -0,0 +1 @@
|
||||
Bazaar-NG meta directory, format 1
|
@ -0,0 +1 @@
|
||||
parent_location = bzr+ssh://bazaar.launchpad.net/+branch/ubuntu-archive-tools/
|
@ -0,0 +1 @@
|
||||
Bazaar Branch Format 7 (needs bzr 1.6)
|
@ -0,0 +1 @@
|
||||
1209 cjwatson@canonical.com-20190220074107-dvkdscxl2y2ww9j6
|
@ -0,0 +1 @@
|
||||
BZR conflict list format 1
|
Binary file not shown.
@ -0,0 +1 @@
|
||||
Bazaar Working Tree Format 6 (bzr 1.14)
|
@ -0,0 +1 @@
|
||||
Bazaar repository format 2a (needs bzr 1.16 or later)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,2 @@
|
||||
Debian_*_Sources
|
||||
__pycache__
|
@ -0,0 +1 @@
|
||||
some kind of packaging?
|
@ -0,0 +1,272 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Check for override mismatches between architectures
|
||||
# Copyright (C) 2005, 2008, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from collections import defaultdict
|
||||
import csv
|
||||
import gzip
|
||||
try:
|
||||
from html import escape
|
||||
except ImportError:
|
||||
from cgi import escape
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from textwrap import dedent
|
||||
import time
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from charts import make_chart, make_chart_header
|
||||
|
||||
|
||||
tempdir = None
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix='architecture-mismatches')
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
def decompress_open(tagfile):
|
||||
ensure_tempdir()
|
||||
decompressed = tempfile.mktemp(dir=tempdir)
|
||||
fin = gzip.GzipFile(filename=tagfile)
|
||||
with open(decompressed, 'wb') as fout:
|
||||
fout.write(fin.read())
|
||||
return open(decompressed, 'r')
|
||||
|
||||
|
||||
def print_section(options, header, items):
|
||||
print("%s:" % header)
|
||||
print("-" * (len(header) + 1))
|
||||
print()
|
||||
for item in items:
|
||||
print(item)
|
||||
print()
|
||||
|
||||
if options.html_output is not None:
|
||||
print("<h2>%s</h2>" % escape(header), file=options.html_output)
|
||||
print("<ul>", file=options.html_output)
|
||||
for item in items:
|
||||
print("<li>%s</li>" % escape(item), file=options.html_output)
|
||||
print("</ul>", file=options.html_output)
|
||||
|
||||
|
||||
def process(options, suite, components, arches):
|
||||
results = {}
|
||||
results["time"] = int(options.time * 1000)
|
||||
|
||||
archive = os.path.expanduser('~/mirror/ubuntu/')
|
||||
|
||||
pkgcomp = defaultdict(lambda: defaultdict(list))
|
||||
pkgsect = defaultdict(lambda: defaultdict(list))
|
||||
pkgprio = defaultdict(lambda: defaultdict(list))
|
||||
archall = defaultdict(set)
|
||||
archany = set()
|
||||
for component in components:
|
||||
for arch in arches:
|
||||
for suffix in '', '/debian-installer':
|
||||
binaries_path = "%s/dists/%s/%s%s/binary-%s/Packages.gz" % (
|
||||
archive, suite, component, suffix, arch)
|
||||
for section in apt_pkg.TagFile(decompress_open(binaries_path)):
|
||||
if 'Package' in section:
|
||||
pkg = section['Package']
|
||||
pkgcomp[pkg][component].append(arch)
|
||||
if 'Section' in section:
|
||||
pkgsect[pkg][section['Section']].append(arch)
|
||||
if 'Priority' in section:
|
||||
pkgprio[pkg][section['Priority']].append(arch)
|
||||
if 'Architecture' in section:
|
||||
if section['Architecture'] == 'all':
|
||||
archall[pkg].add(arch)
|
||||
else:
|
||||
archany.add(pkg)
|
||||
|
||||
packages = sorted(pkgcomp)
|
||||
|
||||
items = []
|
||||
for pkg in packages:
|
||||
if len(pkgcomp[pkg]) > 1:
|
||||
out = []
|
||||
for component in sorted(pkgcomp[pkg]):
|
||||
out.append("%s [%s]" %
|
||||
(component,
|
||||
' '.join(sorted(pkgcomp[pkg][component]))))
|
||||
items.append("%s: %s" % (pkg, ' '.join(out)))
|
||||
print_section(
|
||||
options, "Packages with inconsistent components between architectures",
|
||||
items)
|
||||
results["inconsistent components"] = len(items)
|
||||
|
||||
items = []
|
||||
for pkg in packages:
|
||||
if pkg in pkgsect and len(pkgsect[pkg]) > 1:
|
||||
out = []
|
||||
for section in sorted(pkgsect[pkg]):
|
||||
out.append("%s [%s]" %
|
||||
(section,
|
||||
' '.join(sorted(pkgsect[pkg][section]))))
|
||||
items.append("%s: %s" % (pkg, ' '.join(out)))
|
||||
print_section(
|
||||
options, "Packages with inconsistent sections between architectures",
|
||||
items)
|
||||
results["inconsistent sections"] = len(items)
|
||||
|
||||
items = []
|
||||
for pkg in packages:
|
||||
if pkg in pkgprio and len(pkgprio[pkg]) > 1:
|
||||
out = []
|
||||
for priority in sorted(pkgprio[pkg]):
|
||||
out.append("%s [%s]" %
|
||||
(priority,
|
||||
' '.join(sorted(pkgprio[pkg][priority]))))
|
||||
items.append("%s: %s" % (pkg, ' '.join(out)))
|
||||
print_section(
|
||||
options, "Packages with inconsistent priorities between architectures",
|
||||
items)
|
||||
results["inconsistent priorities"] = len(items)
|
||||
|
||||
items = []
|
||||
archesset = set(arches)
|
||||
for pkg in packages:
|
||||
if (pkg not in archany and
|
||||
pkg in archall and len(archall[pkg]) < len(arches)):
|
||||
missing = sorted(archesset - archall[pkg])
|
||||
items.append("%s [%s]" % (pkg, ' '.join(missing)))
|
||||
print_section(
|
||||
options,
|
||||
"Architecture-independent packages missing from some architectures",
|
||||
items)
|
||||
results["missing arch-indep"] = len(items)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
description='Check for override mismatches between architectures.')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option('-o', '--output-file', help='output to this file')
|
||||
parser.add_option('--html-output-file', help='output HTML to this file')
|
||||
parser.add_option(
|
||||
'--csv-file', help='record CSV time series data in this file')
|
||||
parser.add_option('-s', '--suite', help='check this suite')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if options.suite is None:
|
||||
launchpad = Launchpad.login_anonymously(
|
||||
'architecture-mismatches', options.launchpad_instance)
|
||||
options.suite = launchpad.distributions['ubuntu'].current_series.name
|
||||
|
||||
suite = options.suite
|
||||
components = ["main", "restricted", "universe", "multiverse"]
|
||||
arches = ["amd64", "arm64", "armhf", "i386", "ppc64el", "s390x"]
|
||||
|
||||
if options.output_file is not None:
|
||||
sys.stdout = open('%s.new' % options.output_file, 'w')
|
||||
if options.html_output_file is not None:
|
||||
options.html_output = open('%s.new' % options.html_output_file, 'w')
|
||||
else:
|
||||
options.html_output = None
|
||||
|
||||
options.time = time.time()
|
||||
options.timestamp = time.strftime(
|
||||
'%a %b %e %H:%M:%S %Z %Y', time.gmtime(options.time))
|
||||
print('Generated: %s' % options.timestamp)
|
||||
print()
|
||||
|
||||
if options.html_output is not None:
|
||||
print(dedent("""\
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type"
|
||||
content="text/html; charset=utf-8" />
|
||||
<title>Architecture mismatches for %s</title>
|
||||
<style type="text/css">
|
||||
body { background: #CCCCB0; color: black; }
|
||||
</style>
|
||||
%s
|
||||
</head>
|
||||
<body>
|
||||
<h1>Architecture mismatches for %s</h1>
|
||||
""") % (
|
||||
escape(options.suite), make_chart_header(),
|
||||
escape(options.suite)),
|
||||
file=options.html_output)
|
||||
|
||||
results = process(options, suite, components, arches)
|
||||
|
||||
if options.html_output_file is not None:
|
||||
print("<h2>Over time</h2>", file=options.html_output)
|
||||
print(
|
||||
make_chart("architecture-mismatches.csv", [
|
||||
"inconsistent components",
|
||||
"inconsistent sections",
|
||||
"inconsistent priorities",
|
||||
"missing arch-indep",
|
||||
]),
|
||||
file=options.html_output)
|
||||
print(
|
||||
"<p><small>Generated: %s</small></p>" % escape(options.timestamp),
|
||||
file=options.html_output)
|
||||
print("</body></html>", file=options.html_output)
|
||||
options.html_output.close()
|
||||
os.rename(
|
||||
'%s.new' % options.html_output_file, options.html_output_file)
|
||||
if options.output_file is not None:
|
||||
sys.stdout.close()
|
||||
os.rename('%s.new' % options.output_file, options.output_file)
|
||||
if options.csv_file is not None:
|
||||
if sys.version < "3":
|
||||
open_mode = "ab"
|
||||
open_kwargs = {}
|
||||
else:
|
||||
open_mode = "a"
|
||||
open_kwargs = {"newline": ""}
|
||||
csv_is_new = not os.path.exists(options.csv_file)
|
||||
with open(options.csv_file, open_mode, **open_kwargs) as csv_file:
|
||||
# Field names deliberately hardcoded; any changes require
|
||||
# manually rewriting the output file.
|
||||
fieldnames = [
|
||||
"time",
|
||||
"inconsistent components",
|
||||
"inconsistent sections",
|
||||
"inconsistent priorities",
|
||||
"missing arch-indep",
|
||||
]
|
||||
csv_writer = csv.DictWriter(csv_file, fieldnames)
|
||||
if csv_is_new:
|
||||
csv_writer.writeheader()
|
||||
csv_writer.writerow(results)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,368 @@
|
||||
#! /usr/bin/python
|
||||
# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
|
||||
# GNU Affero General Public License version 3.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
import logging
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.errors import HTTPError
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
re_extract_src_version = re.compile(r"(\S+)\s*\((.*)\)")
|
||||
|
||||
|
||||
class ArchiveCruftCheckerError(Exception):
|
||||
"""ArchiveCruftChecker specific exception.
|
||||
|
||||
Mostly used to describe errors in the initialization of this object.
|
||||
"""
|
||||
|
||||
|
||||
class TagFileNotFound(Exception):
|
||||
"""Raised when an archive tag file could not be found."""
|
||||
|
||||
|
||||
class ArchiveCruftChecker:
|
||||
"""Perform overall checks to identify and remove obsolete records.
|
||||
|
||||
Use initialize() method to validate passed parameters and build the
|
||||
infrastructure variables. It will raise ArchiveCruftCheckerError if
|
||||
something goes wrong.
|
||||
"""
|
||||
|
||||
# XXX cprov 2006-05-15: the default archive path should come
|
||||
# from the config.
|
||||
def __init__(self, launchpad_instance='production',
|
||||
distribution_name='ubuntu', suite=None,
|
||||
archive_path='/srv/launchpad.net/ubuntu-archive'):
|
||||
"""Store passed arguments.
|
||||
|
||||
Also initialize empty variables for storing preliminary results.
|
||||
"""
|
||||
self.launchpad = Launchpad.login_anonymously(
|
||||
'archive-cruft-check', launchpad_instance)
|
||||
self.distribution_name = distribution_name
|
||||
self.suite = suite
|
||||
self.archive_path = archive_path
|
||||
# initialize a group of variables to store temporary results
|
||||
# available versions of published sources
|
||||
self.source_versions = {}
|
||||
# available binaries produced by published sources
|
||||
self.source_binaries = {}
|
||||
# 'Not Build From Source' binaries
|
||||
self.nbs = defaultdict(lambda: defaultdict(dict))
|
||||
# published binary package names
|
||||
self.bin_pkgs = defaultdict(list)
|
||||
# Architecture specific binary packages
|
||||
self.arch_any = defaultdict(lambda: "0")
|
||||
# proposed NBS (before clean up)
|
||||
self.dubious_nbs = defaultdict(lambda: defaultdict(set))
|
||||
# NBS after clean up
|
||||
self.real_nbs = defaultdict(lambda: defaultdict(set))
|
||||
# definitive NBS organized for clean up
|
||||
self.nbs_to_remove = []
|
||||
|
||||
@property
|
||||
def components_and_di(self):
|
||||
components_and_di = []
|
||||
for component in self.components:
|
||||
components_and_di.append(component)
|
||||
components_and_di.append('%s/debian-installer' % (component))
|
||||
return components_and_di
|
||||
|
||||
@property
|
||||
def dist_archive(self):
|
||||
return os.path.join(
|
||||
self.archive_path, self.distro.name, 'dists', self.suite)
|
||||
|
||||
def gunzipTagFileContent(self, filename):
|
||||
"""Gunzip the contents of passed filename.
|
||||
|
||||
Check filename presence, if not present in the filesystem,
|
||||
raises ArchiveCruftCheckerError. Use an tempfile.mkstemp()
|
||||
to store the uncompressed content. Invoke system available
|
||||
gunzip`, raises ArchiveCruftCheckError if it fails.
|
||||
|
||||
This method doesn't close the file descriptor used and does not
|
||||
remove the temporary file from the filesystem, those actions
|
||||
are required in the callsite. (apt_pkg.TagFile is lazy)
|
||||
|
||||
Return a tuple containing:
|
||||
* temp file descriptor
|
||||
* temp filename
|
||||
* the contents parsed by apt_pkg.TagFile()
|
||||
"""
|
||||
if not os.path.exists(filename):
|
||||
raise TagFileNotFound("File does not exist: %s" % filename)
|
||||
|
||||
temp_fd, temp_filename = tempfile.mkstemp()
|
||||
subprocess.check_call(['gunzip', '-c', filename], stdout=temp_fd)
|
||||
|
||||
os.lseek(temp_fd, 0, os.SEEK_SET)
|
||||
temp_file = os.fdopen(temp_fd)
|
||||
# XXX cprov 2006-05-15: maybe we need some sort of data integrity
|
||||
# check at this point, and maybe keep the uncompressed file
|
||||
# for debug purposes, let's see how it behaves in real conditions.
|
||||
parsed_contents = apt_pkg.TagFile(temp_file)
|
||||
|
||||
return temp_file, temp_filename, parsed_contents
|
||||
|
||||
def processSources(self):
|
||||
"""Process archive sources index.
|
||||
|
||||
Build source_binaries, source_versions and bin_pkgs lists.
|
||||
"""
|
||||
logging.debug("Considering Sources:")
|
||||
for component in self.components:
|
||||
filename = os.path.join(
|
||||
self.dist_archive, "%s/source/Sources.gz" % component)
|
||||
|
||||
logging.debug("Processing %s" % filename)
|
||||
try:
|
||||
temp_fd, temp_filename, parsed_sources = (
|
||||
self.gunzipTagFileContent(filename))
|
||||
except TagFileNotFound as warning:
|
||||
logging.warning(warning)
|
||||
return
|
||||
try:
|
||||
for section in parsed_sources:
|
||||
source = section.find("Package")
|
||||
source_version = section.find("Version")
|
||||
binaries = section.find("Binary")
|
||||
for binary in [
|
||||
item.strip() for item in binaries.split(',')]:
|
||||
self.bin_pkgs[binary].append(source)
|
||||
|
||||
self.source_binaries[source] = binaries
|
||||
self.source_versions[source] = source_version
|
||||
finally:
|
||||
# close fd and remove temporary file used to store
|
||||
# uncompressed tag file content from the filesystem.
|
||||
temp_fd.close()
|
||||
os.unlink(temp_filename)
|
||||
|
||||
def buildNBS(self):
|
||||
"""Build the group of 'not build from source' binaries"""
|
||||
# Checks based on the Packages files
|
||||
logging.debug("Building not built from source list (NBS):")
|
||||
for component in self.components_and_di:
|
||||
for architecture in self.architectures:
|
||||
self.buildArchNBS(component, architecture)
|
||||
|
||||
def buildArchNBS(self, component, architecture):
|
||||
"""Build NBS per architecture.
|
||||
|
||||
Store results in self.nbs, also build architecture specific
|
||||
binaries group (stored in self.arch_any)
|
||||
"""
|
||||
filename = os.path.join(
|
||||
self.dist_archive,
|
||||
"%s/binary-%s/Packages.gz" % (component, architecture))
|
||||
|
||||
logging.debug("Processing %s" % filename)
|
||||
try:
|
||||
temp_fd, temp_filename, parsed_packages = (
|
||||
self.gunzipTagFileContent(filename))
|
||||
except TagFileNotFound as warning:
|
||||
logging.warn(warning)
|
||||
return
|
||||
|
||||
try:
|
||||
for section in parsed_packages:
|
||||
package = section.find('Package')
|
||||
source = section.find('Source', "")
|
||||
version = section.find('Version')
|
||||
architecture = section.find('Architecture')
|
||||
|
||||
if source == "":
|
||||
source = package
|
||||
|
||||
if source.find("(") != -1:
|
||||
m = re_extract_src_version.match(source)
|
||||
source = m.group(1)
|
||||
version = m.group(2)
|
||||
|
||||
if package not in self.bin_pkgs:
|
||||
self.nbs[source][package][version] = ""
|
||||
|
||||
if architecture != "all":
|
||||
if apt_pkg.version_compare(
|
||||
version, self.arch_any[package]) < 1:
|
||||
self.arch_any[package] = version
|
||||
finally:
|
||||
# close fd and remove temporary file used to store uncompressed
|
||||
# tag file content from the filesystem.
|
||||
temp_fd.close()
|
||||
os.unlink(temp_filename)
|
||||
|
||||
def addNBS(self, nbs_d, source, version, package):
|
||||
"""Add a new entry in given organized nbs_d list
|
||||
|
||||
Ensure the package is still published in the suite before add.
|
||||
"""
|
||||
result = self.archive.getPublishedBinaries(
|
||||
binary_name=package, exact_match=True, status='Published')
|
||||
result = [bpph for bpph in result
|
||||
if bpph.distro_arch_series_link in self.das_urls]
|
||||
|
||||
if result:
|
||||
nbs_d[source][version].add(package)
|
||||
|
||||
def refineNBS(self):
|
||||
""" Distinguish dubious from real NBS.
|
||||
|
||||
They are 'dubious' if the version numbers match and 'real'
|
||||
if the versions don't match.
|
||||
It stores results in self.dubious_nbs and self.real_nbs.
|
||||
"""
|
||||
for source in self.nbs:
|
||||
for package in self.nbs[source]:
|
||||
versions = sorted(
|
||||
self.nbs[source][package], cmp=apt_pkg.version_compare)
|
||||
latest_version = versions.pop()
|
||||
|
||||
source_version = self.source_versions.get(source, "0")
|
||||
|
||||
if apt_pkg.version_compare(latest_version,
|
||||
source_version) == 0:
|
||||
# We don't actually do anything with dubious_nbs for
|
||||
# now, so let's not waste time computing it.
|
||||
#self.addNBS(self.dubious_nbs, source, latest_version,
|
||||
# package)
|
||||
pass
|
||||
else:
|
||||
self.addNBS(self.real_nbs, source, latest_version,
|
||||
package)
|
||||
|
||||
def outputNBS(self):
|
||||
"""Properly display built NBS entries.
|
||||
|
||||
Also organize the 'real' NBSs for removal in self.nbs_to_remove
|
||||
attribute.
|
||||
"""
|
||||
output = "Not Built from Source\n"
|
||||
output += "---------------------\n\n"
|
||||
|
||||
nbs_keys = sorted(self.real_nbs)
|
||||
|
||||
for source in nbs_keys:
|
||||
proposed_bin = self.source_binaries.get(
|
||||
source, "(source does not exist)")
|
||||
proposed_version = self.source_versions.get(source, "??")
|
||||
output += (" * %s_%s builds: %s\n"
|
||||
% (source, proposed_version, proposed_bin))
|
||||
output += "\tbut no longer builds:\n"
|
||||
versions = sorted(
|
||||
self.real_nbs[source], cmp=apt_pkg.version_compare)
|
||||
|
||||
for version in versions:
|
||||
packages = sorted(self.real_nbs[source][version])
|
||||
|
||||
for pkg in packages:
|
||||
self.nbs_to_remove.append(pkg)
|
||||
|
||||
output += " o %s: %s\n" % (
|
||||
version, ", ".join(packages))
|
||||
|
||||
output += "\n"
|
||||
|
||||
if self.nbs_to_remove:
|
||||
print(output)
|
||||
else:
|
||||
logging.debug("No NBS found")
|
||||
|
||||
def run(self):
|
||||
"""Initialize and build required lists of obsolete entries in archive.
|
||||
|
||||
Check integrity of passed parameters and store organised data.
|
||||
The result list is the self.nbs_to_remove which should contain
|
||||
obsolete packages not currently able to be built from again.
|
||||
Another preliminary lists can be inspected in order to have better
|
||||
idea of what was computed.
|
||||
If anything goes wrong mid-process, it raises ArchiveCruftCheckError,
|
||||
otherwise a list of packages to be removes is printed.
|
||||
"""
|
||||
try:
|
||||
self.distro = self.launchpad.distributions[
|
||||
self.distribution_name]
|
||||
except KeyError:
|
||||
raise ArchiveCruftCheckerError(
|
||||
"Invalid distribution: '%s'" % self.distribution_name)
|
||||
|
||||
if not self.suite:
|
||||
self.distroseries = self.distro.current_series
|
||||
self.suite = self.distroseries.name
|
||||
else:
|
||||
try:
|
||||
self.distroseries = self.distro.getSeries(
|
||||
name_or_version=self.suite.split('-')[0])
|
||||
except HTTPError:
|
||||
raise ArchiveCruftCheckerError(
|
||||
"Invalid suite: '%s'" % self.suite)
|
||||
|
||||
if not os.path.exists(self.dist_archive):
|
||||
raise ArchiveCruftCheckerError(
|
||||
"Invalid archive path: '%s'" % self.dist_archive)
|
||||
|
||||
self.archive = self.distro.main_archive
|
||||
self.distroarchseries = list(self.distroseries.architectures)
|
||||
self.das_urls = [das.self_link for das in self.distroarchseries]
|
||||
self.architectures = [a.architecture_tag
|
||||
for a in self.distroarchseries]
|
||||
self.components = self.distroseries.component_names
|
||||
|
||||
apt_pkg.init()
|
||||
self.processSources()
|
||||
self.buildNBS()
|
||||
self.refineNBS()
|
||||
self.outputNBS()
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser()
|
||||
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-d", "--distro", dest="distro", default="ubuntu", help="check DISTRO")
|
||||
parser.add_option(
|
||||
"-s", "--suite", dest="suite", help="only act on SUITE")
|
||||
parser.add_option(
|
||||
"-n", "--no-action", dest="action", default=True, action="store_false",
|
||||
help="unused compatibility option")
|
||||
parser.add_option(
|
||||
"-v", "--verbose", dest="verbose", default=False, action="store_true",
|
||||
help="emit verbose debugging messages")
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if args:
|
||||
archive_path = args[0]
|
||||
else:
|
||||
logging.error('Archive path is required')
|
||||
return 1
|
||||
|
||||
if options.verbose:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
checker = ArchiveCruftChecker(
|
||||
launchpad_instance=options.launchpad_instance,
|
||||
distribution_name=options.distro, suite=options.suite,
|
||||
archive_path=archive_path)
|
||||
checker.run()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -0,0 +1,807 @@
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
# Based loosely but rather distantly on Launchpad's sync-source.py.
|
||||
# TODO: This should share more code with syncpackage.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
"""Sync all packages without Ubuntu-specific modifications from Debian."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from contextlib import closing
|
||||
import errno
|
||||
import fnmatch
|
||||
from functools import cmp_to_key
|
||||
import gzip
|
||||
from optparse import OptionParser, Values
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
try:
|
||||
from urllib.error import HTTPError
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib2 import HTTPError, urlopen
|
||||
|
||||
import apt_pkg
|
||||
from debian import deb822
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from lazr.restfulclient.errors import ServerError
|
||||
from ubuntutools.archive import DownloadError, SourcePackage
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
CONSUMER_KEY = "auto-sync"
|
||||
|
||||
|
||||
default_suite = {
|
||||
# TODO: map from unstable
|
||||
"debian": "sid",
|
||||
}
|
||||
|
||||
|
||||
class Percentages:
|
||||
"""Helper to compute percentage ratios compared to a fixed total."""
|
||||
|
||||
def __init__(self, total):
|
||||
self.total = total
|
||||
|
||||
def get_ratio(self, number):
|
||||
"""Report the ratio of `number` to `self.total`, as a percentage."""
|
||||
return (float(number) / self.total) * 100
|
||||
|
||||
|
||||
def read_blacklist(url):
|
||||
"""Parse resource at given URL as a 'blacklist'.
|
||||
|
||||
Format:
|
||||
|
||||
{{{
|
||||
# [comment]
|
||||
<sourcename> # [comment]
|
||||
}}}
|
||||
|
||||
Return a list of patterns (fnmatch-style) matching blacklisted source
|
||||
package names.
|
||||
|
||||
Return an empty list if the given URL doesn't exist.
|
||||
"""
|
||||
# TODO: The blacklist should migrate into LP, at which point this
|
||||
# function will be unnecessary.
|
||||
blacklist = []
|
||||
|
||||
try:
|
||||
with closing(urlopen(url)) as url_file:
|
||||
for line in url_file:
|
||||
try:
|
||||
line = line[:line.index(b'#')]
|
||||
except ValueError:
|
||||
pass
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
blacklist.append(line.decode('utf-8'))
|
||||
pass
|
||||
except HTTPError as e:
|
||||
if e.code != 404:
|
||||
raise
|
||||
|
||||
return blacklist
|
||||
|
||||
|
||||
def is_blacklisted(blacklist, src):
|
||||
for pattern in blacklist:
|
||||
if fnmatch.fnmatch(src, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
tempdir = None
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix='auto-sync')
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
def read_ubuntu_sources(options):
|
||||
"""Read information from the Ubuntu Sources files.
|
||||
|
||||
Returns a sequence of:
|
||||
* a mapping of source package names to versions
|
||||
* a mapping of binary package names to (source, version) tuples
|
||||
"""
|
||||
if options.target.distribution.name != 'ubuntu':
|
||||
return
|
||||
|
||||
print("Reading Ubuntu sources ...")
|
||||
source_map = {}
|
||||
binary_map = {}
|
||||
|
||||
ensure_tempdir()
|
||||
suites = [options.target.suite]
|
||||
if options.target.pocket != "Release":
|
||||
suites.insert(0, options.target.series.name)
|
||||
for suite in suites:
|
||||
for component in ("main", "restricted", "universe", "multiverse"):
|
||||
url = ("http://archive.ubuntu.com/ubuntu/dists/%s/%s/source/"
|
||||
"Sources.gz" % (suite, component))
|
||||
sources_path = os.path.join(
|
||||
tempdir, "Ubuntu_%s_%s_Sources" % (suite, component))
|
||||
with closing(urlopen(url)) as url_file:
|
||||
with open("%s.gz" % sources_path, "wb") as comp_file:
|
||||
comp_file.write(url_file.read())
|
||||
with closing(gzip.GzipFile("%s.gz" % sources_path)) as gz_file:
|
||||
with open(sources_path, "wb") as out_file:
|
||||
out_file.write(gz_file.read())
|
||||
with open(sources_path) as sources_file:
|
||||
apt_sources = apt_pkg.TagFile(sources_file)
|
||||
for section in apt_sources:
|
||||
src = section["Package"]
|
||||
ver = section["Version"]
|
||||
if (src not in source_map or
|
||||
apt_pkg.version_compare(source_map[src], ver) < 0):
|
||||
source_map[src] = ver
|
||||
binaries = apt_pkg.parse_depends(
|
||||
section.get("Binary", src))
|
||||
for pkg in [b[0][0] for b in binaries]:
|
||||
if (pkg not in binary_map or
|
||||
apt_pkg.version_compare(
|
||||
binary_map[pkg][1], ver) < 0):
|
||||
binary_map[pkg] = (src, ver)
|
||||
|
||||
return source_map, binary_map
|
||||
|
||||
|
||||
def read_debian_sources(options):
|
||||
"""Read information from the Debian Sources files.
|
||||
|
||||
Returns a mapping of source package names to (version, set of
|
||||
architectures) tuples.
|
||||
"""
|
||||
if options.source.distribution.name != 'debian':
|
||||
return
|
||||
|
||||
print("Reading Debian sources ...")
|
||||
source_map = {}
|
||||
|
||||
ensure_tempdir()
|
||||
for component in ("main", "contrib", "non-free"):
|
||||
url = ("http://ftp.debian.org/debian/dists/%s/%s/source/"
|
||||
"Sources.gz" % (options.source.suite, component))
|
||||
sources_path = os.path.join(
|
||||
tempdir,
|
||||
"Debian_%s_%s_Sources" % (options.source.suite, component))
|
||||
with closing(urlopen(url)) as url_file:
|
||||
with open("%s.gz" % sources_path, "wb") as compressed_file:
|
||||
compressed_file.write(url_file.read())
|
||||
with closing(gzip.GzipFile("%s.gz" % sources_path)) as gz_file:
|
||||
with open(sources_path, "wb") as out_file:
|
||||
out_file.write(gz_file.read())
|
||||
with open(sources_path) as sources_file:
|
||||
apt_sources = apt_pkg.TagFile(sources_file)
|
||||
for section in apt_sources:
|
||||
src = section["Package"]
|
||||
ver = section["Version"]
|
||||
if (src not in source_map or
|
||||
apt_pkg.version_compare(source_map[src][0], ver) < 0):
|
||||
source_map[src] = (
|
||||
ver, set(section.get("Architecture", "").split()))
|
||||
|
||||
return source_map
|
||||
|
||||
|
||||
def read_new_queue(options):
|
||||
"""Return the set of packages already in the NEW queue."""
|
||||
new_queue = options.target.series.getPackageUploads(
|
||||
archive=options.target.archive, status="New")
|
||||
return set([pu.package_name for pu in new_queue
|
||||
if pu.contains_source or pu.contains_copy])
|
||||
|
||||
|
||||
def question(options, message, choices, default):
|
||||
choices = "/".join([c.upper() if c == default else c for c in choices])
|
||||
if options.batch:
|
||||
print("%s (%s)? %s" % (message, choices, default.lower()))
|
||||
return default.lower()
|
||||
else:
|
||||
sys.stdout.write("%s (%s)? " % (message, choices))
|
||||
sys.stdout.flush()
|
||||
return sys.stdin.readline().rstrip().lower()
|
||||
|
||||
|
||||
def filter_pockets(spphs):
|
||||
"""Filter SourcePackagePublishingHistory entries to useful pockets."""
|
||||
return [spph for spph in spphs if spph.pocket in ("Release", "Proposed")]
|
||||
|
||||
|
||||
def version_sort_spphs(spphs):
|
||||
"""Sort a list of SourcePackagePublishingHistory entries by version.
|
||||
|
||||
We return the list in reversed form (highest version first), since
|
||||
that's what the consumers of this function prefer anyway.
|
||||
"""
|
||||
def version_compare(x, y):
|
||||
return apt_pkg.version_compare(
|
||||
x.source_package_version, y.source_package_version)
|
||||
|
||||
return sorted(
|
||||
spphs, key=cmp_to_key(version_compare), reverse=True)
|
||||
|
||||
|
||||
class FakeDifference:
|
||||
"""A partial stub for DistroSeriesDifference.
|
||||
|
||||
Used when the destination series was initialised with a different
|
||||
parent, so we don't get real DSDs.
|
||||
"""
|
||||
def __init__(self, options, src, ver):
|
||||
self.options = options
|
||||
self.status = "Needs attention"
|
||||
self.sourcepackagename = src
|
||||
self.source_version = ver
|
||||
self.real_parent_source_version = None
|
||||
self.fetched_parent_source_version = False
|
||||
|
||||
@property
|
||||
def parent_source_version(self):
|
||||
"""The version in the parent series.
|
||||
|
||||
We can't take this directly from read_debian_sources, since we need
|
||||
the version imported into Launchpad and Launchpad may be behind; so
|
||||
we have to call Archive.getPublishedSources to find this out. As
|
||||
such, this is expensive, so we only do it when necessary.
|
||||
"""
|
||||
if not self.fetched_parent_source_version:
|
||||
spphs = self.options.source.archive.getPublishedSources(
|
||||
distro_series=self.options.source.series,
|
||||
pocket=self.options.source.pocket,
|
||||
source_name=self.sourcepackagename, exact_match=True,
|
||||
status="Published")
|
||||
spphs = version_sort_spphs(spphs)
|
||||
if spphs:
|
||||
self.real_parent_source_version = \
|
||||
spphs[0].source_package_version
|
||||
self.fetched_parent_source_version = True
|
||||
return self.real_parent_source_version
|
||||
|
||||
|
||||
def get_differences(options, ubuntu_sources, debian_sources):
|
||||
# DSDs are not quite sufficiently reliable for us to use them here,
|
||||
# regardless of the parent series. See:
|
||||
# https://bugs.launchpad.net/launchpad/+bug/1003969
|
||||
# Also, how would this work with non-Release pockets?
|
||||
# if options.source.series in options.target.series.getParentSeries():
|
||||
if False:
|
||||
for status in (
|
||||
"Needs attention",
|
||||
"Blacklisted current version",
|
||||
"Blacklisted always",
|
||||
):
|
||||
for difference in options.target.series.getDifferencesTo(
|
||||
parent_series=options.source.series, status=status):
|
||||
yield difference
|
||||
else:
|
||||
# Hack around missing DSDs if the series was initialised with a
|
||||
# different parent.
|
||||
for src in sorted(debian_sources):
|
||||
if (src not in ubuntu_sources or
|
||||
apt_pkg.version_compare(
|
||||
ubuntu_sources[src], debian_sources[src][0]) < 0):
|
||||
yield FakeDifference(options, src, ubuntu_sources.get(src))
|
||||
|
||||
|
||||
def published_in_source_series(options, difference):
|
||||
# Oddly, sometimes packages seem to show up as a difference without
|
||||
# actually being published in options.source.series. Filter those out.
|
||||
src = difference.sourcepackagename
|
||||
from_version = difference.parent_source_version
|
||||
from_src = options.source.archive.getPublishedSources(
|
||||
distro_series=options.source.series, pocket=options.source.pocket,
|
||||
source_name=src, version=from_version, exact_match=True,
|
||||
status="Published")
|
||||
if not from_src:
|
||||
if options.verbose:
|
||||
print(
|
||||
"No published sources for %s_%s in %s/%s?" % (
|
||||
src, from_version,
|
||||
options.source.distribution.display_name,
|
||||
options.source.suite),
|
||||
file=sys.stderr)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def already_in_target_series(options, difference):
|
||||
# The published Sources files may be out of date, and if we're
|
||||
# particularly unlucky with timing relative to a proposed-migration run
|
||||
# it's possible for them to miss something that's in the process of
|
||||
# being moved between pockets. To make sure, check whether an equal or
|
||||
# higher version has already been removed from the destination archive.
|
||||
src = difference.sourcepackagename
|
||||
from_version = difference.parent_source_version
|
||||
to_src = version_sort_spphs(filter_pockets(
|
||||
options.target.archive.getPublishedSources(
|
||||
distro_series=options.target.series, source_name=src,
|
||||
exact_match=True)))
|
||||
if (to_src and
|
||||
apt_pkg.version_compare(
|
||||
from_version, to_src[0].source_package_version) <= 0):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def architectures_allowed(dsc, target):
|
||||
"""Return True if the architecture set dsc is compatible with target."""
|
||||
if dsc == set(["all"]):
|
||||
return True
|
||||
for dsc_arch in dsc:
|
||||
for target_arch in target:
|
||||
command = [
|
||||
"dpkg-architecture", "-a%s" % target_arch, "-i%s" % dsc_arch]
|
||||
env = dict(os.environ)
|
||||
env["CC"] = "true"
|
||||
if subprocess.call(command, env=env) == 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def retry_errors(func):
|
||||
for retry_count in range(7):
|
||||
try:
|
||||
return func()
|
||||
except ssl.SSLError:
|
||||
pass
|
||||
except DownloadError as e:
|
||||
# These are unpleasantly difficult to parse, but we have little
|
||||
# choice since the exception object lacks useful metadata.
|
||||
code = None
|
||||
match = re.match(r".*?: (.*?) ", str(e))
|
||||
if match is not None:
|
||||
try:
|
||||
code = int(match.group(1))
|
||||
except ValueError:
|
||||
pass
|
||||
if code in (502, 503):
|
||||
time.sleep(int(2 ** (retry_count - 1)))
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def sync_one_difference(options, binary_map, difference, source_names):
|
||||
src = difference.sourcepackagename
|
||||
print(" * Trying to add %s ..." % src)
|
||||
|
||||
# We use SourcePackage directly here to avoid having to hardcode Debian
|
||||
# and Ubuntu, and because we got the package list from
|
||||
# DistroSeries.getDifferencesTo() so we can guarantee that Launchpad
|
||||
# knows about all of them.
|
||||
from_srcpkg = SourcePackage(
|
||||
package=src, version=difference.parent_source_version,
|
||||
lp=options.launchpad)
|
||||
from_srcpkg.distribution = options.source.distribution.name
|
||||
retry_errors(from_srcpkg.pull_dsc)
|
||||
|
||||
if difference.source_version is not None:
|
||||
# Check whether this will require a fakesync.
|
||||
to_srcpkg = SourcePackage(
|
||||
package=src, version=difference.source_version,
|
||||
lp=options.launchpad)
|
||||
to_srcpkg.distribution = options.target.distribution.name
|
||||
retry_errors(to_srcpkg.pull_dsc)
|
||||
if not from_srcpkg.dsc.compare_dsc(to_srcpkg.dsc):
|
||||
print("[Skipping (requires fakesync)] %s_%s (vs %s)" % (
|
||||
src, difference.parent_source_version,
|
||||
difference.source_version))
|
||||
return False
|
||||
|
||||
from_binary = deb822.PkgRelation.parse_relations(
|
||||
from_srcpkg.dsc["binary"])
|
||||
pkgs = [entry[0]["name"] for entry in from_binary]
|
||||
|
||||
for pkg in pkgs:
|
||||
if pkg in binary_map:
|
||||
current_src, current_ver = binary_map[pkg]
|
||||
|
||||
# TODO: Check that a non-main source package is not trying to
|
||||
# override a main binary package (we don't know binary
|
||||
# components yet).
|
||||
|
||||
# Check that a source package is not trying to override an
|
||||
# Ubuntu-modified binary package.
|
||||
if "ubuntu" in current_ver:
|
||||
answer = question(
|
||||
options,
|
||||
"%s_%s is trying to override modified binary %s_%s. "
|
||||
"OK" % (
|
||||
src, difference.parent_source_version,
|
||||
pkg, current_ver), "yn", "n")
|
||||
if answer != "y":
|
||||
return False
|
||||
|
||||
print("I: %s -> %s_%s." % (src, pkg, current_ver))
|
||||
|
||||
source_names.append(src)
|
||||
return True
|
||||
|
||||
|
||||
failed_copy = None
|
||||
|
||||
def copy_packages(options, source_names):
|
||||
global failed_copy
|
||||
if failed_copy is not None and len(source_names) >= failed_copy:
|
||||
source_names_left = source_names[:len(source_names) // 2]
|
||||
source_names_right = source_names[len(source_names) // 2:]
|
||||
copy_packages(options, source_names_left)
|
||||
copy_packages(options, source_names_right)
|
||||
return
|
||||
|
||||
try:
|
||||
options.target.archive.copyPackages(
|
||||
source_names=source_names,
|
||||
from_archive=options.source.archive,
|
||||
from_series=options.source.series.name,
|
||||
to_series=options.target.series.name,
|
||||
to_pocket=options.target.pocket,
|
||||
include_binaries=False, sponsored=options.requestor,
|
||||
auto_approve=True, silent=True)
|
||||
except ServerError as e:
|
||||
if len(source_names) < 100:
|
||||
raise
|
||||
if e.response.status != 503:
|
||||
raise
|
||||
print("Cannot copy %d packages at once; bisecting ..." %
|
||||
len(source_names))
|
||||
failed_copy = len(source_names)
|
||||
source_names_left = source_names[:len(source_names) // 2]
|
||||
source_names_right = source_names[len(source_names) // 2:]
|
||||
copy_packages(options, source_names_left)
|
||||
copy_packages(options, source_names_right)
|
||||
|
||||
|
||||
def sync_differences(options):
|
||||
stat_us = 0
|
||||
stat_cant_update = 0
|
||||
stat_updated = 0
|
||||
stat_uptodate_modified = 0
|
||||
stat_uptodate = 0
|
||||
stat_count = 0
|
||||
stat_blacklisted = 0
|
||||
|
||||
blacklist = read_blacklist(
|
||||
"http://people.canonical.com/~ubuntu-archive/sync-blacklist.txt")
|
||||
ubuntu_sources, binary_map = read_ubuntu_sources(options)
|
||||
debian_sources = read_debian_sources(options)
|
||||
new_queue = read_new_queue(options)
|
||||
|
||||
print("Getting differences between %s/%s and %s/%s ..." % (
|
||||
options.source.distribution.display_name, options.source.suite,
|
||||
options.target.distribution.display_name, options.target.suite))
|
||||
new_differences = []
|
||||
updated_source_names = []
|
||||
new_source_names = []
|
||||
seen_differences = set()
|
||||
for difference in get_differences(options, ubuntu_sources, debian_sources):
|
||||
status = difference.status
|
||||
if status == "Resolved":
|
||||
stat_uptodate += 1
|
||||
continue
|
||||
|
||||
stat_count += 1
|
||||
src = difference.sourcepackagename
|
||||
if src in seen_differences:
|
||||
continue
|
||||
seen_differences.add(src)
|
||||
to_version = difference.source_version
|
||||
if to_version is None:
|
||||
src_ver = src
|
||||
else:
|
||||
src_ver = "%s_%s" % (src, to_version)
|
||||
src_is_blacklisted = is_blacklisted(blacklist, src)
|
||||
if src_is_blacklisted or status == "Blacklisted always":
|
||||
if options.verbose:
|
||||
if src_is_blacklisted:
|
||||
print("[BLACKLISTED] %s" % src_ver)
|
||||
else:
|
||||
comments = options.target.series.getDifferenceComments(
|
||||
source_package_name=src)
|
||||
if comments:
|
||||
print("""[BLACKLISTED] %s (%s: "%s")""" % (
|
||||
src_ver, comments[-1].comment_author.name,
|
||||
comments[-1].body_text))
|
||||
else:
|
||||
print("[BLACKLISTED] %s" % src_ver)
|
||||
stat_blacklisted += 1
|
||||
# "Blacklisted current version" is supposed to mean that the version
|
||||
# in options.target.series is higher than that in
|
||||
# options.source.series. However, I've seen cases that suggest that
|
||||
# this status isn't necessarily always kept up to date properly.
|
||||
# Since we're perfectly capable of checking the versions for
|
||||
# ourselves anyway, let's just be cautious and check everything with
|
||||
# both plausible statuses.
|
||||
elif status in ("Needs attention", "Blacklisted current version"):
|
||||
from_version = difference.parent_source_version
|
||||
if from_version is None:
|
||||
if options.verbose:
|
||||
print("[Ubuntu Specific] %s" % src_ver)
|
||||
stat_us += 1
|
||||
continue
|
||||
if to_version is None:
|
||||
if not published_in_source_series(options, difference):
|
||||
continue
|
||||
# Handle new packages at the end, since they require more
|
||||
# interaction.
|
||||
if options.new:
|
||||
new_differences.append(difference)
|
||||
continue
|
||||
elif options.new_only:
|
||||
stat_uptodate += 1
|
||||
elif apt_pkg.version_compare(to_version, from_version) < 0:
|
||||
if "ubuntu" in to_version:
|
||||
if options.verbose:
|
||||
print("[NOT Updating - Modified] %s (vs %s)" % (
|
||||
src_ver, from_version))
|
||||
stat_cant_update += 1
|
||||
else:
|
||||
if not published_in_source_series(options, difference):
|
||||
continue
|
||||
if already_in_target_series(options, difference):
|
||||
continue
|
||||
print("[Updating] %s (%s [%s] < %s [%s])" % (
|
||||
src, to_version,
|
||||
options.target.distribution.display_name,
|
||||
from_version,
|
||||
options.source.distribution.display_name))
|
||||
if sync_one_difference(
|
||||
options, binary_map, difference,
|
||||
updated_source_names):
|
||||
stat_updated += 1
|
||||
else:
|
||||
stat_cant_update += 1
|
||||
elif "ubuntu" in to_version:
|
||||
if options.verbose:
|
||||
print("[Nothing to update (Modified)] %s (vs %s)" % (
|
||||
src_ver, from_version))
|
||||
stat_uptodate_modified += 1
|
||||
else:
|
||||
if options.verbose:
|
||||
print("[Nothing to update] %s (%s [%s] >= %s [%s])" % (
|
||||
src, to_version,
|
||||
options.target.distribution.display_name,
|
||||
from_version,
|
||||
options.source.distribution.display_name))
|
||||
stat_uptodate += 1
|
||||
else:
|
||||
print("[Unknown status] %s (%s)" % (src_ver, status),
|
||||
file=sys.stderr)
|
||||
|
||||
target_architectures = set(
|
||||
a.architecture_tag for a in options.target.architectures)
|
||||
for difference in new_differences:
|
||||
src = difference.sourcepackagename
|
||||
from_version = difference.parent_source_version
|
||||
if src in new_queue:
|
||||
print("[Skipping (already in NEW)] %s_%s" % (src, from_version))
|
||||
continue
|
||||
if not architectures_allowed(
|
||||
debian_sources[src][1], target_architectures):
|
||||
if options.verbose:
|
||||
print(
|
||||
"[Skipping (not built on any target architecture)] %s_%s" %
|
||||
(src, from_version))
|
||||
continue
|
||||
to_src = version_sort_spphs(filter_pockets(
|
||||
options.target.archive.getPublishedSources(
|
||||
source_name=src, exact_match=True)))
|
||||
if (to_src and
|
||||
apt_pkg.version_compare(
|
||||
from_version, to_src[0].source_package_version) <= 0):
|
||||
# Equal or higher version already removed from destination
|
||||
# distribution.
|
||||
continue
|
||||
print("[New] %s_%s" % (src, from_version))
|
||||
if to_src:
|
||||
print("Previous publications in %s:" %
|
||||
options.target.distribution.display_name)
|
||||
for spph in to_src[:10]:
|
||||
desc = " %s (%s): %s" % (
|
||||
spph.source_package_version, spph.distro_series.name,
|
||||
spph.status)
|
||||
if (spph.status == "Deleted" and
|
||||
spph.removed_by is not None and
|
||||
spph.removal_comment is not None):
|
||||
desc += " (removed by %s: %s)" % (
|
||||
spph.removed_by.display_name, spph.removal_comment)
|
||||
print(desc)
|
||||
if len(to_src) > 10:
|
||||
history_url = "%s/+source/%s/+publishinghistory" % (
|
||||
options.target.distribution.web_link, src)
|
||||
print(" ... plus %d more; see %s" %
|
||||
(len(to_src) - 10, history_url))
|
||||
else:
|
||||
print("No previous publications in %s" %
|
||||
options.target.distribution.display_name)
|
||||
answer = question(options, "OK", "yn", "y")
|
||||
new_ok = (answer != "n")
|
||||
if new_ok:
|
||||
if sync_one_difference(
|
||||
options, binary_map, difference, new_source_names):
|
||||
stat_updated += 1
|
||||
else:
|
||||
stat_cant_update += 1
|
||||
else:
|
||||
stat_blacklisted += 1
|
||||
|
||||
percentages = Percentages(stat_count)
|
||||
print()
|
||||
print("Out-of-date BUT modified: %3d (%.2f%%)" % (
|
||||
stat_cant_update, percentages.get_ratio(stat_cant_update)))
|
||||
print("Updated: %3d (%.2f%%)" % (
|
||||
stat_updated, percentages.get_ratio(stat_updated)))
|
||||
print("Ubuntu Specific: %3d (%.2f%%)" % (
|
||||
stat_us, percentages.get_ratio(stat_us)))
|
||||
print("Up-to-date [Modified]: %3d (%.2f%%)" % (
|
||||
stat_uptodate_modified, percentages.get_ratio(stat_uptodate_modified)))
|
||||
print("Up-to-date: %3d (%.2f%%)" % (
|
||||
stat_uptodate, percentages.get_ratio(stat_uptodate)))
|
||||
print("Blacklisted: %3d (%.2f%%)" % (
|
||||
stat_blacklisted, percentages.get_ratio(stat_blacklisted)))
|
||||
print(" -----------")
|
||||
print("Total: %s" % stat_count)
|
||||
|
||||
if updated_source_names + new_source_names:
|
||||
print()
|
||||
if updated_source_names:
|
||||
print("Updating: %s" % " ".join(updated_source_names))
|
||||
if new_source_names:
|
||||
print("New: %s" % " ".join(new_source_names))
|
||||
if options.dry_run:
|
||||
print("Not copying packages in dry-run mode.")
|
||||
else:
|
||||
answer = question(options, "OK", "yn", "y")
|
||||
if answer != "n":
|
||||
copy_packages(options, updated_source_names + new_source_names)
|
||||
|
||||
|
||||
def main():
|
||||
if sys.version >= '3':
|
||||
# Force encoding to UTF-8 even in non-UTF-8 locales.
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(
|
||||
sys.stdout.detach(), encoding="UTF-8", line_buffering=True)
|
||||
else:
|
||||
# Avoid having to do .encode('UTF-8') everywhere. This is a pain; I
|
||||
# wish Python supported something like
|
||||
# "sys.stdout.encoding = 'UTF-8'".
|
||||
def fix_stdout():
|
||||
import codecs
|
||||
sys.stdout = codecs.EncodedFile(sys.stdout, 'UTF-8')
|
||||
|
||||
def null_decode(input, errors='strict'):
|
||||
return input, len(input)
|
||||
sys.stdout.decode = null_decode
|
||||
|
||||
fix_stdout()
|
||||
|
||||
parser = OptionParser(usage="usage: %prog [options]")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-v", "--verbose", dest="verbose",
|
||||
default=False, action="store_true", help="be more verbose")
|
||||
parser.add_option(
|
||||
"--log-directory", help="log to a file under this directory")
|
||||
parser.add_option(
|
||||
"-d", "--to-distro", dest="todistro", default="ubuntu",
|
||||
metavar="DISTRO", help="sync to DISTRO")
|
||||
parser.add_option(
|
||||
"-s", "--to-suite", dest="tosuite",
|
||||
metavar="SUITE", help="sync to SUITE")
|
||||
parser.add_option(
|
||||
"-D", "--from-distro", dest="fromdistro", default="debian",
|
||||
metavar="DISTRO", help="sync from DISTRO")
|
||||
parser.add_option(
|
||||
"-S", "--from-suite", dest="fromsuite",
|
||||
metavar="SUITE", help="sync from SUITE")
|
||||
parser.add_option(
|
||||
"--new-only", dest="new_only",
|
||||
default=False, action="store_true", help="only sync new packages")
|
||||
parser.add_option(
|
||||
"--no-new", dest="new",
|
||||
default=True, action="store_false", help="don't sync new packages")
|
||||
parser.add_option(
|
||||
"--batch", dest="batch", default=False, action="store_true",
|
||||
help="assume default answer to all questions")
|
||||
parser.add_option(
|
||||
"--dry-run", default=False, action="store_true",
|
||||
help="only show what would be done; don't copy packages")
|
||||
options, args = parser.parse_args()
|
||||
if args:
|
||||
parser.error("This program does not accept any non-option arguments.")
|
||||
|
||||
apt_pkg.init()
|
||||
options.launchpad = Launchpad.login_with(
|
||||
CONSUMER_KEY, options.launchpad_instance, version="devel")
|
||||
|
||||
if options.log_directory is not None:
|
||||
now = time.gmtime()
|
||||
log_relative_path = os.path.join(
|
||||
time.strftime("%F", now), "%s.log" % time.strftime("%T", now))
|
||||
log_file = os.path.join(options.log_directory, log_relative_path)
|
||||
if not os.path.isdir(os.path.dirname(log_file)):
|
||||
os.makedirs(os.path.dirname(log_file))
|
||||
sys.stdout = open(log_file, "w", buffering=1)
|
||||
else:
|
||||
log_file = None
|
||||
|
||||
options.source = Values()
|
||||
options.source.launchpad = options.launchpad
|
||||
options.source.distribution = options.fromdistro
|
||||
options.source.suite = options.fromsuite
|
||||
if options.source.suite is None and options.fromdistro in default_suite:
|
||||
options.source.suite = default_suite[options.fromdistro]
|
||||
lputils.setup_location(options.source)
|
||||
|
||||
options.target = Values()
|
||||
options.target.launchpad = options.launchpad
|
||||
options.target.distribution = options.todistro
|
||||
options.target.suite = options.tosuite
|
||||
lputils.setup_location(options.target, default_pocket="Proposed")
|
||||
|
||||
# This is a very crude check, and easily bypassed. It's simply here to
|
||||
# discourage people from causing havoc by mistake. A mass auto-sync is
|
||||
# a disruptive operation, and, generally speaking, archive
|
||||
# administrators know when it's OK to do one. If you aren't an archive
|
||||
# administrator, you should think very hard, and ask on #ubuntu-release
|
||||
# if options.target.distribution is Ubuntu, before disabling this check.
|
||||
owner = options.target.archive.owner
|
||||
if (not options.dry_run and
|
||||
options.launchpad.me != owner and
|
||||
options.launchpad.me not in owner.participants):
|
||||
print("You are not an archive administrator for %s. Exiting." %
|
||||
options.target.distribution.display_name, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
options.requestor = options.launchpad.people["katie"]
|
||||
|
||||
sync_differences(options)
|
||||
|
||||
if options.log_directory is not None:
|
||||
sys.stdout.close()
|
||||
current_link = os.path.join(options.log_directory, "current.log")
|
||||
try:
|
||||
os.unlink(current_link)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
os.symlink(log_relative_path, current_link)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,93 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2016 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Bootstrap a package build using injected build-dependencies."""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
from optparse import (
|
||||
OptionParser,
|
||||
SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
def bootstrap_package(options, package):
|
||||
source = lputils.find_latest_published_source(options, package)
|
||||
arch_tags = [a.architecture_tag for a in options.architectures]
|
||||
for build in source.getBuilds():
|
||||
if build.arch_tag in arch_tags:
|
||||
if (build.buildstate != "Needs building" and
|
||||
not build.can_be_retried):
|
||||
print("%s cannot be retried" % build.web_link, file=sys.stderr)
|
||||
elif options.dry_run:
|
||||
print("Would bootstrap %s" % build.web_link)
|
||||
else:
|
||||
print("Bootstrapping %s" % build.web_link)
|
||||
build.external_dependencies = (
|
||||
"deb [trusted=yes] "
|
||||
"http://archive-team.internal/bootstrap/%s %s main" %
|
||||
(build.arch_tag, source.distro_series.name))
|
||||
build.lp_save()
|
||||
build.retry()
|
||||
|
||||
|
||||
def bootstrap_packages(options, packages):
|
||||
for package in packages:
|
||||
bootstrap_package(options, package)
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
usage="usage: %prog [options] package [...]",
|
||||
epilog=lputils.ARCHIVE_REFERENCE_DESCRIPTION)
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show what would be done")
|
||||
parser.add_option("-A", "--archive", help="bootstrap in ARCHIVE")
|
||||
parser.add_option(
|
||||
"-s", "--suite", metavar="SUITE", help="bootstrap in SUITE")
|
||||
parser.add_option(
|
||||
"-a", "--architecture", dest="architectures", action="append",
|
||||
metavar="ARCHITECTURE",
|
||||
help="architecture tag (may be given multiple times)")
|
||||
parser.add_option(
|
||||
"-d", "--distribution", default="ubuntu", help=SUPPRESS_HELP)
|
||||
parser.add_option(
|
||||
"-e", "--version",
|
||||
metavar="VERSION", help="package version (default: current version)")
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"bootstrap-package", options.launchpad_instance, version="devel")
|
||||
lputils.setup_location(options, default_pocket="Proposed")
|
||||
|
||||
if not args:
|
||||
parser.error("You must specify some packages to bootstrap.")
|
||||
|
||||
bootstrap_packages(options, args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,86 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Branch a set of live filesystem configurations for the next release."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
def branch_livefses(options, owner):
|
||||
for livefs in list(options.launchpad.livefses):
|
||||
if (livefs.owner == owner and
|
||||
livefs.distro_series == options.source_series):
|
||||
print("Branching %s for %s ..." % (
|
||||
livefs.web_link, options.dest_series.name))
|
||||
new_livefs = options.launchpad.livefses.new(
|
||||
owner=owner, distro_series=options.dest_series,
|
||||
name=livefs.name, metadata=livefs.metadata)
|
||||
new_livefs.require_virtualized = livefs.require_virtualized
|
||||
new_livefs.relative_build_score = livefs.relative_build_score
|
||||
new_livefs.lp_save()
|
||||
print(" %s" % new_livefs.web_link)
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="usage: %prog [options] OWNER")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-d", "--distribution", default="ubuntu", metavar="DISTRIBUTION",
|
||||
help="branch live filesystems for DISTRIBUTION")
|
||||
parser.add_option(
|
||||
"--source-series",
|
||||
help="source series (default: current stable release)")
|
||||
parser.add_option(
|
||||
"--dest-series",
|
||||
help="destination series (default: series in pre-release freeze)")
|
||||
options, args = parser.parse_args()
|
||||
if not args:
|
||||
parser.error(
|
||||
"You must specify an owner whose live filesystems you want to "
|
||||
"copy.")
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"branch-livefses", options.launchpad_instance, version="devel")
|
||||
|
||||
distro = options.launchpad.distributions[options.distribution]
|
||||
if options.source_series is None:
|
||||
options.source_series = [
|
||||
series for series in distro.series
|
||||
if series.status == "Current Stable Release"][0]
|
||||
else:
|
||||
options.source_series = distro.getSeries(
|
||||
name_or_version=options.source_series)
|
||||
if options.dest_series is None:
|
||||
options.dest_series = [
|
||||
series for series in distro.series
|
||||
if series.status == "Pre-release Freeze"][0]
|
||||
else:
|
||||
options.dest_series = distro.getSeries(
|
||||
name_or_version=options.dest_series)
|
||||
|
||||
owner = options.launchpad.people[args[0]]
|
||||
|
||||
branch_livefses(options, owner)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,175 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Branch a set of Ubuntu seeds for the next release."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urlparse import urlparse
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from enum import Enum
|
||||
|
||||
class VCS(Enum):
|
||||
Git = 1
|
||||
Bazaar = 2
|
||||
|
||||
@staticmethod
|
||||
def detect_vcs(source):
|
||||
if os.path.exists(os.path.join(source, ".git")):
|
||||
return VCS.Git
|
||||
elif os.path.exists(os.path.join(source, ".bzr")):
|
||||
return VCS.Bazaar
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def remote_bzr_branch(source):
|
||||
# TODO: should really use bzrlib instead
|
||||
info = subprocess.check_output(
|
||||
["bzr", "info", source], universal_newlines=True)
|
||||
for line in info.splitlines():
|
||||
if "checkout of branch:" in line:
|
||||
return line.split(": ")[1].rstrip("/")
|
||||
else:
|
||||
raise Exception("Unable to find remote branch for %s" % source)
|
||||
|
||||
|
||||
def remote_git_repository(source, srcbranch):
|
||||
fullbranch = subprocess.check_output(
|
||||
["git", "rev-parse", "--symbolic-full-name",
|
||||
srcbranch + "@{upstream}"],
|
||||
universal_newlines=True, cwd=source)
|
||||
return subprocess.check_output(
|
||||
["git", "ls-remote", "--get-url", fullbranch.split("/")[2]],
|
||||
universal_newlines=True, cwd=source).rstrip("\n")
|
||||
|
||||
|
||||
def lp_branch(options, url):
|
||||
return options.launchpad.branches.getByUniqueName(
|
||||
unique_name=urlparse(url).path.lstrip("/"))
|
||||
|
||||
|
||||
def branch(options, collection):
|
||||
source = "%s.%s" % (collection, options.source_series)
|
||||
dest = "%s.%s" % (collection, options.dest_series)
|
||||
vcs = VCS.detect_vcs(source)
|
||||
if vcs:
|
||||
if vcs is VCS.Bazaar:
|
||||
subprocess.check_call(["bzr", "up", source])
|
||||
remote_source = remote_bzr_branch(source)
|
||||
remote_dest = os.path.join(os.path.dirname(remote_source), dest)
|
||||
subprocess.check_call(["bzr", "branch", source, dest])
|
||||
subprocess.check_call(["bzr", "push", "-d", dest, remote_dest])
|
||||
subprocess.check_call(["bzr", "bind", ":push"], cwd=dest)
|
||||
|
||||
lp_source = lp_branch(options, remote_source)
|
||||
lp_source.lifecycle_status = "Mature"
|
||||
lp_source.lp_save()
|
||||
|
||||
lp_dest = lp_branch(options, remote_dest)
|
||||
lp_dest.lifecycle_status = "Development"
|
||||
lp_dest.lp_save()
|
||||
elif vcs is VCS.Git:
|
||||
subprocess.check_call(["git", "fetch"], cwd=source)
|
||||
subprocess.check_call(["git", "reset", "--hard", "FETCH_HEAD"], cwd=source)
|
||||
os.rename(source, dest)
|
||||
subprocess.check_call(["git", "checkout", "-b", options.dest_series], cwd=dest)
|
||||
|
||||
re_include_source = re.compile(
|
||||
r"^(include )(.*)\.%s" % options.source_series)
|
||||
new_lines = []
|
||||
message = []
|
||||
with open(os.path.join(dest, "STRUCTURE")) as structure:
|
||||
for line in structure:
|
||||
match = re_include_source.match(line)
|
||||
if match:
|
||||
new_lines.append(re_include_source.sub(
|
||||
r"\1\2.%s" % options.dest_series, line))
|
||||
message.append(
|
||||
"%s.%s -> %s.%s" %
|
||||
(match.group(2), options.source_series,
|
||||
match.group(2), options.dest_series))
|
||||
else:
|
||||
new_lines.append(line)
|
||||
if message:
|
||||
with open(os.path.join(dest, "STRUCTURE.new"), "w") as structure:
|
||||
for line in new_lines:
|
||||
print(line, end="", file=structure)
|
||||
os.rename(
|
||||
os.path.join(dest, "STRUCTURE.new"),
|
||||
os.path.join(dest, "STRUCTURE"))
|
||||
if vcs is VCS.Bazaar:
|
||||
subprocess.check_call(
|
||||
["bzr", "commit", "-m", "; ".join(message)], cwd=dest)
|
||||
elif vcs is VCS.Git:
|
||||
subprocess.check_call(["git", "add", "STRUCTURE"], cwd=dest)
|
||||
subprocess.check_call(
|
||||
["git", "commit", "-m", "; ".join(message)], cwd=dest)
|
||||
subprocess.check_call(
|
||||
["git", "push", "origin", options.dest_series], cwd=dest)
|
||||
|
||||
remote = remote_git_repository(dest, options.source_series)
|
||||
if "git.launchpad.net" in remote:
|
||||
lp_git_repo = options.launchpad.git_repositories.getByPath(
|
||||
path=urlparse(remote).path.lstrip("/"))
|
||||
lp_git_repo.default_branch = options.dest_series
|
||||
lp_git_repo.lp_save()
|
||||
else:
|
||||
raise Exception(
|
||||
"Git remote URL must be on git.launchpad.net.")
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="usage: %prog [options] collection ...")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"--source-series",
|
||||
help="source series (default: current stable release)")
|
||||
parser.add_option(
|
||||
"--dest-series",
|
||||
help="destination series (default: series in pre-release freeze)")
|
||||
options, args = parser.parse_args()
|
||||
if not args:
|
||||
parser.error("You must specify at least one seed collection.")
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"branch-seeds", options.launchpad_instance, version="devel")
|
||||
|
||||
distro = options.launchpad.distributions["ubuntu"]
|
||||
if options.source_series is None:
|
||||
options.source_series = [
|
||||
series.name for series in distro.series
|
||||
if series.status == "Current Stable Release"][0]
|
||||
if options.dest_series is None:
|
||||
options.dest_series = [
|
||||
series.name for series in distro.series
|
||||
if series.status == "Pre-release Freeze"][0]
|
||||
|
||||
for collection in args:
|
||||
branch(options, collection)
|
||||
|
||||
|
||||
main()
|
@ -0,0 +1,204 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Override a publication."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import OrderedDict
|
||||
from optparse import OptionParser, SUPPRESS_HELP
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
def find_publications(options, packages):
|
||||
for package in packages:
|
||||
# Change matching source.
|
||||
if (options.source_and_binary or options.binary_and_source or
|
||||
options.source_only):
|
||||
source = lputils.find_latest_published_source(options, package)
|
||||
yield "source", source
|
||||
|
||||
# Change all binaries for matching source.
|
||||
if options.source_and_binary:
|
||||
for binary in source.getPublishedBinaries():
|
||||
if not binary.is_debug:
|
||||
yield "binary", binary
|
||||
# Change matching binaries.
|
||||
elif not options.source_only:
|
||||
for binary in lputils.find_latest_published_binaries(
|
||||
options, package):
|
||||
if not binary.is_debug:
|
||||
yield "binary", binary
|
||||
|
||||
|
||||
def stringify_phased_update_percentage(phased_update_percentage):
|
||||
if phased_update_percentage is None:
|
||||
return "100%"
|
||||
else:
|
||||
return '%s%%' % phased_update_percentage
|
||||
|
||||
|
||||
def stringify_binary_kwargs(binary_kwargs):
|
||||
for key, value in binary_kwargs.items():
|
||||
if key == "new_phased_update_percentage":
|
||||
yield stringify_phased_update_percentage(value)
|
||||
else:
|
||||
yield value
|
||||
|
||||
|
||||
def change_overrides(options, packages):
|
||||
source_kwargs = OrderedDict()
|
||||
binary_kwargs = OrderedDict()
|
||||
if options.component:
|
||||
print("Override component to %s" % options.component)
|
||||
source_kwargs["new_component"] = options.component
|
||||
binary_kwargs["new_component"] = options.component
|
||||
if options.section:
|
||||
print("Override section to %s" % options.section)
|
||||
source_kwargs["new_section"] = options.section
|
||||
binary_kwargs["new_section"] = options.section
|
||||
if options.priority:
|
||||
print("Override priority to %s" % options.priority)
|
||||
binary_kwargs["new_priority"] = options.priority
|
||||
if options.percentage is not None:
|
||||
print("Override percentage to %s" % options.percentage)
|
||||
binary_kwargs["new_phased_update_percentage"] = options.percentage
|
||||
|
||||
publications = []
|
||||
for pubtype, publication in find_publications(options, packages):
|
||||
if pubtype == "source" and not source_kwargs:
|
||||
continue
|
||||
|
||||
publications.append((pubtype, publication))
|
||||
|
||||
if pubtype == "source":
|
||||
print("%s: %s/%s -> %s" % (
|
||||
publication.display_name,
|
||||
publication.component_name, publication.section_name,
|
||||
"/".join(source_kwargs.values())))
|
||||
else:
|
||||
print("%s: %s/%s/%s/%s -> %s" % (
|
||||
publication.display_name,
|
||||
publication.component_name, publication.section_name,
|
||||
publication.priority_name.lower(),
|
||||
stringify_phased_update_percentage(
|
||||
publication.phased_update_percentage),
|
||||
"/".join(stringify_binary_kwargs(binary_kwargs))))
|
||||
|
||||
if options.dry_run:
|
||||
print("Dry run; no publications overridden.")
|
||||
else:
|
||||
if not options.confirm_all:
|
||||
if YesNoQuestion().ask("Override", "no") == "no":
|
||||
return
|
||||
|
||||
num_overridden = 0
|
||||
num_same = 0
|
||||
for pubtype, publication in publications:
|
||||
if pubtype == "source":
|
||||
kwargs = source_kwargs
|
||||
else:
|
||||
kwargs = binary_kwargs
|
||||
if publication.changeOverride(**kwargs):
|
||||
num_overridden += 1
|
||||
else:
|
||||
print("%s remained the same" % publication.display_name)
|
||||
num_same += 1
|
||||
|
||||
summary = []
|
||||
if num_overridden:
|
||||
summary.append("%d %s overridden" % (
|
||||
num_overridden,
|
||||
"publication" if num_overridden == 1 else "publications"))
|
||||
if num_same:
|
||||
summary.append("%d %s remained the same" % (
|
||||
num_same, "publication" if num_same == 1 else "publications"))
|
||||
if summary:
|
||||
print("%s." % "; ".join(summary))
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
usage="usage: %prog -s suite [options] package [...]",
|
||||
epilog=lputils.ARCHIVE_REFERENCE_DESCRIPTION)
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show removals that would be performed")
|
||||
parser.add_option(
|
||||
"-y", "--confirm-all", default=False, action="store_true",
|
||||
help="do not ask for confirmation")
|
||||
parser.add_option("-A", "--archive", help="override in ARCHIVE")
|
||||
parser.add_option(
|
||||
"-s", "--suite", metavar="SUITE", help="override in SUITE")
|
||||
parser.add_option(
|
||||
"-a", "--architecture", dest="architectures", action="append",
|
||||
metavar="ARCHITECTURE",
|
||||
help="architecture tag (may be given multiple times)")
|
||||
parser.add_option(
|
||||
"-e", "--version",
|
||||
metavar="VERSION", help="package version (default: current version)")
|
||||
parser.add_option(
|
||||
"-S", "--source-and-binary", default=False, action="store_true",
|
||||
help="select source and all binaries from this source")
|
||||
parser.add_option(
|
||||
"-B", "--binary-and-source", default=False, action="store_true",
|
||||
help="select source and binary (of the same name)")
|
||||
parser.add_option(
|
||||
"-t", "--source-only", default=False, action="store_true",
|
||||
help="select source packages only")
|
||||
parser.add_option(
|
||||
"-c", "--component",
|
||||
metavar="COMPONENT", help="move package to COMPONENT")
|
||||
parser.add_option(
|
||||
"-p", "--priority",
|
||||
metavar="PRIORITY", help="move package to PRIORITY")
|
||||
parser.add_option(
|
||||
"-x", "--section",
|
||||
metavar="SECTION", help="move package to SECTION")
|
||||
parser.add_option(
|
||||
"-z", "--percentage", type="int", default=None,
|
||||
metavar="PERCENTAGE", help="set phased update percentage")
|
||||
|
||||
# Deprecated in favour of -A.
|
||||
parser.add_option(
|
||||
"-d", "--distribution", default="ubuntu", help=SUPPRESS_HELP)
|
||||
parser.add_option(
|
||||
"-j", "--partner", default=False, action="store_true",
|
||||
help=SUPPRESS_HELP)
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if (not options.component and not options.section and not options.priority
|
||||
and options.percentage is None):
|
||||
parser.error(
|
||||
"You must override at least one of component, section, "
|
||||
"priority, and percentage.")
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"change-override", options.launchpad_instance, version="devel")
|
||||
lputils.setup_location(options)
|
||||
|
||||
change_overrides(options, args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,103 @@
|
||||
# Copyright 2014 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Show charts using YUI."""
|
||||
|
||||
from textwrap import dedent
|
||||
|
||||
|
||||
def make_chart_header(chart_name="chart", width=960, height=550):
|
||||
"""Return HTML to declare the chart style and load YUI.
|
||||
|
||||
This should be included in the <head> element.
|
||||
"""
|
||||
params = {"chart_name": chart_name, "width": width, "height": height}
|
||||
return dedent("""\
|
||||
<style type="text/css">
|
||||
#%(chart_name)s {
|
||||
width: %(width)dpx;
|
||||
height: %(height)dpx;
|
||||
}
|
||||
</style>
|
||||
<script src="http://yui.yahooapis.com/3.17.2/build/yui/yui-min.js">
|
||||
</script>
|
||||
""") % params
|
||||
|
||||
|
||||
def make_chart(source, keys, chart_name="chart"):
|
||||
"""Return HTML to render a chart."""
|
||||
params = {
|
||||
"source": source,
|
||||
"chart_name": chart_name,
|
||||
"series_keys": ", ".join('"%s"' % key for key in keys),
|
||||
"series_styles": ", ".join(
|
||||
'"%s": { line: { weight: "2mm" } }' % key for key in keys),
|
||||
"series_schema_fields": ", ".join(
|
||||
'{key: "%s", parser: parseNum}' % key for key in keys),
|
||||
}
|
||||
return dedent("""\
|
||||
<div id="%(chart_name)s"></div>
|
||||
<script>
|
||||
YUI().use(['charts-legend', 'datasource'], function (Y) {
|
||||
var chart = new Y.Chart({
|
||||
dataProvider: [],
|
||||
render: "#%(chart_name)s",
|
||||
styles: {
|
||||
axes: {
|
||||
time: {
|
||||
label: { rotation: -45, color: "#000000" }
|
||||
},
|
||||
values: {
|
||||
label: { color: "#000000" },
|
||||
alwaysShowZero: true,
|
||||
scaleType: "logarithmic"
|
||||
}
|
||||
},
|
||||
series: {
|
||||
%(series_styles)s
|
||||
}
|
||||
},
|
||||
categoryKey: "time",
|
||||
categoryType: "time",
|
||||
valueAxisName: "values",
|
||||
seriesKeys: [ %(series_keys)s ],
|
||||
showMarkers: false,
|
||||
legend: { position: "bottom" }
|
||||
});
|
||||
|
||||
var parseDate = function (val) { return new Date(+val); };
|
||||
var parseNum = function (val) { return +val; };
|
||||
|
||||
var csv = new Y.DataSource.IO({source: "%(source)s"});
|
||||
csv.plug(Y.Plugin.DataSourceTextSchema, {
|
||||
schema: {
|
||||
resultDelimiter: "\\n",
|
||||
fieldDelimiter: ",",
|
||||
resultFields: [
|
||||
{key: "time", parser: parseDate},
|
||||
%(series_schema_fields)s
|
||||
]}});
|
||||
csv.sendRequest({request: "", on: {
|
||||
success: function (e) {
|
||||
e.response.results.shift(); // remove CSV header
|
||||
chart.set("dataProvider", e.response.results);
|
||||
},
|
||||
failure: function (e) {
|
||||
console.log("Failed to fetch %(source)s: " +
|
||||
e.error.message);
|
||||
}}});
|
||||
});
|
||||
</script>
|
||||
""") % params
|
@ -0,0 +1,300 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
import gzip
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import apt_pkg
|
||||
|
||||
|
||||
default_base = '/home/ubuntu-archive/mirror/ubuntu'
|
||||
default_suite = 'disco'
|
||||
components = ('main', 'restricted', 'universe', 'multiverse')
|
||||
|
||||
# Cut-down RE from deb822.PkgRelation.
|
||||
re_dep = re.compile(r'^\s*([a-zA-Z0-9.+\-]{2,})')
|
||||
|
||||
re_kernel_image_di = re.compile(r'^kernel-image-(.+)-di')
|
||||
|
||||
|
||||
# Cheaper version of deb822.PkgRelation.parse_relations.
|
||||
def parse_relation_packages(raw):
|
||||
for or_dep in raw.split(','):
|
||||
for dep in or_dep.split('|'):
|
||||
match = re_dep.match(dep.strip())
|
||||
if match:
|
||||
yield match.group(1)
|
||||
|
||||
|
||||
def primary_arches(suite):
|
||||
return ('amd64', 'i386')
|
||||
|
||||
|
||||
def ports_arches(suite):
|
||||
if suite == 'lucid':
|
||||
return ('armel', 'ia64', 'powerpc', 'sparc')
|
||||
elif suite == 'precise':
|
||||
return ('armel', 'armhf', 'powerpc')
|
||||
elif suite in ('14.09', '14.09-factory'):
|
||||
return ('armhf',)
|
||||
elif suite in ('trusty', 'vivid', 'wily'):
|
||||
return ('arm64', 'armhf', 'powerpc', 'ppc64el')
|
||||
elif suite in ('xenial', 'yakkety'):
|
||||
return ('arm64', 'armhf', 'powerpc', 'ppc64el', 's390x')
|
||||
else:
|
||||
return ('arm64', 'armhf', 'ppc64el', 's390x')
|
||||
|
||||
|
||||
def read_tag_file(path):
|
||||
tmp = tempfile.NamedTemporaryFile(prefix='checkrdepends.', delete=False)
|
||||
try:
|
||||
compressed = gzip.open(path)
|
||||
try:
|
||||
tmp.write(compressed.read())
|
||||
finally:
|
||||
compressed.close()
|
||||
tmp.close()
|
||||
with open(tmp.name) as uncompressed:
|
||||
tag_file = apt_pkg.TagFile(uncompressed)
|
||||
prev_name = None
|
||||
prev_stanza = None
|
||||
for stanza in tag_file:
|
||||
try:
|
||||
name = stanza['package']
|
||||
except KeyError:
|
||||
continue
|
||||
if name != prev_name and prev_stanza is not None:
|
||||
yield prev_stanza
|
||||
prev_name = name
|
||||
prev_stanza = stanza
|
||||
if prev_stanza is not None:
|
||||
yield prev_stanza
|
||||
finally:
|
||||
os.unlink(tmp.name)
|
||||
|
||||
|
||||
def read_sources(path):
|
||||
ret = {
|
||||
'binary': {},
|
||||
'source': defaultdict(set),
|
||||
'build_deps': defaultdict(set),
|
||||
}
|
||||
binary = ret['binary']
|
||||
source = ret['source']
|
||||
build_deps = ret['build_deps']
|
||||
for stanza in read_tag_file(path):
|
||||
if 'binary' not in stanza:
|
||||
continue
|
||||
name = stanza['package']
|
||||
binpkgs = [b.rstrip(',') for b in stanza['binary'].split()]
|
||||
binary[name] = binpkgs
|
||||
for binpkg in binpkgs:
|
||||
source[binpkg].add(stanza['package'])
|
||||
for field in ('build-depends', 'build-depends-indep'):
|
||||
if field not in stanza:
|
||||
continue
|
||||
for depname in parse_relation_packages(stanza[field]):
|
||||
build_deps[depname].add(name)
|
||||
return ret
|
||||
|
||||
|
||||
def read_packages(debs, path, sources, ignores=[], missing_ok=False):
|
||||
ret = {'deps': defaultdict(dict)}
|
||||
deps = ret['deps']
|
||||
try:
|
||||
for stanza in read_tag_file(path):
|
||||
name = stanza['package']
|
||||
for field in ('pre-depends', 'depends', 'recommends'):
|
||||
if field not in stanza:
|
||||
continue
|
||||
for depname in parse_relation_packages(stanza[field]):
|
||||
if depname not in debs:
|
||||
continue
|
||||
# skip dependencies that are built from the same source,
|
||||
# when we're doing a sourceful removal.
|
||||
if name in ignores:
|
||||
continue
|
||||
deps[depname][name] = (field, stanza['architecture'])
|
||||
except IOError:
|
||||
if not missing_ok:
|
||||
raise
|
||||
return ret
|
||||
|
||||
|
||||
def read_di(debs, path):
|
||||
ret = set()
|
||||
try:
|
||||
with open(path) as manifest:
|
||||
for line in manifest:
|
||||
udeb = line.split()[0]
|
||||
ret.add(udeb)
|
||||
match = re_kernel_image_di.match(udeb)
|
||||
if match:
|
||||
re_modules = re.compile(r'-modules-%s-di' % match.group(1))
|
||||
for pkg in debs:
|
||||
if re_modules.search(pkg):
|
||||
ret.add(pkg)
|
||||
except IOError:
|
||||
pass
|
||||
return ret
|
||||
|
||||
|
||||
def pockets(opts):
|
||||
if '-' in opts.suite:
|
||||
return ('',)
|
||||
else:
|
||||
return ('', '-updates', '-security', '-backports')
|
||||
|
||||
|
||||
def render_dep(name, field, arch):
|
||||
ret = name
|
||||
if field == "recommends":
|
||||
ret += " (r)"
|
||||
if arch == "all":
|
||||
ret += " [all]"
|
||||
return ret
|
||||
|
||||
|
||||
def search(opts, pkgs):
|
||||
for pocket in pockets(opts):
|
||||
pocket_base = '%s/dists/%s%s' % (opts.archive_base, opts.suite, pocket)
|
||||
if opts.arches:
|
||||
arches = opts.arches
|
||||
else:
|
||||
arches = list(primary_arches(opts.suite))
|
||||
if opts.ports:
|
||||
arches.extend(ports_arches(opts.suite))
|
||||
|
||||
packages = defaultdict(dict)
|
||||
sources = {}
|
||||
for comp in components:
|
||||
comp_base = '%s/%s' % (pocket_base, comp)
|
||||
sources[comp] = read_sources('%s/source/Sources.gz' % comp_base)
|
||||
|
||||
if opts.binary:
|
||||
debs = pkgs
|
||||
ignores = []
|
||||
else:
|
||||
debs = set()
|
||||
for src in pkgs:
|
||||
for comp in components:
|
||||
if src in sources[comp]['binary']:
|
||||
debs.update(set(sources[comp]['binary'][src]))
|
||||
ignores = debs = sorted(debs)
|
||||
|
||||
# Now we have the source<->binary mapping, we can read Packages
|
||||
# files but only bother to remember the dependencies we need.
|
||||
for comp in components:
|
||||
comp_base = '%s/%s' % (pocket_base, comp)
|
||||
di_comp = '%s/debian-installer' % comp
|
||||
di_comp_base = '%s/%s' % (pocket_base, di_comp)
|
||||
|
||||
build_deps = sources[comp]['build_deps']
|
||||
for deb in debs:
|
||||
if opts.directory is not None:
|
||||
out = open(os.path.join(opts.directory, deb), 'a')
|
||||
else:
|
||||
out = sys.stdout
|
||||
|
||||
# build dependencies
|
||||
if deb in build_deps:
|
||||
print("-- %s%s/%s build deps on %s:" %
|
||||
(opts.suite, pocket, comp, deb), file=out)
|
||||
for pkg in sorted(build_deps[deb]):
|
||||
print(pkg, file=out)
|
||||
|
||||
# binary dependencies
|
||||
for arch in arches:
|
||||
if arch not in packages[comp]:
|
||||
packages[comp][arch] = \
|
||||
read_packages(debs,
|
||||
'%s/binary-%s/Packages.gz' %
|
||||
(comp_base, arch),
|
||||
sources[comp], ignores)
|
||||
if arch not in packages[di_comp]:
|
||||
packages[di_comp][arch] = \
|
||||
read_packages(debs,
|
||||
'%s/binary-%s/Packages.gz' %
|
||||
(di_comp_base, arch),
|
||||
sources[comp], ignores,
|
||||
missing_ok=True)
|
||||
if comp == 'main':
|
||||
di_images = \
|
||||
read_di(debs,
|
||||
'%s/installer-%s/current/images/'
|
||||
'udeb.list' % (comp_base, arch))
|
||||
di_deps = packages[di_comp][arch]['deps']
|
||||
for udeb in di_images:
|
||||
di_deps[udeb]['debian-installer-images'] = (
|
||||
'depends', arch)
|
||||
|
||||
deps = packages[comp][arch]['deps']
|
||||
di_deps = packages[di_comp][arch]['deps']
|
||||
if deb in deps:
|
||||
print("-- %s%s/%s %s deps on %s:" %
|
||||
(opts.suite, pocket, comp, arch, deb), file=out)
|
||||
for pkg, (field, pkgarch) in sorted(deps[deb].items()):
|
||||
print(render_dep(pkg, field, pkgarch), file=out)
|
||||
if deb in di_deps:
|
||||
print("-- %s%s/%s %s deps on %s:" %
|
||||
(opts.suite, pocket, di_comp, arch, deb),
|
||||
file=out)
|
||||
for pkg, (field, pkgarch) in sorted(
|
||||
di_deps[deb].items()):
|
||||
print(render_dep(pkg, field, pkgarch), file=out)
|
||||
|
||||
if opts.directory is not None:
|
||||
out.close()
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='%prog [options] pkg [...]')
|
||||
parser.add_option('-B', '--archive-base', dest='archive_base',
|
||||
help=('archive base directory (default: %s)' %
|
||||
default_base),
|
||||
default=default_base)
|
||||
parser.add_option('-s', '--suite', dest='suite',
|
||||
help='suite to check (default: %s)' % default_suite,
|
||||
default=default_suite)
|
||||
parser.add_option('-a', '--arch', dest='arches', action='append',
|
||||
help='check only this architecture '
|
||||
'(may be given multiple times)')
|
||||
parser.add_option('-b', '--binary', dest='binary', action='store_true',
|
||||
help='treat arguments as binary packages, not source')
|
||||
parser.add_option('--no-ports', dest='ports',
|
||||
default=True, action='store_false',
|
||||
help='skip ports architectures')
|
||||
parser.add_option('-d', '--directory', dest='directory', metavar='DIR',
|
||||
help='output to directory DIR (one file per package) '
|
||||
'instead of standard output')
|
||||
opts, args = parser.parse_args()
|
||||
|
||||
if 'CHECKRDEPENDS_PROFILE' in os.environ:
|
||||
import profile
|
||||
profile.run('search(opts, args)')
|
||||
else:
|
||||
search(opts, args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,940 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Sync a suite with a Seed list.
|
||||
# Copyright (C) 2004, 2005, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Author: James Troup <james.troup@canonical.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
# XXX - add indication if all of the binaries of a source packages are
|
||||
# listed for promotion at once
|
||||
# i.e. to allow 'change-override -S' usage
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import atexit
|
||||
from collections import defaultdict, OrderedDict
|
||||
import copy
|
||||
import csv
|
||||
import gzip
|
||||
try:
|
||||
from html import escape
|
||||
except ImportError:
|
||||
from cgi import escape
|
||||
import json
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from textwrap import dedent
|
||||
import time
|
||||
try:
|
||||
from urllib.parse import quote_plus
|
||||
except ImportError:
|
||||
from urllib import quote_plus
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from charts import make_chart, make_chart_header
|
||||
|
||||
|
||||
tempdir = None
|
||||
|
||||
archive_source = {}
|
||||
archive_binary = {}
|
||||
|
||||
current_source = {}
|
||||
current_binary = {}
|
||||
|
||||
germinate_source = {}
|
||||
germinate_binary = {}
|
||||
|
||||
seed_source = defaultdict(set)
|
||||
seed_binary = defaultdict(set)
|
||||
|
||||
|
||||
class MIRLink:
|
||||
def __init__(self, id, status, title, assignee):
|
||||
self.id = id
|
||||
self.status = status
|
||||
self.title = title
|
||||
self.assignee = assignee
|
||||
|
||||
def __str__(self):
|
||||
if self.status not in ('Fix Committed', 'Fix Released') and self.assignee:
|
||||
s = "MIR: #%d (%s for %s)" % (self.id, self.status,
|
||||
self.assignee.display_name)
|
||||
else:
|
||||
s = "MIR: #%d (%s)" % (self.id, self.status)
|
||||
# no need to repeat the standard title
|
||||
if not self.title.startswith("[MIR]"):
|
||||
s += " %s" % self.title
|
||||
return s
|
||||
|
||||
def html(self):
|
||||
h = 'MIR: <a href="https://launchpad.net/bugs/%d">#%d</a> (%s)' % (
|
||||
self.id, self.id, escape(self.status))
|
||||
# no need to repeat the standard title
|
||||
if not self.title.startswith("[MIR]"):
|
||||
h += " %s" % escape(self.title)
|
||||
return h
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix='component-mismatches')
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
def decompress_open(tagfile):
|
||||
ensure_tempdir()
|
||||
decompressed = tempfile.mktemp(dir=tempdir)
|
||||
fin = gzip.GzipFile(filename=tagfile)
|
||||
with open(decompressed, 'wb') as fout:
|
||||
fout.write(fin.read())
|
||||
return open(decompressed, 'r')
|
||||
|
||||
|
||||
def read_current_source(options):
|
||||
for suite in options.suites:
|
||||
for component in options.all_components:
|
||||
sources_path = "%s/dists/%s/%s/source/Sources.gz" % (
|
||||
options.archive_dir, suite, component)
|
||||
for section in apt_pkg.TagFile(decompress_open(sources_path)):
|
||||
if 'Package' in section and 'Version' in section:
|
||||
(pkg, version) = (section['Package'], section['Version'])
|
||||
if pkg not in archive_source:
|
||||
archive_source[pkg] = (version, component)
|
||||
else:
|
||||
if apt_pkg.version_compare(
|
||||
archive_source[pkg][0], version) < 0:
|
||||
archive_source[pkg] = (
|
||||
version, component.split("/")[0])
|
||||
|
||||
for pkg, (version, component) in archive_source.items():
|
||||
if component in options.components:
|
||||
current_source[pkg] = (version, component)
|
||||
|
||||
|
||||
def read_current_binary(options):
|
||||
components_with_di = []
|
||||
for component in options.all_components:
|
||||
components_with_di.append(component)
|
||||
components_with_di.append('%s/debian-installer' % component)
|
||||
for suite in options.suites:
|
||||
for component in components_with_di:
|
||||
for arch in [
|
||||
"i386", "amd64", "armhf", "arm64", "ppc64el",
|
||||
"s390x"]:
|
||||
binaries_path = "%s/dists/%s/%s/binary-%s/Packages.gz" % (
|
||||
options.archive_dir, suite, component, arch)
|
||||
for section in apt_pkg.TagFile(decompress_open(binaries_path)):
|
||||
if 'Package' in section and 'Version' in section:
|
||||
(pkg, version) = (section['Package'],
|
||||
section['Version'])
|
||||
if 'source' in section:
|
||||
src = section['Source'].split(" ", 1)[0]
|
||||
else:
|
||||
src = section['Package']
|
||||
if pkg not in archive_binary:
|
||||
archive_binary[pkg] = (
|
||||
version, component.split("/")[0], src)
|
||||
else:
|
||||
if apt_pkg.version_compare(
|
||||
archive_binary[pkg][0], version) < 0:
|
||||
archive_binary[pkg] = (version, component, src)
|
||||
|
||||
for pkg, (version, component, src) in archive_binary.items():
|
||||
if component in options.components:
|
||||
current_binary[pkg] = (version, component, src)
|
||||
|
||||
|
||||
def read_germinate(options):
|
||||
for flavour in reversed(options.flavours.split(",")):
|
||||
# List of seeds
|
||||
seeds = ["all"]
|
||||
try:
|
||||
filename = "%s/structure_%s_%s_i386" % (
|
||||
options.germinate_path, flavour, options.suite)
|
||||
with open(filename) as structure:
|
||||
for line in structure:
|
||||
if not line or line.startswith('#') or ':' not in line:
|
||||
continue
|
||||
seeds.append(line.split(':')[0])
|
||||
except IOError:
|
||||
continue
|
||||
# ideally supported+build-depends too, but Launchpad's
|
||||
# cron.germinate doesn't save this
|
||||
|
||||
for arch in ["i386", "amd64", "armhf", "arm64", "ppc64el",
|
||||
"s390x"]:
|
||||
for seed in seeds:
|
||||
filename = "%s/%s_%s_%s_%s" % (
|
||||
options.germinate_path, seed, flavour, options.suite, arch)
|
||||
with open(filename) as f:
|
||||
for line in f:
|
||||
# Skip header and footer
|
||||
if (line[0] == "-" or line.startswith("Package") or
|
||||
line[0] == " "):
|
||||
continue
|
||||
# Skip empty lines
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
pkg, source, why = [word.strip()
|
||||
for word in line.split('|')][:3]
|
||||
if seed == "all":
|
||||
germinate_binary[pkg] = (
|
||||
source, why, flavour, arch)
|
||||
germinate_source[source] = (flavour, arch)
|
||||
else:
|
||||
seed_binary[seed].add(pkg)
|
||||
seed_source[seed].add(source)
|
||||
|
||||
|
||||
def is_included_binary(options, pkg):
|
||||
if options.include:
|
||||
for seed in options.include.split(","):
|
||||
if seed in seed_binary and pkg in seed_binary[seed]:
|
||||
return True
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_excluded_binary(options, pkg):
|
||||
if options.exclude:
|
||||
seeds = set(seed_binary) - set(options.exclude.split(","))
|
||||
for seed in seeds:
|
||||
if seed in seed_binary and pkg in seed_binary[seed]:
|
||||
return False
|
||||
for seed in options.exclude.split(","):
|
||||
if seed in seed_binary and pkg in seed_binary[seed]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_included_source(options, pkg):
|
||||
if options.include:
|
||||
for seed in options.include.split(","):
|
||||
if seed in seed_source and pkg in seed_source[seed]:
|
||||
return True
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_excluded_source(options, pkg):
|
||||
if options.exclude:
|
||||
seeds = set(seed_source) - set(options.exclude.split(","))
|
||||
for seed in seeds:
|
||||
if seed in seed_source and pkg in seed_source[seed]:
|
||||
return False
|
||||
for seed in options.exclude.split(","):
|
||||
if seed in seed_source and pkg in seed_source[seed]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_source(binary):
|
||||
return current_binary[binary][2]
|
||||
|
||||
|
||||
def find_signer(options, source):
|
||||
# look at the source package publishing history for the most recent
|
||||
# package_signer, a copy from debian won't have a package signer
|
||||
series = options.distro.getSeries(name_or_version=options.suite)
|
||||
publications = options.archive.getPublishedSources(
|
||||
distro_series=series, source_name=source,
|
||||
exact_match=True)
|
||||
if not publications:
|
||||
return('no publications found', '')
|
||||
sorted_pubs = sorted([(ps.date_published, ps)
|
||||
for ps in publications
|
||||
if ps.date_published is not None], reverse=True)
|
||||
for pub in sorted_pubs:
|
||||
if pub[1].package_signer:
|
||||
signer = pub[1].package_signer.name
|
||||
web_link = pub[1].package_signer.web_link
|
||||
return(signer, web_link)
|
||||
else:
|
||||
signer = ''
|
||||
web_link = ''
|
||||
return (signer, web_link)
|
||||
|
||||
|
||||
def do_reverse(options, source, binaries, why_d):
|
||||
global signers
|
||||
try:
|
||||
signers.keys()
|
||||
except NameError:
|
||||
signers = {}
|
||||
output = []
|
||||
depend = {}
|
||||
recommend = {}
|
||||
build_depend = {}
|
||||
for binary in binaries:
|
||||
why = why_d[source][binary]
|
||||
if why.find("Build-Depend") != -1:
|
||||
why = why.replace("(Build-Depend)", "").strip()
|
||||
build_depend[why] = ""
|
||||
elif why.find("Recommends") != -1:
|
||||
why = why.replace("(Recommends)", "").strip()
|
||||
recommend[why] = ""
|
||||
else:
|
||||
depend[why] = ""
|
||||
|
||||
def do_category(map, category):
|
||||
keys = []
|
||||
for k in map:
|
||||
if k.startswith('Rescued from '):
|
||||
pkg = k.replace('Rescued from ', '')
|
||||
else:
|
||||
pkg = k
|
||||
# seed names have spaces in them
|
||||
if ' ' not in pkg:
|
||||
try:
|
||||
source = get_source(pkg)
|
||||
except KeyError:
|
||||
source = pkg
|
||||
pass
|
||||
if source not in signers:
|
||||
signer, web_link = find_signer(options, source)
|
||||
if signer and web_link:
|
||||
signers[source] = (signer, web_link)
|
||||
if k in current_binary:
|
||||
keys.append('%s (%s)' % (k, current_binary[k][1].upper()))
|
||||
elif k in current_source:
|
||||
keys.append('%s (%s)' % (k, current_source[k][1].upper()))
|
||||
else:
|
||||
keys.append(k)
|
||||
keys.sort()
|
||||
if keys:
|
||||
return ["[Reverse-%s: %s]" % (category, ", ".join(keys))]
|
||||
else:
|
||||
return []
|
||||
|
||||
output.extend(do_category(depend, 'Depends'))
|
||||
output.extend(do_category(recommend, 'Recommends'))
|
||||
output.extend(do_category(build_depend, 'Build-Depends'))
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def do_dot(why, fd, mir_bugs, suite):
|
||||
# write dot graph for given why dictionary
|
||||
|
||||
written_nodes = set()
|
||||
|
||||
fd.write(
|
||||
'digraph "component-mismatches: movements to main/restricted" {\n')
|
||||
for s, binwhy in why.iteritems():
|
||||
for binary, why in binwhy.iteritems():
|
||||
# ignore binaries from this source, and "rescued"
|
||||
if why in binwhy or why.startswith('Rescued'):
|
||||
continue
|
||||
|
||||
if "(Recommends)" in why:
|
||||
relation = " R "
|
||||
color = "gray"
|
||||
why = why.replace(" (Recommends)", "")
|
||||
elif "Build-Depend" in why:
|
||||
relation = " B"
|
||||
color = "blue"
|
||||
why = why.replace(" (Build-Depend)", "")
|
||||
else:
|
||||
relation = ""
|
||||
color = "black"
|
||||
|
||||
try:
|
||||
why = get_source(why)
|
||||
except KeyError:
|
||||
# happens for sources which are in universe, or seeds
|
||||
try:
|
||||
why = germinate_binary[why][0]
|
||||
except:
|
||||
pass
|
||||
|
||||
# helper function to write a node
|
||||
def write_node(name):
|
||||
# ensure to only write it once
|
||||
if name in written_nodes:
|
||||
return name
|
||||
written_nodes.add(name)
|
||||
|
||||
fd.write(' "%s" [label="%s" style="filled" tooltip="%s"' %
|
||||
(name, name, ', '.join(package_team_mapping[name])))
|
||||
|
||||
mirs = mir_bugs.get(name, [])
|
||||
approved_mirs = [
|
||||
id for id, status, title, assignee in mirs
|
||||
if status in ('Fix Committed', 'Fix Released')]
|
||||
|
||||
url = None
|
||||
if name.endswith(' seed'):
|
||||
fc = "green"
|
||||
elif name in current_source:
|
||||
fc = "lightgreen"
|
||||
url = ("https://launchpad.net/ubuntu/+source/%s" %
|
||||
quote_plus(name))
|
||||
elif approved_mirs:
|
||||
fc = "yellow"
|
||||
url = "https://launchpad.net/bugs/%i" % approved_mirs[0]
|
||||
elif mirs:
|
||||
if mirs[0][1] == 'Incomplete':
|
||||
fc = "darkkhaki"
|
||||
else:
|
||||
fc = "darksalmon"
|
||||
url = "https://launchpad.net/bugs/%i" % mirs[0][0]
|
||||
else:
|
||||
fc = "white"
|
||||
# Need to use & otherwise the svg will have a syntax error
|
||||
url = ("https://launchpad.net/ubuntu/+source/%s/+filebug?"
|
||||
"field.title=%s&field.status=Incomplete"
|
||||
"&field.tags=%s" %
|
||||
(quote_plus(name), quote_plus("[MIR] %s" % name),
|
||||
quote_plus(suite)))
|
||||
fd.write(' fillcolor="%s"' % fc)
|
||||
if url:
|
||||
fd.write(' URL="%s"' % url)
|
||||
fd.write("]\n")
|
||||
return name
|
||||
|
||||
s_node = write_node(s)
|
||||
why_node = write_node(why)
|
||||
|
||||
# generate relation
|
||||
fd.write(' "%s" -> "%s" [label="%s" color="%s" '
|
||||
'fontcolor="%s"]\n' %
|
||||
(why_node, s_node, relation, color, color))
|
||||
|
||||
# add legend
|
||||
fd.write("""
|
||||
{
|
||||
rank="source"
|
||||
NodeLegend[shape=none, margin=0, label=<
|
||||
<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
|
||||
<tr><td>Nodes</td></tr>
|
||||
<tr><td bgcolor="green">seed</td></tr>
|
||||
<tr><td bgcolor="lightgreen">in main/restricted </td></tr>
|
||||
<tr><td bgcolor="yellow">approved MIR (clickable)</td></tr>
|
||||
<tr><td bgcolor="darksalmon">unapproved MIR (clickable)</td></tr>
|
||||
<tr><td bgcolor="darkkhaki">Incomplete/stub MIR (clickable)</td></tr>
|
||||
<tr><td bgcolor="white">No MIR (click to file one)</td></tr>
|
||||
</table>
|
||||
>];
|
||||
|
||||
EdgeLegend[shape=none, margin=0, label=<
|
||||
<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
|
||||
<tr><td>Edges</td></tr>
|
||||
<tr><td>Depends:</td></tr>
|
||||
<tr><td><font color="gray">Recommends:</font></td></tr>
|
||||
<tr><td><font color="blue">Build-Depends: </font></td></tr>
|
||||
</table>
|
||||
>];
|
||||
}
|
||||
}
|
||||
""")
|
||||
|
||||
|
||||
def filter_source(component, sources):
|
||||
return [
|
||||
s for s in sources
|
||||
if s in archive_source and archive_source[s][1] == component]
|
||||
|
||||
|
||||
def filter_binary(component, binaries):
|
||||
return [
|
||||
b for b in binaries
|
||||
if b in archive_binary and archive_binary[b][1] == component]
|
||||
|
||||
|
||||
package_team_mapping = defaultdict(set)
|
||||
|
||||
|
||||
def get_teams(options, source):
|
||||
global package_team_mapping
|
||||
|
||||
if os.path.exists(options.package_team_mapping):
|
||||
with open(options.package_team_mapping) as ptm_file:
|
||||
for team, packages in json.load(ptm_file).items():
|
||||
if team == "unsubscribed":
|
||||
continue
|
||||
for package in packages:
|
||||
package_team_mapping[package].add(team)
|
||||
|
||||
if source in package_team_mapping:
|
||||
for team in package_team_mapping[source]:
|
||||
yield team
|
||||
elif package_team_mapping:
|
||||
yield "unsubscribed"
|
||||
|
||||
|
||||
def print_section_text(options, header, body,
|
||||
source_and_binary=False, binary_only=False):
|
||||
if body:
|
||||
print(" %s" % header)
|
||||
print(" %s" % ("-" * len(header)))
|
||||
print()
|
||||
for entry in body:
|
||||
line = entry[0]
|
||||
source = line[0]
|
||||
binaries = " ".join(line[1:])
|
||||
if source_and_binary:
|
||||
print(" o %s: %s" % (source, binaries))
|
||||
elif binary_only:
|
||||
indent_right = 75 - len(binaries) - len(source) - 2
|
||||
print(" o %s%s{%s}" % (binaries, " " * indent_right, source))
|
||||
else:
|
||||
print(" o %s" % source)
|
||||
for line in entry[1:]:
|
||||
print(" %s" % line)
|
||||
if len(entry) != 1:
|
||||
print()
|
||||
if len(body[-1]) == 1:
|
||||
print()
|
||||
print("=" * 70)
|
||||
print()
|
||||
|
||||
|
||||
def print_section_html(options, header, body,
|
||||
source_and_binary=False, binary_only=False):
|
||||
if body:
|
||||
def print_html(*args, **kwargs):
|
||||
print(*args, file=options.html_output, **kwargs)
|
||||
|
||||
def source_link(source):
|
||||
return (
|
||||
'<a href="https://launchpad.net/ubuntu/+source/%s">%s</a>' % (
|
||||
escape(source, quote=True), escape(source)))
|
||||
|
||||
print_html("<h2>%s</h2>" % escape(header))
|
||||
print_html("<table>")
|
||||
for entry in body:
|
||||
line = entry[0]
|
||||
source = line[0]
|
||||
binaries = " ".join(line[1:])
|
||||
if source_and_binary:
|
||||
print_html(
|
||||
'<tr><th colspan="2">%s: %s' % (
|
||||
source_link(source), escape(binaries)))
|
||||
elif binary_only:
|
||||
print_html('<tr><th>%s</th>' % escape(binaries), end="")
|
||||
print_html(
|
||||
"<th><small>%s</small></th></tr>" % source_link(source))
|
||||
else:
|
||||
print_html(
|
||||
'<tr><th colspan="2">%s</th></tr>' % source_link(source))
|
||||
for line in entry[1:]:
|
||||
if isinstance(line, MIRLink):
|
||||
line = line.html()
|
||||
else:
|
||||
for item in line.strip('[]').split(' '):
|
||||
if item.strip(',') in signers:
|
||||
comma = ''
|
||||
if item.endswith(','):
|
||||
comma = ','
|
||||
pkg = item.strip(',')
|
||||
else:
|
||||
pkg = item
|
||||
# neither of these will help fix the issue
|
||||
if signers[pkg][0] in ['ps-jenkins',
|
||||
'ci-train-bot']:
|
||||
continue
|
||||
line = line.replace(item, '%s (Uploader: %s)%s' %
|
||||
(pkg, signers[pkg][0], comma))
|
||||
line = escape(line)
|
||||
print_html(
|
||||
'<tr><td colspan="2"><span class="note">%s'
|
||||
'</span></td></tr>' % line)
|
||||
print_html("</table>")
|
||||
|
||||
|
||||
def do_output(options,
|
||||
orig_source_add, orig_source_remove, binary_add, binary_remove,
|
||||
mir_bugs):
|
||||
results = {}
|
||||
results["time"] = int(options.time * 1000)
|
||||
|
||||
global package_team_mapping
|
||||
package_team_mapping = defaultdict(set)
|
||||
if os.path.exists(options.package_team_mapping):
|
||||
with open(options.package_team_mapping) as ptm_file:
|
||||
for team, packages in json.load(ptm_file).items():
|
||||
if team == "unsubscribed":
|
||||
continue
|
||||
for package in packages:
|
||||
package_team_mapping[package].add(team)
|
||||
|
||||
if options.html_output is not None:
|
||||
print(dedent("""\
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type"
|
||||
content="text/html; charset=utf-8" />
|
||||
<title>Component mismatches for %s</title>
|
||||
<style type="text/css">
|
||||
body { background: #CCCCB0; color: black; }
|
||||
a { text-decoration: none; }
|
||||
table { border-collapse: collapse; border-style: none none;
|
||||
margin-bottom: 3ex; empty-cells: show; }
|
||||
table th { text-align: left;
|
||||
border-style: groove none none none;
|
||||
border-width: 3px; padding-right: 10px;
|
||||
font-weight: normal; }
|
||||
table td { vertical-align: top; text-align: left;
|
||||
border-style: none none;
|
||||
border-width: 1px; padding-right: 10px; }
|
||||
.note { margin-left: 3ex; }
|
||||
</style>
|
||||
%s
|
||||
</head>
|
||||
<body>
|
||||
<h1>Component mismatches for %s</h1>
|
||||
""") % (escape(options.suite), make_chart_header(),
|
||||
escape(options.suite)), file=options.html_output)
|
||||
|
||||
# Additions
|
||||
|
||||
binary_only = defaultdict(dict)
|
||||
both = defaultdict(dict)
|
||||
|
||||
source_add = copy.copy(orig_source_add)
|
||||
source_remove = copy.copy(orig_source_remove)
|
||||
|
||||
for pkg in binary_add:
|
||||
(source, why, flavour, arch) = binary_add[pkg]
|
||||
if source not in orig_source_add:
|
||||
binary_only[source][pkg] = why
|
||||
else:
|
||||
both[source][pkg] = why
|
||||
if source in source_add:
|
||||
source_add.remove(source)
|
||||
|
||||
all_output = OrderedDict()
|
||||
results["source promotions"] = 0
|
||||
results["binary promotions"] = 0
|
||||
for component in options.components:
|
||||
if component == "main":
|
||||
counterpart = "universe"
|
||||
elif component == "restricted":
|
||||
counterpart = "multiverse"
|
||||
else:
|
||||
continue
|
||||
|
||||
output = []
|
||||
for source in filter_source(counterpart, sorted(both)):
|
||||
binaries = sorted(both[source])
|
||||
entry = [[source] + binaries]
|
||||
|
||||
for (id, status, title, assignee) in mir_bugs.get(source, []):
|
||||
entry.append(MIRLink(id, status, title, assignee))
|
||||
|
||||
entry.extend(do_reverse(options, source, binaries, both))
|
||||
output.append(entry)
|
||||
|
||||
all_output["Source and binary movements to %s" % component] = {
|
||||
"output": output,
|
||||
"source_and_binary": True,
|
||||
}
|
||||
results["source promotions"] += len(output)
|
||||
|
||||
output = []
|
||||
for source in sorted(binary_only):
|
||||
binaries = filter_binary(counterpart, sorted(binary_only[source]))
|
||||
|
||||
if binaries:
|
||||
entry = [[source] + binaries]
|
||||
entry.extend(do_reverse(options, source, binaries,
|
||||
binary_only))
|
||||
output.append(entry)
|
||||
|
||||
all_output["Binary only movements to %s" % component] = {
|
||||
"output": output,
|
||||
"binary_only": True,
|
||||
}
|
||||
results["binary promotions"] += len(output)
|
||||
|
||||
output = []
|
||||
for source in filter_source(counterpart, sorted(source_add)):
|
||||
output.append([[source]])
|
||||
|
||||
all_output["Source only movements to %s" % component] = {
|
||||
"output": output,
|
||||
}
|
||||
results["source promotions"] += len(output)
|
||||
|
||||
if options.dot:
|
||||
with open(options.dot, 'w') as f:
|
||||
do_dot(both, f, mir_bugs, options.suite)
|
||||
|
||||
# Removals
|
||||
|
||||
binary_only = defaultdict(dict)
|
||||
both = defaultdict(dict)
|
||||
for pkg in binary_remove:
|
||||
source = get_source(pkg)
|
||||
if source not in orig_source_remove:
|
||||
binary_only[source][pkg] = ""
|
||||
else:
|
||||
both[source][pkg] = ""
|
||||
if source in source_remove:
|
||||
source_remove.remove(source)
|
||||
|
||||
results["source demotions"] = 0
|
||||
results["binary demotions"] = 0
|
||||
for component in options.components:
|
||||
if component == "main":
|
||||
counterpart = "universe"
|
||||
elif component == "restricted":
|
||||
counterpart = "multiverse"
|
||||
else:
|
||||
continue
|
||||
|
||||
output = []
|
||||
for source in filter_source(component, sorted(both)):
|
||||
binaries = sorted(both[source])
|
||||
output.append([[source] + binaries])
|
||||
|
||||
all_output["Source and binary movements to %s" % counterpart] = {
|
||||
"output": output,
|
||||
"source_and_binary": True,
|
||||
}
|
||||
results["source demotions"] += len(output)
|
||||
|
||||
output = []
|
||||
for source in sorted(binary_only):
|
||||
binaries = filter_binary(component, sorted(binary_only[source]))
|
||||
|
||||
if binaries:
|
||||
output.append([[source] + binaries])
|
||||
|
||||
all_output["Binary only movements to %s" % counterpart] = {
|
||||
"output": output,
|
||||
"binary_only": True,
|
||||
}
|
||||
results["binary demotions"] += len(output)
|
||||
|
||||
output = []
|
||||
for source in filter_source(component, sorted(source_remove)):
|
||||
output.append([[source]])
|
||||
|
||||
all_output["Source only movements to %s" % counterpart] = {
|
||||
"output": output,
|
||||
}
|
||||
results["source demotions"] += len(output)
|
||||
|
||||
for title, output_spec in all_output.items():
|
||||
source_and_binary = output_spec.get("source_and_binary", False)
|
||||
binary_only = output_spec.get("binary_only", False)
|
||||
print_section_text(
|
||||
options, title, output_spec["output"],
|
||||
source_and_binary=source_and_binary, binary_only=binary_only)
|
||||
if options.html_output is not None and package_team_mapping:
|
||||
by_team = defaultdict(list)
|
||||
for entry in output_spec["output"]:
|
||||
source = entry[0][0]
|
||||
for team in package_team_mapping[source]:
|
||||
by_team[team].append(entry)
|
||||
if not package_team_mapping[source]:
|
||||
by_team["unsubscribed"].append(entry)
|
||||
for team, entries in sorted(by_team.items()):
|
||||
print_section_html(
|
||||
options, "%s (%s)" % (title, team), entries,
|
||||
source_and_binary=source_and_binary,
|
||||
binary_only=binary_only)
|
||||
|
||||
if options.html_output is not None:
|
||||
print("<h2>Over time</h2>", file=options.html_output)
|
||||
print(
|
||||
make_chart("component-mismatches.csv", [
|
||||
"source promotions", "binary promotions",
|
||||
"source demotions", "binary demotions",
|
||||
]),
|
||||
file=options.html_output)
|
||||
print(
|
||||
"<p><small>Generated: %s</small></p>" % escape(options.timestamp),
|
||||
file=options.html_output)
|
||||
print("</body></html>", file=options.html_output)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def do_source_diff(options):
|
||||
removed = []
|
||||
added = []
|
||||
removed = list(set(current_source).difference(set(germinate_source)))
|
||||
for pkg in germinate_source:
|
||||
if (pkg not in current_source and
|
||||
is_included_source(options, pkg) and
|
||||
not is_excluded_source(options, pkg)):
|
||||
added.append(pkg)
|
||||
removed.sort()
|
||||
added.sort()
|
||||
return (added, removed)
|
||||
|
||||
|
||||
def do_binary_diff(options):
|
||||
removed = []
|
||||
added = {}
|
||||
removed = list(set(current_binary).difference(set(germinate_binary)))
|
||||
for pkg in germinate_binary:
|
||||
if (pkg not in current_binary and
|
||||
is_included_binary(options, pkg) and
|
||||
not is_excluded_binary(options, pkg)):
|
||||
added[pkg] = germinate_binary[pkg]
|
||||
removed.sort()
|
||||
return (added, removed)
|
||||
|
||||
|
||||
def get_mir_bugs(options, sources):
|
||||
'''Return MIR bug information for a set of source packages.
|
||||
|
||||
Return a map source -> [(id, status, title, assignee), ...]
|
||||
'''
|
||||
result = defaultdict(list)
|
||||
mir_team = options.launchpad.people['ubuntu-mir']
|
||||
bug_statuses = ("New", "Incomplete", "Won't Fix", "Confirmed", "Triaged",
|
||||
"In Progress", "Fix Committed", "Fix Released")
|
||||
for source in sources:
|
||||
tasks = options.distro.getSourcePackage(name=source).searchTasks(
|
||||
bug_subscriber=mir_team, status=bug_statuses)
|
||||
for task in tasks:
|
||||
result[source].append((task.bug.id, task.status, task.bug.title,
|
||||
task.assignee))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
apt_pkg.init()
|
||||
|
||||
parser = OptionParser(description='Sync a suite with a Seed list.')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option('-o', '--output-file', help='output to this file')
|
||||
parser.add_option('--html-output-file', help='output HTML to this file')
|
||||
parser.add_option(
|
||||
'--csv-file', help='record CSV time series data in this file')
|
||||
parser.add_option(
|
||||
'--package-team-mapping',
|
||||
default=os.path.expanduser('~/public_html/package-team-mapping.json'),
|
||||
help='path to package-team-mapping.json')
|
||||
parser.add_option('-s', '--suite', help='check this suite')
|
||||
parser.add_option('-f', '--flavours', default='ubuntu',
|
||||
help='check these flavours (comma-separated)')
|
||||
parser.add_option('-i', '--include', help='include these seeds')
|
||||
parser.add_option('-e', '--exclude', help='exclude these seeds')
|
||||
parser.add_option('-d', '--dot',
|
||||
help='generate main promotion graph suitable for dot')
|
||||
parser.add_option(
|
||||
'--germinate-path',
|
||||
default=os.path.expanduser('~/mirror/ubuntu-germinate/'),
|
||||
help='read Germinate output from this directory')
|
||||
parser.add_option(
|
||||
'--archive-dir',
|
||||
default=os.path.expanduser('~/mirror/ubuntu/'),
|
||||
help='use Ubuntu archive located in this directory')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
options.launchpad = Launchpad.login_anonymously(
|
||||
'component-mismatches', options.launchpad_instance)
|
||||
options.distro = options.launchpad.distributions['ubuntu']
|
||||
options.archive = options.distro.getArchive(name='primary')
|
||||
|
||||
options.component = "main,restricted"
|
||||
options.components = options.component.split(',')
|
||||
options.all_components = ["main", "restricted", "universe", "multiverse"]
|
||||
|
||||
if options.suite is None:
|
||||
options.suite = options.distro.current_series.name
|
||||
|
||||
# Considering all the packages to have a full installable suite. So:
|
||||
# -security = release + -security
|
||||
# -updates = release + -updates + -security
|
||||
# -proposed = release + updates + security + proposed
|
||||
if "-" in options.suite:
|
||||
options.suite, options.pocket = options.suite.split("-")
|
||||
options.suites = [options.suite]
|
||||
if options.pocket in ["updates", "security", "proposed"]:
|
||||
options.suites.append("%s-security" % options.suite)
|
||||
if options.pocket in ["updates", "proposed"]:
|
||||
options.suites.append("%s-updates" % options.suite)
|
||||
if options.pocket in ["proposed"]:
|
||||
options.suites.append("%s-proposed" % options.suite)
|
||||
else:
|
||||
options.suites = [options.suite]
|
||||
|
||||
if options.output_file is not None:
|
||||
sys.stdout = open('%s.new' % options.output_file, 'w')
|
||||
if options.html_output_file is not None:
|
||||
options.html_output = open('%s.new' % options.html_output_file, 'w')
|
||||
else:
|
||||
options.html_output = None
|
||||
|
||||
options.time = time.time()
|
||||
options.timestamp = time.strftime(
|
||||
'%a %b %e %H:%M:%S %Z %Y', time.gmtime(options.time))
|
||||
print('Generated: %s' % options.timestamp)
|
||||
print()
|
||||
|
||||
read_germinate(options)
|
||||
read_current_source(options)
|
||||
read_current_binary(options)
|
||||
source_add, source_remove = do_source_diff(options)
|
||||
binary_add, binary_remove = do_binary_diff(options)
|
||||
mir_bugs = get_mir_bugs(options, source_add)
|
||||
results = do_output(
|
||||
options, source_add, source_remove, binary_add, binary_remove,
|
||||
mir_bugs)
|
||||
|
||||
if options.html_output_file is not None:
|
||||
options.html_output.close()
|
||||
os.rename(
|
||||
'%s.new' % options.html_output_file, options.html_output_file)
|
||||
if options.output_file is not None:
|
||||
sys.stdout.close()
|
||||
os.rename('%s.new' % options.output_file, options.output_file)
|
||||
if options.csv_file is not None:
|
||||
if sys.version < "3":
|
||||
open_mode = "ab"
|
||||
open_kwargs = {}
|
||||
else:
|
||||
open_mode = "a"
|
||||
open_kwargs = {"newline": ""}
|
||||
csv_is_new = not os.path.exists(options.csv_file)
|
||||
with open(options.csv_file, open_mode, **open_kwargs) as csv_file:
|
||||
# Field names deliberately hardcoded; any changes require
|
||||
# manually rewriting the output file.
|
||||
fieldnames = [
|
||||
"time",
|
||||
"source promotions",
|
||||
"binary promotions",
|
||||
"source demotions",
|
||||
"binary demotions",
|
||||
]
|
||||
csv_writer = csv.DictWriter(csv_file, fieldnames)
|
||||
if csv_is_new:
|
||||
csv_writer.writeheader()
|
||||
csv_writer.writerow(results)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,190 @@
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# This script can be used to reschedule some of the copy archives
|
||||
# builds so that they are processed like regular PPA builds.
|
||||
#
|
||||
# Copy archives builds have a huge penalty applied to them which means
|
||||
# that they are only processed when there is nothing else being processed
|
||||
# by the build farm. That's usually fine, but for some rebuilds, we want
|
||||
# more timely processing, while at the same time, we do want to continue to
|
||||
# service regular PPA builds.
|
||||
#
|
||||
# This script will try to have a portion of the build farm processing copy
|
||||
# builds. It does that by rescoring builds to the normal build priority
|
||||
# range. But will only rescore a few builds at a time, so as not to take ove
|
||||
# the build pool. By default, it won't rescore more than 1/4 the number of
|
||||
# available builders. So for example, if there are 12 i386 builders, only
|
||||
# 3 builds at a time will have a "normal priority".
|
||||
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
import logging
|
||||
import time
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
API_NAME = 'copy-build-scheduler'
|
||||
|
||||
NEEDS_BUILDING = 'Needs building'
|
||||
BUILDING = 'Currently building'
|
||||
COPY_ARCHIVE_SCORE_PENALTY = 2600
|
||||
# Number of minutes to wait between schedule run.
|
||||
SCHEDULE_PERIOD = 5
|
||||
|
||||
|
||||
def determine_builder_capacity(lp, args):
|
||||
"""Find how many builders to use for copy builds by processor."""
|
||||
capacity = {}
|
||||
for processor in args.processors:
|
||||
queue = [
|
||||
builder for builder in lp.builders.getBuildersForQueue(
|
||||
processor='/+processors/%s' % processor, virtualized=True)
|
||||
if builder.active]
|
||||
max_capacity = len(queue)
|
||||
capacity[processor] = round(max_capacity * args.builder_ratio)
|
||||
# Make sure at least 1 builders is associated
|
||||
if capacity[processor] == 0:
|
||||
capacity[processor] = 1
|
||||
logging.info(
|
||||
'Will use %d out of %d %s builders', capacity[processor],
|
||||
max_capacity, processor)
|
||||
return capacity
|
||||
|
||||
|
||||
def get_archive_used_builders_capacity(archive):
|
||||
"""Return the number of builds currently being done for the archive."""
|
||||
capacity = defaultdict(int)
|
||||
building = archive.getBuildRecords(build_state=BUILDING)
|
||||
for build in building:
|
||||
capacity[build.arch_tag] += 1
|
||||
return capacity
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'--lp-instance', default='production', dest='lp_instance',
|
||||
help="Select the Launchpad instance to run against. Defaults to "
|
||||
"'production'")
|
||||
parser.add_argument(
|
||||
'-v', '--verbose', default=0, action='count', dest='verbose',
|
||||
help="Increase verbosity of the script. -v prints info messages"
|
||||
"-vv will print debug messages.")
|
||||
parser.add_argument(
|
||||
'-c', '--credentials', default=None, action='store',
|
||||
dest='credentials',
|
||||
help="Use the OAuth credentials in FILE instead of the desktop "
|
||||
"one.", metavar='FILE')
|
||||
parser.add_argument(
|
||||
'-d', '--distribution', default='ubuntu', action='store',
|
||||
dest='distribution',
|
||||
help="The archive distribution. Defaults to 'ubuntu'.")
|
||||
parser.add_argument(
|
||||
'-p', '--processor', action='append', dest='processors',
|
||||
help="The processor for which to schedule builds. "
|
||||
"Default to i386 and amd64.")
|
||||
parser.add_argument(
|
||||
'-r', '--ratio', default=0.25, action='store', type=float,
|
||||
dest='builder_ratio',
|
||||
help="The ratio of builders that you want to use for the copy "
|
||||
"builds. Default to 25%% of the available builders.")
|
||||
parser.add_argument('copy_archive_name', help='Name of copy archive')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.verbose >= 2:
|
||||
log_level = logging.DEBUG
|
||||
elif args.verbose == 1:
|
||||
log_level = logging.INFO
|
||||
else:
|
||||
log_level = logging.WARNING
|
||||
logging.basicConfig(level=log_level)
|
||||
|
||||
if args.builder_ratio >= 1 or args.builder_ratio < 0:
|
||||
parser.error(
|
||||
'ratio should be a float between 0 and 1: %s' %
|
||||
args.builder_ratio)
|
||||
|
||||
if not args.processors:
|
||||
args.processors = ['amd64', 'i386']
|
||||
|
||||
lp = Launchpad.login_with(
|
||||
API_NAME, args.lp_instance,
|
||||
credentials_file=args.credentials,
|
||||
version='devel')
|
||||
|
||||
try:
|
||||
distribution = lp.distributions[args.distribution]
|
||||
except KeyError:
|
||||
parser.error('unknown distribution: %s' % args.distribution)
|
||||
|
||||
archive = distribution.getArchive(name=args.copy_archive_name)
|
||||
if archive is None:
|
||||
parser.error('unknown archive: %s' % args.copy_archive_name)
|
||||
|
||||
iteration = 0
|
||||
while True:
|
||||
# Every 5 schedules run - and on the first - compute available
|
||||
# capacity.
|
||||
if (iteration % 5) == 0:
|
||||
capacity = determine_builder_capacity(lp, args)
|
||||
iteration += 1
|
||||
|
||||
pending_builds = archive.getBuildRecords(build_state=NEEDS_BUILDING)
|
||||
logging.debug('Found %d pending builds.' % len(pending_builds))
|
||||
if len(pending_builds) == 0:
|
||||
logging.info('No more builds pending. We are done.')
|
||||
break
|
||||
|
||||
used_capacity = get_archive_used_builders_capacity(archive)
|
||||
|
||||
# For each processor, rescore up as many builds as we have
|
||||
# capacity for.
|
||||
for processor in args.processors:
|
||||
builds_to_rescore = (
|
||||
capacity[processor] - used_capacity.get(processor, 0))
|
||||
logging.debug(
|
||||
'Will try to rescore %d %s builds', builds_to_rescore,
|
||||
processor)
|
||||
for build in pending_builds:
|
||||
if builds_to_rescore <= 0:
|
||||
break
|
||||
|
||||
if build.arch_tag != processor:
|
||||
continue
|
||||
|
||||
if build.score < 0:
|
||||
# Only rescore builds that look like the negative
|
||||
# copy archive modified have been applied.
|
||||
logging.info('Rescoring %s' % build.title)
|
||||
# This should make them considered like a regular build.
|
||||
build.rescore(
|
||||
score=build.score + COPY_ARCHIVE_SCORE_PENALTY)
|
||||
else:
|
||||
logging.debug('%s already rescored', build.title)
|
||||
|
||||
# If the score was already above 0, it was probably
|
||||
# rescored already, count it against our limit anyway.
|
||||
builds_to_rescore -= 1
|
||||
|
||||
# Reschedule in a while.
|
||||
logging.debug('Sleeping for %d minutes.', SCHEDULE_PERIOD)
|
||||
time.sleep(SCHEDULE_PERIOD * 60)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,258 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Copy package publication records."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from launchpadlib.errors import HTTPError
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
try:
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
except ImportError:
|
||||
print("No ubuntutools installed: sudo apt-get install ubuntu-dev-tools")
|
||||
exit()
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
def find_publications(args, package):
|
||||
source = lputils.find_latest_published_source(args, package)
|
||||
yield source, source.source_package_version
|
||||
|
||||
if args.include_binaries:
|
||||
for binary in source.getPublishedBinaries():
|
||||
yield binary, binary.binary_package_version
|
||||
|
||||
|
||||
def copy_packages(args):
|
||||
ret = True
|
||||
|
||||
for package in args.packages:
|
||||
print("Copy candidates:")
|
||||
|
||||
try:
|
||||
source = lputils.find_latest_published_source(args, package)
|
||||
except lputils.PackageMissing as error:
|
||||
print(error)
|
||||
if args.skip_missing:
|
||||
print('Skipping')
|
||||
continue
|
||||
else:
|
||||
# Bail with exit code non-zero.
|
||||
return False
|
||||
print("\t%s" % source.display_name)
|
||||
num_copies = 1
|
||||
|
||||
if args.include_binaries:
|
||||
for binary in source.getPublishedBinaries():
|
||||
print("\t%s" % binary.display_name)
|
||||
num_copies += 1
|
||||
|
||||
print("Candidate copy target: %s" % args.destination.archive)
|
||||
if args.sponsoree:
|
||||
print("Sponsored for: %s" % args.sponsoree)
|
||||
if args.dry_run:
|
||||
print("Dry run; no packages copied.")
|
||||
else:
|
||||
if not args.confirm_all:
|
||||
if YesNoQuestion().ask("Copy", "no") == "no":
|
||||
continue
|
||||
|
||||
try:
|
||||
args.destination.archive.copyPackage(
|
||||
source_name=package, version=source.source_package_version,
|
||||
from_archive=args.archive,
|
||||
from_series=args.series.name,
|
||||
from_pocket=args.pocket,
|
||||
to_series=args.destination.series.name,
|
||||
to_pocket=args.destination.pocket,
|
||||
include_binaries=args.include_binaries,
|
||||
unembargo=args.unembargo,
|
||||
auto_approve=args.auto_approve,
|
||||
silent=args.silent,
|
||||
sponsored=args.sponsoree)
|
||||
|
||||
print("%d %s requested." % (
|
||||
num_copies, "copy" if num_copies == 1 else "copies"))
|
||||
except HTTPError as e:
|
||||
print(e.content, file=sys.stderr)
|
||||
ret = False
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
epilog=lputils.ARCHIVE_REFERENCE_DESCRIPTION)
|
||||
parser.add_argument(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_argument(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show copies that would be performed")
|
||||
parser.add_argument(
|
||||
"-y", "--confirm-all", default=False, action="store_true",
|
||||
help="do not ask for confirmation")
|
||||
parser.add_argument(
|
||||
"--from", metavar="ARCHIVE", dest="archive",
|
||||
help="copy from ARCHIVE (default: ubuntu)")
|
||||
parser.add_argument(
|
||||
"-s", "--suite", "--from-suite", metavar="SUITE",
|
||||
help="copy from SUITE (default: development release pocket)")
|
||||
parser.add_argument(
|
||||
"--to", metavar="ARCHIVE",
|
||||
help="copy to ARCHIVE (default: copy from archive)")
|
||||
parser.add_argument(
|
||||
"--to-suite", metavar="SUITE",
|
||||
help="copy to SUITE (default: copy from suite)")
|
||||
parser.add_argument(
|
||||
"-e", "--version",
|
||||
metavar="VERSION", help="package version (default: current version)")
|
||||
parser.add_argument(
|
||||
"-b", "--include-binaries", default=False, action="store_true",
|
||||
help="copy related binaries")
|
||||
parser.add_argument(
|
||||
"--unembargo", default=False, action="store_true",
|
||||
help="allow copying from a private archive to a public archive")
|
||||
parser.add_argument(
|
||||
"--auto-approve", default=False, action="store_true",
|
||||
help="automatically approve copy (requires queue admin permissions)")
|
||||
parser.add_argument(
|
||||
"--silent", default=False, action="store_true",
|
||||
help="suppress mail notifications (requires queue admin permissions)")
|
||||
parser.add_argument(
|
||||
"--force-same-destination", default=False, action="store_true",
|
||||
help=(
|
||||
"force copy when source == destination (e.g. when reverting to "
|
||||
"a previous version in the same suite)"))
|
||||
parser.add_argument(
|
||||
"--skip-missing", default=False, action="store_true",
|
||||
help=(
|
||||
"When a package cannot be copied, normally this script exits "
|
||||
"with a non-zero status. With --skip-missing instead, the "
|
||||
"error is printed and copying continues"))
|
||||
parser.add_argument(
|
||||
"--sponsor", metavar="USERNAME", dest="sponsoree", default=None,
|
||||
help="Sponsor the sync for USERNAME (a Launchpad username).")
|
||||
|
||||
# Deprecated in favour of --to and --from.
|
||||
parser.add_argument(
|
||||
"-d", "--distribution", default="ubuntu", help=argparse.SUPPRESS)
|
||||
parser.add_argument("-p", "--ppa", help=argparse.SUPPRESS)
|
||||
parser.add_argument("--ppa-name", help=argparse.SUPPRESS)
|
||||
parser.add_argument(
|
||||
"-j", "--partner", default=False, action="store_true",
|
||||
help=argparse.SUPPRESS)
|
||||
parser.add_argument(
|
||||
"--to-primary", default=False, action="store_true",
|
||||
help=argparse.SUPPRESS)
|
||||
parser.add_argument("--to-distribution", help=argparse.SUPPRESS)
|
||||
parser.add_argument("--to-ppa", help=argparse.SUPPRESS)
|
||||
parser.add_argument("--to-ppa-name", help=argparse.SUPPRESS)
|
||||
parser.add_argument(
|
||||
"--to-partner", default=False, action="store_true",
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
parser.add_argument(
|
||||
"packages", metavar="package", nargs="+",
|
||||
help="name of package to copy")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
args.launchpad = Launchpad.login_with(
|
||||
"copy-package", args.launchpad_instance, version="devel")
|
||||
args.destination = argparse.Namespace()
|
||||
args.destination.launchpad = args.launchpad
|
||||
args.destination.suite = args.to_suite or args.suite
|
||||
|
||||
if args.archive or args.to:
|
||||
# Use modern single-option archive references.
|
||||
if ((args.distribution and args.distribution != u'ubuntu') or
|
||||
args.ppa or args.ppa_name or args.partner or
|
||||
args.to_distribution or args.to_ppa or
|
||||
args.to_ppa_name or args.to_partner):
|
||||
parser.error(
|
||||
"cannot use --to/--from and the deprecated archive selection "
|
||||
"options together")
|
||||
args.destination.archive = args.to or args.archive
|
||||
else:
|
||||
# Use the deprecated four-option archive specifiers.
|
||||
if args.ppa and args.partner:
|
||||
parser.error(
|
||||
"cannot copy from partner archive and PPA simultaneously")
|
||||
if args.to_ppa and args.to_partner:
|
||||
parser.error(
|
||||
"cannot copy to partner archive and PPA simultaneously")
|
||||
|
||||
args.destination.distribution = (
|
||||
args.to_distribution or args.distribution)
|
||||
args.destination.ppa = args.to_ppa
|
||||
args.destination.ppa_name = args.to_ppa_name
|
||||
args.destination.partner = args.to_partner
|
||||
|
||||
# In cases where source is specified, but destination is not,
|
||||
# default to destination = source
|
||||
if (args.ppa is not None and args.to_ppa is None and
|
||||
not args.to_primary and not args.destination.partner):
|
||||
args.destination.ppa = args.ppa
|
||||
if (args.ppa_name is not None and args.to_ppa_name is None and
|
||||
args.destination.ppa is not None):
|
||||
args.destination.ppa_name = args.ppa_name
|
||||
if (args.partner and not args.destination.partner and
|
||||
not args.ppa):
|
||||
args.destination.partner = args.partner
|
||||
|
||||
if args.to_primary and args.to_ppa_name is not None:
|
||||
parser.error(
|
||||
"--to-ppa-name option set for copy to primary archive")
|
||||
|
||||
lputils.setup_location(args)
|
||||
lputils.setup_location(args.destination)
|
||||
|
||||
if args.archive.private and not args.destination.archive.private:
|
||||
if not args.unembargo:
|
||||
parser.error(
|
||||
"copying from a private archive to a public archive requires "
|
||||
"the --unembargo option")
|
||||
|
||||
# TODO some equivalent of canModifySuite check?
|
||||
|
||||
if (not args.force_same_destination and
|
||||
args.distribution == args.destination.distribution and
|
||||
args.suite == args.destination.suite and
|
||||
args.pocket == args.destination.pocket and
|
||||
args.archive.reference == args.destination.archive.reference):
|
||||
parser.error("copy destination must differ from source")
|
||||
|
||||
if args.sponsoree:
|
||||
try:
|
||||
args.sponsoree = args.launchpad.people[args.sponsoree]
|
||||
except KeyError:
|
||||
parser.error(
|
||||
"Person to sponsor for not found: %s" % args.sponsoree)
|
||||
|
||||
if copy_packages(args):
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -0,0 +1,116 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
'''Copy a kernel from the kernel team's PPA to -proposed.
|
||||
|
||||
USAGE:
|
||||
copy-proposed-kernel [--security] <release> <sourcepackage>
|
||||
'''
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Copy a proposed kernel to the apropriate archive pocket')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Do everything but actually copy the package')
|
||||
parser.add_argument('--security', '-S', action='store_true', help='Copy from the kernel security PPA')
|
||||
parser.add_argument('--security2', action='store_true', help='Copy from the kernel security PPA2')
|
||||
parser.add_argument('--esm', '-E', action='store_true', help='Copy from the kernel ESM PPA and to the kernel ESM proposed PPA')
|
||||
parser.add_argument('--no-auto', action='store_true', help='Turn off automatic detection of ESM et al based on series')
|
||||
parser.add_argument('series', action='store', help='The series the source package is in')
|
||||
parser.add_argument('source', action='store', help='The source package name')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
to = 'ubuntu'
|
||||
ppa_name = '~canonical-kernel-team/ubuntu/ppa'
|
||||
security = False
|
||||
|
||||
# If we are allowed to intuit destinations do so:
|
||||
# 1) precise is now destined for the ESM PPAs
|
||||
if not args.no_auto:
|
||||
if args.series == 'precise' and not args.esm:
|
||||
print("NOTE: directing copy from and to ESM for precise")
|
||||
args.esm = True
|
||||
|
||||
if args.esm:
|
||||
ppa_name = '~canonical-kernel-esm/ubuntu/ppa'
|
||||
to = '~canonical-kernel-esm/ubuntu/proposed'
|
||||
to_pocket = 'release'
|
||||
if args.security:
|
||||
ppa_name = '~canonical-kernel-security-team/ubuntu/ppa'
|
||||
if not args.esm:
|
||||
security = True
|
||||
else:
|
||||
ppa_name = '~canonical-kernel-security-team/ubuntu/esm'
|
||||
if args.security2:
|
||||
ppa_name = '~canonical-kernel-security-team/ubuntu/ppa2'
|
||||
if not args.esm:
|
||||
security = True
|
||||
|
||||
(release, pkg) = (args.series, args.source)
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
'ubuntu-archive-tools', 'production', version='devel')
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
distro_series = ubuntu.getSeries(name_or_version=release)
|
||||
kernel_ppa = launchpad.archives.getByReference(
|
||||
reference=ppa_name)
|
||||
|
||||
# get current version in PPA for that series
|
||||
versions = kernel_ppa.getPublishedSources(
|
||||
source_name=pkg, exact_match=True, status='Published', pocket='Release',
|
||||
distro_series=distro_series)
|
||||
assert versions.total_size == 1
|
||||
version = versions[0].source_package_version
|
||||
|
||||
include_binaries = (pkg not in ('debian-installer')
|
||||
and not pkg.startswith('linux-signed'))
|
||||
|
||||
# Grab a reference to the 'to' archive and select a pocket.
|
||||
to_archive = launchpad.archives.getByReference(reference=to)
|
||||
if to == 'ubuntu':
|
||||
to_pocket = 'proposed'
|
||||
else:
|
||||
to_pocket = 'release'
|
||||
|
||||
print("""Copying {}/{}:
|
||||
From: {} release
|
||||
To: {} {}""".format(pkg, version, kernel_ppa, to_archive, to_pocket))
|
||||
|
||||
if args.dry_run:
|
||||
print("Dry run; no packages copied.")
|
||||
sys.exit(0)
|
||||
|
||||
# Finally ready to actually copy this.
|
||||
to_archive.copyPackage(
|
||||
from_archive=kernel_ppa, include_binaries=include_binaries,
|
||||
source_name=pkg, to_series=release, to_pocket=to_pocket, version=version,
|
||||
auto_approve=True, unembargo=security)
|
||||
|
||||
# TODO: adjust this script to use find-bin-overrides or rewrite
|
||||
# find-bin-overrides to use lpapi and use it here.
|
||||
print('''
|
||||
IMPORTANT: Please verify the overrides are correct for this source package.
|
||||
Failure to do so may result in uninstallability when it is ultimately copied to
|
||||
-updates/-security. lp:ubuntu-qa-tools/security-tools/find-bin-overrides can
|
||||
help with this.
|
||||
''')
|
@ -0,0 +1,289 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from collections import namedtuple
|
||||
import gzip
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
try:
|
||||
from urllib.parse import unquote
|
||||
from urllib.request import urlretrieve
|
||||
except ImportError:
|
||||
from urllib import unquote, urlretrieve
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
# from dak, more or less
|
||||
re_no_epoch = re.compile(r"^\d+:")
|
||||
re_strip_revision = re.compile(r"-[^-]+$")
|
||||
re_changelog_versions = re.compile(r"^\w[-+0-9a-z.]+ \(([^\(\) \t]+)\)")
|
||||
|
||||
default_mirrors = ":".join([
|
||||
'/home/ubuntu-archive/mirror/ubuntu',
|
||||
'/srv/archive.ubuntu.com/ubuntu',
|
||||
])
|
||||
tempdir = None
|
||||
|
||||
series_by_name = {}
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix='copy-report')
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
def decompress_open(tagfile):
|
||||
if tagfile.startswith('http:') or tagfile.startswith('ftp:'):
|
||||
url = tagfile
|
||||
tagfile = urlretrieve(url)[0]
|
||||
|
||||
if tagfile.endswith('.gz'):
|
||||
ensure_tempdir()
|
||||
decompressed = tempfile.mktemp(dir=tempdir)
|
||||
fin = gzip.GzipFile(filename=tagfile)
|
||||
with open(decompressed, 'wb') as fout:
|
||||
fout.write(fin.read())
|
||||
return open(decompressed, 'r')
|
||||
else:
|
||||
return open(tagfile, 'r')
|
||||
|
||||
|
||||
Section = namedtuple("Section", ["version", "directory", "files"])
|
||||
|
||||
|
||||
def tagfiletodict(tagfile):
|
||||
suite = {}
|
||||
for section in apt_pkg.TagFile(decompress_open(tagfile)):
|
||||
files = [s.strip().split()[2] for s in section["Files"].split('\n')]
|
||||
suite[section["Package"]] = Section(
|
||||
version=section["Version"], directory=section["Directory"],
|
||||
files=files)
|
||||
return suite
|
||||
|
||||
|
||||
def find_dsc(options, pkg, section):
|
||||
dsc_filename = [s for s in section.files if s.endswith('.dsc')][0]
|
||||
for mirror in options.mirrors:
|
||||
path = '%s/%s/%s' % (mirror, section.directory, dsc_filename)
|
||||
if os.path.exists(path):
|
||||
yield path
|
||||
ensure_tempdir()
|
||||
spph = options.archive.getPublishedSources(
|
||||
source_name=pkg, version=section.version, exact_match=True)[0]
|
||||
outdir = tempfile.mkdtemp(dir=tempdir)
|
||||
filenames = []
|
||||
for url in spph.sourceFileUrls():
|
||||
filename = os.path.join(outdir, unquote(os.path.basename(url)))
|
||||
urlretrieve(url, filename)
|
||||
filenames.append(filename)
|
||||
yield [s for s in filenames if s.endswith('.dsc')][0]
|
||||
|
||||
|
||||
class BrokenSourcePackage(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_changelog_versions(pkg, dsc, version):
|
||||
ensure_tempdir()
|
||||
|
||||
upstream_version = re_no_epoch.sub('', version)
|
||||
upstream_version = re_strip_revision.sub('', upstream_version)
|
||||
|
||||
with open(os.devnull, 'w') as devnull:
|
||||
ret = subprocess.call(
|
||||
['dpkg-source', '-q', '--no-check', '-sn', '-x', dsc],
|
||||
stdout=devnull, cwd=tempdir)
|
||||
|
||||
# It's in the archive, so these assertions must hold.
|
||||
if ret != 0:
|
||||
raise BrokenSourcePackage(dsc)
|
||||
|
||||
unpacked = '%s/%s-%s' % (tempdir, pkg, upstream_version)
|
||||
assert os.path.isdir(unpacked)
|
||||
changelog_path = '%s/debian/changelog' % unpacked
|
||||
assert os.path.exists(changelog_path)
|
||||
|
||||
with open(changelog_path) as changelog:
|
||||
versions = set()
|
||||
for line in changelog:
|
||||
m = re_changelog_versions.match(line)
|
||||
if m:
|
||||
versions.add(m.group(1))
|
||||
|
||||
shutil.rmtree(unpacked)
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
def descended_from(options, pkg, section1, section2):
|
||||
if apt_pkg.version_compare(section1.version, section2.version) <= 0:
|
||||
return False
|
||||
exception = None
|
||||
for dsc in find_dsc(options, pkg, section1):
|
||||
try:
|
||||
versions = get_changelog_versions(pkg, dsc, section1.version)
|
||||
except BrokenSourcePackage as exception:
|
||||
continue
|
||||
return section1.version in versions
|
||||
raise exception
|
||||
|
||||
|
||||
Candidate = namedtuple(
|
||||
"Candidate", ["package", "suite1", "suite2", "version1", "version2"])
|
||||
|
||||
|
||||
def get_series(options, name):
|
||||
if name not in series_by_name:
|
||||
series_by_name[name] = options.distro.getSeries(name_or_version=name)
|
||||
return series_by_name[name]
|
||||
|
||||
|
||||
def already_copied(options, candidate):
|
||||
if "-" in candidate.suite2:
|
||||
series, pocket = candidate.suite2.split("-", 1)
|
||||
pocket = pocket.title()
|
||||
else:
|
||||
series = candidate.suite2
|
||||
pocket = "Release"
|
||||
series = get_series(options, series)
|
||||
pubs = options.archive.getPublishedSources(
|
||||
source_name=candidate.package, version=candidate.version1,
|
||||
exact_match=True, distro_series=series, pocket=pocket)
|
||||
for pub in pubs:
|
||||
if pub.status in ("Pending", "Published"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def copy(options, candidate):
|
||||
if "-" in candidate.suite2:
|
||||
to_series, to_pocket = candidate.suite2.split("-", 1)
|
||||
to_pocket = to_pocket.title()
|
||||
else:
|
||||
to_series = candidate.suite2
|
||||
to_pocket = "Release"
|
||||
options.archive.copyPackage(
|
||||
source_name=candidate.package, version=candidate.version1,
|
||||
from_archive=options.archive, to_pocket=to_pocket, to_series=to_series,
|
||||
include_binaries=True, auto_approve=True)
|
||||
|
||||
|
||||
def candidate_string(candidate):
|
||||
string = ('copy-package -y -b -s %s --to-suite %s -e %s %s' %
|
||||
(candidate.suite1, candidate.suite2, candidate.version1,
|
||||
candidate.package))
|
||||
if candidate.version2 is not None:
|
||||
string += ' # %s: %s' % (candidate.suite2, candidate.version2)
|
||||
return string
|
||||
|
||||
|
||||
def main():
|
||||
apt_pkg.init_system()
|
||||
|
||||
parser = optparse.OptionParser(usage="usage: %prog [options] [suites]")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"--quick", action="store_true", help="don't examine changelogs")
|
||||
parser.add_option(
|
||||
"--copy-safe", action="store_true",
|
||||
help="automatically copy safe candidates")
|
||||
parser.add_option(
|
||||
"--mirrors", default=default_mirrors,
|
||||
help="colon-separated list of local mirrors")
|
||||
options, args = parser.parse_args()
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"copy-report", options.launchpad_instance, version="devel")
|
||||
options.distro = options.launchpad.distributions["ubuntu"]
|
||||
options.archive = options.distro.main_archive
|
||||
options.mirrors = options.mirrors.split(":")
|
||||
|
||||
if args:
|
||||
suites = args
|
||||
else:
|
||||
suites = reversed([
|
||||
series.name
|
||||
for series in options.launchpad.distributions["ubuntu"].series
|
||||
if series.status in ("Supported", "Current Stable Release")])
|
||||
|
||||
yes = []
|
||||
maybe = []
|
||||
no = []
|
||||
|
||||
for suite in suites:
|
||||
for component in 'main', 'restricted', 'universe', 'multiverse':
|
||||
tagfile1 = '%s/dists/%s-security/%s/source/Sources.gz' % (
|
||||
options.mirrors[0], suite, component)
|
||||
tagfile2 = '%s/dists/%s-updates/%s/source/Sources.gz' % (
|
||||
options.mirrors[0], suite, component)
|
||||
name1 = '%s-security' % suite
|
||||
name2 = '%s-updates' % suite
|
||||
|
||||
suite1 = tagfiletodict(tagfile1)
|
||||
suite2 = tagfiletodict(tagfile2)
|
||||
|
||||
for package in sorted(suite1):
|
||||
section1 = suite1[package]
|
||||
section2 = suite2.get(package)
|
||||
if (section2 is None or
|
||||
(not options.quick and
|
||||
descended_from(options, package, section1, section2))):
|
||||
candidate = Candidate(
|
||||
package=package, suite1=name1, suite2=name2,
|
||||
version1=section1.version, version2=None)
|
||||
if not already_copied(options, candidate):
|
||||
yes.append(candidate)
|
||||
elif apt_pkg.version_compare(
|
||||
section1.version, section2.version) > 0:
|
||||
candidate = Candidate(
|
||||
package=package, suite1=name1, suite2=name2,
|
||||
version1=section1.version, version2=section2.version)
|
||||
if already_copied(options, candidate):
|
||||
pass
|
||||
elif not options.quick:
|
||||
no.append(candidate)
|
||||
else:
|
||||
maybe.append(candidate)
|
||||
|
||||
if yes:
|
||||
print("The following packages can be copied safely:")
|
||||
print("--------------------------------------------")
|
||||
print()
|
||||
for candidate in yes:
|
||||
print(candidate_string(candidate))
|
||||
print()
|
||||
|
||||
if options.copy_safe:
|
||||
for candidate in yes:
|
||||
copy(options, candidate)
|
||||
|
||||
if maybe:
|
||||
print("Check that these packages are descendants before copying:")
|
||||
print("---------------------------------------------------------")
|
||||
print()
|
||||
for candidate in maybe:
|
||||
print('#%s' % candidate_string(candidate))
|
||||
print()
|
||||
|
||||
if no:
|
||||
print("The following packages need to be merged by hand:")
|
||||
print("-------------------------------------------------")
|
||||
print()
|
||||
for candidate in no:
|
||||
print('#%s' % candidate_string(candidate))
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,47 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Run archive-cruft-check and run checkrdepends on every NBS package.
|
||||
|
||||
MIRROR=$HOME/mirror
|
||||
DISTRIBUTION="${DISTRIBUTION:-ubuntu}"
|
||||
RELEASE="${RELEASE:-disco}"
|
||||
OUTDIR="${OUTDIR:-$HOME/public_html/NBS}"
|
||||
OUTFILE="${OUTFILE:-$HOME/public_html/nbs.html}"
|
||||
|
||||
CURBINS=`zgrep -h ^Binary: "$MIRROR/$DISTRIBUTION/dists/$RELEASE"/*/source/Sources.gz | cut -f 2- -d\ |sed 's/,[[:space:]]*/\n/g'`
|
||||
|
||||
D=`mktemp -d`
|
||||
trap "rm -rf $D" 0 2 3 5 10 13 15
|
||||
chmod 755 $D
|
||||
|
||||
CHECK=
|
||||
for i in $(archive-cruft-check -d "$DISTRIBUTION" -s "$RELEASE" "$MIRROR" 2>&1 | grep '^ *o ' | sed 's/^.*://; s/,//g'); do
|
||||
if echo "$CURBINS" | fgrep -xq $i; then
|
||||
echo "$i" >> $D/00FTBFS
|
||||
else
|
||||
CHECK="$CHECK $i"
|
||||
fi
|
||||
done
|
||||
checkrdepends -B "$MIRROR/$DISTRIBUTION" -s $RELEASE -b -d "$D" $CHECK
|
||||
|
||||
rsync -a --delete "$D/" "$OUTDIR/"
|
||||
|
||||
nbs-report -d "$DISTRIBUTION" -s "$RELEASE" --csv "${OUTFILE%.html}.csv" \
|
||||
"$OUTDIR/" >"$OUTFILE.new" && \
|
||||
mv "$OUTFILE.new" "$OUTFILE"
|
@ -0,0 +1,113 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Demote packages to proposed pocket.
|
||||
|
||||
This is useful in the case where a package fails to build or is otherwise
|
||||
broken, but we don't want to remove it from the archive permanently and
|
||||
would be happy to take a fix by way of a sync from Debian or similar. In
|
||||
cases where the package comes from Debian, make sure that any demotion to
|
||||
proposed is accompanied by a Debian bug report.
|
||||
|
||||
This is analogous to removing a package from Debian testing.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
def demote(options, packages):
|
||||
print("Demoting packages to %s-proposed:" % options.suite)
|
||||
try:
|
||||
demotables = []
|
||||
for package in packages:
|
||||
source = lputils.find_latest_published_source(options, package)
|
||||
demotables.append(source)
|
||||
print("\t%s" % source.display_name)
|
||||
except lputils.PackageMissing as message:
|
||||
print(message, ". Exiting.")
|
||||
sys.exit(1)
|
||||
print("Comment: %s" % options.comment)
|
||||
|
||||
if options.dry_run:
|
||||
print("Dry run; no packages demoted.")
|
||||
else:
|
||||
if not options.confirm_all:
|
||||
if YesNoQuestion().ask("Demote", "no") == "no":
|
||||
return
|
||||
|
||||
for source in demotables:
|
||||
options.archive.copyPackage(
|
||||
source_name=source.source_package_name,
|
||||
version=source.source_package_version,
|
||||
from_archive=options.archive,
|
||||
from_series=options.series.name, from_pocket="Release",
|
||||
to_series=options.series.name, to_pocket="Proposed",
|
||||
include_binaries=True, auto_approve=True)
|
||||
if not options.confirm_all:
|
||||
if YesNoQuestion().ask(
|
||||
"Remove %s from release" % source.source_package_name,
|
||||
"no") == "no":
|
||||
continue
|
||||
source.requestDeletion(removal_comment=options.comment)
|
||||
|
||||
print("%d %s successfully demoted." % (
|
||||
len(demotables),
|
||||
"package" if len(demotables) == 1 else "packages"))
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
usage='usage: %prog -m "comment" [options] package [...]')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show demotions that would be performed")
|
||||
parser.add_option(
|
||||
"-y", "--confirm-all", default=False, action="store_true",
|
||||
help="do not ask for confirmation")
|
||||
parser.add_option(
|
||||
"-d", "--distribution", default="ubuntu",
|
||||
metavar="DISTRIBUTION", help="demote from DISTRIBUTION")
|
||||
parser.add_option(
|
||||
"-s", "--suite", metavar="SUITE", help="demote from SUITE")
|
||||
parser.add_option(
|
||||
"-e", "--version",
|
||||
metavar="VERSION", help="package version (default: current version)")
|
||||
parser.add_option("-m", "--comment", help="demotion comment")
|
||||
options, args = parser.parse_args()
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"demote-to-proposed", options.launchpad_instance, version="devel")
|
||||
lputils.setup_location(options)
|
||||
|
||||
if options.comment is None:
|
||||
parser.error("You must provide a comment/reason for all demotions.")
|
||||
|
||||
demote(options, args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,529 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2014 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Requires germinate >= 2.18.
|
||||
|
||||
"""Copy a subset of one distribution into a derived distribution."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from collections import OrderedDict
|
||||
from contextlib import closing, contextmanager
|
||||
import io
|
||||
import logging
|
||||
from optparse import OptionParser, Values
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib2 import urlopen
|
||||
|
||||
import apt_pkg
|
||||
from dateutil import parser as dateutil_parser
|
||||
from germinate.archive import TagFile
|
||||
from germinate.germinator import Germinator
|
||||
from germinate.seeds import Seed, SeedError, SeedStructure
|
||||
from launchpadlib.errors import HTTPError
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
import pytz
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
_bzr_cache_dir = None
|
||||
|
||||
|
||||
@contextmanager
|
||||
def open_url_as_text(url):
|
||||
with closing(urlopen(url)) as raw:
|
||||
with closing(io.BytesIO(raw.read())) as binary:
|
||||
with closing(io.TextIOWrapper(binary)) as text:
|
||||
yield text
|
||||
|
||||
|
||||
class ManifestError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TimeTravellingSeed(Seed):
|
||||
def __init__(self, options, *args):
|
||||
self.options = options
|
||||
super(TimeTravellingSeed, self).__init__(*args, bzr=True)
|
||||
|
||||
def _bzr_revision_at_date(self, branch_url, date_obj):
|
||||
"""Work out the bzr revision of a branch at a particular date.
|
||||
|
||||
Unfortunately, bzr's date: revisionspec is unhelpful for this, as it
|
||||
asks for the first revision *after* the given date, and fails if the
|
||||
given date is after the last revision on the branch. We could
|
||||
probably do this properly with bzrlib, but life's too short and this
|
||||
will do.
|
||||
|
||||
This assumes all sorts of things like the exact ordering of field
|
||||
names in log output. Since bzr is no longer being heavily
|
||||
developed, hopefully this won't be a problem until we can switch the
|
||||
seeds to git ...
|
||||
"""
|
||||
command = ["bzr", "log", branch_url]
|
||||
bzr_log = subprocess.Popen(
|
||||
command, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
revno = None
|
||||
for line in bzr_log.stdout:
|
||||
line = line.rstrip("\n")
|
||||
if line.startswith("revno: "):
|
||||
revno = line[len("revno: "):].split(" ", 1)[0]
|
||||
elif line.startswith("timestamp: "):
|
||||
timestamp = dateutil_parser.parse(line[len("timestamp: "):])
|
||||
if timestamp < date_obj:
|
||||
break
|
||||
else:
|
||||
revno = None
|
||||
bzr_log.stdout.close()
|
||||
bzr_log.wait()
|
||||
if revno is None:
|
||||
raise SeedError("No revision found at %s in %s" % (
|
||||
date_obj, branch_url))
|
||||
return revno
|
||||
|
||||
def _open_seed(self, base, branch, name, bzr=False):
|
||||
if not bzr:
|
||||
raise Exception("Non-bzr-based time travel is not supported.")
|
||||
|
||||
global _bzr_cache_dir
|
||||
if _bzr_cache_dir is None:
|
||||
_bzr_cache_dir = tempfile.mkdtemp(prefix="derive-distribution-")
|
||||
atexit.register(shutil.rmtree, _bzr_cache_dir, ignore_errors=True)
|
||||
|
||||
path = os.path.join(base, branch)
|
||||
checkout = os.path.join(_bzr_cache_dir, branch)
|
||||
if not os.path.isdir(checkout):
|
||||
revno = self._bzr_revision_at_date(path, self.options.date)
|
||||
logging.info("Checking out %s at r%s" % (path, revno))
|
||||
command = [
|
||||
"bzr", "checkout", "--lightweight", "-r%s" % revno, path,
|
||||
checkout,
|
||||
]
|
||||
status = subprocess.call(command)
|
||||
if status != 0:
|
||||
raise SeedError(
|
||||
"Command failed with exit status %d:\n '%s'" % (
|
||||
status, " ".join(command)))
|
||||
return open(os.path.join(checkout, name))
|
||||
|
||||
|
||||
class TimeTravellingSeedStructure(SeedStructure):
|
||||
def __init__(self, options, *args, **kwargs):
|
||||
kwargs["bzr"] = True
|
||||
self.options = options
|
||||
super(TimeTravellingSeedStructure, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_seed(self, bases, branches, name, bzr=False):
|
||||
if not bzr:
|
||||
raise Exception("Non-bzr-based time travel is not supported.")
|
||||
return TimeTravellingSeed(self.options, bases, branches, name)
|
||||
|
||||
|
||||
class TimeTravellingGerminator:
|
||||
apt_mirror = "http://people.canonical.com/~ubuntu-archive/apt-mirror.cgi"
|
||||
|
||||
def __init__(self, options):
|
||||
self.options = options
|
||||
|
||||
@property
|
||||
def components(self):
|
||||
return ["main", "restricted", "universe", "multiverse"]
|
||||
|
||||
@property
|
||||
def mirror(self):
|
||||
if self.options.date is not None:
|
||||
timestamp = int(time.mktime(self.options.date.timetuple()))
|
||||
return "%s/%d" % (self.apt_mirror, timestamp)
|
||||
else:
|
||||
return self.apt_mirror
|
||||
|
||||
def makeSeedStructures(self, suite, flavours, extra_packages):
|
||||
series_name = suite.split("-")[0]
|
||||
if self.options.seed_source is None:
|
||||
seed_bases = None
|
||||
else:
|
||||
seed_bases = self.options.seed_source.split(",")
|
||||
structures = {}
|
||||
for flavour in flavours:
|
||||
try:
|
||||
structure = TimeTravellingSeedStructure(
|
||||
self.options, "%s.%s" % (flavour, series_name),
|
||||
seed_bases=seed_bases)
|
||||
if len(structure):
|
||||
extra_seed = []
|
||||
for extra_package in extra_packages:
|
||||
extra_seed.append(" * " + extra_package)
|
||||
if extra_seed:
|
||||
structure.add("extra-packages", extra_seed, "required")
|
||||
# Work around inability to specify extra packages
|
||||
# with no parent seeds.
|
||||
structure._inherit["extra-packages"] = []
|
||||
structures[flavour] = structure
|
||||
# TODO: We could save time later by mangling the
|
||||
# structure to remove seeds we don't care about.
|
||||
else:
|
||||
logging.warning(
|
||||
"Skipping empty seed structure for %s.%s",
|
||||
flavour, series_name)
|
||||
except SeedError as e:
|
||||
logging.warning(
|
||||
"Failed to fetch seeds for %s.%s: %s",
|
||||
flavour, series_name, e)
|
||||
return structures
|
||||
|
||||
def germinateArchFlavour(self, germinator, suite, arch, flavour, structure,
|
||||
seed_names):
|
||||
"""Germinate seeds on a single flavour for a single architecture."""
|
||||
germinator.plant_seeds(structure)
|
||||
germinator.grow(structure)
|
||||
germinator.add_extras(structure)
|
||||
|
||||
# Unfortunately we have to use several bits of Germinate internals
|
||||
# here. I promise not to change them under myself without notifying
|
||||
# myself.
|
||||
all_seeds = OrderedDict()
|
||||
for seed_name in seed_names:
|
||||
seed = germinator._get_seed(structure, seed_name)
|
||||
for inner_seed in germinator._inner_seeds(seed):
|
||||
if inner_seed.name not in all_seeds:
|
||||
all_seeds[inner_seed.name] = inner_seed
|
||||
if "extra-packages" in structure:
|
||||
seed = germinator._get_seed(structure, "extra-packages")
|
||||
for inner_seed in germinator._inner_seeds(seed):
|
||||
if inner_seed.name not in all_seeds:
|
||||
all_seeds[inner_seed.name] = inner_seed
|
||||
for seed in all_seeds.values():
|
||||
sources = seed._sourcepkgs | seed._build_sourcepkgs
|
||||
for source in sources:
|
||||
version = germinator._sources[source]["Version"]
|
||||
if (source in self._versions and
|
||||
self._versions[source] != version):
|
||||
# This requires manual investigation, as the resulting
|
||||
# derived distribution series can only have one version
|
||||
# of any given source package.
|
||||
raise Exception(
|
||||
"Conflicting source versions: seed %s/%s requires "
|
||||
"%s %s, but already present at version %s" % (
|
||||
flavour, seed, source, version,
|
||||
self._versions[source]))
|
||||
self._versions[source] = version
|
||||
|
||||
def checkImageManifest(self, germinator, arch, manifest_url):
|
||||
ok = True
|
||||
with open_url_as_text(manifest_url) as manifest:
|
||||
for line in manifest:
|
||||
try:
|
||||
package, version = line.split()
|
||||
if package.startswith("click:"):
|
||||
continue
|
||||
package = package.split(":", 1)[0]
|
||||
if package not in germinator._packages:
|
||||
raise ManifestError(
|
||||
"%s not found for %s (from %s)" % (
|
||||
package, arch, manifest_url))
|
||||
gpkg = germinator._packages[package]
|
||||
if gpkg["Version"] != version:
|
||||
raise ManifestError(
|
||||
"Found %s %s for %s, but wanted %s "
|
||||
"(from %s)" % (
|
||||
package, gpkg["Version"], arch, version,
|
||||
manifest_url))
|
||||
if gpkg["Source"] not in self._versions:
|
||||
raise ManifestError(
|
||||
"%s not copied (from %s)" % (
|
||||
gpkg["Source"], manifest_url))
|
||||
except ManifestError as e:
|
||||
logging.error(e.message)
|
||||
ok = False
|
||||
return ok
|
||||
|
||||
def germinateArch(self, suite, components, arch, flavours, structures):
|
||||
"""Germinate seeds on all flavours for a single architecture."""
|
||||
germinator = Germinator(arch)
|
||||
|
||||
# Read archive metadata.
|
||||
logging.info("Reading archive for %s/%s", suite, arch)
|
||||
archive = TagFile(suite, components, arch, self.mirror, cleanup=True)
|
||||
germinator.parse_archive(archive)
|
||||
|
||||
if self.options.all_packages:
|
||||
for source in germinator._sources:
|
||||
self._versions[source] = germinator._sources[source]["Version"]
|
||||
else:
|
||||
for flavour, seed_names in flavours.items():
|
||||
logging.info("Germinating for %s/%s/%s", flavour, suite, arch)
|
||||
self.germinateArchFlavour(
|
||||
germinator, suite, arch, flavour, structures[flavour],
|
||||
seed_names)
|
||||
|
||||
ok = True
|
||||
if self.options.check_image_manifest:
|
||||
for manifest_id in self.options.check_image_manifest:
|
||||
manifest_arch, manifest_url = manifest_id.split(":", 1)
|
||||
if arch != manifest_arch:
|
||||
continue
|
||||
if not self.checkImageManifest(germinator, arch, manifest_url):
|
||||
ok = False
|
||||
|
||||
return ok
|
||||
|
||||
def getVersions(self, full_seed_names, extra_packages):
|
||||
self._versions = {}
|
||||
|
||||
suite = self.options.suite
|
||||
components = self.components
|
||||
architectures = [
|
||||
a.architecture_tag for a in self.options.architectures]
|
||||
flavours = OrderedDict()
|
||||
for full_seed_name in full_seed_names:
|
||||
flavour, seed_name = full_seed_name.split("/")
|
||||
flavours.setdefault(flavour, []).append(seed_name)
|
||||
|
||||
if self.options.all_packages:
|
||||
structures = None
|
||||
else:
|
||||
logging.info("Reading seed structures")
|
||||
structures = self.makeSeedStructures(
|
||||
suite, flavours, extra_packages)
|
||||
if self.options.all_packages or structures:
|
||||
ok = True
|
||||
for arch in architectures:
|
||||
if not self.germinateArch(
|
||||
suite, components, arch, flavours, structures):
|
||||
ok = False
|
||||
if not ok:
|
||||
sys.exit(1)
|
||||
|
||||
return self._versions
|
||||
|
||||
|
||||
def retry_on_error(func, *args, **kwargs):
|
||||
# Since failure will be expensive to restart from scratch, we try this a
|
||||
# few times in case of failure.
|
||||
for i in range(3):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except HTTPError as e:
|
||||
print(e.content, file=sys.stderr)
|
||||
if i == 2:
|
||||
raise
|
||||
time.sleep(15)
|
||||
|
||||
|
||||
def derive_distribution(options, args):
|
||||
full_seed_names = [
|
||||
arg[len("seed:"):] for arg in args if arg.startswith("seed:")]
|
||||
if not full_seed_names and not options.all_packages:
|
||||
raise Exception(
|
||||
"You must specify at least one seed name (in the form "
|
||||
"seed:COLLECTION/NAME).")
|
||||
extra_packages = [arg for arg in args if not arg.startswith("seed:")]
|
||||
ttg = TimeTravellingGerminator(options)
|
||||
versions = ttg.getVersions(full_seed_names, extra_packages)
|
||||
|
||||
if options.excludes:
|
||||
for exclude in options.excludes:
|
||||
versions.pop(exclude, None)
|
||||
|
||||
# Skip anything we already have, to simplify incremental copies.
|
||||
original_versions = dict(versions)
|
||||
removable = {}
|
||||
newer = {}
|
||||
for spph in options.destination.archive.getPublishedSources(
|
||||
distro_series=options.destination.series,
|
||||
pocket=options.destination.pocket, status="Published"):
|
||||
source = spph.source_package_name
|
||||
if source not in versions:
|
||||
removable[source] = spph.source_package_version
|
||||
else:
|
||||
diff = apt_pkg.version_compare(
|
||||
versions[source], spph.source_package_version)
|
||||
if diff < 0:
|
||||
newer[source] = spph
|
||||
elif diff == 0:
|
||||
del versions[source]
|
||||
|
||||
print("Copy candidates:")
|
||||
for source, version in sorted(versions.items()):
|
||||
print("\t%s\t%s" % (source, version))
|
||||
print()
|
||||
|
||||
if newer:
|
||||
print("These packages previously had newer version numbers:")
|
||||
for source, spph in sorted(newer.items()):
|
||||
print(
|
||||
"\t%s\t%s -> %s" % (
|
||||
source, spph.source_package_version, versions[source]))
|
||||
print()
|
||||
|
||||
if removable:
|
||||
print("These packages could possibly be removed:")
|
||||
for source, version in sorted(removable.items()):
|
||||
print("\t%s\t%s" % (source, version))
|
||||
print()
|
||||
|
||||
if options.dry_run:
|
||||
print("Dry run; no packages copied.")
|
||||
else:
|
||||
if YesNoQuestion().ask("Copy", "no") == "no":
|
||||
return False
|
||||
|
||||
print("Setting packaging information ...")
|
||||
for source in sorted(versions.keys()):
|
||||
sp = options.series.getSourcePackage(name=source)
|
||||
if sp.productseries_link is not None:
|
||||
derived_sp = options.destination.series.getSourcePackage(
|
||||
name=source)
|
||||
if derived_sp.productseries_link is None:
|
||||
retry_on_error(
|
||||
derived_sp.setPackaging,
|
||||
productseries=sp.productseries_link)
|
||||
print(".", end="")
|
||||
sys.stdout.flush()
|
||||
print()
|
||||
|
||||
# Wouldn't it be lovely if we could do a single copyPackages call
|
||||
# with a giant dictionary of source package names to versions? As
|
||||
# it is we need to call copyPackage a few thousand times instead.
|
||||
archive = options.destination.archive
|
||||
for source, version in sorted(versions.items()):
|
||||
print("\t%s\t%s" % (source, version))
|
||||
if source in newer:
|
||||
retry_on_error(
|
||||
newer[source].requestDeletion,
|
||||
removal_comment=(
|
||||
"derive-distribution rewinding to %s" % version))
|
||||
retry_on_error(
|
||||
archive.copyPackage,
|
||||
source_name=source, version=version,
|
||||
from_archive=options.archive,
|
||||
to_pocket=options.destination.pocket,
|
||||
to_series=options.destination.series.name,
|
||||
include_binaries=True, auto_approve=True, silent=True)
|
||||
|
||||
print("Checking package sets ...")
|
||||
found_any_set = False
|
||||
for source_set in options.launchpad.packagesets.getBySeries(
|
||||
distroseries=options.series):
|
||||
sources = source_set.getSourcesIncluded(direct_inclusion=True)
|
||||
if set(sources) & set(original_versions):
|
||||
print("\t%s" % source_set.name)
|
||||
found_any_set = True
|
||||
if found_any_set:
|
||||
print(
|
||||
"A member of ~techboard needs to copy the above package sets "
|
||||
"to the new series.")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logging.getLogger("germinate").setLevel(logging.CRITICAL)
|
||||
apt_pkg.init()
|
||||
|
||||
parser = OptionParser(
|
||||
usage="usage: %prog --to-distribution distribution [options]")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show actions that would be performed")
|
||||
parser.add_option(
|
||||
"-d", "--distribution", default="ubuntu",
|
||||
metavar="DISTRIBUTION", help="copy from DISTRIBUTION")
|
||||
parser.add_option("-s", "--suite", metavar="SUITE", help="copy from SUITE")
|
||||
parser.add_option(
|
||||
"-a", "--architecture", dest="architectures", action="append",
|
||||
metavar="ARCHITECTURE",
|
||||
help="architecture tag (may be given multiple times)")
|
||||
parser.add_option(
|
||||
"--to-distribution", metavar="DISTRIBUTION",
|
||||
help="copy to DISTRIBUTION")
|
||||
parser.add_option(
|
||||
"--to-suite", metavar="SUITE",
|
||||
help="copy to SUITE (default: copy from suite)")
|
||||
parser.add_option(
|
||||
"--date", metavar="DATE", help=(
|
||||
"copy from suite as it existed on DATE; assumes UTC if timezone "
|
||||
"not specified"))
|
||||
parser.add_option("--seed-source", help="fetch seeds from SOURCE")
|
||||
parser.add_option(
|
||||
"--check-image-manifest", action="append", metavar="ARCH:URL", help=(
|
||||
"ensure that all packages from the manifest at URL for "
|
||||
"architecture ARCH are copied (may be given multiple times)"))
|
||||
parser.add_option(
|
||||
"--exclude", dest="excludes", action="append", metavar="PACKAGE",
|
||||
help="don't copy PACKAGE (may be given multiple times)")
|
||||
parser.add_option(
|
||||
"--all-packages", default=False, action="store_true",
|
||||
help="copy all packages in source suite rather than germinating")
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if not args and not options.all_packages:
|
||||
parser.error("You must specify some seeds or packages to copy.")
|
||||
|
||||
if options.launchpad_instance == "dogfood":
|
||||
# Work around old service root in some versions of launchpadlib.
|
||||
options.launchpad_instance = "https://api.dogfood.paddev.net/"
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"derive-distribution", options.launchpad_instance, version="devel")
|
||||
lputils.setup_location(options)
|
||||
options.destination = Values()
|
||||
options.destination.launchpad = options.launchpad
|
||||
options.destination.distribution = options.to_distribution
|
||||
options.destination.suite = options.to_suite
|
||||
options.destination.architectures = [
|
||||
a.architecture_tag for a in options.architectures]
|
||||
|
||||
# In cases where source is specified, but destination is not, default to
|
||||
# destination = source.
|
||||
if options.destination.distribution is None:
|
||||
options.destination.distribution = options.distribution
|
||||
if options.destination.suite is None:
|
||||
options.destination.suite = options.suite
|
||||
|
||||
if options.date is not None:
|
||||
options.date = dateutil_parser.parse(options.date)
|
||||
if options.date.tzinfo is None:
|
||||
options.date = options.date.replace(tzinfo=pytz.UTC)
|
||||
|
||||
lputils.setup_location(options.destination)
|
||||
|
||||
if (options.distribution == options.destination.distribution and
|
||||
options.suite == options.destination.suite):
|
||||
parser.error("copy destination must differ from source")
|
||||
|
||||
if derive_distribution(options, args):
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
@ -0,0 +1,717 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2008, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Copyright (C) 2010 Stéphane Graber <stgraber@stgraber.org>
|
||||
# Copyright (C) 2010 Michael Bienia <geser@ubuntu.com>
|
||||
# Copyright (C) 2011 Iain Lane <laney@ubuntu.com>
|
||||
# Copyright (C) 2011 Soren Hansen <soren@linux2go.dk>
|
||||
# Copyright (C) 2012 Stefano Rivera <stefanor@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Edit uploader permissions for the Ubuntu distro in Launchpad."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser, SUPPRESS_HELP
|
||||
import sys
|
||||
|
||||
import launchpadlib.errors
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
import lputils
|
||||
|
||||
if sys.version < '3':
|
||||
input = raw_input
|
||||
|
||||
|
||||
CONSUMER_KEY = "edit-acl"
|
||||
|
||||
|
||||
def print_perms(perms, series=None):
|
||||
for perm in perms:
|
||||
if (series is not None and perm.distro_series_name is not None and
|
||||
series.name != perm.distro_series_name):
|
||||
continue
|
||||
desc = []
|
||||
desc.append("archive '%s'" % perm.archive.name)
|
||||
if perm.component_name:
|
||||
desc.append("component '%s'" % perm.component_name)
|
||||
if series:
|
||||
desc[-1] += ' in %s' % series
|
||||
if perm.package_set_name:
|
||||
desc.append("package set '%s' in %s" % (perm.package_set_name,
|
||||
perm.distro_series_name))
|
||||
if perm.source_package_name:
|
||||
desc.append("source package '%s'" % perm.source_package_name)
|
||||
if perm.pocket:
|
||||
desc.append("pocket '%s'" % perm.pocket)
|
||||
if perm.distro_series_name is not None:
|
||||
desc[-1] += " in %s" % perm.distro_series_name
|
||||
print("%s for %s: %s" % (perm.permission, perm.person.name,
|
||||
', '.join(desc)))
|
||||
|
||||
|
||||
def multiline_input(prompt):
|
||||
print(prompt)
|
||||
print("End with a line containing only a full-stop '.'")
|
||||
buf = []
|
||||
while True:
|
||||
line = input()
|
||||
if line == '.':
|
||||
return '\n'.join(buf)
|
||||
buf.append(line)
|
||||
|
||||
|
||||
def get_archive(options, launchpad):
|
||||
# We default to looking up by archive reference (ubuntu,
|
||||
# ubuntu/partner or ~owner/ubuntu/ppa).
|
||||
if options.archive is not None:
|
||||
archive = launchpad.archives.getByReference(reference=options.archive)
|
||||
if archive is not None:
|
||||
return archive
|
||||
|
||||
# But we also still support combining a distro name in -d and an
|
||||
# archive name or old PPA reference in -A (-d ubuntu,
|
||||
# -d ubuntu -A partner, or -d ubuntu -A owner/ppa).
|
||||
distro = launchpad.distributions[options.distro]
|
||||
if options.archive is None:
|
||||
return distro.main_archive
|
||||
else:
|
||||
if '/' in options.archive:
|
||||
owner, ppa_name = options.archive.split('/')
|
||||
return launchpad.people[owner].getPPAByName(
|
||||
distribution=distro, name=ppa_name)
|
||||
for archive in distro.archives:
|
||||
if archive.name == options.archive:
|
||||
return archive
|
||||
raise AssertionError("No such archive in Ubuntu: %s" % options.archive)
|
||||
|
||||
|
||||
def get_source_components(options, launchpad, archive, source):
|
||||
try:
|
||||
from debian import debian_support
|
||||
except ImportError:
|
||||
from debian_bundle import debian_support
|
||||
|
||||
args = {}
|
||||
if options.series:
|
||||
args['distro_series'] = options.series
|
||||
|
||||
newest = {}
|
||||
for spph in archive.getPublishedSources(
|
||||
source_name=source, exact_match=True, status='Published', **args):
|
||||
if not spph.distro_series.active:
|
||||
continue
|
||||
new_version = debian_support.Version(spph.source_package_version)
|
||||
if (spph.distro_series.name not in newest or
|
||||
new_version > newest[spph.distro_series.name][0]):
|
||||
newest[spph.distro_series.name] = (new_version,
|
||||
spph.component_name)
|
||||
|
||||
for series in sorted(newest, key=lambda s: newest[s][0]):
|
||||
yield series, newest[series][1]
|
||||
|
||||
|
||||
permission_names = dict(upload='Archive Upload Rights',
|
||||
admin='Queue Administration Rights')
|
||||
|
||||
|
||||
def do_query(options):
|
||||
"""Query existing permissions and show on stdout."""
|
||||
if options.archive.self_link == options.distro.main_archive_link:
|
||||
archives = options.distro.archives
|
||||
else:
|
||||
archives = [options.archive]
|
||||
|
||||
if options.person:
|
||||
for person in options.person:
|
||||
if '@' in person:
|
||||
lp_person = launchpad.people.getByEmail(email=person)
|
||||
else:
|
||||
try:
|
||||
lp_person = launchpad.people[person]
|
||||
except KeyError:
|
||||
print("Person '%s' doesn't exist." % person)
|
||||
sys.exit(1)
|
||||
perms = []
|
||||
for archive in archives:
|
||||
perms.extend(archive.getPermissionsForPerson(
|
||||
person=lp_person))
|
||||
if options.acl_type:
|
||||
perm_name = permission_names[options.acl_type]
|
||||
perms = [p for p in perms if p.permission == perm_name]
|
||||
print("== All rights for %s ==" % lp_person.name)
|
||||
print_perms(perms, options.series)
|
||||
|
||||
if options.component:
|
||||
perms = []
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
for archive in archives:
|
||||
perms.extend(archive.getUploadersForComponent(
|
||||
component_name=options.component))
|
||||
if not options.acl_type or options.acl_type == 'admin':
|
||||
for archive in archives:
|
||||
perms.extend(archive.getQueueAdminsForComponent(
|
||||
component_name=options.component))
|
||||
print("== All rights for component '%s' ==" % options.component)
|
||||
print_perms(perms, options.series)
|
||||
|
||||
if options.packageset:
|
||||
for packageset in options.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
|
||||
perms = []
|
||||
for archive in archives:
|
||||
perms.extend(archive.getUploadersForPackageset(
|
||||
packageset=lp_set))
|
||||
print(("== All uploaders for package set '%s' in '%s' "
|
||||
"(owned by '%s') ==" %
|
||||
(packageset, options.series.name,
|
||||
lp_set.owner.display_name)))
|
||||
print_perms(perms, options.series)
|
||||
|
||||
sources = sorted(lp_set.getSourcesIncluded(direct_inclusion=True))
|
||||
if sources:
|
||||
print()
|
||||
print("== All source packages in package set '%s' "
|
||||
"in '%s' ==" % (packageset, options.series.name))
|
||||
for source in sources:
|
||||
print(source)
|
||||
child_sets = list(lp_set.setsIncluded(direct_inclusion=True))
|
||||
if child_sets:
|
||||
print()
|
||||
print("== All package sets in package set '%s' in '%s' ==" %
|
||||
(packageset, options.series.name))
|
||||
for child_set in child_sets:
|
||||
print(child_set.name)
|
||||
|
||||
if options.source:
|
||||
for source in options.source:
|
||||
perms = []
|
||||
perms_set = []
|
||||
for archive in archives:
|
||||
perms.extend(archive.getUploadersForPackage(
|
||||
source_package_name=source))
|
||||
perms_set.extend(archive.getPackagesetsForSource(
|
||||
sourcepackagename=source))
|
||||
print("== All uploaders for package '%s' ==" % source)
|
||||
print_perms(perms, options.series)
|
||||
print_perms(perms_set, options.series)
|
||||
for archive in archives:
|
||||
for series, component in get_source_components(
|
||||
options, launchpad, archive, source):
|
||||
perms_component = archive.getUploadersForComponent(
|
||||
component_name=component)
|
||||
print_perms(perms_component, series=series)
|
||||
|
||||
if options.pocket:
|
||||
perms = []
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
for archive in archives:
|
||||
perms.extend(archive.getUploadersForPocket(
|
||||
pocket=options.pocket))
|
||||
if not options.acl_type or options.acl_type == 'admin':
|
||||
for archive in archives:
|
||||
perms.extend(archive.getQueueAdminsForPocket(
|
||||
pocket=options.pocket))
|
||||
print("== All rights for pocket '%s' ==" % options.pocket)
|
||||
print_perms(perms, options.series)
|
||||
|
||||
if (not options.person and not options.component and
|
||||
not options.packageset and not options.source and
|
||||
not options.pocket):
|
||||
perms = []
|
||||
for archive in archives:
|
||||
perms.extend(archive.getAllPermissions())
|
||||
if options.acl_type:
|
||||
perm_name = permission_names[options.acl_type]
|
||||
perms = [p for p in perms if p.permission == perm_name]
|
||||
print("== All rights ==")
|
||||
print_perms(perms, options.series)
|
||||
|
||||
|
||||
def validate_add_delete_options(options, requires_person=True):
|
||||
if options.packageset and options.source:
|
||||
# Special options to manage package sets, bodged into this tool
|
||||
# since they aren't entirely inconvenient here.
|
||||
if options.component or options.person:
|
||||
print("-P <packageset> -s <source> cannot be used with a "
|
||||
"component or person as well")
|
||||
return False
|
||||
return True
|
||||
|
||||
if requires_person and not options.person:
|
||||
print("You must specify at least one person to (de-)authorise.")
|
||||
return False
|
||||
|
||||
count = 0
|
||||
if options.component:
|
||||
count += 1
|
||||
if options.packageset:
|
||||
count += 1
|
||||
if options.source:
|
||||
count += 1
|
||||
if options.pocket:
|
||||
count += 1
|
||||
if count > 1:
|
||||
print("You can only specify one of package set, source, component, "
|
||||
"or pocket")
|
||||
return False
|
||||
|
||||
if count == 0:
|
||||
print("You must specify one of package set, source, component, or "
|
||||
"pocket")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def do_add(options):
|
||||
"""Add a new permission."""
|
||||
if not validate_add_delete_options(options):
|
||||
return False
|
||||
|
||||
if options.packageset and options.source:
|
||||
for packageset in options.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
lp_set.addSources(names=options.source)
|
||||
print("Added:")
|
||||
for source in options.source:
|
||||
print(source)
|
||||
return
|
||||
|
||||
people = [launchpad.people[person] for person in options.person]
|
||||
|
||||
if options.source:
|
||||
for source in options.source:
|
||||
for person in people:
|
||||
perm = options.archive.newPackageUploader(
|
||||
person=person, source_package_name=source)
|
||||
print("Added:")
|
||||
print_perms([perm])
|
||||
return
|
||||
|
||||
if options.packageset:
|
||||
for packageset in options.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
for person in people:
|
||||
perm = options.archive.newPackagesetUploader(
|
||||
person=person, packageset=lp_set)
|
||||
print("Added:")
|
||||
print_perms([perm])
|
||||
return
|
||||
|
||||
if options.component:
|
||||
for person in people:
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
perm = options.archive.newComponentUploader(
|
||||
person=person, component_name=options.component)
|
||||
else:
|
||||
perm = options.archive.newQueueAdmin(
|
||||
person=person, component_name=options.component)
|
||||
print("Added:")
|
||||
print_perms([perm])
|
||||
return
|
||||
|
||||
if options.pocket:
|
||||
admin_kwargs = {}
|
||||
if options.series:
|
||||
admin_kwargs["distroseries"] = options.series
|
||||
for person in people:
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
perm = options.archive.newPocketUploader(
|
||||
person=person, pocket=options.pocket)
|
||||
else:
|
||||
perm = options.archive.newPocketQueueAdmin(
|
||||
person=person, pocket=options.pocket, **admin_kwargs)
|
||||
print("Added:")
|
||||
print_perms([perm])
|
||||
return
|
||||
|
||||
|
||||
def do_delete(options):
|
||||
"""Delete a permission."""
|
||||
# We kind of hacked packageset management into here.
|
||||
# Deleting packagesets doesn't require a person...
|
||||
requires_person = not (options.packageset and not options.source)
|
||||
if not validate_add_delete_options(options, requires_person):
|
||||
return False
|
||||
|
||||
if options.packageset and options.source:
|
||||
for packageset in options.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
lp_set.removeSources(names=options.source)
|
||||
print("Deleted:")
|
||||
for source in options.source:
|
||||
print(source)
|
||||
return
|
||||
|
||||
if options.packageset and not options.person:
|
||||
for packageset in options.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
uploaders = options.archive.getUploadersForPackageset(
|
||||
direct_permissions=True, packageset=lp_set)
|
||||
if len(uploaders) > 0:
|
||||
print("Cannot delete packageset with defined uploaders")
|
||||
print("Current uploaders:")
|
||||
for permission in uploaders:
|
||||
print(" %s" % permission.person.name)
|
||||
continue
|
||||
print("Confirm removal of packageset '%s'" % lp_set.name)
|
||||
print("Description:")
|
||||
print(" " + lp_set.description.replace("\n", "\n "))
|
||||
print("Containing Sources:")
|
||||
for member in lp_set.getSourcesIncluded():
|
||||
print(" %s" % member)
|
||||
print("Containing packagesets:")
|
||||
for member in lp_set.setsIncluded():
|
||||
print(" %s" % member.name)
|
||||
ack = input("Remove? (y/N): ")
|
||||
if ack.lower() == 'y':
|
||||
lp_set.lp_delete()
|
||||
print("Deleted %s/%s" % (lp_set.name, options.series.name))
|
||||
return
|
||||
|
||||
lp_people = [launchpad.people[person] for person in options.person]
|
||||
|
||||
if options.source:
|
||||
for source in options.source:
|
||||
for lp_person in lp_people:
|
||||
try:
|
||||
options.archive.deletePackageUploader(
|
||||
person=lp_person, source_package_name=source)
|
||||
print("Deleted %s/%s" % (lp_person.name, source))
|
||||
except Exception:
|
||||
print("Failed to delete %s/%s" % (lp_person.name, source))
|
||||
return
|
||||
|
||||
if options.packageset:
|
||||
for packageset in options.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
for lp_person in lp_people:
|
||||
options.archive.deletePackagesetUploader(
|
||||
person=lp_person, packageset=lp_set)
|
||||
print("Deleted %s/%s/%s" % (lp_person.name, packageset,
|
||||
options.series.name))
|
||||
return
|
||||
|
||||
if options.component:
|
||||
for lp_person in lp_people:
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
options.archive.deleteComponentUploader(
|
||||
person=lp_person, component_name=options.component)
|
||||
print("Deleted %s/%s" % (lp_person.name, options.component))
|
||||
else:
|
||||
options.archive.deleteQueueAdmin(
|
||||
person=lp_person, component_name=options.component)
|
||||
print("Deleted %s/%s (admin)" % (lp_person.name,
|
||||
options.component))
|
||||
return
|
||||
|
||||
if options.pocket:
|
||||
admin_kwargs = {}
|
||||
if options.series:
|
||||
admin_kwargs["distroseries"] = options.series
|
||||
for lp_person in lp_people:
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
options.archive.deletePocketUploader(
|
||||
person=lp_person, pocket=options.pocket)
|
||||
print("Deleted %s/%s" % (lp_person.name, options.pocket))
|
||||
else:
|
||||
options.archive.deletePocketQueueAdmin(
|
||||
person=lp_person, pocket=options.pocket, **admin_kwargs)
|
||||
if options.series:
|
||||
print(
|
||||
"Deleted %s/%s/%s (admin)" %
|
||||
(lp_person.name, options.pocket, options.series.name))
|
||||
else:
|
||||
print("Deleted %s/%s (admin)" %
|
||||
(lp_person.name, options.pocket))
|
||||
return
|
||||
|
||||
|
||||
def do_create(options):
|
||||
if not options.packageset:
|
||||
print("You can only create a package set, not something else.")
|
||||
return False
|
||||
|
||||
if not options.person or len(options.person) != 1:
|
||||
print("You must specify exactly one person to own the new package "
|
||||
"set.")
|
||||
return False
|
||||
|
||||
distro_series = options.series or options.distro.current_series
|
||||
lp_person = launchpad.people[options.person[0]]
|
||||
|
||||
for packageset in options.packageset:
|
||||
try:
|
||||
if launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=distro_series):
|
||||
print("Package set %s already exists" % packageset)
|
||||
continue
|
||||
except (TypeError, launchpadlib.errors.HTTPError):
|
||||
pass
|
||||
desc = multiline_input("Description for new package set %s:"
|
||||
% packageset)
|
||||
ps = launchpad.packagesets.new(
|
||||
name=packageset, description=desc, distroseries=distro_series,
|
||||
owner=lp_person)
|
||||
print(ps)
|
||||
|
||||
|
||||
def do_modify(options):
|
||||
if not options.packageset:
|
||||
print("You can only modify a package set, not something else.")
|
||||
return False
|
||||
|
||||
if options.person and len(options.person) > 1:
|
||||
print("You can only specify one person as the new packageset owner.")
|
||||
return False
|
||||
|
||||
distro_series = options.series or options.distro.current_series
|
||||
|
||||
lp_person = None
|
||||
if options.person:
|
||||
lp_person = launchpad.people[options.person[0]]
|
||||
|
||||
for packageset in options.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=distro_series)
|
||||
if lp_person:
|
||||
print("Making %s the owner of %s/%s"
|
||||
% (lp_person.name, lp_set.name, distro_series.name))
|
||||
lp_set.owner = lp_person
|
||||
lp_set.lp_save()
|
||||
continue
|
||||
|
||||
print("Current description of %s:" % lp_set.name)
|
||||
print(" " + lp_set.description.replace("\n", "\n "))
|
||||
desc = multiline_input("New description [blank=leave unmodified]:")
|
||||
if desc:
|
||||
print("Modifying description of %s/%s"
|
||||
% (lp_set.name, distro_series.name))
|
||||
lp_set.description = desc
|
||||
lp_set.lp_save()
|
||||
continue
|
||||
|
||||
rename = input("Rename %s to? [blank=don't rename]: " % lp_set.name)
|
||||
if rename:
|
||||
print("Renaming %s/%s -> %s"
|
||||
% (lp_set.name, distro_series.name, rename))
|
||||
lp_set.name = rename
|
||||
lp_set.lp_save()
|
||||
continue
|
||||
|
||||
|
||||
def do_copy(options):
|
||||
if options.archive.self_link == options.distro.main_archive_link:
|
||||
archives = options.distro.archives
|
||||
else:
|
||||
archives = [options.archive]
|
||||
|
||||
if not options.packageset:
|
||||
print("You can only copy a package set, not something else.")
|
||||
return False
|
||||
|
||||
distro_series = options.series or options.distro.current_series
|
||||
|
||||
dst = input("Name of the destination series: ")
|
||||
dst_series = options.distro.getSeries(name_or_version=dst)
|
||||
|
||||
for packageset in options.packageset:
|
||||
src_pkgset = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=distro_series)
|
||||
if not src_pkgset:
|
||||
print("Package set %s doesn't exist" % packageset)
|
||||
|
||||
ps = launchpad.packagesets.new(
|
||||
name=packageset, description=src_pkgset.description,
|
||||
distroseries=dst_series, owner=src_pkgset.owner_link,
|
||||
related_set=src_pkgset)
|
||||
print(ps)
|
||||
|
||||
ps.addSources(names=src_pkgset.getSourcesIncluded())
|
||||
|
||||
perms = []
|
||||
for archive in archives:
|
||||
perms.extend(archive.getUploadersForPackageset(
|
||||
packageset=src_pkgset))
|
||||
|
||||
for perm in perms:
|
||||
perm.archive.newPackagesetUploader(
|
||||
person=perm.person_link, packageset=ps)
|
||||
|
||||
|
||||
def do_check(options):
|
||||
"""Check if a person can upload a package."""
|
||||
if not options.person:
|
||||
print("A person needs to be specified to check.")
|
||||
return False
|
||||
if not options.source:
|
||||
print("A source package needs to be specified to check.")
|
||||
return False
|
||||
|
||||
people = [launchpad.people[person] for person in options.person]
|
||||
distro_series = options.series or options.distro.current_series
|
||||
|
||||
if options.pocket:
|
||||
pocket = options.pocket
|
||||
else:
|
||||
pocket = 'Release'
|
||||
|
||||
for person in people:
|
||||
for srcpkg in options.source:
|
||||
try:
|
||||
spph = options.archive.getPublishedSources(
|
||||
distro_series=distro_series,
|
||||
exact_match=True,
|
||||
pocket=pocket,
|
||||
source_name=srcpkg,
|
||||
status='Published',
|
||||
)[0]
|
||||
except IndexError:
|
||||
if not options.pocket:
|
||||
raise
|
||||
# Not yet in options.pocket, but maybe in Release?
|
||||
spph = options.archive.getPublishedSources(
|
||||
distro_series=distro_series,
|
||||
exact_match=True,
|
||||
pocket='Release',
|
||||
source_name=srcpkg,
|
||||
status='Published',
|
||||
)[0]
|
||||
try:
|
||||
options.archive.checkUpload(
|
||||
component=spph.component_name,
|
||||
distroseries=distro_series,
|
||||
person=person,
|
||||
pocket=pocket,
|
||||
sourcepackagename=srcpkg,
|
||||
)
|
||||
print("%s (%s) can upload %s to %s/%s" % (
|
||||
person.display_name, person.name,
|
||||
srcpkg, distro_series.displayname, pocket))
|
||||
except launchpadlib.errors.HTTPError as e:
|
||||
if e.response.status == 403:
|
||||
print("%s (%s) cannot upload %s to %s/%s" % (
|
||||
person.display_name, person.name,
|
||||
srcpkg, distro_series.displayname, pocket))
|
||||
else:
|
||||
print("There was a %s error:" % e.response.status)
|
||||
print(e.content)
|
||||
|
||||
|
||||
def main(options, action):
|
||||
|
||||
if action == "query":
|
||||
do_query(options)
|
||||
elif action == "add":
|
||||
do_add(options)
|
||||
elif action in ("delete", "remove"):
|
||||
do_delete(options)
|
||||
elif action == "create":
|
||||
do_create(options)
|
||||
elif action == "modify":
|
||||
do_modify(options)
|
||||
elif action == "copy":
|
||||
do_copy(options)
|
||||
elif action == "check":
|
||||
do_check(options)
|
||||
else:
|
||||
raise AssertionError("Invalid action %s" % action)
|
||||
|
||||
return
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = OptionParser(
|
||||
usage="usage: %prog [options] "
|
||||
"query|add|delete|create|modify|copy|check",
|
||||
epilog=lputils.ARCHIVE_REFERENCE_DESCRIPTION)
|
||||
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option("-A", "--archive", dest="archive")
|
||||
parser.add_option("-S", "--series", dest="series")
|
||||
parser.add_option("-p", "--person", dest="person", action="append")
|
||||
parser.add_option("-c", "--component", dest="component")
|
||||
parser.add_option("-P", "--packageset", dest="packageset", action="append")
|
||||
parser.add_option("-s", "--source", dest="source", action="append")
|
||||
parser.add_option("--pocket", dest="pocket")
|
||||
parser.add_option("-t", "--acl-type", dest="acl_type",
|
||||
help="ACL type: upload or admin")
|
||||
parser.add_option("--anon", dest="anon_login", action="store_true",
|
||||
default=False, help="Login anonymously to Launchpad")
|
||||
|
||||
# Deprecated in favour of -A.
|
||||
parser.add_option(
|
||||
"-d", "--distribution", dest="distro", default="ubuntu",
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
possible_actions = ('query', 'add', 'delete', 'create', 'copy', 'check')
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error(
|
||||
"You must specify an action, one of:\n%s" %
|
||||
", ".join(possible_actions))
|
||||
|
||||
if options.anon_login and args[0] not in ('query', 'check'):
|
||||
print("E: Anonymous login not supported for this action.")
|
||||
sys.exit(1)
|
||||
|
||||
if (args[0] != 'query' and
|
||||
not options.person and not options.component and
|
||||
not options.packageset and not options.source and
|
||||
not options.pocket):
|
||||
parser.error("Provide at least one of "
|
||||
"person/component/packageset/source/pocket")
|
||||
if options.packageset and not options.series:
|
||||
parser.error("Package set requires an associated series")
|
||||
if options.acl_type and options.acl_type not in ('upload', 'admin'):
|
||||
parser.error("Invalid ACL type '%s' (valid: 'upload', 'admin')" %
|
||||
options.acl_type)
|
||||
if options.acl_type == 'admin' and options.packageset:
|
||||
parser.error("ACL type admin not allowed for package sets")
|
||||
if options.acl_type == 'admin' and options.source:
|
||||
parser.error("ACL type admin not allowed for source packages")
|
||||
if options.pocket:
|
||||
options.pocket = options.pocket.title()
|
||||
|
||||
if options.anon_login:
|
||||
launchpad = Launchpad.login_anonymously(
|
||||
CONSUMER_KEY, options.launchpad_instance, version="devel")
|
||||
else:
|
||||
launchpad = Launchpad.login_with(
|
||||
CONSUMER_KEY, options.launchpad_instance, version="devel")
|
||||
|
||||
options.archive = get_archive(options, launchpad)
|
||||
options.distro = options.archive.distribution
|
||||
if options.series is not None:
|
||||
options.series = options.distro.getSeries(
|
||||
name_or_version=options.series)
|
||||
|
||||
try:
|
||||
main(options, args[0])
|
||||
except launchpadlib.errors.HTTPError as err:
|
||||
print("There was a %s error:" % err.response.status)
|
||||
print(err.content)
|
@ -0,0 +1 @@
|
||||
edit-acl
|
@ -0,0 +1,126 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
# Author: Iain Lane <iain.lane@canonical.com>
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
|
||||
# USA
|
||||
|
||||
import argparse
|
||||
import apt_pkg
|
||||
import gzip
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import urllib.request
|
||||
import sys
|
||||
|
||||
PARSED_SEEDS_URL = \
|
||||
'http://qa.ubuntuwire.org/ubuntu-seeded-packages/seeded.json.gz'
|
||||
LOGGER = logging.getLogger(os.path.basename(sys.argv[0]))
|
||||
|
||||
|
||||
class GetPackage():
|
||||
apt_cache_initialised = False
|
||||
|
||||
def __init__(self):
|
||||
# Initialise python-apt
|
||||
if not GetPackage.apt_cache_initialised:
|
||||
apt_pkg.init()
|
||||
GetPackage.apt_cache_initialised = True
|
||||
|
||||
self.cache = apt_pkg.Cache(None)
|
||||
self.pkgrecords = apt_pkg.PackageRecords(self.cache)
|
||||
self.depcache = apt_pkg.DepCache(self.cache)
|
||||
|
||||
# Download & parse the seeds
|
||||
response = urllib.request.urlopen(PARSED_SEEDS_URL)
|
||||
|
||||
buf = io.BytesIO(response.read())
|
||||
f = gzip.GzipFile(fileobj=buf)
|
||||
data = f.read().decode('utf-8')
|
||||
self.seeded_packages = json.loads(data)
|
||||
|
||||
def getsourcepackage(self, pkg):
|
||||
pkg = re.sub(':.*', '', pkg)
|
||||
try:
|
||||
candidate = self.depcache.get_candidate_ver(self.cache[pkg])
|
||||
except KeyError: # no package found (arch specific?)
|
||||
return
|
||||
try:
|
||||
self.pkgrecords.lookup(candidate.file_list[0])
|
||||
except AttributeError: # no source (pure virtual?)
|
||||
return
|
||||
return self.pkgrecords.source_pkg or pkg
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Generate a freeze block for'
|
||||
+ ' an Ubuntu milestone')
|
||||
parser.add_argument('flavours', nargs='+',
|
||||
help='The participating flavours')
|
||||
parser.add_argument('--only-unique', '-u', action='store_true',
|
||||
help='Block only packages unique to FLAVOURS')
|
||||
parser.add_argument('--debug', '-d', action='store_true',
|
||||
help='Output some extra debugging')
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.basicConfig(stream=sys.stderr,
|
||||
level=(logging.DEBUG if args.debug
|
||||
else logging.WARNING))
|
||||
|
||||
packages = GetPackage()
|
||||
|
||||
output = set()
|
||||
skip = set()
|
||||
|
||||
flavours = set(args.flavours)
|
||||
|
||||
# binary package: [ [ product, seed ] ]
|
||||
# e.g. "gbrainy": [["edubuntu", "dvd"]]
|
||||
for k, v in packages.seeded_packages.items():
|
||||
source_pkg = packages.getsourcepackage(k)
|
||||
seeding_flavours = set([x[0] for x in v if x[1] != "supported"])
|
||||
|
||||
# If you don't get to freeze others' packages
|
||||
if args.only_unique:
|
||||
not_releasing_seeding_flavours = seeding_flavours - flavours
|
||||
else:
|
||||
not_releasing_seeding_flavours = None
|
||||
|
||||
if not_releasing_seeding_flavours:
|
||||
LOGGER.debug(("Skipping %s (%s binary package) because it's"
|
||||
+ " seeded on %s") % (source_pkg, k, v))
|
||||
output.discard(source_pkg)
|
||||
skip.add(source_pkg)
|
||||
continue
|
||||
|
||||
if source_pkg and source_pkg in skip:
|
||||
continue
|
||||
|
||||
if seeding_flavours.intersection(flavours) and source_pkg:
|
||||
LOGGER.debug("Adding %s (%s binary package) due to %s"
|
||||
% (source_pkg, k, v))
|
||||
output.add(source_pkg)
|
||||
skip.add(source_pkg)
|
||||
|
||||
print ("block", " ".join(sorted(output)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,113 @@
|
||||
#! /usr/bin/python
|
||||
# Copyright 2012 Canonical Ltd.
|
||||
#
|
||||
# This script will write update metrics for a given Ubuntu release in CSV
|
||||
# format. It will output a file with updates (broken in Security vs Updates)
|
||||
# by month, as well as the number per package.
|
||||
|
||||
from collections import defaultdict
|
||||
import csv
|
||||
import logging
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
API_NAME = 'ubuntu-update-metrics'
|
||||
|
||||
|
||||
def write_metrics_csv(filename, key_name, metrics):
|
||||
"""Write the metrics to a CSV file.
|
||||
|
||||
:param filename: The CSV filename.
|
||||
:param key_name: The name of the metrics key.
|
||||
:param metrics: This should be a sequence of
|
||||
[key, {'Updates': X, 'Security': X, 'Total': X}] record.
|
||||
"""
|
||||
logging.info('Writing metrics by %s to %s', key_name.lower(), filename)
|
||||
writer = csv.writer(open(filename, 'wb'))
|
||||
writer.writerow([key_name, 'Updates', 'Security', 'Total'])
|
||||
for key, metrics in metrics:
|
||||
writer.writerow(
|
||||
[key, metrics['Updates'], metrics['Security'], metrics['Total']])
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = OptionParser(
|
||||
usage="%prog [options] ubuntu-release-name")
|
||||
parser.add_option(
|
||||
'-l', '--launchpad', default='production', dest='lp_instance',
|
||||
help="Select the Launchpad instance to run against. Defaults to "
|
||||
"'production'")
|
||||
parser.add_option(
|
||||
'-v', '--verbose', default=0, action='count', dest='verbose',
|
||||
help="Increase verbosity of the script. -v prints info messages, "
|
||||
"-vv will print debug messages.")
|
||||
parser.add_option(
|
||||
'-c', '--credentials', default=None, action='store',
|
||||
dest='credentials',
|
||||
help="Use the OAuth credentials in FILE instead of the desktop "
|
||||
"one.", metavar='FILE')
|
||||
parser.add_option(
|
||||
'-d', '--distribution', default='ubuntu', action='store',
|
||||
dest='distribution',
|
||||
help="The distribution to compute metrics for. Defaults to 'ubuntu'.")
|
||||
options, args = parser.parse_args(argv[1:])
|
||||
if len(args) != 1:
|
||||
parser.error('Missing archive name.')
|
||||
|
||||
if options.verbose >= 2:
|
||||
log_level = logging.DEBUG
|
||||
elif options.verbose == 1:
|
||||
log_level = logging.INFO
|
||||
else:
|
||||
log_level = logging.WARNING
|
||||
logging.basicConfig(level=log_level)
|
||||
|
||||
lp = Launchpad.login_with(
|
||||
API_NAME, options.lp_instance, credentials_file=options.credentials,
|
||||
version='devel')
|
||||
|
||||
try:
|
||||
distribution = lp.distributions[options.distribution]
|
||||
except KeyError:
|
||||
parser.error('unknown distribution: %s' % options.distribution)
|
||||
|
||||
series = distribution.getSeries(name_or_version=args[0])
|
||||
if series is None:
|
||||
parser.error('unknown series: %s' % args[0])
|
||||
archive = series.main_archive
|
||||
|
||||
updates_by_package = defaultdict(lambda: defaultdict(int))
|
||||
updates_by_month = defaultdict(lambda: defaultdict(int))
|
||||
for pocket in ['Updates', 'Security']:
|
||||
logging.info(
|
||||
'Retrieving published %s sources for %s...', pocket, args[0])
|
||||
published_history = archive.getPublishedSources(
|
||||
component_name='main', created_since_date=series.datereleased,
|
||||
distro_series=series, pocket=pocket)
|
||||
for source in published_history:
|
||||
package_metrics = updates_by_package[source.source_package_name]
|
||||
package_metrics[source.pocket] += 1
|
||||
package_metrics['Total'] += 1
|
||||
|
||||
month = source.date_published.strftime('%Y-%m')
|
||||
month_metrics = updates_by_month[month]
|
||||
month_metrics[source.pocket] += 1
|
||||
month_metrics['Total'] += 1
|
||||
|
||||
by_month_filename = '%s-%s-updates-by-month.csv' % (
|
||||
options.distribution, args[0])
|
||||
write_metrics_csv(
|
||||
by_month_filename, 'Month', sorted(updates_by_month.items()))
|
||||
|
||||
by_package_filename = '%s-%s-updates-by-package.csv' % (
|
||||
options.distribution, args[0])
|
||||
write_metrics_csv(
|
||||
by_package_filename, 'Package', sorted(
|
||||
updates_by_package.items(),
|
||||
key=lambda m: m[1]['Total'], reverse=True))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv)
|
@ -0,0 +1,89 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2011 Iain Lane
|
||||
# Copyright (C) 2011 Stefano Rivera
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
from optparse import OptionParser
|
||||
import re
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
def blacklist_package(options, pkg, reason):
|
||||
try:
|
||||
dsd = options.devel_series.getDifferencesTo(
|
||||
source_package_name_filter=pkg, parent_series=options.sid)[0]
|
||||
except IndexError:
|
||||
print("Couldn't blacklist %s (no DSD)" % pkg)
|
||||
return False
|
||||
|
||||
if dsd.status == "Blacklisted always":
|
||||
print("%s already blacklisted" % pkg)
|
||||
return False
|
||||
|
||||
if not options.dry_run:
|
||||
dsd.blacklist(all=True, comment=reason)
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="usage: %prog [options] sync-blacklist.txt")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="don't actually blacklist anything")
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 1:
|
||||
parser.error("requires input file as argument")
|
||||
blacklist = args[0]
|
||||
|
||||
lp = Launchpad.login_with(
|
||||
'sync-blacklist', options.launchpad_instance, version='devel')
|
||||
ubuntu = lp.distributions['ubuntu']
|
||||
debian = lp.distributions['debian']
|
||||
|
||||
options.devel_series = ubuntu.current_series
|
||||
options.sid = debian.current_series
|
||||
|
||||
# Read blacklist
|
||||
applicable_comments = []
|
||||
with open(blacklist) as blacklist_file:
|
||||
for line in blacklist_file:
|
||||
if not line.strip():
|
||||
applicable_comments = []
|
||||
continue
|
||||
|
||||
m = re.match(r'^\s*([a-z0-9.+-]+)?\s*(?:#\s*(.+)?)?$', line)
|
||||
source, comment = m.groups()
|
||||
if source:
|
||||
comments = applicable_comments[:]
|
||||
if comment:
|
||||
comments.append(comment)
|
||||
if not comments:
|
||||
comments.append("None given")
|
||||
comments.append("(from sync-blacklist.txt)")
|
||||
print("blacklisting %s (reason: %s)..."
|
||||
% (source, '; '.join(comments)), end="")
|
||||
if blacklist_package(options, source, '\n'.join(comments)):
|
||||
print("done")
|
||||
elif comment:
|
||||
applicable_comments.append(comment)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2010, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Do a size comparison of the files on two ISOs. This can be used to find out
|
||||
# which packages were added, removed, and significantly changed in size between
|
||||
# two releases or daily builds. Note that this only really works for
|
||||
# alternates, since desktop CDs by and large just have one big squashfs image.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def deb_size_map(iso_path):
|
||||
map = {} # package -> (version, size)
|
||||
isoinfo = subprocess.Popen(
|
||||
['isoinfo', '-lR', '-i', iso_path],
|
||||
stdout=subprocess.PIPE, universal_newlines=True)
|
||||
out = isoinfo.communicate()[0]
|
||||
assert isoinfo.returncode == 0
|
||||
|
||||
for l in out.splitlines():
|
||||
l = l.strip()
|
||||
if not l.endswith('.deb'):
|
||||
continue
|
||||
|
||||
fields = l.split()
|
||||
size = int(fields[4])
|
||||
fname = fields[11]
|
||||
|
||||
(pkg, version, _) = fname.split('_')
|
||||
map[pkg] = (version, size)
|
||||
|
||||
return map
|
||||
|
||||
#
|
||||
# main
|
||||
#
|
||||
|
||||
if len(sys.argv) != 3:
|
||||
print('Usage: %s <old iso> <new iso>' % sys.argv[0], file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
old_map = deb_size_map(sys.argv[1])
|
||||
new_map = deb_size_map(sys.argv[2])
|
||||
|
||||
print('== Removed packages ==')
|
||||
sum = 0
|
||||
for p, (v, s) in old_map.iteritems():
|
||||
if p not in new_map:
|
||||
print('%s (%.1f MB)' % (p, s / 1000000.))
|
||||
sum += s
|
||||
print('TOTAL: -%.1f MB' % (sum / 1000000.))
|
||||
|
||||
sum = 0
|
||||
print('\n== Added packages ==')
|
||||
for p, (v, s) in new_map.iteritems():
|
||||
if p not in old_map:
|
||||
print('%s (%.1f MB)' % (p, s / 1000000.))
|
||||
sum += s
|
||||
print('TOTAL: +%.1f MB' % (sum / 1000000.))
|
||||
|
||||
print('\n== Changed packages ==')
|
||||
sum = 0
|
||||
for p, (v, s) in old_map.iteritems():
|
||||
if p not in new_map:
|
||||
continue
|
||||
|
||||
new_s = new_map[p][1]
|
||||
sum += new_s - s
|
||||
|
||||
# only show differences > 100 kB to filter out noise
|
||||
if new_s - s > 100000:
|
||||
print('%s (Δ %.1f MB - %s: %.1f MB %s: %.1f MB)' % (
|
||||
p, (new_s - s) / 1000000., v, s / 1000000., new_map[p][0],
|
||||
new_s / 1000000.))
|
||||
|
||||
print('TOTAL difference: %.1f MB' % (sum / 1000000.))
|
@ -0,0 +1,169 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Stéphane Graber <stgraber@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# To use this module, you need a ini configuration file at ~/.isotracker.conf
|
||||
# example:
|
||||
# [general]
|
||||
# url=http://iso.qa.ubuntu.com/xmlrpc.php
|
||||
# username=stgraber
|
||||
# password=blablabla
|
||||
# default_milestone=Precise Daily
|
||||
#
|
||||
# [localized]
|
||||
# url=http://localized-iso.qa.ubuntu.com/xmlrpc.php
|
||||
# password=differentpassword
|
||||
|
||||
from __future__ import print_function
|
||||
try:
|
||||
import configparser
|
||||
except ImportError:
|
||||
import ConfigParser as configparser
|
||||
|
||||
from qatracker import QATracker, QATrackerMilestone, QATrackerProduct
|
||||
import os
|
||||
|
||||
class NoConfigurationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ISOTracker:
|
||||
def __init__(self, target=None):
|
||||
# Store the alternative target (configuration section)
|
||||
self.target = target
|
||||
|
||||
# Read configuration
|
||||
configfile = os.path.expanduser('~/.isotracker.conf')
|
||||
if not os.path.exists(configfile):
|
||||
raise NoConfigurationError(
|
||||
"Missing configuration file at: %s" % configfile)
|
||||
|
||||
# Load the config
|
||||
self.config = configparser.ConfigParser()
|
||||
self.config.read([configfile])
|
||||
|
||||
# Connect to the tracker
|
||||
url = self.config.get('general', 'url')
|
||||
username = self.config.get('general', 'username')
|
||||
password = self.config.get('general', 'password')
|
||||
|
||||
# Override with custom URL and credentials for the target
|
||||
if self.target:
|
||||
if self.config.has_section(self.target):
|
||||
if self.config.has_option(self.target, 'url'):
|
||||
url = self.config.get(self.target, 'url')
|
||||
if self.config.has_option(self.target, 'username'):
|
||||
username = self.config.get(self.target, 'username')
|
||||
if self.config.has_option(self.target, 'password'):
|
||||
password = self.config.get(self.target, 'password')
|
||||
else:
|
||||
print("Couldn't find a '%s' target, using the default." %
|
||||
self.target)
|
||||
|
||||
self.qatracker = QATracker(url, username, password)
|
||||
|
||||
# Get the required list of products and milestones
|
||||
self.tracker_products = self.qatracker.get_products()
|
||||
self.tracker_milestones = self.qatracker.get_milestones()
|
||||
|
||||
def default_milestone(self):
|
||||
"""
|
||||
Get the default milestone from the configuration file.
|
||||
"""
|
||||
|
||||
milestone_name = None
|
||||
|
||||
if self.target:
|
||||
# Series-specific default milestone
|
||||
try:
|
||||
milestone_name = self.config.get(self.target,
|
||||
'default_milestone')
|
||||
except (KeyError, configparser.NoSectionError,
|
||||
configparser.NoOptionError):
|
||||
pass
|
||||
|
||||
if not milestone_name:
|
||||
# Generic default milestone
|
||||
try:
|
||||
milestone_name = self.config.get('general',
|
||||
'default_milestone')
|
||||
except (KeyError, configparser.NoSectionError,
|
||||
configparser.NoOptionError):
|
||||
pass
|
||||
|
||||
if not milestone_name:
|
||||
raise KeyError("No default milestone selected")
|
||||
else:
|
||||
return self.get_milestone_by_name(milestone_name)
|
||||
|
||||
def get_product_by_name(self, product):
|
||||
"""
|
||||
Get a QATrackerProduct from the product's name.
|
||||
"""
|
||||
|
||||
for entry in self.tracker_products:
|
||||
if entry.title.lower() == product.lower():
|
||||
return entry
|
||||
else:
|
||||
raise KeyError("Product '%s' not found" % product)
|
||||
|
||||
def get_milestone_by_name(self, milestone):
|
||||
"""
|
||||
Get a QATrackerMilestone from the milestone's name.
|
||||
"""
|
||||
|
||||
for entry in self.tracker_milestones:
|
||||
if entry.title.lower() == milestone.lower():
|
||||
return entry
|
||||
else:
|
||||
raise KeyError("Milestone '%s' not found" % milestone)
|
||||
|
||||
def get_builds(self, milestone=None,
|
||||
status=['Active', 'Re-building', 'Ready']):
|
||||
"""
|
||||
Get a list of QATrackerBuild for the given milestone and status.
|
||||
"""
|
||||
|
||||
if not milestone:
|
||||
milestone = self.default_milestone()
|
||||
elif not isinstance(milestone, QATrackerMilestone):
|
||||
milestone = self.get_milestone_by_name(milestone)
|
||||
|
||||
return milestone.get_builds(status)
|
||||
|
||||
def post_build(self, product, version, milestone=None, note="",
|
||||
notify=True):
|
||||
"""
|
||||
Post a new build to the given milestone.
|
||||
"""
|
||||
|
||||
if not isinstance(product, QATrackerProduct):
|
||||
product = self.get_product_by_name(product)
|
||||
|
||||
notefile = os.path.expanduser('~/.isotracker.note')
|
||||
if note == "" and os.path.exists(notefile):
|
||||
with open(notefile, 'r') as notefd:
|
||||
note = notefd.read()
|
||||
|
||||
if not milestone:
|
||||
milestone = self.default_milestone()
|
||||
elif not isinstance(milestone, QATrackerMilestone):
|
||||
milestone = self.get_milestone_by_name(milestone)
|
||||
|
||||
if milestone.add_build(product, version, note, notify):
|
||||
print("Build successfully added to the tracker")
|
||||
else:
|
||||
print("Failed to add build to the tracker")
|
@ -0,0 +1,253 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Apply suitable overrides to new kernel binaries, matching previous ones."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from collections import defaultdict
|
||||
from contextlib import closing
|
||||
import gzip
|
||||
from optparse import OptionParser, Values
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib2 import urlopen
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from lazr.restfulclient.errors import ServerError
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
CONSUMER_KEY = "kernel-overrides"
|
||||
|
||||
|
||||
tempdir = None
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix='kernel-overrides')
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
class FakeBPPH:
|
||||
def __init__(self, pkg, component, das):
|
||||
self.binary_package_name = pkg
|
||||
self.component_name = component
|
||||
self.distro_arch_series = das
|
||||
|
||||
|
||||
def get_published_binaries(options, source):
|
||||
"""If getPublishedBinaries times out, fall back to doing it by hand."""
|
||||
try:
|
||||
for binary in source.getPublishedBinaries():
|
||||
if not binary.is_debug:
|
||||
yield binary
|
||||
except ServerError as e:
|
||||
if e.response.status != 503:
|
||||
raise
|
||||
print("getPublishedBinaries timed out; fetching Packages instead ...")
|
||||
ensure_tempdir()
|
||||
for section_name in ("", "debian-installer"):
|
||||
for component in ("main", "restricted", "universe", "multiverse"):
|
||||
for das in options.old.series.architectures:
|
||||
arch = das.architecture_tag
|
||||
if arch in ("amd64", "i386"):
|
||||
base = "http://archive.ubuntu.com/ubuntu"
|
||||
else:
|
||||
base = "http://ports.ubuntu.com/ubuntu-ports"
|
||||
url = ("%s/dists/%s/%s%s/binary-%s/Packages.gz" %
|
||||
(base, options.old.suite, component,
|
||||
"/%s" % section_name if section_name else "",
|
||||
arch))
|
||||
path = os.path.join(
|
||||
tempdir, "Ubuntu_%s_%s%s_Packages_%s" %
|
||||
(options.old.suite, component,
|
||||
"_%s" % section_name if section_name else "", arch))
|
||||
with closing(urlopen(url)) as url_file:
|
||||
with open("%s.gz" % path, "wb") as comp_file:
|
||||
comp_file.write(url_file.read())
|
||||
with closing(gzip.GzipFile("%s.gz" % path)) as gz_file:
|
||||
with open(path, "wb") as out_file:
|
||||
out_file.write(gz_file.read())
|
||||
with open(path) as packages_file:
|
||||
apt_packages = apt_pkg.TagFile(packages_file)
|
||||
for section in apt_packages:
|
||||
pkg = section["Package"]
|
||||
src = section.get("Source", pkg).split(" ", 1)[0]
|
||||
if src != options.source:
|
||||
continue
|
||||
yield FakeBPPH(pkg, component, das)
|
||||
|
||||
|
||||
def find_current_binaries(options):
|
||||
print("Checking existing binaries in %s ..." % options.old.suite,
|
||||
file=sys.stderr)
|
||||
sources = options.old.archive.getPublishedSources(
|
||||
source_name=options.source, distro_series=options.old.series,
|
||||
pocket=options.old.pocket, exact_match=True, status="Published")
|
||||
for source in sources:
|
||||
binaries = defaultdict(dict)
|
||||
for binary in get_published_binaries(options, source):
|
||||
print(".", end="")
|
||||
sys.stdout.flush()
|
||||
arch = binary.distro_arch_series.architecture_tag
|
||||
name = binary.binary_package_name
|
||||
component = binary.component_name
|
||||
if name not in binaries[arch]:
|
||||
binaries[arch][name] = component
|
||||
if binaries:
|
||||
print()
|
||||
return binaries
|
||||
print()
|
||||
return []
|
||||
|
||||
|
||||
def find_matching_uploads(options, newabi):
|
||||
print("Checking %s uploads to %s ..." %
|
||||
(options.queue.lower(), options.suite), file=sys.stderr)
|
||||
uploads = options.series.getPackageUploads(
|
||||
name=options.source, exact_match=True, archive=options.archive,
|
||||
pocket=options.pocket, status=options.queue)
|
||||
for upload in uploads:
|
||||
if upload.contains_build:
|
||||
# display_name is inaccurate for the theoretical case of an
|
||||
# upload containing multiple builds, but in practice it's close
|
||||
# enough.
|
||||
source = upload.display_name.split(",")[0]
|
||||
if source == options.source:
|
||||
binaries = upload.getBinaryProperties()
|
||||
binaries = [b for b in binaries if "customformat" not in b]
|
||||
if [b for b in binaries if newabi in b["version"]]:
|
||||
yield upload, binaries
|
||||
|
||||
|
||||
def equal_except_abi(old, new, abi):
|
||||
"""Are OLD and NEW the same package name aside from ABI?"""
|
||||
# Make sure new always contains the ABI.
|
||||
if abi in old:
|
||||
old, new = new, old
|
||||
if abi not in new:
|
||||
return False
|
||||
|
||||
left, _, right = new.partition(abi)
|
||||
if not old.startswith(left) or not old.endswith(right):
|
||||
return False
|
||||
old_abi = old[len(left):]
|
||||
if right:
|
||||
old_abi = old_abi[:-len(right)]
|
||||
return old_abi[0].isdigit() and old_abi[-1].isdigit()
|
||||
|
||||
|
||||
def apply_kernel_overrides(options, newabi):
|
||||
current_binaries = find_current_binaries(options)
|
||||
all_changes = []
|
||||
|
||||
for upload, binaries in find_matching_uploads(options, newabi):
|
||||
print("%s/%s (%s):" %
|
||||
(upload.package_name, upload.package_version,
|
||||
upload.display_arches.split(",")[0]))
|
||||
changes = []
|
||||
for binary in binaries:
|
||||
if binary["architecture"] not in current_binaries:
|
||||
continue
|
||||
current_binaries_arch = current_binaries[binary["architecture"]]
|
||||
for name, component in current_binaries_arch.items():
|
||||
if (binary["component"] != component and
|
||||
equal_except_abi(name, binary["name"], newabi)):
|
||||
print("\t%s: %s -> %s" %
|
||||
(binary["name"], binary["component"], component))
|
||||
changes.append(
|
||||
{"name": binary["name"], "component": component})
|
||||
if changes:
|
||||
all_changes.append((upload, changes))
|
||||
|
||||
if all_changes:
|
||||
if options.dry_run:
|
||||
print("Dry run; no changes made.")
|
||||
else:
|
||||
if not options.confirm_all:
|
||||
if YesNoQuestion().ask("Override", "no") == "no":
|
||||
return
|
||||
for upload, changes in all_changes:
|
||||
upload.overrideBinaries(changes=changes)
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="usage: %prog [options] NEW-ABI")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-d", "--distribution", metavar="DISTRO", default="ubuntu",
|
||||
help="look in distribution DISTRO")
|
||||
parser.add_option(
|
||||
"-S", "--suite", metavar="SUITE",
|
||||
help="look in suite SUITE (default: <current series>-proposed)")
|
||||
parser.add_option(
|
||||
"--old-suite", metavar="SUITE",
|
||||
help="look for previous binaries in suite SUITE "
|
||||
"(default: value of --suite without -proposed)")
|
||||
parser.add_option(
|
||||
"-s", "--source", metavar="SOURCE", default="linux",
|
||||
help="operate on source package SOURCE")
|
||||
parser.add_option(
|
||||
"-Q", "--queue", metavar="QUEUE", default="new",
|
||||
help="consider packages in QUEUE")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="don't make any modifications")
|
||||
parser.add_option(
|
||||
"-y", "--confirm-all", default=False, action="store_true",
|
||||
help="do not ask for confirmation")
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 1:
|
||||
parser.error("must supply NEW-ABI")
|
||||
newabi = args[0]
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
CONSUMER_KEY, options.launchpad_instance, version="devel")
|
||||
|
||||
if options.suite is None:
|
||||
distribution = options.launchpad.distributions[options.distribution]
|
||||
options.suite = "%s-proposed" % distribution.current_series.name
|
||||
if options.old_suite is None:
|
||||
options.old_suite = options.suite
|
||||
if options.old_suite.endswith("-proposed"):
|
||||
options.old_suite = options.old_suite[:-9]
|
||||
options.queue = options.queue.title()
|
||||
options.version = None
|
||||
lputils.setup_location(options)
|
||||
options.old = Values()
|
||||
options.old.launchpad = options.launchpad
|
||||
options.old.distribution = options.distribution
|
||||
options.old.suite = options.old_suite
|
||||
lputils.setup_location(options.old)
|
||||
|
||||
apply_kernel_overrides(options, newabi)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,102 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2016 Canonical Ltd.
|
||||
# Author: Steve Langasek <steve.langasek@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Release a kernel stable release update.
|
||||
|
||||
Copy packages from -proposed to -updates, and optionally to -security,
|
||||
following the directions of a kernel SRU workflow bug.
|
||||
|
||||
USAGE:
|
||||
kernel-sru-release <bug> [<bug> ...]
|
||||
"""
|
||||
|
||||
import re
|
||||
import subprocess
|
||||
from optparse import OptionParser
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from kernel_workflow import *
|
||||
|
||||
def release_task_callback(lp, bugnum, task, context):
|
||||
workflow_re = re.compile(
|
||||
r'^%skernel-sru-workflow/(?P<subtask>.*)' % str(lp._root_uri))
|
||||
task_match = workflow_re.search(str(task.target))
|
||||
if not task_match:
|
||||
return {}
|
||||
subtask = task_match.group('subtask')
|
||||
if subtask == 'promote-to-proposed':
|
||||
if task.status != 'Fix Released':
|
||||
raise KernelWorkflowError(
|
||||
"Ignoring bug %s, promote-to-proposed not done"
|
||||
% bugnum)
|
||||
return {}
|
||||
return {'proposed': task}
|
||||
if subtask == 'promote-to-updates':
|
||||
if task.status != 'Confirmed':
|
||||
raise KernelWorkflowError(
|
||||
"Ignoring bug %s, not ready to promote-to-updates"
|
||||
% bugnum)
|
||||
return {}
|
||||
return {'updates': task}
|
||||
if (subtask == 'promote-to-security' and task.status == 'Confirmed'):
|
||||
return {'security': task}
|
||||
return {}
|
||||
|
||||
|
||||
def release_source_callback(lp, bugnum, tasks, full_packages, release, context):
|
||||
if not 'proposed' in tasks or not 'updates' in tasks:
|
||||
raise KernelWorkflowError()
|
||||
cmd = ['sru-release', '--no-bugs', release]
|
||||
cmd.extend(full_packages)
|
||||
if 'security' in tasks:
|
||||
cmd.append('--security')
|
||||
try:
|
||||
subprocess.check_call(cmd)
|
||||
except subprocess.CalledProcessError:
|
||||
print("Failed to run sru-release for %s" % bugnum)
|
||||
raise
|
||||
|
||||
tasks['updates'].status = 'Fix Committed'
|
||||
tasks['updates'].assignee = lp.me
|
||||
tasks['updates'].lp_save()
|
||||
if 'security' in tasks:
|
||||
tasks['security'].status = 'Fix Committed'
|
||||
tasks['security'].assignee = lp.me
|
||||
tasks['security'].lp_save()
|
||||
|
||||
|
||||
def process_sru_bugs(lp, bugs):
|
||||
"""Process the list of bugs and call sru-release for each"""
|
||||
for bugnum in bugs:
|
||||
process_sru_bug(
|
||||
lp, bugnum, release_task_callback, release_source_callback)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = OptionParser(
|
||||
usage="Usage: %prog bug [bug ...]")
|
||||
|
||||
parser.add_option("-l", "--launchpad", dest="launchpad_instance",
|
||||
default="production")
|
||||
|
||||
options, bugs = parser.parse_args()
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
'ubuntu-archive-tools', options.launchpad_instance, version='devel')
|
||||
|
||||
process_sru_bugs(launchpad, bugs)
|
@ -0,0 +1,455 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2016 Canonical Ltd.
|
||||
# Author: Steve Langasek <steve.langasek@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Show and approve changes in an unapproved kernel upload.
|
||||
|
||||
Generate a debdiff between current source package in a given release and the
|
||||
version in the canonical-kernel ppa, and ask whether or not to approve the
|
||||
upload.
|
||||
|
||||
The debdiff is filtered for noise (abi/* directories; mechanical changes of
|
||||
ABI strings in debian/control et al.)
|
||||
|
||||
USAGE:
|
||||
kernel-sru-review <bug number>
|
||||
"""
|
||||
|
||||
import glob
|
||||
import datetime
|
||||
import os
|
||||
import pytz
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from contextlib import ExitStack
|
||||
from tempfile import mkdtemp
|
||||
from optparse import OptionParser
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from kernel_workflow import *
|
||||
|
||||
|
||||
def get_master_kernel(lp, bugnum):
|
||||
current = lp.bugs[bugnum]
|
||||
master = None
|
||||
backport_re = re.compile(r'^kernel-sru-backport-of-(\d+)$')
|
||||
derivative_re = re.compile(r'^kernel-sru-derivative-of-(\d+)$')
|
||||
|
||||
for tag in current.tags:
|
||||
num = derivative_re.match(tag)
|
||||
if not num:
|
||||
num = backport_re.match(tag)
|
||||
if num:
|
||||
master = lp.bugs[num.group(1)]
|
||||
|
||||
if not master:
|
||||
print("No master kernel.")
|
||||
return (None, None)
|
||||
return get_name_and_version_from_bug(master)
|
||||
|
||||
|
||||
def get_kernel_dsc(me, archive, source, series=None, version=None):
|
||||
kwargs = {
|
||||
'order_by_date': True,
|
||||
'exact_match': True,
|
||||
'source_name': source
|
||||
}
|
||||
if version:
|
||||
kwargs['version'] = version
|
||||
if series:
|
||||
kwargs['status'] = 'Published'
|
||||
kwargs['distro_series'] = series
|
||||
|
||||
# in cases where we have a separate archive for proposed and release,
|
||||
# we need to check both places in the order proposed -> release
|
||||
target = archive['proposed']
|
||||
srcpkgs = target.getPublishedSources(**kwargs)
|
||||
if len(srcpkgs) == 0:
|
||||
target = archive['release']
|
||||
srcpkgs = target.getPublishedSources(**kwargs)
|
||||
if len(srcpkgs) == 0 and 'non-esm' in archive:
|
||||
target = archive['non-esm']
|
||||
srcpkgs = target.getPublishedSources(**kwargs)
|
||||
if len(srcpkgs) == 0:
|
||||
raise KernelWorkflowError(
|
||||
"Selected %s kernel could not be found" % source)
|
||||
srcpkg = srcpkgs[0]
|
||||
source_ver = srcpkg.source_package_version
|
||||
source_dsc = list(filter(
|
||||
lambda x: x.endswith('.dsc'),
|
||||
srcpkg.sourceFileUrls()))[0]
|
||||
if target.private:
|
||||
priv_url = me.getArchiveSubscriptionURL(archive=target)
|
||||
dsc_file = os.path.basename(source_dsc)
|
||||
source_dsc = os.path.join(priv_url, 'pool/main/l', source, dsc_file)
|
||||
|
||||
return (source_dsc, source_ver)
|
||||
|
||||
|
||||
def generate_diff_from_master(me, archive, master_source, master_version,
|
||||
new_source, new_upstream,
|
||||
work_dir, tardir, start_dir):
|
||||
master_upstream = master_version.split('-')[0]
|
||||
|
||||
try:
|
||||
master_dsc, master_version = get_kernel_dsc(
|
||||
me, archive, master_source, version=master_version)
|
||||
except KernelWorkflowError:
|
||||
print("A master kernel diff was requested but the listed master "
|
||||
"kernel could not be found in any known archive.",
|
||||
end="")
|
||||
sys.stdout.flush()
|
||||
sys.stdin.readline()
|
||||
return
|
||||
|
||||
fetch_tarball_from_cache(
|
||||
work_dir, tardir, master_source, master_upstream, start_dir)
|
||||
|
||||
# grab the old source first
|
||||
dget_cmd = ['dget', '-u', master_dsc]
|
||||
try:
|
||||
subprocess.check_call(dget_cmd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Failed to get master source for %s at version %s" %
|
||||
(master_source, master_version))
|
||||
raise e
|
||||
|
||||
# generate the diff
|
||||
print("Generating brief diff between new kernel and master (%s) to %s" %
|
||||
(master_version, os.path.join(work_dir, 'master_diff')))
|
||||
diff_cmd = ('diff -rq "{}-{}" "{}-{}" >master_diff').format(
|
||||
master_source, master_upstream, new_source, new_upstream)
|
||||
subprocess.call(diff_cmd, shell=True)
|
||||
|
||||
|
||||
def review_task_callback(lp, bugnum, task, context):
|
||||
if str(task.target) != \
|
||||
('%skernel-sru-workflow/promote-to-proposed' % str(lp._root_uri)):
|
||||
return {}
|
||||
if task.status == 'Confirmed':
|
||||
return {'proposed': task}
|
||||
elif task.status == 'In Progress':
|
||||
if lp.me.self_link != task.assignee_link:
|
||||
print("This bug is in progress and not assigned to you. Do you "
|
||||
"still want to review \nit? [yN]",
|
||||
end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if not response.strip().lower().startswith('y'):
|
||||
raise KernelWorkflowError("Skipping bug %s" % bugnum)
|
||||
return {'proposed': task}
|
||||
|
||||
raise KernelWorkflowError(
|
||||
"Ignoring bug %s, not ready to promote-to-proposed"
|
||||
% bugnum)
|
||||
|
||||
|
||||
def review_source_callback(lp, bugnum, tasks, full_packages, release, context):
|
||||
# as per LP: #1290543, we need to evaluate (load) lp.me for
|
||||
# getArchiveSubscritionURL to work
|
||||
me = lp.load(lp.me.self_link)
|
||||
master_source = None
|
||||
master_version = None
|
||||
if context['diff']:
|
||||
master_source, master_version = get_master_kernel(lp, bugnum)
|
||||
for source in full_packages:
|
||||
process_source_package(
|
||||
source, release, me, context['archive'], context['ppa'],
|
||||
context['ubuntu'], context['startdir'], context['workdir'],
|
||||
context['tardir'], context['esm'], context['tarcache'],
|
||||
master_source, master_version)
|
||||
tasks['proposed'].status = 'Fix Committed'
|
||||
tasks['proposed'].assignee = me
|
||||
tasks['proposed'].lp_save()
|
||||
|
||||
|
||||
def fetch_tarball_from_cache(directory, tardir, source, version, cwd):
|
||||
actual_tardir = None
|
||||
tarballs = []
|
||||
|
||||
glob_pattern = '%s_%s.orig.tar.*' % (source, version)
|
||||
# first we look in the current working directory where the command was
|
||||
# called from
|
||||
actual_tardir = cwd
|
||||
tarballs = glob.glob(os.path.join(cwd, glob_pattern))
|
||||
if not tarballs:
|
||||
actual_tardir = tardir
|
||||
tarballs = glob.glob(os.path.join(tardir, glob_pattern))
|
||||
if tarballs:
|
||||
target = os.path.join(directory, os.path.basename(tarballs[0]))
|
||||
try:
|
||||
os.link(tarballs[0], target)
|
||||
except FileExistsError:
|
||||
pass
|
||||
except:
|
||||
# if the hard linking fails, do a copy operation
|
||||
shutil.copy(tarballs[0], target)
|
||||
else:
|
||||
actual_tardir = None
|
||||
return actual_tardir
|
||||
|
||||
|
||||
def save_tarball_to_cache(directory, tardir, source, version):
|
||||
glob_pattern = '%s_%s.orig.tar.*' % (source, version)
|
||||
to_copy = glob.glob(os.path.join(directory, glob_pattern))
|
||||
for tarball in to_copy:
|
||||
target = os.path.join(tardir, os.path.basename(tarball))
|
||||
try:
|
||||
os.link(tarball, target)
|
||||
except FileExistsError:
|
||||
pass
|
||||
except:
|
||||
# if the hard linking fails, do a copy operation
|
||||
shutil.copy(tarball, target)
|
||||
|
||||
|
||||
def process_source_package(source, release, me, archive, ppa, ubuntu,
|
||||
start_dir, work_dir, tardir,
|
||||
esm=False, tar_cache=False,
|
||||
master_source=None, master_version=None):
|
||||
series = ubuntu.getSeries(name_or_version=release)
|
||||
|
||||
ppa_src = ppa.getPublishedSources(order_by_date=True,
|
||||
status='Published', exact_match=True,
|
||||
distro_series=series,
|
||||
source_name=source)[0]
|
||||
ppa_ver = ppa_src.source_package_version
|
||||
ppa_dsc = list(filter(
|
||||
lambda x: x.endswith('.dsc'), ppa_src.sourceFileUrls()))[0]
|
||||
if ppa.private:
|
||||
priv_url = me.getArchiveSubscriptionURL(archive=ppa)
|
||||
dsc_file = os.path.basename(ppa_dsc)
|
||||
ppa_dsc = os.path.join(priv_url, 'pool/main/l', source, dsc_file)
|
||||
|
||||
# since we can have one archive for more than one 'pocket', no need to do
|
||||
# API calls more than once
|
||||
scanned = set()
|
||||
for pocket in archive.values():
|
||||
if pocket.self_link in scanned:
|
||||
continue
|
||||
archive_uploads = series.getPackageUploads(version=ppa_ver,
|
||||
name=source,
|
||||
archive=pocket,
|
||||
exact_match=True)
|
||||
for upload in archive_uploads:
|
||||
if upload.status != 'Rejected':
|
||||
print("%s_%s already copied to Ubuntu archive (%s), skipping" %
|
||||
(source, ppa_ver, upload.status))
|
||||
return
|
||||
scanned.add(pocket.self_link)
|
||||
|
||||
source_dsc, source_ver = get_kernel_dsc(me, archive, source, series=series)
|
||||
|
||||
new_fullabi = ppa_ver.split('~')[0]
|
||||
new_majorabi = re.sub(r"\.[^.]+$", '', new_fullabi)
|
||||
new_upstream = new_fullabi.split('-')[0]
|
||||
|
||||
old_fullabi = source_ver.split('~')[0]
|
||||
old_majorabi = re.sub(r"\.[^.]+$", '', old_fullabi)
|
||||
old_upstream = old_fullabi.split('-')[0]
|
||||
|
||||
real_tardir = fetch_tarball_from_cache(
|
||||
work_dir, tardir, source, old_upstream, start_dir)
|
||||
|
||||
# grab the old source first
|
||||
if esm:
|
||||
pull_cmd = ['dget', '-u', source_dsc]
|
||||
else:
|
||||
# for non-ESM cases, it's just more reliable to use pull-lp-source
|
||||
pull_cmd = ['pull-lp-source', source, source_ver]
|
||||
|
||||
try:
|
||||
subprocess.check_call(pull_cmd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Failed to get archive source for %s at version %s" %
|
||||
(source, source_ver))
|
||||
raise e
|
||||
|
||||
# update contents to match what we think the new ABI should be
|
||||
sed_cmd = ('grep -rl "{}" "{}-{}"/debian* | grep -v changelog '
|
||||
+ '| xargs -r sed -i -e"s/{}/{}/g" -e"s/{}/{}/g"').format(
|
||||
re.escape(old_majorabi), source, old_upstream,
|
||||
re.escape(old_fullabi), re.escape(new_fullabi),
|
||||
re.escape(old_majorabi), re.escape(new_majorabi))
|
||||
try:
|
||||
subprocess.check_call(sed_cmd, shell=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Failed to postprocess archive source for %s at version %s" %
|
||||
(source, source_ver))
|
||||
raise e
|
||||
|
||||
if not real_tardir and tar_cache:
|
||||
save_tarball_to_cache(work_dir, tardir, source, old_upstream)
|
||||
|
||||
# move the source dir aside so that it doesn't clobber.
|
||||
os.rename(source + '-' + old_upstream, source + '-' + old_upstream + '.old')
|
||||
|
||||
real_tardir = fetch_tarball_from_cache(
|
||||
work_dir, tardir, source, new_upstream, start_dir)
|
||||
|
||||
# grab the new source
|
||||
dget_cmd = ['dget', '-u', ppa_dsc]
|
||||
try:
|
||||
subprocess.check_call(dget_cmd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Failed to get ppa source for %s at version %s" %
|
||||
(source, ppa_ver))
|
||||
raise e
|
||||
|
||||
if not real_tardir and tar_cache:
|
||||
save_tarball_to_cache(work_dir, tardir, source, new_upstream)
|
||||
|
||||
if (master_source and master_version and
|
||||
'-meta' not in source and '-signed' not in source):
|
||||
# if requested, we also generate a brief diff between the new kernel
|
||||
# and its 'master' kernel
|
||||
generate_diff_from_master(
|
||||
me, archive, master_source, master_version, source, new_upstream,
|
||||
work_dir, tardir, start_dir)
|
||||
|
||||
# generate the diff
|
||||
raw_diff_cmd = ('diff -uNr "{}-{}.old" "{}-{}" | filterdiff -x'
|
||||
+ ' \'**/abi/**\' >raw_diff').format(
|
||||
source, old_upstream, source, new_upstream)
|
||||
subprocess.call(raw_diff_cmd, shell=True)
|
||||
|
||||
# look at the diff
|
||||
view_cmd = ('(diffstat raw_diff; cat raw_diff) | sensible-pager').format(
|
||||
source, old_upstream, source, new_upstream)
|
||||
subprocess.call(view_cmd, shell=True)
|
||||
|
||||
print("Accept the package into -proposed? [yN] ", end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if response.strip().lower().startswith('y'):
|
||||
copy_cmd = ['copy-proposed-kernel', release, source]
|
||||
if esm:
|
||||
copy_cmd.append('--esm')
|
||||
copy_time = datetime.datetime.now(tz=pytz.utc)
|
||||
try:
|
||||
subprocess.check_call(copy_cmd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Failed to copy source for %s at version %s" %
|
||||
(source, ppa_ver))
|
||||
raise e
|
||||
print("Accepted")
|
||||
# we know this isn't a kernel package containing signed bits,
|
||||
# so don't subject ourselves to extra delays
|
||||
if '-meta' in source or '-signed' in source:
|
||||
return
|
||||
print("Checking for UEFI binaries")
|
||||
# Arbitrary 10 second delay, maybe enough to let uefi binaries hit
|
||||
# the unapproved queue.
|
||||
time.sleep(10)
|
||||
# accept any related uefi binaries. We filter as closely as possible
|
||||
# on name without hard-coding architecture, and we also filter to
|
||||
# only include uefi binaries that have appeared since we started the
|
||||
# copy to avoid accepting something that might have been improperly
|
||||
# copied into the queue by an "attacker" with upload rights.
|
||||
uefis = []
|
||||
for signed_type in ('uefi', 'signing'):
|
||||
uefis.extend(series.getPackageUploads(
|
||||
archive=archive['release'],
|
||||
pocket='Proposed',
|
||||
status='Unapproved',
|
||||
custom_type=signed_type,
|
||||
name='{}_{}_'.format(source, ppa_ver),
|
||||
created_since_date=copy_time))
|
||||
for uefi in uefis:
|
||||
print("Accepting {}".format(uefi))
|
||||
uefi.acceptFromQueue()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
default_release = 'cosmic'
|
||||
|
||||
parser = OptionParser(
|
||||
usage="Usage: %prog [options] bug [bug ...]")
|
||||
|
||||
xdg_cache = os.getenv('XDG_CACHE_HOME', '~/.cache')
|
||||
cachedir = os.path.expanduser(
|
||||
os.path.join(xdg_cache, 'ubuntu-archive-tools/kernel-tarballs'))
|
||||
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-k", "--keep-files", dest="keep_files", action="store_true")
|
||||
parser.add_option(
|
||||
"-C", "--cache-tarballs", dest="caching", action="store_true")
|
||||
parser.add_option(
|
||||
"-t", "--tarball-directory", dest="tardir", default=cachedir)
|
||||
parser.add_option(
|
||||
"-e", "--esm", dest="esm", action="store_true")
|
||||
parser.add_option(
|
||||
"-d", "--diff-against-master", dest="diff_master",
|
||||
action="store_true")
|
||||
|
||||
opts, bugs = parser.parse_args()
|
||||
|
||||
if len(bugs) < 1:
|
||||
parser.error('Need to specify at least one bug number')
|
||||
|
||||
tardir = os.path.abspath(opts.tardir)
|
||||
|
||||
if opts.caching:
|
||||
# if we enabled tarball caching, make sure the tarball directory exists
|
||||
if not os.path.isdir(tardir):
|
||||
try:
|
||||
os.makedirs(tardir)
|
||||
except:
|
||||
parser.error(
|
||||
'Invalid tarball directory specified (%s)' % tardir)
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
'ubuntu-archive-tools', opts.launchpad_instance, version='devel')
|
||||
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
# for ESM (precise) we use special PPAs for CKT testing, -proposed and
|
||||
# release
|
||||
archive = {}
|
||||
if opts.esm:
|
||||
team = 'canonical-kernel-esm'
|
||||
archive['proposed'] = launchpad.people[team].getPPAByName(
|
||||
distribution=ubuntu, name='proposed')
|
||||
archive['release'] = launchpad.people['ubuntu-esm'].getPPAByName(
|
||||
distribution=ubuntu, name='esm')
|
||||
archive['non-esm'] = ubuntu.main_archive
|
||||
else:
|
||||
team = 'canonical-kernel-team'
|
||||
archive['proposed'] = archive['release'] = ubuntu.main_archive
|
||||
ppa = launchpad.people[team].getPPAByName(
|
||||
distribution=ubuntu, name='ppa')
|
||||
|
||||
start_dir = os.getcwd()
|
||||
context = {
|
||||
'archive': archive, 'ppa': ppa, 'ubuntu': ubuntu,
|
||||
'tardir': tardir, 'tarcache': opts.caching, 'startdir': start_dir,
|
||||
'esm': opts.esm, 'diff': opts.diff_master
|
||||
}
|
||||
for bugnum in bugs:
|
||||
with ExitStack() as resources:
|
||||
cwd = mkdtemp(prefix='kernel-sru-%s-' % bugnum, dir=start_dir)
|
||||
if not opts.keep_files:
|
||||
resources.callback(shutil.rmtree, cwd)
|
||||
os.chdir(cwd)
|
||||
context['workdir'] = cwd
|
||||
process_sru_bug(
|
||||
launchpad, bugnum, review_task_callback,
|
||||
review_source_callback, context)
|
||||
os.chdir(start_dir)
|
@ -0,0 +1,118 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2016 Canonical Ltd.
|
||||
# Author: Steve Langasek <steve.langasek@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
class KernelWorkflowError(Exception):
|
||||
"""An exception occurred with the state of the workflow bug"""
|
||||
|
||||
|
||||
def get_name_and_version_from_bug(bug):
|
||||
title_re = re.compile(
|
||||
r'^(?P<package>[a-z0-9.-]+): (?P<version>[0-9.-]+[0-9a-z.~-]*)'
|
||||
+ ' -proposed tracker$')
|
||||
match = title_re.search(bug.title)
|
||||
if not match:
|
||||
print("Ignoring bug %s, not a kernel SRU tracking bug" % bug.id)
|
||||
return (None, None)
|
||||
package = match.group('package')
|
||||
version = match.group('version')
|
||||
# FIXME: check that the package version is correct for the suite
|
||||
return (package, version)
|
||||
|
||||
|
||||
def process_sru_bug(lp, bugnum, task_callback, source_callback, context=None):
|
||||
"""Process the indicated bug and call the provided helper functions
|
||||
as needed
|
||||
"""
|
||||
package_re = re.compile(
|
||||
(r'^%subuntu/(?P<release>[0-9a-z.-]+)/'
|
||||
+ '\+source/(?P<package>[a-z0-9.-]+)$') % str(lp._root_uri))
|
||||
workflow_re = re.compile(
|
||||
r'^%skernel-sru-workflow/(?P<subtask>.*)' % str(lp._root_uri))
|
||||
prep_re = re.compile(r'prepare-package(?P<subpackage>.*)')
|
||||
|
||||
packages = []
|
||||
source_name = None
|
||||
proposed_task = None
|
||||
updates_task = None
|
||||
security_task = None
|
||||
bug = lp.bugs[int(bugnum)]
|
||||
package, version = get_name_and_version_from_bug(bug)
|
||||
if not package or not version:
|
||||
return
|
||||
|
||||
task_results = {}
|
||||
for task in bug.bug_tasks:
|
||||
# If a task is set to invalid, we do not care about it
|
||||
if task.status == 'Invalid':
|
||||
continue
|
||||
|
||||
# FIXME: ok not exactly what we want, we probably want a hash?
|
||||
task_results.update(task_callback(lp, bugnum, task, context))
|
||||
task_match = workflow_re.search(str(task.target))
|
||||
if task_match:
|
||||
subtask = task_match.group('subtask')
|
||||
# FIXME: consolidate subtask / prep_match here
|
||||
prep_match = prep_re.search(subtask)
|
||||
if prep_match:
|
||||
packages.append(prep_match.group('subpackage'))
|
||||
|
||||
pkg_match = package_re.search(str(task.target))
|
||||
if pkg_match:
|
||||
if source_name:
|
||||
print("Too many source packages, %s and %s, ignoring bug %s"
|
||||
% (source_name, pkg_match.group('package'), bugnum))
|
||||
continue
|
||||
source_name = pkg_match.group('package')
|
||||
release = pkg_match.group('release')
|
||||
continue
|
||||
|
||||
if not source_name:
|
||||
print("No source package to act on, skipping bug %s" % bugnum)
|
||||
return
|
||||
|
||||
if source_name != package:
|
||||
print("Cannot determine base package for %s, %s vs. %s"
|
||||
% (bugnum, source_name, package))
|
||||
return
|
||||
|
||||
if not packages:
|
||||
print("No packages in the prepare list, don't know what to do")
|
||||
return
|
||||
|
||||
if not '' in packages:
|
||||
print("No kernel package in prepare list, only meta packages. "
|
||||
"Continue review? [yN] ", end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if not response.strip().lower().startswith('y'):
|
||||
return
|
||||
|
||||
full_packages = []
|
||||
for pkg in packages:
|
||||
if pkg == '-lbm':
|
||||
pkg = '-backports-modules-3.2.0'
|
||||
|
||||
real_package = re.sub(r'^linux', 'linux' + pkg, package)
|
||||
full_packages.append(real_package)
|
||||
|
||||
source_callback(lp, bugnum, task_results, full_packages, release, context)
|
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/python
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import argparse
|
||||
|
||||
# See isotracker.py for setup instructions.
|
||||
from isotracker import ISOTracker
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="List all the builds for a milestone.")
|
||||
parser.add_argument('-m', '--milestone',
|
||||
help='post to MILESTONE rather than the default')
|
||||
parser.add_argument('-t', '--target',
|
||||
help='post to an alternate QATracker')
|
||||
args = parser.parse_args()
|
||||
|
||||
isotracker = ISOTracker(target=args.target)
|
||||
|
||||
if args.milestone is None:
|
||||
args.milestone = isotracker.default_milestone()
|
||||
|
||||
products = {}
|
||||
for entry in isotracker.tracker_products:
|
||||
products[entry.id] = entry.title
|
||||
|
||||
builds = isotracker.get_builds(args.milestone)
|
||||
for build in sorted(builds, key=lambda build: products[build.productid]):
|
||||
print("{0:<60} {1:<15} {2:<15}".format(
|
||||
products[build.productid], build.status_string, build.version))
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,161 @@
|
||||
# Copyright 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Launchpad API utility functions."""
|
||||
|
||||
from operator import attrgetter
|
||||
|
||||
from debian import debian_support
|
||||
from lazr.restfulclient.resource import Entry
|
||||
|
||||
|
||||
class PackageMissing(Exception):
|
||||
"Generic exception generated by `lputils`."
|
||||
|
||||
def __init__(self, message=None):
|
||||
Exception.__init__(self, message)
|
||||
self.message = message
|
||||
|
||||
|
||||
known_pockets = (
|
||||
"Security",
|
||||
"Updates",
|
||||
"Proposed",
|
||||
"Backports",
|
||||
)
|
||||
|
||||
ARCHIVE_REFERENCE_DESCRIPTION = (
|
||||
'ARCHIVE can take one of four forms: "ubuntu" for a primary archive, '
|
||||
'"~canonical-kernel-team/ubuntu/ppa" or '
|
||||
'"ppa:canonical-kernel-team/ubuntu/ppa" for a PPA, or '
|
||||
'"ubuntu/partner" for a partner or copy archive.')
|
||||
|
||||
|
||||
def setup_location(args, default_pocket="Release"):
|
||||
archive = None
|
||||
if getattr(args, "archive", False):
|
||||
# Try parsing an archive reference first.
|
||||
archive = args.launchpad.archives.getByReference(
|
||||
reference=args.archive)
|
||||
if archive is None:
|
||||
raise AssertionError("No such archive: %s" % args.archive)
|
||||
else:
|
||||
# Otherwise derive the archive from the deprecated
|
||||
# -d/--ppa/--ppa-name/--partner options.
|
||||
if isinstance(args.distribution, Entry):
|
||||
distro = args.distribution
|
||||
else:
|
||||
distro = args.launchpad.distributions[args.distribution]
|
||||
if getattr(args, "partner", False):
|
||||
archive = [
|
||||
archive for archive in distro.archives
|
||||
if archive.name == "partner"][0]
|
||||
elif getattr(args, "ppa", None):
|
||||
archive = args.launchpad.people[args.ppa].getPPAByName(
|
||||
distribution=distro, name=args.ppa_name)
|
||||
else:
|
||||
archive = distro.main_archive
|
||||
|
||||
args.archive = archive
|
||||
args.distribution = archive.distribution
|
||||
if args.suite:
|
||||
if "-" in args.suite:
|
||||
args.series, args.pocket = args.suite.rsplit("-", 1)
|
||||
args.pocket = args.pocket.title()
|
||||
if args.pocket not in known_pockets:
|
||||
args.series = args.suite
|
||||
args.pocket = "Release"
|
||||
else:
|
||||
args.series = args.suite
|
||||
args.pocket = "Release"
|
||||
args.series = args.distribution.getSeries(name_or_version=args.series)
|
||||
else:
|
||||
args.series = args.distribution.current_series
|
||||
args.pocket = default_pocket
|
||||
if args.pocket == "Release":
|
||||
args.suite = args.series.name
|
||||
else:
|
||||
args.suite = "%s-%s" % (args.series.name, args.pocket.lower())
|
||||
|
||||
if getattr(args, "architecture", None) is not None:
|
||||
args.architectures = [args.series.getDistroArchSeries(
|
||||
archtag=args.architecture)]
|
||||
elif getattr(args, "architectures", None) is not None:
|
||||
args.architectures = sorted(
|
||||
[a for a in args.series.architectures
|
||||
if a.architecture_tag in args.architectures],
|
||||
key=attrgetter("architecture_tag"))
|
||||
else:
|
||||
args.architectures = sorted(
|
||||
args.series.architectures, key=attrgetter("architecture_tag"))
|
||||
|
||||
|
||||
def find_newest_publication(method, version_attr, **kwargs):
|
||||
"""Hack around being unable to pass status=("Published", "Pending")."""
|
||||
published_pubs = method(status="Published", **kwargs)
|
||||
pending_pubs = method(status="Pending", **kwargs)
|
||||
try:
|
||||
newest_published = published_pubs[0]
|
||||
newest_published_ver = getattr(newest_published, version_attr)
|
||||
except IndexError:
|
||||
try:
|
||||
return pending_pubs[0]
|
||||
except IndexError:
|
||||
if kwargs["version"] is not None:
|
||||
try:
|
||||
return method(**kwargs)[0]
|
||||
except IndexError:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
try:
|
||||
newest_pending = pending_pubs[0]
|
||||
newest_pending_ver = getattr(newest_pending, version_attr)
|
||||
except IndexError:
|
||||
return newest_published
|
||||
if debian_support.version_compare(
|
||||
newest_published_ver, newest_pending_ver) > 0:
|
||||
return newest_published
|
||||
else:
|
||||
return newest_pending
|
||||
|
||||
|
||||
def find_latest_published_binaries(args, package):
|
||||
target_binaries = []
|
||||
for architecture in args.architectures:
|
||||
binary = find_newest_publication(
|
||||
args.archive.getPublishedBinaries, "binary_package_version",
|
||||
binary_name=package, version=args.version,
|
||||
distro_arch_series=architecture, pocket=args.pocket,
|
||||
exact_match=True)
|
||||
if binary is not None:
|
||||
target_binaries.append(binary)
|
||||
if not target_binaries:
|
||||
raise PackageMissing(
|
||||
"Could not find binaries for '%s/%s' in %s" %
|
||||
(package, args.version, args.suite))
|
||||
return target_binaries
|
||||
|
||||
|
||||
def find_latest_published_source(args, package):
|
||||
source = find_newest_publication(
|
||||
args.archive.getPublishedSources, "source_package_version",
|
||||
source_name=package, version=args.version,
|
||||
distro_series=args.series, pocket=args.pocket, exact_match=True)
|
||||
if source is None:
|
||||
raise PackageMissing(
|
||||
"Could not find source '%s/%s' in %s" %
|
||||
(package, args.version, args.suite))
|
||||
return source
|
Binary file not shown.
@ -0,0 +1,325 @@
|
||||
#!/usr/bin/python
|
||||
# Manage the Launchpad build farm.
|
||||
#
|
||||
# Copyright 2012-2014 Canonical Ltd.
|
||||
# Author: William Grant <wgrant@ubuntu.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
from datetime import (
|
||||
datetime,
|
||||
timedelta,
|
||||
)
|
||||
from itertools import groupby
|
||||
import re
|
||||
from textwrap import dedent
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from lazr.restfulclient.errors import PreconditionFailed
|
||||
import pytz
|
||||
|
||||
|
||||
def format_timedelta(delta):
|
||||
value = None
|
||||
hours = delta.seconds // 3600
|
||||
minutes = (delta.seconds - (hours * 3600)) // 60
|
||||
if delta.days > 0:
|
||||
value = delta.days
|
||||
unit = 'day'
|
||||
elif hours > 0:
|
||||
value = hours
|
||||
unit = 'hour'
|
||||
elif minutes > 0:
|
||||
value = minutes
|
||||
unit = 'minute'
|
||||
if value is not None:
|
||||
return 'for %d %s%s' % (value, unit, 's' if value > 1 else '')
|
||||
return ''
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description=dedent("""\
|
||||
List and manage Launchpad builders.
|
||||
|
||||
If no changes are specified (--auto, --manual, --enable, --disable,
|
||||
--set-failnotes, --set-virtual, --set-non-virtual, or --set-vm-host), a
|
||||
detailed listing of matching builders will be shown.
|
||||
"""))
|
||||
parser.add_argument(
|
||||
"-l", "--lp-instance", dest="lp_instance", default="production",
|
||||
help="use the specified Launchpad instance (default: production)")
|
||||
|
||||
parser.add_argument(
|
||||
"-q", "--quiet", dest="quiet", action="store_true", default=None,
|
||||
help="only display errors")
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", dest="verbose", action="store_true", default=None,
|
||||
help="display more detail")
|
||||
|
||||
parser.add_argument(
|
||||
"-a", "--arch", dest="arch", default=None,
|
||||
help="update only builders of this architecture (eg. i386)")
|
||||
parser.add_argument(
|
||||
"-b", "--builder", dest="builders", action="append", metavar="BUILDER",
|
||||
help="update only this builder (may be given multiple times)")
|
||||
parser.add_argument(
|
||||
"--failnotes", dest="failnotes", default=None,
|
||||
help="update only builders with failnotes matching this regexp")
|
||||
parser.add_argument(
|
||||
"-e", "--enabled", action="store_const", dest="ok_filter", const=True,
|
||||
help="update only enabled builders")
|
||||
parser.add_argument(
|
||||
"-d", "--disabled", action="store_const", dest="ok_filter", const=False,
|
||||
help="update only disabled builders")
|
||||
parser.add_argument(
|
||||
"--cleaning", action="store_const", dest="cleaning_filter", const=True,
|
||||
help="update only builders that are stuck cleaning")
|
||||
parser.add_argument(
|
||||
"--virtual", action="store_const", dest="virtual_filter", const=True,
|
||||
help="update only virtual builders")
|
||||
parser.add_argument(
|
||||
"--non-virtual", action="store_const", dest="virtual_filter", const=False,
|
||||
help="update only non-virtual builders")
|
||||
parser.add_argument(
|
||||
"--builder-version", dest="builder_version", default=None,
|
||||
help="update only builders running this launchpad-buildd version")
|
||||
|
||||
dispatch_group = parser.add_mutually_exclusive_group()
|
||||
dispatch_group.add_argument(
|
||||
"--auto", dest="auto", action="store_true", default=None,
|
||||
help="enable automatic dispatching")
|
||||
dispatch_group.add_argument(
|
||||
"--manual", dest="manual", action="store_true", default=None,
|
||||
help="disable automatic dispatching")
|
||||
ok_group = parser.add_mutually_exclusive_group()
|
||||
ok_group.add_argument(
|
||||
"--enable", dest="enable", action="store_true", default=None,
|
||||
help="mark the builder as OK")
|
||||
ok_group.add_argument(
|
||||
"--disable", dest="disable", action="store_true", default=None,
|
||||
help="mark the builder as not OK")
|
||||
ok_group.add_argument(
|
||||
"--reset", dest="reset", action="store_true", default=None,
|
||||
help="reset the builder by disabling and re-enabling it")
|
||||
parser.add_argument(
|
||||
"--set-failnotes", dest="set_failnotes", default=None,
|
||||
help="set the builder's failnotes")
|
||||
virtual_group = parser.add_mutually_exclusive_group()
|
||||
virtual_group.add_argument(
|
||||
"--set-virtual", dest="set_virtual", action="store_true", default=None,
|
||||
help="mark the builder as virtual")
|
||||
virtual_group.add_argument(
|
||||
"--set-non-virtual", dest="set_non_virtual",
|
||||
action="store_true", default=None,
|
||||
help="mark the builder as non-virtual")
|
||||
visible_group = parser.add_mutually_exclusive_group()
|
||||
visible_group.add_argument(
|
||||
"--set-visible", dest="set_visible", action="store_true", default=None,
|
||||
help="mark the builder as visible")
|
||||
visible_group.add_argument(
|
||||
"--set-invisible", dest="set_invisible", action="store_true", default=None,
|
||||
help="mark the builder as invisible")
|
||||
parser.add_argument(
|
||||
"--set-vm-host", dest="set_vm_host", default=None,
|
||||
help="set the builder's VM host")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
changes = {}
|
||||
if args.manual:
|
||||
changes['manual'] = True
|
||||
if args.auto:
|
||||
changes['manual'] = False
|
||||
if args.enable:
|
||||
changes['builderok'] = True
|
||||
if args.disable or args.reset:
|
||||
# In the --reset case, we'll re-enable it manually after applying this.
|
||||
changes['builderok'] = False
|
||||
if args.set_failnotes is not None:
|
||||
changes['failnotes'] = args.set_failnotes or None
|
||||
if args.set_virtual:
|
||||
changes['virtualized'] = True
|
||||
if args.set_non_virtual:
|
||||
changes['virtualized'] = False
|
||||
if args.set_visible:
|
||||
changes['active'] = True
|
||||
if args.set_invisible:
|
||||
changes['active'] = False
|
||||
if args.set_vm_host is not None:
|
||||
changes['vm_host'] = args.set_vm_host or None
|
||||
|
||||
lp = Launchpad.login_with(
|
||||
'manage-builders', args.lp_instance, version='devel')
|
||||
|
||||
processor_names = {p.self_link: p.name for p in lp.processors}
|
||||
|
||||
def get_processor_name(processor_link):
|
||||
if processor_link not in processor_names:
|
||||
processor_names[processor_link] = lp.load(processor_link).name
|
||||
return processor_names[processor_link]
|
||||
|
||||
def get_clean_status_duration(builder):
|
||||
return datetime.now(pytz.UTC) - builder.date_clean_status_changed
|
||||
|
||||
def is_cleaning(builder):
|
||||
return (
|
||||
builder.builderok
|
||||
and builder.current_build_link is None
|
||||
and builder.clean_status in ('Dirty', 'Cleaning')
|
||||
and get_clean_status_duration(builder) > timedelta(minutes=10))
|
||||
|
||||
candidates = []
|
||||
for builder in lp.builders:
|
||||
if not builder.active:
|
||||
continue
|
||||
if args.ok_filter is not None and builder.builderok != args.ok_filter:
|
||||
continue
|
||||
if (args.cleaning_filter is not None
|
||||
and is_cleaning(builder) != args.cleaning_filter):
|
||||
continue
|
||||
if (args.virtual_filter is not None
|
||||
and builder.virtualized != args.virtual_filter):
|
||||
continue
|
||||
if args.builders and builder.name not in args.builders:
|
||||
continue
|
||||
if (args.arch
|
||||
and not any(get_processor_name(p) == args.arch
|
||||
for p in builder.processors)):
|
||||
continue
|
||||
if (args.failnotes and (
|
||||
not builder.failnotes
|
||||
or not re.search(args.failnotes, builder.failnotes))):
|
||||
continue
|
||||
if (args.builder_version is not None and
|
||||
args.builder_version != builder.version):
|
||||
continue
|
||||
candidates.append(builder)
|
||||
|
||||
def builder_sort_key(builder):
|
||||
return (
|
||||
not builder.virtualized,
|
||||
# https://launchpad.net/builders sorts by Processor.id, but that
|
||||
# isn't accessible on the webservice. This produces vaguely similar
|
||||
# results in practice and looks reasonable.
|
||||
sorted(builder.processors),
|
||||
builder.vm_host,
|
||||
builder.vm_reset_protocol if builder.virtualized else '',
|
||||
builder.name)
|
||||
|
||||
def apply_changes(obj, **changes):
|
||||
count = 3
|
||||
for i in range(count):
|
||||
changed = False
|
||||
for change, value in changes.items():
|
||||
if getattr(obj, change) != value:
|
||||
setattr(obj, change, value)
|
||||
changed = True
|
||||
if changed:
|
||||
try:
|
||||
obj.lp_save()
|
||||
break
|
||||
except PreconditionFailed:
|
||||
if i == count - 1:
|
||||
raise
|
||||
obj.lp_refresh()
|
||||
return changed
|
||||
|
||||
candidates.sort(key=builder_sort_key)
|
||||
|
||||
count_changed = count_unchanged = 0
|
||||
|
||||
if changes and not args.quiet:
|
||||
print('Updating %d builders.' % len(candidates))
|
||||
|
||||
if args.verbose:
|
||||
clump_sort_key = lambda b: builder_sort_key(b)[:4]
|
||||
else:
|
||||
clump_sort_key = lambda b: builder_sort_key(b)[:2]
|
||||
builder_clumps = [
|
||||
list(group) for _, group in groupby(candidates, clump_sort_key)]
|
||||
|
||||
for clump in builder_clumps:
|
||||
if not changes and not args.quiet:
|
||||
if clump != builder_clumps[0]:
|
||||
print()
|
||||
exemplar = clump[0]
|
||||
archs = ' '.join(get_processor_name(p) for p in exemplar.processors)
|
||||
if args.verbose:
|
||||
if exemplar.virtualized:
|
||||
virt_desc = '(v %s)' % exemplar.vm_reset_protocol
|
||||
else:
|
||||
virt_desc = '(nv)'
|
||||
print(
|
||||
'%s %s%s' % (
|
||||
virt_desc, archs,
|
||||
(' [%s]' % exemplar.vm_host) if exemplar.vm_host else ''))
|
||||
else:
|
||||
print(
|
||||
'%-4s %s' % ('(v)' if exemplar.virtualized else '(nv)', archs))
|
||||
|
||||
for candidate in clump:
|
||||
changed = apply_changes(candidate, **changes)
|
||||
if args.reset and not candidate.builderok:
|
||||
if apply_changes(candidate, builderok=True):
|
||||
changed = True
|
||||
if changed:
|
||||
count_changed += 1
|
||||
if not args.quiet:
|
||||
print('* %s' % candidate.name)
|
||||
elif changes:
|
||||
if not args.quiet:
|
||||
print(' %s' % candidate.name)
|
||||
count_unchanged += 1
|
||||
else:
|
||||
duration = get_clean_status_duration(candidate)
|
||||
if not candidate.builderok:
|
||||
# Disabled builders always need explanation.
|
||||
if candidate.failnotes:
|
||||
failnote = candidate.failnotes.strip().splitlines()[0]
|
||||
else:
|
||||
failnote = 'no failnotes'
|
||||
status = 'DISABLED: %s' % failnote
|
||||
elif is_cleaning(candidate):
|
||||
# Idle builders that have been dirty or cleaning for more
|
||||
# than ten minutes are a little suspicious.
|
||||
status = '%s %s' % (
|
||||
candidate.clean_status, format_timedelta(duration))
|
||||
elif (candidate.current_build_link is not None
|
||||
and duration > timedelta(days=1)):
|
||||
# Something building for more than a day deserves
|
||||
# investigation.
|
||||
status = 'Building %s' % format_timedelta(duration)
|
||||
else:
|
||||
status = ''
|
||||
if args.verbose:
|
||||
if candidate.current_build_link is not None:
|
||||
dirty_flag = 'B'
|
||||
elif candidate.clean_status == 'Dirty':
|
||||
dirty_flag = 'D'
|
||||
elif candidate.clean_status == 'Cleaning':
|
||||
dirty_flag = 'C'
|
||||
else:
|
||||
dirty_flag = ' '
|
||||
print(
|
||||
' %-18s %-8s %s%s%s %s' % (
|
||||
candidate.name, candidate.version,
|
||||
dirty_flag, 'M' if candidate.manual else ' ',
|
||||
'X' if not candidate.builderok else ' ',
|
||||
status))
|
||||
elif not args.quiet:
|
||||
print(' %-20s %s' % (candidate.name, status))
|
||||
|
||||
if changes and not args.quiet:
|
||||
print("Changed: %d. Unchanged: %d." % (count_changed, count_unchanged))
|
@ -0,0 +1,215 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright 2013-2019 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Manage build base images."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import subprocess
|
||||
import sys
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urlparse import urlparse
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from launchpadlib.uris import web_root_for_service_root
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
# Convenience aliases.
|
||||
image_types = {
|
||||
"chroot": "Chroot tarball",
|
||||
"lxd": "LXD image",
|
||||
}
|
||||
|
||||
|
||||
def describe_image_type(image_type):
|
||||
if image_type == "Chroot tarball":
|
||||
return "base chroot tarball"
|
||||
elif image_type == "LXD image":
|
||||
return "base LXD image"
|
||||
else:
|
||||
raise ValueError("unknown image type '%s'" % image_type)
|
||||
|
||||
|
||||
def get_chroot(args):
|
||||
das = args.architectures[0]
|
||||
suite_arch = "%s/%s" % (args.suite, das.architecture_tag)
|
||||
url = das.getChrootURL(pocket=args.pocket, image_type=args.image_type)
|
||||
if url is None:
|
||||
print("No %s for %s" % (
|
||||
describe_image_type(args.image_type), suite_arch))
|
||||
return 1
|
||||
if args.dry_run:
|
||||
print("Would fetch %s" % url)
|
||||
else:
|
||||
# We use wget here to save on having to implement a progress bar
|
||||
# with urlretrieve.
|
||||
command = ["wget"]
|
||||
if args.filepath is not None:
|
||||
command.extend(["-O", args.filepath])
|
||||
command.append(url)
|
||||
subprocess.check_call(command)
|
||||
return 0
|
||||
|
||||
|
||||
def info_chroot(args):
|
||||
das = args.architectures[0]
|
||||
url = das.getChrootURL(pocket=args.pocket, image_type=args.image_type)
|
||||
if url is not None:
|
||||
print(url)
|
||||
return 0
|
||||
|
||||
|
||||
def remove_chroot(args):
|
||||
das = args.architectures[0]
|
||||
previous_url = das.getChrootURL(
|
||||
pocket=args.pocket, image_type=args.image_type)
|
||||
if previous_url is not None:
|
||||
print("Previous %s: %s" % (
|
||||
describe_image_type(args.image_type), previous_url))
|
||||
suite_arch = "%s/%s" % (args.suite, das.architecture_tag)
|
||||
if args.dry_run:
|
||||
print("Would remove %s from %s" % (
|
||||
describe_image_type(args.image_type), suite_arch))
|
||||
else:
|
||||
if not args.confirm_all:
|
||||
if YesNoQuestion().ask(
|
||||
"Remove %s from %s" % (
|
||||
describe_image_type(args.image_type), suite_arch),
|
||||
"no") == "no":
|
||||
return 0
|
||||
das.removeChroot(pocket=args.pocket, image_type=args.image_type)
|
||||
return 0
|
||||
|
||||
|
||||
def set_chroot(args):
|
||||
das = args.architectures[0]
|
||||
previous_url = das.getChrootURL(
|
||||
pocket=args.pocket, image_type=args.image_type)
|
||||
if previous_url is not None:
|
||||
print("Previous %s: %s" % (
|
||||
describe_image_type(args.image_type), previous_url))
|
||||
suite_arch = "%s/%s" % (args.suite, das.architecture_tag)
|
||||
if args.build_url:
|
||||
target = "%s from %s" % (args.filepath, args.build_url)
|
||||
else:
|
||||
target = args.filepath
|
||||
if args.dry_run:
|
||||
print("Would set %s for %s to %s" % (
|
||||
describe_image_type(args.image_type), suite_arch, target))
|
||||
else:
|
||||
if not args.confirm_all:
|
||||
if YesNoQuestion().ask(
|
||||
"Set %s for %s to %s" % (
|
||||
describe_image_type(args.image_type), suite_arch, target),
|
||||
"no") == "no":
|
||||
return 0
|
||||
if args.build_url:
|
||||
das.setChrootFromBuild(
|
||||
livefsbuild=urlparse(args.build_url).path,
|
||||
filename=args.filepath,
|
||||
pocket=args.pocket, image_type=args.image_type)
|
||||
else:
|
||||
with open(args.filepath, "rb") as f:
|
||||
data = f.read()
|
||||
sha1sum = hashlib.sha1(data).hexdigest()
|
||||
das.setChroot(
|
||||
data=data, sha1sum=sha1sum,
|
||||
pocket=args.pocket, image_type=args.image_type)
|
||||
return 0
|
||||
|
||||
|
||||
commands = {
|
||||
"get": get_chroot,
|
||||
"info": info_chroot,
|
||||
"remove": remove_chroot,
|
||||
"set": set_chroot}
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_argument(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show removals that would be performed")
|
||||
parser.add_argument(
|
||||
"-y", "--confirm-all", default=False, action="store_true",
|
||||
help="do not ask for confirmation")
|
||||
parser.add_argument(
|
||||
"-d", "--distribution", default="ubuntu",
|
||||
metavar="DISTRIBUTION", help="manage base images for DISTRIBUTION")
|
||||
parser.add_argument(
|
||||
"-s", "--suite", "--series", dest="suite", metavar="SUITE",
|
||||
help="manage base images for SUITE")
|
||||
parser.add_argument(
|
||||
"-a", "--architecture", metavar="ARCHITECTURE", required=True,
|
||||
help="manage base images for ARCHITECTURE")
|
||||
parser.add_argument(
|
||||
"-i", "--image-type", metavar="TYPE", default="Chroot tarball",
|
||||
help="manage base images of type TYPE")
|
||||
parser.add_argument(
|
||||
"--from-build", dest="build_url", metavar="URL",
|
||||
help="Live filesystem build URL to set base image from")
|
||||
parser.add_argument(
|
||||
"-f", "--filepath", metavar="PATH",
|
||||
help="Base image file path (or file name if --from-build is given)")
|
||||
parser.add_argument("command", choices=sorted(commands.keys()))
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "set" and args.filepath is None:
|
||||
parser.error("The set command requires a base image file path (-f).")
|
||||
|
||||
if args.image_type not in image_types.values():
|
||||
image_type = image_types.get(args.image_type.lower())
|
||||
if image_type is not None:
|
||||
args.image_type = image_type
|
||||
else:
|
||||
parser.error("Unknown image type '%s'." % args.image_type)
|
||||
|
||||
if args.command in ("get", "info"):
|
||||
login_method = Launchpad.login_anonymously
|
||||
else:
|
||||
login_method = Launchpad.login_with
|
||||
args.launchpad = login_method(
|
||||
"manage-chroot", args.launchpad_instance, version="devel")
|
||||
lputils.setup_location(args)
|
||||
|
||||
if args.command == "set" and args.build_url:
|
||||
parsed_build_url = urlparse(args.build_url)
|
||||
if parsed_build_url.scheme != "":
|
||||
service_host = args.launchpad._root_uri.host
|
||||
web_host = urlparse(web_root_for_service_root(
|
||||
str(args.launchpad._root_uri))).hostname
|
||||
if parsed_build_url.hostname not in (service_host, web_host):
|
||||
parser.error(
|
||||
"%s is not on this Launchpad instance (%s)" % (
|
||||
args.build_url, web_host))
|
||||
|
||||
return commands[args.command](args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -0,0 +1,67 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2017 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Mark a suite dirty.
|
||||
|
||||
This is useful on the rare occasions when Launchpad needs to be forced to
|
||||
republish a suite even though it isn't itself aware of a reason to do so.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
def mark_suite_dirty(options):
|
||||
if options.dry_run:
|
||||
print(
|
||||
"Would mark %s dirty in %s." % (options.suite, options.archive))
|
||||
else:
|
||||
options.archive.markSuiteDirty(
|
||||
distroseries=options.series, pocket=options.pocket)
|
||||
print("Marked %s dirty in %s." % (options.suite, options.archive))
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
usage="usage: %prog -s suite",
|
||||
epilog=lputils.ARCHIVE_REFERENCE_DESCRIPTION)
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show what would be done")
|
||||
parser.add_option("-A", "--archive", help="operate on ARCHIVE")
|
||||
parser.add_option(
|
||||
"-s", "--suite", metavar="SUITE", help="mark SUITE dirty")
|
||||
options, _ = parser.parse_args()
|
||||
|
||||
options.distribution = "ubuntu"
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"mark-suite-dirty", options.launchpad_instance, version="devel")
|
||||
lputils.setup_location(options)
|
||||
|
||||
mark_suite_dirty(options)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -0,0 +1,31 @@
|
||||
#!/bin/sh
|
||||
|
||||
# quick and dirty script to report, for the first transition shown in
|
||||
# update_output, the list of packages that are blocked only by autopkgtests.
|
||||
|
||||
# this looks only at the first transition because this is normally the
|
||||
# biggest one needing immediate attention.
|
||||
|
||||
# Author: Steve Langasek <steve.langasek@ubuntu.com>
|
||||
|
||||
set -e
|
||||
|
||||
cleanup() {
|
||||
if [ -n "$WORKDIR" ]; then
|
||||
rm -rf "$WORKDIR"
|
||||
fi
|
||||
}
|
||||
|
||||
WORKDIR=
|
||||
trap cleanup 0 2 3 5 10 13 15
|
||||
WORKDIR=$(mktemp -d)
|
||||
|
||||
URLBASE=https://people.canonical.com/~ubuntu-archive/proposed-migration/
|
||||
for file in update_output.txt update_output_notest.txt; do
|
||||
wget -q "$URLBASE/$file" -O - \
|
||||
| sed -e'1,/easy:/d; s/^[[:space:]]\+\* [^:]*: //; q' \
|
||||
| sed -e's/, /\n/g' > "$WORKDIR/$file"
|
||||
done
|
||||
|
||||
LC_COLLATE=C join -v2 "$WORKDIR/update_output_notest.txt" \
|
||||
"$WORKDIR/update_output.txt"
|
@ -0,0 +1,85 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Move all bugs of a milestone to another milestone. This is usually done after
|
||||
# the milestone was released.
|
||||
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
def parse_args():
|
||||
'''Parse command line.
|
||||
|
||||
Return (options, arguments).
|
||||
'''
|
||||
parser = optparse.OptionParser(
|
||||
usage='Usage: %prog [options] <from milestone> <to milestone>')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-s", "--series",
|
||||
help="Ubuntu release to work on (default: current development "
|
||||
"release)")
|
||||
parser.add_option(
|
||||
"-n", "--no-act", "--dry-run", action="store_true",
|
||||
help="Only show bugs that would be moved, but do not act on them")
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 2:
|
||||
parser.error('Need to specify "from" and "to" milestone. See --help')
|
||||
|
||||
return (options, args)
|
||||
|
||||
if __name__ == '__main__':
|
||||
(options, (from_ms, to_ms)) = parse_args()
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
'ubuntu-archive-tools', options.launchpad_instance)
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
if options.series:
|
||||
distro_series = ubuntu.getSeries(name_or_version=options.series)
|
||||
else:
|
||||
distro_series = ubuntu.current_series
|
||||
|
||||
# convert milestone names to LP objects
|
||||
lp_milestones = {}
|
||||
for m in distro_series.all_milestones:
|
||||
lp_milestones[m.name] = m
|
||||
try:
|
||||
from_ms = lp_milestones[from_ms]
|
||||
except KeyError:
|
||||
sys.stderr.write('ERROR: Unknown milestone %s\n' % from_ms)
|
||||
sys.exit(1)
|
||||
try:
|
||||
to_ms = lp_milestones[to_ms]
|
||||
except KeyError:
|
||||
sys.stderr.write('ERROR: Unknown milestone %s\n' % to_ms)
|
||||
sys.exit(1)
|
||||
|
||||
# move them over
|
||||
if options.no_act:
|
||||
print('Would move the following bug tasks to %s:' % to_ms.name)
|
||||
else:
|
||||
print('Moving the following bug tasks to %s:' % to_ms.name)
|
||||
for task in from_ms.searchTasks():
|
||||
print(task.title)
|
||||
if not options.no_act:
|
||||
task.milestone_link = to_ms
|
||||
task.lp_save()
|
@ -0,0 +1,280 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Generate a HTML report of current NBS binary packages from a checkrdepends
|
||||
# output directory
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
import csv
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from charts import make_chart, make_chart_header
|
||||
|
||||
|
||||
def parse_checkrdepends_file(path, pkgmap):
|
||||
'''Parse one checkrdepends file into the NBS map'''
|
||||
|
||||
cur_component = None
|
||||
cur_arch = None
|
||||
|
||||
with open(path) as f:
|
||||
for line in f:
|
||||
if line.startswith('-- '):
|
||||
(cur_component, cur_arch) = line.split('/', 1)[1].split()[:2]
|
||||
continue
|
||||
assert cur_component
|
||||
assert cur_arch
|
||||
|
||||
rdep = line.strip().split()[0]
|
||||
pkgmap.setdefault(rdep, (cur_component, []))[1].append(cur_arch)
|
||||
|
||||
|
||||
def _pkg_removable(pkg, nbs, checked_v):
|
||||
'''Recursively check if pakcage is removable.
|
||||
|
||||
checked_v is the working set of already checked vertices, to avoid infinite
|
||||
loops.
|
||||
'''
|
||||
checked_v.add(pkg)
|
||||
for rdep in nbs.get(pkg, []):
|
||||
if rdep in checked_v:
|
||||
continue
|
||||
#checked_v.add(rdep)
|
||||
if not rdep in nbs:
|
||||
try:
|
||||
checked_v.remove(rdep)
|
||||
except KeyError:
|
||||
pass
|
||||
return False
|
||||
if not _pkg_removable(rdep, nbs, checked_v):
|
||||
try:
|
||||
checked_v.remove(rdep)
|
||||
except KeyError:
|
||||
pass
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_removables(nbs):
|
||||
'''Get set of removable packages.
|
||||
|
||||
This includes packages with no rdepends and disconnected subgraphs, i. e.
|
||||
clusters of NBS packages which only depend on each other.
|
||||
'''
|
||||
removable = set()
|
||||
|
||||
for p in nbs:
|
||||
if p in removable:
|
||||
continue
|
||||
checked_v = set()
|
||||
if _pkg_removable(p, nbs, checked_v):
|
||||
# we can add the entire cluster here, not just p; avoids
|
||||
# re-checking the other vertices in that cluster
|
||||
removable.update(checked_v)
|
||||
|
||||
return removable
|
||||
|
||||
|
||||
def html_report(options, nbs, removables):
|
||||
'''Generate HTML report from NBS map.'''
|
||||
|
||||
print('''\
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>NBS packages</title>
|
||||
<style type="text/css">
|
||||
body { background: #CCCCB0; color: black; }
|
||||
a { text-decoration: none; }
|
||||
table { border-collapse: collapse; border-style: none none;
|
||||
margin-bottom: 3ex; empty-cells: show; }
|
||||
table th { text-align: left; border-style: solid none none none;
|
||||
border-width: 3px; padding-right: 10px; }
|
||||
table td { vertical-align:top; text-align: left; border-style: dotted none;
|
||||
border-width: 1px; padding-right: 10px; }
|
||||
.normal { }
|
||||
.removable { color: green; font-weight: bold; }
|
||||
.nbs { color: blue; }
|
||||
.componentsup { font-size: 70%%; color: red; font-weight: bold; }
|
||||
.componentunsup { font-size: 70%%; color: darkred; }
|
||||
</style>
|
||||
%s
|
||||
</head>
|
||||
<body>
|
||||
<h1>NBS: Binary packages not built from any source</h1>
|
||||
|
||||
<h2>Archive Administrator commands</h2>
|
||||
<p>Run this command to remove NBS packages which are not required any more:</p>
|
||||
''' % make_chart_header())
|
||||
|
||||
print('<p style="font-family: monospace">remove-package -m NBS '
|
||||
'-d %s -s %s -b -y %s</p>' %
|
||||
(options.distribution, options.suite, ' '.join(sorted(removables))))
|
||||
|
||||
print('''
|
||||
<h2>Reverse dependencies</h2>
|
||||
|
||||
<p><span class="nbs">Reverse dependencies which are NBS themselves</span><br/>
|
||||
<span class="removable">NBS package which can be removed safely</span></p>
|
||||
<table>
|
||||
''')
|
||||
reverse_nbs = defaultdict(list) # non_nbs_pkg -> [nbspkg1, ...]
|
||||
pkg_component = {} # non_nbs_pkg -> (component, component_class)
|
||||
|
||||
for pkg in sorted(nbs):
|
||||
nbsmap = nbs[pkg]
|
||||
if pkg in removables:
|
||||
cls = 'removable'
|
||||
else:
|
||||
cls = 'normal'
|
||||
print('<tr><th colspan="4"><span class="%s">%s</span></td></tr>' %
|
||||
(cls, pkg), end="")
|
||||
for rdep in sorted(nbsmap):
|
||||
(component, arches) = nbsmap[rdep]
|
||||
|
||||
if component in ('main', 'restricted'):
|
||||
component_cls = 'sup'
|
||||
else:
|
||||
component_cls = 'unsup'
|
||||
|
||||
if rdep in nbs:
|
||||
if rdep in removables:
|
||||
cls = 'removable'
|
||||
else:
|
||||
cls = 'nbs'
|
||||
else:
|
||||
cls = 'normal'
|
||||
reverse_nbs[rdep].append(pkg)
|
||||
pkg_component[rdep] = (component, component_cls)
|
||||
|
||||
print('<tr><td> </td>', end='')
|
||||
print('<td><span class="%s">%s</span></td> ' % (cls, rdep), end='')
|
||||
print('<td><span class="component%s">%s</span></td>' %
|
||||
(component_cls, component), end='')
|
||||
print('<td>%s</td></tr>' % ' '.join(arches))
|
||||
|
||||
print('''</table>
|
||||
<h2>Packages which depend on NBS packages</h2>
|
||||
<table>''')
|
||||
|
||||
def sort_rev_nbs(k1, k2):
|
||||
len_cmp = cmp(len(reverse_nbs[k1]), len(reverse_nbs[k2]))
|
||||
if len_cmp == 0:
|
||||
return cmp(k1, k2)
|
||||
else:
|
||||
return -len_cmp
|
||||
|
||||
for pkg in sorted(reverse_nbs, cmp=sort_rev_nbs):
|
||||
print('<tr><td>%s</td> '
|
||||
'<td><span class="component%s">%s</span></td><td>' % (
|
||||
pkg, pkg_component[pkg][1], pkg_component[pkg][0]), end="")
|
||||
print(" ".join(sorted(reverse_nbs[pkg])), end="")
|
||||
print('</td></tr>')
|
||||
|
||||
print('</table>')
|
||||
|
||||
if options.csv_file is not None:
|
||||
print("<h2>Over time</h2>")
|
||||
print(make_chart(
|
||||
os.path.basename(options.csv_file), ["removable", "total"]))
|
||||
|
||||
print('<p><small>Generated at %s.</small></p>' %
|
||||
time.strftime('%Y-%m-%d %H:%M:%S %Z', time.gmtime(options.time)))
|
||||
print('</body></html>')
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
usage="%prog <checkrdepends output directory>",
|
||||
description="Generate an HTML report of current NBS binary packages.")
|
||||
parser.add_option('-d', '--distribution', default='ubuntu')
|
||||
parser.add_option('-s', '--suite', default='disco')
|
||||
parser.add_option(
|
||||
'--csv-file', help='record CSV time series data in this file')
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 1:
|
||||
parser.error("need a checkrdepends output directory")
|
||||
|
||||
options.time = time.time()
|
||||
|
||||
# pkg -> rdep_pkg -> (component, [arch1, arch2, ...])
|
||||
nbs = defaultdict(dict)
|
||||
|
||||
for f in os.listdir(args[0]):
|
||||
if f.startswith('.') or f.endswith('.html'):
|
||||
continue
|
||||
parse_checkrdepends_file(os.path.join(args[0], f), nbs[f])
|
||||
|
||||
#with open('/tmp/dot', 'w') as dot:
|
||||
# print('digraph {', file=dot)
|
||||
# print(' ratio 0.1', file=dot)
|
||||
# pkgnames = set(nbs)
|
||||
# for m in nbs.itervalues():
|
||||
# pkgnames.update(m)
|
||||
# for n in pkgnames:
|
||||
# print(' %s [label="%s"' % (n.replace('-', '').replace('.', ''), n),
|
||||
# end="", file=dot)
|
||||
# if n in nbs:
|
||||
# print(', style="filled", fillcolor="lightblue"', end="", file=dot)
|
||||
# print(']', file=dot)
|
||||
# print(file=dot)
|
||||
# for pkg, map in nbs.iteritems():
|
||||
# for rd in map:
|
||||
# print(' %s -> %s' % (
|
||||
# pkg.replace('-', '').replace('.', ''),
|
||||
# rd.replace('-', '').replace('.', '')), file=dot)
|
||||
# print('}', file=dot)
|
||||
|
||||
removables = get_removables(nbs)
|
||||
|
||||
html_report(options, nbs, removables)
|
||||
|
||||
if options.csv_file is not None:
|
||||
if sys.version < "3":
|
||||
open_mode = "ab"
|
||||
open_kwargs = {}
|
||||
else:
|
||||
open_mode = "a"
|
||||
open_kwargs = {"newline": ""}
|
||||
csv_is_new = not os.path.exists(options.csv_file)
|
||||
with open(options.csv_file, open_mode, **open_kwargs) as csv_file:
|
||||
# Field names deliberately hardcoded; any changes require
|
||||
# manually rewriting the output file.
|
||||
fieldnames = [
|
||||
"time",
|
||||
"removable",
|
||||
"total",
|
||||
]
|
||||
csv_writer = csv.DictWriter(csv_file, fieldnames)
|
||||
if csv_is_new:
|
||||
csv_writer.writeheader()
|
||||
csv_writer.writerow({
|
||||
"time": int(options.time * 1000),
|
||||
"removable": len(removables),
|
||||
"total": len(nbs),
|
||||
})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,167 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Authors:
|
||||
# Martin Pitt <martin.pitt@ubuntu.com>
|
||||
# Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Process binary NEW entries from Debian.
|
||||
|
||||
Find and accept all binary NEW entries built by source packages synced
|
||||
directly from Debian. These do not typically need significant review.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
try:
|
||||
from urllib.parse import unquote, urlsplit
|
||||
from urllib.request import urlopen, urlretrieve
|
||||
except ImportError:
|
||||
from urllib import unquote, urlretrieve
|
||||
from urllib2 import urlopen
|
||||
from urlparse import urlsplit
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
CONSUMER_KEY = "new-binary-debian-universe"
|
||||
|
||||
|
||||
temp_dir = None
|
||||
|
||||
|
||||
def ensure_temp_dir():
|
||||
global temp_dir
|
||||
if temp_dir is None:
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
atexit.register(shutil.rmtree, temp_dir)
|
||||
|
||||
|
||||
def find_matching_uploads(options, explicit_suite):
|
||||
kwargs = {}
|
||||
if explicit_suite:
|
||||
kwargs["pocket"] = options.pocket
|
||||
uploads = options.series.getPackageUploads(
|
||||
archive=options.archive, status="New", **kwargs)
|
||||
for upload in uploads:
|
||||
if upload.contains_build:
|
||||
if upload.changes_file_url is None:
|
||||
continue
|
||||
# display_name is inaccurate for the theoretical case of an
|
||||
# upload containing multiple builds, but in practice it's close
|
||||
# enough.
|
||||
source = upload.display_name.split(",")[0]
|
||||
if source == "linux":
|
||||
continue
|
||||
binaries = upload.getBinaryProperties()
|
||||
binaries = [b for b in binaries if "customformat" not in b]
|
||||
if [b for b in binaries if "ubuntu" in b["version"]]:
|
||||
continue
|
||||
changes_file = urlopen(upload.changes_file_url)
|
||||
try:
|
||||
changes = changes_file.read()
|
||||
finally:
|
||||
changes_file.close()
|
||||
if (" unstable; urgency=" not in changes and
|
||||
" experimental; urgency=" not in changes):
|
||||
continue
|
||||
|
||||
if options.lintian:
|
||||
ensure_temp_dir()
|
||||
for url in upload.binaryFileUrls():
|
||||
if (not url.endswith("_all.deb") and
|
||||
not url.endswith("_i386.deb")):
|
||||
continue
|
||||
filename = unquote(urlsplit(url)[2].split("/")[-1])
|
||||
print("Fetching %s ..." % filename)
|
||||
path = os.path.join(temp_dir, filename)
|
||||
urlretrieve(url, path)
|
||||
lintian = subprocess.Popen(
|
||||
["lintian", path], stdout=subprocess.PIPE,
|
||||
universal_newlines=True)
|
||||
out = lintian.communicate()[0]
|
||||
if lintian.returncode != 0:
|
||||
print("\n=== %s ===\n%s" % (filename, out),
|
||||
file=sys.stderr)
|
||||
|
||||
yield upload, binaries
|
||||
|
||||
|
||||
def find_and_accept(options, explicit_suite):
|
||||
for upload, binaries in list(
|
||||
find_matching_uploads(options, explicit_suite)):
|
||||
if options.source and upload.package_name not in options.source:
|
||||
continue
|
||||
display = "%s/%s (%s)" % (
|
||||
upload.display_name, upload.display_version, upload.display_arches)
|
||||
if options.dry_run:
|
||||
print("Would accept %s" % display)
|
||||
else:
|
||||
for binary in binaries:
|
||||
if "customformat" not in binary:
|
||||
print("%s | %s Component: %s Section: %s Priority: %s" % (
|
||||
"N" if binary["is_new"] else "*", binary["name"],
|
||||
binary["component"], binary["section"],
|
||||
binary["priority"]))
|
||||
if not options.confirm_all:
|
||||
if YesNoQuestion().ask("Accept %s" % display, "no") == "no":
|
||||
continue
|
||||
print("Accepting %s" % display)
|
||||
upload.acceptFromQueue()
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="usage: %prog [options]")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-d", "--distribution", metavar="DISTRO", default="ubuntu",
|
||||
help="look in distribution DISTRO")
|
||||
parser.add_option(
|
||||
"-s", "--suite", metavar="SUITE", help="look in suite SUITE")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="don't make any modifications")
|
||||
parser.add_option(
|
||||
"-y", "--confirm-all", default=False, action="store_true",
|
||||
help="do not ask for confirmation")
|
||||
parser.add_option(
|
||||
"--lintian", default=False, action="store_true",
|
||||
help="run packages through Lintian")
|
||||
parser.add_option(
|
||||
"--source", action="append", metavar="NAME",
|
||||
help="only consider source package NAME")
|
||||
options, _ = parser.parse_args()
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
CONSUMER_KEY, options.launchpad_instance, version="devel")
|
||||
explicit_suite = options.suite is not None
|
||||
lputils.setup_location(options)
|
||||
|
||||
find_and_accept(options, explicit_suite)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,107 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from contextlib import closing
|
||||
import gzip
|
||||
from optparse import OptionParser
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
try:
|
||||
from urllib.request import urlretrieve
|
||||
except ImportError:
|
||||
from urllib import urlretrieve
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
tempdir = None
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix="orphaned-sources")
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
def decompress_open(tagfile):
|
||||
if tagfile.startswith("http:") or tagfile.startswith("ftp:"):
|
||||
url = tagfile
|
||||
tagfile = urlretrieve(url)[0]
|
||||
|
||||
if tagfile.endswith(".gz"):
|
||||
ensure_tempdir()
|
||||
decompressed = tempfile.mktemp(dir=tempdir)
|
||||
with closing(gzip.GzipFile(tagfile)) as fin:
|
||||
with open(decompressed, "wb") as fout:
|
||||
fout.write(fin.read())
|
||||
return open(decompressed, "r")
|
||||
else:
|
||||
return open(tagfile, "r")
|
||||
|
||||
|
||||
def archive_base(archtag):
|
||||
if archtag in ("amd64", "i386", "src"):
|
||||
return "http://archive.ubuntu.com/ubuntu"
|
||||
else:
|
||||
return "http://ports.ubuntu.com/ubuntu-ports"
|
||||
|
||||
|
||||
def source_names(options):
|
||||
sources = set()
|
||||
for component in "main", "restricted", "universe", "multiverse":
|
||||
url = "%s/dists/%s/%s/source/Sources.gz" % (
|
||||
archive_base("src"), options.suite, component)
|
||||
print("Reading %s ..." % url, file=sys.stderr)
|
||||
for section in apt_pkg.TagFile(decompress_open(url)):
|
||||
sources.add(section["Package"])
|
||||
return sources
|
||||
|
||||
|
||||
def referenced_sources(options):
|
||||
sources = set()
|
||||
for component in "main", "restricted", "universe", "multiverse":
|
||||
for arch in options.architectures:
|
||||
archtag = arch.architecture_tag
|
||||
for suffix in "", "/debian-installer":
|
||||
url = "%s/dists/%s/%s%s/binary-%s/Packages.gz" % (
|
||||
archive_base(archtag), options.suite, component, suffix,
|
||||
archtag)
|
||||
print("Reading %s ..." % url, file=sys.stderr)
|
||||
for section in apt_pkg.TagFile(decompress_open(url)):
|
||||
if "Source" in section:
|
||||
sources.add(section["Source"].split(" ", 1)[0])
|
||||
else:
|
||||
sources.add(section["Package"])
|
||||
return sources
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
description="Check for sources without any remaining binaries.")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option("-s", "--suite", help="check this suite")
|
||||
options, _ = parser.parse_args()
|
||||
|
||||
options.distribution = "ubuntu"
|
||||
options.launchpad = Launchpad.login_anonymously(
|
||||
"orphaned-sources", options.launchpad_instance)
|
||||
lputils.setup_location(options)
|
||||
|
||||
if options.pocket != "Release":
|
||||
parser.error("cannot run on non-release pocket")
|
||||
|
||||
orphaned_sources = source_names(options) - referenced_sources(options)
|
||||
for source in sorted(orphaned_sources):
|
||||
print(source)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,165 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
import bz2
|
||||
from collections import defaultdict
|
||||
import json
|
||||
import lzma
|
||||
from optparse import OptionParser
|
||||
import requests
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
tempdir = None
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix="unsubscribed-packages")
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
def decompress_open(tagfile):
|
||||
if tagfile.startswith("http:") or tagfile.startswith("ftp:"):
|
||||
url = tagfile
|
||||
tagfile = requests.get(url)
|
||||
if tagfile.status_code == 404:
|
||||
url = url.replace(".xz", ".bz2")
|
||||
tagfile = requests.get(url)
|
||||
|
||||
ensure_tempdir()
|
||||
decompressed = tempfile.mktemp(dir=tempdir)
|
||||
with open(decompressed, "wb") as fout:
|
||||
if url.endswith(".xz"):
|
||||
fout.write(lzma.decompress(tagfile.content))
|
||||
elif url.endswith(".bz2"):
|
||||
fout.write(bz2.decompress(tagfile.content))
|
||||
return open(decompressed, "r")
|
||||
|
||||
|
||||
def archive_base(archtag):
|
||||
if archtag in ("amd64", "i386", "src"):
|
||||
return "http://archive.ubuntu.com/ubuntu"
|
||||
else:
|
||||
return "http://ports.ubuntu.com/ubuntu-ports"
|
||||
|
||||
|
||||
def source_names(options):
|
||||
sources = dict()
|
||||
for suite in options.suites:
|
||||
for component in ["main", "restricted"]:
|
||||
url = "%s/dists/%s/%s/source/Sources.xz" % (
|
||||
archive_base("src"), suite, component)
|
||||
if not options.quiet:
|
||||
print("Reading %s ..." % url, file=sys.stderr)
|
||||
for section in apt_pkg.TagFile(decompress_open(url)):
|
||||
pkg = section["Package"]
|
||||
if suite == options.dev_suite:
|
||||
sources[pkg] = True
|
||||
else:
|
||||
if sources.get(pkg, False) == True:
|
||||
continue
|
||||
sources[pkg] = False
|
||||
return sources
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
description="Check for source packages in main or restricted in "
|
||||
"active distro series and return a json file of the teams "
|
||||
"to which they map.")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-u", "--unsubscribed", action="store_true", default=False,
|
||||
help="Only return packages which have no subscriber")
|
||||
parser.add_option(
|
||||
"-p", "--print", action="store_true", default=False,
|
||||
dest="display",
|
||||
help="Print results to screen instead of a json file")
|
||||
parser.add_option(
|
||||
"-o", "--output-file", default="package-team-mapping.json",
|
||||
help="output JSON to this file")
|
||||
parser.add_option(
|
||||
"-q", "--quiet", action="store_true", default=False,
|
||||
help="Quieten progress messages")
|
||||
options, _ = parser.parse_args()
|
||||
options.suite = None
|
||||
options.distribution = "ubuntu"
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"unsubscribed-packages", options.launchpad_instance)
|
||||
launchpad = options.launchpad
|
||||
ubuntu = launchpad.distributions[options.distribution]
|
||||
options.suites = []
|
||||
for series in ubuntu.series:
|
||||
# very few lucid packages are supported
|
||||
if series.name == 'lucid':
|
||||
continue
|
||||
if series.active:
|
||||
options.suites.append(series.name)
|
||||
# find the dev series
|
||||
if series.status in ['Active Development', 'Pre-release Freeze']:
|
||||
options.dev_suite = series.name
|
||||
|
||||
lputils.setup_location(options)
|
||||
|
||||
team_names = [
|
||||
'checkbox-bugs',
|
||||
'desktop-packages',
|
||||
'documentation-packages',
|
||||
'foundations-bugs',
|
||||
'kernel-packages',
|
||||
'kubuntu-bugs',
|
||||
'landscape',
|
||||
'maas-maintainers',
|
||||
'mir-team',
|
||||
'pkg-ime',
|
||||
'snappy-dev',
|
||||
'translators-packages',
|
||||
'ubuntu-openstack',
|
||||
'ubuntu-printing',
|
||||
'ubuntu-security',
|
||||
'ubuntu-server',
|
||||
]
|
||||
|
||||
data = { "unsubscribed": [] }
|
||||
subscriptions = defaultdict(list)
|
||||
for team_name in team_names:
|
||||
data[team_name] = []
|
||||
team = launchpad.people[team_name]
|
||||
team_subs = team.getBugSubscriberPackages()
|
||||
for src_pkg in team_subs:
|
||||
subscriptions[src_pkg.name].append(team_name)
|
||||
data[team_name].append(src_pkg.name)
|
||||
|
||||
source_packages = source_names(options)
|
||||
for source_package in sorted(source_packages):
|
||||
# we only care about ones people are not subscribed to in the dev release
|
||||
if source_package not in subscriptions and source_packages[source_package]:
|
||||
data["unsubscribed"].append(source_package)
|
||||
if options.display:
|
||||
print("No team is subscribed to: %s" %
|
||||
source_package)
|
||||
else:
|
||||
if not options.unsubscribed:
|
||||
if options.display:
|
||||
print("%s is subscribed to: %s" %
|
||||
(team_name, source_package))
|
||||
|
||||
if not options.display:
|
||||
with open(options.output_file, 'w') as json_file:
|
||||
json_file.write(json.dumps(data, indent=4))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,106 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
# Author: Stéphane Graber <stgraber@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import time
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from codecs import open
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate list of packages and uploaders for all packagesets.")
|
||||
parser.add_argument("target", metavar="TARGET",
|
||||
help="Target directory")
|
||||
parser.add_argument("-a", "--all", action="store_true",
|
||||
help="Sync all series instead of just the active ones")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Authenticated login to Launchpad as anonymous
|
||||
# doesn't let us list the uploaders
|
||||
lp = Launchpad.login_with('package_sets_report', 'production')
|
||||
|
||||
ubuntu = lp.distributions['ubuntu']
|
||||
|
||||
# Get the list of series
|
||||
if args.all:
|
||||
ubuntu_series = [series for series in ubuntu.series
|
||||
if series.status != "Future"]
|
||||
else:
|
||||
ubuntu_series = [series for series in ubuntu.series if series.active]
|
||||
|
||||
# cache
|
||||
teams = {}
|
||||
|
||||
for series in ubuntu_series:
|
||||
series_name = str(series.name)
|
||||
|
||||
if not os.path.exists(os.path.join(args.target, series_name)):
|
||||
os.makedirs(os.path.join(args.target, series_name))
|
||||
|
||||
for pkgset in lp.packagesets.getBySeries(distroseries=series):
|
||||
report = ""
|
||||
report += "Name: %s\n" % pkgset.name
|
||||
report += "Description: %s\n" % pkgset.description
|
||||
report += "Owner: %s\n" % pkgset.owner.display_name
|
||||
report += "Creation date: %s\n" % pkgset.date_created
|
||||
|
||||
# List all the source packages
|
||||
report += "\nPackages:\n"
|
||||
for pkg in sorted(list(pkgset.getSourcesIncluded())):
|
||||
report += " - %s\n" % str(pkg)
|
||||
|
||||
# List all the sub-package sets
|
||||
report += "\nSub-package sets:\n"
|
||||
for child in sorted(list(pkgset.setsIncluded(direct_inclusion=True))):
|
||||
report += " - %s\n" % child.name
|
||||
|
||||
# List all the uploaders, when it's a team, show the members count
|
||||
report += "\nUploaders:\n"
|
||||
for archive in ubuntu.archives:
|
||||
for uploader in sorted(list(archive.getUploadersForPackageset(
|
||||
packageset=pkgset)),
|
||||
key=lambda uploader: uploader.person.display_name):
|
||||
|
||||
if uploader.person.is_team:
|
||||
if not uploader.person.name in teams:
|
||||
team = uploader.person
|
||||
teams[uploader.person.name] = team
|
||||
else:
|
||||
team = teams[uploader.person.name]
|
||||
|
||||
report += " - %s (%s) (%s) (%s) (%s members)\n" % \
|
||||
(team.display_name,
|
||||
team.name,
|
||||
uploader.permission,
|
||||
archive.displayname,
|
||||
len(team.members))
|
||||
for member in sorted(list(team.members),
|
||||
key=lambda person: person.name):
|
||||
report += " - %s (%s)\n" % (member.display_name,
|
||||
member.name)
|
||||
else:
|
||||
report += " - %s (%s) (%s)\n" % \
|
||||
(uploader.person.name,
|
||||
uploader.person.display_name,
|
||||
uploader.permission)
|
||||
|
||||
report += "\nGenerated at: %s\n" % time.asctime()
|
||||
with open(os.path.join(args.target, series_name, pkgset.name),
|
||||
"w+", encoding="utf-8") as fd:
|
||||
fd.write(report)
|
@ -0,0 +1,93 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
# Author: Stéphane Graber <stgraber@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a user readable report of all archive permissions")
|
||||
parser.add_argument("target", metavar="TARGET",
|
||||
help="Target directory")
|
||||
args = parser.parse_args()
|
||||
|
||||
if not os.path.exists(args.target):
|
||||
os.makedirs(args.target)
|
||||
|
||||
lp = Launchpad.login_with('permissions', 'production', version="devel")
|
||||
|
||||
entries = {"teams": {}, "individuals": {}}
|
||||
|
||||
for archive in lp.distributions['ubuntu'].archives:
|
||||
for permission in archive.getAllPermissions():
|
||||
if permission.person.is_team:
|
||||
target = "teams"
|
||||
else:
|
||||
target = "individuals"
|
||||
|
||||
if not permission.person.name in entries[target]:
|
||||
entries[target][permission.person.name] = []
|
||||
|
||||
if permission.component_name:
|
||||
entry = "%s: component '%s'" % (permission.permission,
|
||||
permission.component_name)
|
||||
if permission.distro_series_name:
|
||||
entry += " for '%s'" % (permission.distro_series_name)
|
||||
entries[target][permission.person.name].append(entry)
|
||||
|
||||
if permission.package_set_name:
|
||||
entry = "%s: packageset '%s'" % (permission.permission,
|
||||
permission.package_set_name)
|
||||
if permission.distro_series_name:
|
||||
entry += " for '%s'" % (permission.distro_series_name)
|
||||
entries[target][permission.person.name].append(entry)
|
||||
|
||||
if permission.source_package_name:
|
||||
entry = "%s: source '%s'" % (permission.permission,
|
||||
permission.source_package_name)
|
||||
if permission.distro_series_name:
|
||||
entry += " for '%s'" % (permission.distro_series_name)
|
||||
entries[target][permission.person.name].append(entry)
|
||||
|
||||
if permission.pocket:
|
||||
entry = "%s: pocket '%s'" % (permission.permission,
|
||||
permission.pocket)
|
||||
if permission.distro_series_name:
|
||||
entry += " for '%s'" % (permission.distro_series_name)
|
||||
entries[target][permission.person.name].append(entry)
|
||||
|
||||
ubuntudev = [person.name
|
||||
for person in lp.people['ubuntu-dev'].getMembersByStatus(
|
||||
status="Approved")]
|
||||
|
||||
# Add known exceptions:
|
||||
ubuntudev += ["ubuntu-backporters", "ubuntu-security", "ubuntu-archive",
|
||||
"ubuntu-release", "ubuntu-sru"]
|
||||
|
||||
for target, people in entries.items():
|
||||
with open(os.path.join(args.target, target), "w+") as fd:
|
||||
for user, permissions in sorted(people.items()):
|
||||
fd.write("=== %s ===\n" % user)
|
||||
if user not in ubuntudev:
|
||||
fd.write("Isn't a direct member of ~ubuntu-dev!\n")
|
||||
|
||||
for package in sorted(permissions):
|
||||
fd.write(" - %s\n" % package)
|
||||
fd.write("\n")
|
@ -0,0 +1,730 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
# Author: Brian Murray <brian.murray@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
'''Increment the Phased-Update-Percentage for a package
|
||||
|
||||
Check to see whether or not there is a regression (new crash bucket or
|
||||
increase in rate of errors about a package) using errors.ubuntu.com and if
|
||||
not increment the Phased-Update-Percentage for the package.
|
||||
Additionally, generate an html report regarding state of phasing of
|
||||
packages and email uploaders regarding issues with their uploads.
|
||||
'''
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import apt
|
||||
import codecs
|
||||
import csv
|
||||
import datetime
|
||||
import lazr
|
||||
import logging
|
||||
import os
|
||||
import simplejson as json
|
||||
import time
|
||||
|
||||
from collections import defaultdict, OrderedDict
|
||||
from email import utils
|
||||
from optparse import OptionParser
|
||||
|
||||
import lputils
|
||||
|
||||
try:
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib import quote, urlopen
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
def get_primary_email(lp_user):
|
||||
try:
|
||||
lp_user_email = lp_user.preferred_email_address.email
|
||||
except ValueError as e:
|
||||
if 'server-side permission' in e.message:
|
||||
logging.info("%s has hidden their email addresses" %
|
||||
lp_user.web_link)
|
||||
return ''
|
||||
logging.info("Error accessing %s's preferred email address: %s" %
|
||||
(lp_user.web_link, e.message))
|
||||
return ''
|
||||
return lp_user_email
|
||||
|
||||
|
||||
def set_pup(current_pup, new_pup, release, suite, src_pkg):
|
||||
options.series = release
|
||||
options.suite = suite
|
||||
options.pocket = 'Updates'
|
||||
options.version = None
|
||||
source = lputils.find_latest_published_source(options, src_pkg)
|
||||
publications = [
|
||||
binary for binary in source.getPublishedBinaries()
|
||||
if not binary.is_debug]
|
||||
|
||||
for pub in publications:
|
||||
if pub.status != 'Published':
|
||||
continue
|
||||
pub.changeOverride(new_phased_update_percentage=new_pup)
|
||||
if new_pup != 0:
|
||||
logging.info('Incremented p-u-p for %s %s from %s%% to %s%%' %
|
||||
(suite, pub.binary_package_name,
|
||||
current_pup, new_pup))
|
||||
else:
|
||||
logging.info('Set p-u-p to 0%% from %s%% for %s %s' %
|
||||
(current_pup, suite, pub.binary_package_name))
|
||||
|
||||
|
||||
def generate_html_report(releases, buckets):
|
||||
import tempfile
|
||||
import shutil
|
||||
with tempfile.NamedTemporaryFile() as report:
|
||||
report.write('''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Released Ubuntu SRUs</title>
|
||||
<style type="text/css">
|
||||
body { background: #CCCCB0; color: black; }
|
||||
a { text-decoration: none; }
|
||||
table { border-collapse: collapse; border-style: solid none;
|
||||
border-width: 3px; margin-bottom: 3ex; empty-cells: show; }
|
||||
table th { text-align: left; border-style: none none dotted none;
|
||||
border-width: 1px; padding-right: 10px; }
|
||||
table td { text-align: left; border-style: none none dotted none;
|
||||
border-width: 1px; padding-right: 10px; }
|
||||
.noborder { border-style: none; }
|
||||
a { color: blue; }
|
||||
a:visited { color: black; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Phasing %sUbuntu Stable Release Updates</h1>
|
||||
''' % ('', 'and Released ')[options.fully_phased])
|
||||
report.write(
|
||||
'<p>Generated: %s by '
|
||||
'<a href="http://bazaar.launchpad.net/'
|
||||
'~ubuntu-archive/ubuntu-archive-tools/trunk/annotate/head%%3A/'
|
||||
'phased-updater">phased-updater</a></p>' %
|
||||
time.strftime('%F %T UTC', time.gmtime()))
|
||||
report.write('''<p>A <a
|
||||
href="https://wiki.ubuntu.com/StableReleaseUpdates ">stable release
|
||||
update</a> has been created for the following packages, i. e. they have
|
||||
a new version in -updates, and either an increased rate of crashes has
|
||||
been detected or an error has been found that only exists with the new
|
||||
version of the package.\n''')
|
||||
for release in releases:
|
||||
rname = release.name
|
||||
if not buckets[rname]:
|
||||
continue
|
||||
report.write('''<h3>%s</h3>\n''' % rname)
|
||||
report.write('''<table>\n''')
|
||||
report.write('''<tr>
|
||||
<th>Package</th>
|
||||
<th>Version</th>
|
||||
<th>Update Percentage</th>
|
||||
<th>Rate Increase</th>
|
||||
<th>Problems</th>
|
||||
<th>Days</th>
|
||||
</tr>''')
|
||||
for pub_source in buckets[rname]:
|
||||
pkg = pub_source.source_package_name
|
||||
version = pub_source.source_package_version
|
||||
age = (datetime.datetime.now() -
|
||||
pub_source.date_published.replace(tzinfo=None)).days
|
||||
update_percentage = buckets[rname][pub_source].get('pup', 100)
|
||||
if not options.fully_phased and update_percentage == 100:
|
||||
continue
|
||||
lpurl = '%s/ubuntu/+source/%s/' % (LP_BASE_URL, pkg)
|
||||
report.write('''<tr>
|
||||
<td><a href="%s">%s</a></td>
|
||||
<td><a href="%s">%s</a></td>\n''' %
|
||||
(lpurl, pkg, lpurl + version, version))
|
||||
report.write(' <td>')
|
||||
if update_percentage == 0:
|
||||
binary_pub = pub_source.getPublishedBinaries()[0]
|
||||
arch = binary_pub.distro_arch_series.architecture_tag
|
||||
bpph_url = ('%s/ubuntu/%s/%s/%s' %
|
||||
(LP_BASE_URL, rname, arch,
|
||||
binary_pub.binary_package_name))
|
||||
report.write('<a href="%s">%s%% of users' %
|
||||
(bpph_url, update_percentage))
|
||||
previous_pup = \
|
||||
buckets[rname][pub_source]['previous_pup']
|
||||
if previous_pup != 0:
|
||||
report.write(' (was %s%%)</a>' % previous_pup)
|
||||
else:
|
||||
report.write('</a>')
|
||||
else:
|
||||
report.write('%s%% of users' % update_percentage)
|
||||
report.write('</td>\n')
|
||||
if 'rate' in buckets[rname][pub_source]:
|
||||
data = buckets[rname][pub_source]['rate']
|
||||
report.write(' <td><a href="%s">+%s</a></td>\n' %
|
||||
(data[1], data[0]))
|
||||
else:
|
||||
report.write(' <td></td>\n')
|
||||
report.write(' <td>')
|
||||
if 'buckets' in buckets[rname][pub_source]:
|
||||
# TODO: it'd be great if these were sorted
|
||||
for bucket in buckets[rname][pub_source]['buckets']:
|
||||
if 'problem' in bucket:
|
||||
# create a short version of the problem's hash
|
||||
phash = bucket.replace(
|
||||
'https://errors.ubuntu.com/problem/', '')[0:6]
|
||||
report.write('<a href="%s">%s</a> ' % (bucket,
|
||||
phash))
|
||||
else:
|
||||
report.write('<a href="%s">problem</a> ' % bucket)
|
||||
else:
|
||||
report.write('')
|
||||
report.write('</td>\n')
|
||||
report.write(' <td>%s</td>\n' % age)
|
||||
report.write('</tr>\n')
|
||||
report.write('''</table>\n''')
|
||||
report.write('''</body>\n''')
|
||||
report.write('''</html>''')
|
||||
report.flush()
|
||||
shutil.copy2(report.name, '%s/%s' % (os.getcwd(), REPORT_FILE))
|
||||
os.chmod('%s/%s' % (os.getcwd(), REPORT_FILE), 0o644)
|
||||
|
||||
|
||||
def create_email_notifications(releases, spph_buckets):
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
notifications = defaultdict(list)
|
||||
try:
|
||||
with codecs.open(NOTIFICATIONS, 'r', encoding='utf-8') as notify_file:
|
||||
for line in notify_file.readlines():
|
||||
line = line.strip('\n').split(', ')
|
||||
# LP name, problem, pkg_version
|
||||
person = line[0]
|
||||
problem = line[1]
|
||||
pkg = line[2]
|
||||
pkg_version = line[3]
|
||||
notifications[person].append((problem, pkg, pkg_version))
|
||||
except IOError:
|
||||
pass
|
||||
bdmurray_mail = 'brian@ubuntu.com'
|
||||
b_body = ('Your upload of %s version %s to %s has resulted in %s'
|
||||
'error%s that %s first reported about this version of the '
|
||||
'package. The error%s follow%s:\n\n'
|
||||
'%s\n\n')
|
||||
i_body = ('Your upload of %s version %s to %s has resulted in an '
|
||||
'increased daily rate of errors for the package compared '
|
||||
'to the previous two weeks. For problems currently being '
|
||||
'reported about the package see:\n\n'
|
||||
'%s&period=week\n\n')
|
||||
remedy = ('You can view the current status of the phasing of all '
|
||||
'Stable Release Updates, including yours, at:\n\n'
|
||||
'http://people.canonical.com/~ubuntu-archive/%s\n\n'
|
||||
'Further phasing of this update has been stopped until the '
|
||||
'errors have either been fixed or determined to not be a '
|
||||
'result of this Stable Release Update. In the event of '
|
||||
'the latter please let a member of the Ubuntu Stable Release '
|
||||
'Updates team (~ubuntu-sru) know so that phasing of the update '
|
||||
'can proceed.' % (REPORT_FILE))
|
||||
for release in releases:
|
||||
rname = release.name
|
||||
for spph in spph_buckets[rname]:
|
||||
update_percentage = spph_buckets[rname][spph].get('pup', 100)
|
||||
# never send emails about updates that are fully phased
|
||||
if update_percentage == 100:
|
||||
continue
|
||||
if 'buckets' not in spph_buckets[rname][spph] and \
|
||||
'rate' not in spph_buckets[rname][spph]:
|
||||
continue
|
||||
signer = spph.package_signer
|
||||
# copies of packages from debian won't have a signer
|
||||
if not signer:
|
||||
continue
|
||||
# not an active user of Launchpad
|
||||
if not signer.is_valid:
|
||||
logging.info('%s not mailed as they are not a valid LP user' %
|
||||
signer)
|
||||
continue
|
||||
signer_email = get_primary_email(signer)
|
||||
signer_name = signer.name
|
||||
# use the changes file as a backup method for determining email addresses
|
||||
changes_file_url = spph.changesFileUrl()
|
||||
try:
|
||||
changes_file = urlopen(changes_file_url)
|
||||
for line in changes_file.readlines():
|
||||
line = line.strip()
|
||||
if line.startswith('Changed-By:'):
|
||||
changer = line.lstrip('Changed-By: ').decode('utf-8')
|
||||
changer_name, changer_email = utils.parseaddr(changer.strip())
|
||||
break
|
||||
except IOError:
|
||||
pass
|
||||
creator = spph.package_creator
|
||||
creator_email = ''
|
||||
pkg = spph.source_package_name
|
||||
version = spph.source_package_version
|
||||
if not signer_email and signer_name == creator.name:
|
||||
if not changer_email:
|
||||
logging.info("No contact email found for %s %s %s" %
|
||||
(rname, pkg, version))
|
||||
continue
|
||||
signer_email = changer_email
|
||||
logging.info("Used changes file to find contact email for %s %s %s" %
|
||||
(rname, pkg, version))
|
||||
if 'buckets' in spph_buckets[rname][spph]:
|
||||
# see if they've been emailed about the bucket before
|
||||
notices = []
|
||||
if signer_name in notifications:
|
||||
notices = notifications[signer_name]
|
||||
for notice, notified_pkg, notified_version in notices:
|
||||
if notice in spph_buckets[rname][spph]['buckets']:
|
||||
if (notified_pkg != pkg and
|
||||
notified_version != version):
|
||||
continue
|
||||
spph_buckets[rname][spph]['buckets'].remove(notice)
|
||||
if len(spph_buckets[rname][spph]['buckets']) == 0:
|
||||
continue
|
||||
receivers = [bdmurray_mail]
|
||||
quantity = len(spph_buckets[rname][spph]['buckets'])
|
||||
msg = MIMEText(
|
||||
b_body % (pkg, version, rname, ('an ', '')[quantity != 1],
|
||||
('', 's')[quantity != 1],
|
||||
('was', 'were')[quantity != 1],
|
||||
('', 's')[quantity != 1],
|
||||
('s', '')[quantity != 1],
|
||||
'\n'.join(spph_buckets[rname][spph]['buckets']))
|
||||
+ remedy)
|
||||
subject = '[%s/%s] Possible Regression' % (rname, pkg)
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = EMAIL_SENDER
|
||||
msg['Reply-To'] = bdmurray_mail
|
||||
receivers.append(signer_email)
|
||||
msg['To'] = signer_email
|
||||
if creator != signer and creator.is_valid:
|
||||
creator_email = get_primary_email(creator)
|
||||
# fall back to the email found in the changes file
|
||||
if not creator_email:
|
||||
creator_email = changer_email
|
||||
receivers.append(creator_email)
|
||||
msg['Cc'] = '%s' % changer_email
|
||||
smtp = smtplib.SMTP('localhost')
|
||||
smtp.sendmail(EMAIL_SENDER, receivers,
|
||||
msg.as_string())
|
||||
smtp.quit()
|
||||
logging.info('%s mailed about %s' % (receivers, subject))
|
||||
# add signer, problem, pkg, version to notifications csv file
|
||||
with codecs.open(NOTIFICATIONS, 'a', encoding='utf-8') as notify_file:
|
||||
for bucket in spph_buckets[rname][spph]['buckets']:
|
||||
notify_file.write('%s, %s, %s, %s\n' % \
|
||||
(signer_name, bucket,
|
||||
pkg, version))
|
||||
if changer_email:
|
||||
notify_file.write('%s, %s, %s, %s\n' % \
|
||||
(creator.name, bucket,
|
||||
pkg, version))
|
||||
if 'rate' in spph_buckets[rname][spph]:
|
||||
# see if they have been emailed about the increased rate
|
||||
# for this package version before
|
||||
notices = []
|
||||
if signer_name in notifications:
|
||||
notices = notifications[signer_name]
|
||||
if ('increased-rate', pkg, version) in notices:
|
||||
continue
|
||||
receivers = [bdmurray_mail]
|
||||
msg = MIMEText(i_body % (pkg, quote(version), rname,
|
||||
spph_buckets[rname][spph]['rate'][1])
|
||||
+ remedy)
|
||||
subject = '[%s/%s] Increase in crash rate' % (rname, pkg)
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = EMAIL_SENDER
|
||||
msg['Reply-To'] = bdmurray_mail
|
||||
receivers.append(signer_email)
|
||||
msg['To'] = signer_email
|
||||
if creator != signer and creator.is_valid:
|
||||
# fall back to the email found in the changes file
|
||||
if not creator_email:
|
||||
creator_email = changer_email
|
||||
receivers.append(creator_email)
|
||||
msg['Cc'] = '%s' % creator_email
|
||||
smtp = smtplib.SMTP('localhost')
|
||||
smtp.sendmail(EMAIL_SENDER, receivers,
|
||||
msg.as_string())
|
||||
smtp.quit()
|
||||
logging.info('%s mailed about %s' % (receivers, subject))
|
||||
# add signer, increased-rate, pkg, version to
|
||||
# notifications csv
|
||||
with codecs.open(NOTIFICATIONS, 'a', encoding='utf-8') as notify_file:
|
||||
notify_file.write('%s, increased-rate, %s, %s\n' %
|
||||
(signer_name, pkg, version))
|
||||
if creator_email:
|
||||
notify_file.write('%s, increased-rate, %s, %s\n' %
|
||||
(creator.name, pkg, version))
|
||||
|
||||
|
||||
def new_buckets(archive, release, src_pkg, version):
|
||||
# can't use created_since here because it have may been uploaded
|
||||
# before the release date
|
||||
spph = archive.getPublishedSources(distro_series=release,
|
||||
source_name=src_pkg, exact_match=True)
|
||||
pubs = [(ph.date_published, ph.source_package_version) for ph in spph
|
||||
if ph.status != 'Deleted' and ph.pocket != 'Backports'
|
||||
and ph.pocket != 'Proposed'
|
||||
and ph.date_published is not None]
|
||||
pubs = sorted(pubs)
|
||||
# it is possible for the same version to appear multiple times
|
||||
numbers = set([pub[1] for pub in pubs])
|
||||
versions = sorted(numbers, cmp=apt.apt_pkg.version_compare)
|
||||
# it never appeared in release e.g. cedarview-drm-drivers in precise
|
||||
try:
|
||||
previous_version = versions[-2]
|
||||
except IndexError:
|
||||
return False
|
||||
new_version = versions[-1]
|
||||
new_buckets_url = '%spackage-version-new-buckets/?format=json&' % \
|
||||
(BASE_ERRORS_URL) + \
|
||||
'package=%s&previous_version=%s&new_version=%s' % \
|
||||
(quote(src_pkg), quote(previous_version), quote(new_version))
|
||||
try:
|
||||
new_buckets_file = urlopen(new_buckets_url)
|
||||
except IOError:
|
||||
return 'error'
|
||||
# If we don't receive an OK response from the Error Tracker we should not
|
||||
# increment the phased-update-percentage.
|
||||
if new_buckets_file.getcode() != 200:
|
||||
logging.error('HTTP error retrieving %s' % new_buckets_url)
|
||||
return 'error'
|
||||
try:
|
||||
new_buckets_data = json.load(new_buckets_file)
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.error('Error getting new buckets at %s' % new_buckets_url)
|
||||
return 'error'
|
||||
if 'error_message' in new_buckets_data.keys():
|
||||
logging.error('Error getting new buckets at %s' % new_buckets_url)
|
||||
return 'error'
|
||||
if len(new_buckets_data['objects']) == 0:
|
||||
return False
|
||||
buckets = []
|
||||
for bucket in new_buckets_data['objects']:
|
||||
# Do not consider package install failures until they have more
|
||||
# information added to the instances.
|
||||
if bucket['function'].startswith('package:'):
|
||||
continue
|
||||
# 16.04's duplicate signature for ProblemType: Package doesn't
|
||||
# start with 'package:' so check for strings in the bucket.
|
||||
if 'is already installed and configured' in bucket['function']:
|
||||
logging.info('Skipped already installed bucket %s' %
|
||||
bucket['web_link'])
|
||||
continue
|
||||
# Skip failed buckets as they don't have useful tracebacks
|
||||
if bucket['function'].startswith('failed:'):
|
||||
logging.info('Skipped failed to retrace bucket %s' %
|
||||
bucket['web_link'])
|
||||
continue
|
||||
# check to see if the version appears for the affected release
|
||||
versions_url = '%sversions/?format=json&id=%s' % \
|
||||
((BASE_ERRORS_URL) , quote(bucket['function'].encode('utf-8')))
|
||||
try:
|
||||
versions_data_file = urlopen(versions_url)
|
||||
except IOError:
|
||||
logging.error('Error getting release versions at %s' % versions_url)
|
||||
# don't return an error because its better to have a false positive
|
||||
# in this case
|
||||
buckets.append(bucket['web_link'])
|
||||
continue
|
||||
try:
|
||||
versions_data = json.load(versions_data_file)
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.error('Error getting release versions at %s' % versions_url)
|
||||
# don't return an error because its better to have a false positive
|
||||
# in this case
|
||||
buckets.append(bucket['web_link'])
|
||||
continue
|
||||
if 'error_message' in versions_data:
|
||||
# don't return an error because its better to have a false positive
|
||||
# in this case
|
||||
buckets.append(bucket['web_link'])
|
||||
continue
|
||||
# -1 means that release isn't affected
|
||||
if len([vd[release.name] for vd in versions_data['objects'] \
|
||||
if vd['version'] == new_version and vd[release.name] != -1]) == 0:
|
||||
continue
|
||||
buckets.append(bucket['web_link'])
|
||||
logging.info('Details (new buckets): %s' % new_buckets_url)
|
||||
return buckets
|
||||
|
||||
|
||||
def package_previous_version(release, src_pkg, version):
|
||||
# return previous package version from updates or release and
|
||||
# the publication date of the current package version
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
primary = ubuntu.getArchive(name='primary')
|
||||
current_version_date = None
|
||||
previous_version = None
|
||||
# Archive.getPublishedSources returns results ordered by
|
||||
# (name, id) where the id number is autocreated, subsequently
|
||||
# the newest package versions are returned first
|
||||
for spph in primary.getPublishedSources(source_name=src_pkg,
|
||||
distro_series=release,
|
||||
exact_match=True):
|
||||
if spph.pocket == 'Proposed':
|
||||
continue
|
||||
if spph.status == 'Deleted':
|
||||
continue
|
||||
if spph.source_package_version == version:
|
||||
if not current_version_date:
|
||||
current_version_date = spph.date_published.date()
|
||||
elif spph.date_published.date() > current_version_date:
|
||||
current_version_date = spph.date_published.date()
|
||||
if spph.pocket == 'Updates' and spph.status == 'Superseded':
|
||||
return (spph.source_package_version, current_version_date)
|
||||
if spph.pocket == 'Release' and spph.status == 'Published':
|
||||
return (spph.source_package_version, current_version_date)
|
||||
return (None, None)
|
||||
|
||||
|
||||
def crash_rate_increase(release, src_pkg, version, last_pup):
|
||||
pvers, date = package_previous_version(release, src_pkg, version)
|
||||
date = str(date).replace('-', '')
|
||||
if not pvers:
|
||||
# joyent-mdata-client was put in updates w/o being in the release
|
||||
# pocket
|
||||
return False
|
||||
release_name = 'Ubuntu ' + release.version
|
||||
rate_url = BASE_ERRORS_URL + 'package-rate-of-crashes/?format=json' + \
|
||||
'&exclude_proposed=True' + \
|
||||
'&release=%s&package=%s&old_version=%s&new_version=%s&phased_update_percentage=%s&date=%s' % \
|
||||
(quote(release_name), quote(src_pkg), quote(pvers), quote(version),
|
||||
last_pup, date)
|
||||
try:
|
||||
rate_file = urlopen(rate_url)
|
||||
except IOError:
|
||||
return 'error'
|
||||
# If we don't receive an OK response from the Error Tracker we should not
|
||||
# increment the phased-update-percentage.
|
||||
if rate_file.getcode() != 200:
|
||||
logging.error('HTTP error retrieving %s' % rate_url)
|
||||
return 'error'
|
||||
try:
|
||||
rate_data = json.load(rate_file)
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.error('Error getting rate at %s' % rate_url)
|
||||
return 'error'
|
||||
if 'error_message' in rate_data.keys():
|
||||
logging.error('Error getting rate at %s' % rate_url)
|
||||
return 'error'
|
||||
logging.info('Details (rate increase): %s' % rate_url)
|
||||
# this may not be useful if the buckets creating the increase have
|
||||
# failed to retrace
|
||||
for data in rate_data['objects']:
|
||||
if data['increase']:
|
||||
previous_amount = data['previous_average']
|
||||
# this may happen if there were no crashes reported about
|
||||
# the previous version of the package
|
||||
if not previous_amount:
|
||||
logging.info('No previous crash data found for %s %s' %
|
||||
(src_pkg, pvers))
|
||||
previous_amount = 0
|
||||
if 'difference' in data:
|
||||
increase = data['difference']
|
||||
elif 'this_count' in data:
|
||||
# 2013-06-17 this can be negative due to the portion of the
|
||||
# day math (we take the average crashes and multiple them by
|
||||
# the fraction of hours that have passed so far in the day)
|
||||
current_amount = data['this_count']
|
||||
increase = current_amount - previous_amount
|
||||
logging.info('[%s/%s] increase: %s, previous_avg: %s' %
|
||||
(release_name.replace('Ubuntu ', ''), src_pkg,
|
||||
increase, previous_amount))
|
||||
if '&version=' not in data['web_link']:
|
||||
link = data['web_link'] + '&version=%s' % version
|
||||
else:
|
||||
link = data['web_link']
|
||||
logging.info('Details (rate increase): %s' % link)
|
||||
return(increase, link)
|
||||
|
||||
|
||||
def main():
|
||||
# TODO: make email code less redundant
|
||||
# TODO: modify HTTP_USER_AGENT (both versions of urllib)
|
||||
# TODO: Open bugs for regressions when false positives reduced
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
archive = ubuntu.getArchive(name='primary')
|
||||
options.archive = archive
|
||||
|
||||
overrides = defaultdict(list)
|
||||
rate_overrides = []
|
||||
override_file = csv.reader(open(OVERRIDES, 'r'))
|
||||
for row in override_file:
|
||||
if len(row) < 3:
|
||||
continue
|
||||
# package, version, problem
|
||||
if row[0].startswith('#'):
|
||||
continue
|
||||
package = row[0].strip()
|
||||
version = row[1].strip()
|
||||
problem = row[2].strip()
|
||||
if problem == 'increased-rate':
|
||||
rate_overrides.append((package, version))
|
||||
else:
|
||||
overrides[(package, version)].append(problem)
|
||||
|
||||
releases = []
|
||||
for series in ubuntu.series:
|
||||
if series.active:
|
||||
if series.status == 'Active Development':
|
||||
continue
|
||||
releases.append(series)
|
||||
releases.reverse()
|
||||
issues = {}
|
||||
for release in releases:
|
||||
# We can't use release.datereleased because some SRUs are 0 day
|
||||
cdate = release.date_created
|
||||
rname = release.name
|
||||
rvers = release.version
|
||||
issues[rname] = OrderedDict()
|
||||
# XXX - starting with raring
|
||||
if rname in ['precise', 'vivid']:
|
||||
continue
|
||||
pub_sources = archive.getPublishedSources(
|
||||
created_since_date=cdate,
|
||||
order_by_date=True,
|
||||
pocket='Updates', status='Published', distro_series=release)
|
||||
for pub_source in pub_sources:
|
||||
src_pkg = pub_source.source_package_name
|
||||
version = pub_source.source_package_version
|
||||
pbs = None
|
||||
try:
|
||||
pbs = [pb for pb in pub_source.getPublishedBinaries()
|
||||
if pb.phased_update_percentage is not None]
|
||||
# workaround for LP: #1695113
|
||||
except lazr.restfulclient.errors.ServerError as e:
|
||||
if 'HTTP Error 503' in str(e):
|
||||
logging.info('Skipping 503 Error for %s' % src_pkg)
|
||||
pass
|
||||
if not pbs:
|
||||
continue
|
||||
if pbs:
|
||||
# the p-u-p is currently the same for all binary packages
|
||||
last_pup = pbs[0].phased_update_percentage
|
||||
else:
|
||||
last_pup = None
|
||||
max_pup = 0
|
||||
if last_pup == 0:
|
||||
for allpb in archive.getPublishedBinaries(
|
||||
exact_match=True, pocket='Updates',
|
||||
binary_name=pbs[0].binary_package_name):
|
||||
if allpb.distro_arch_series.distroseries == release:
|
||||
if allpb.phased_update_percentage > 0:
|
||||
max_pup = allpb.phased_update_percentage
|
||||
break
|
||||
if max_pup and last_pup == 0:
|
||||
rate_increase = crash_rate_increase(release, src_pkg, version, max_pup)
|
||||
else:
|
||||
rate_increase = crash_rate_increase(release, src_pkg, version, last_pup)
|
||||
problems = new_buckets(archive, release, src_pkg, version)
|
||||
# In the event that there as an error connecting to errors.ubuntu.com then
|
||||
# neither increase nor stop the phased-update.
|
||||
if rate_increase == 'error' or problems == 'error':
|
||||
logging.info("Skipping %s due to failure to get data from Errors." % src_pkg)
|
||||
continue
|
||||
if problems:
|
||||
if (src_pkg, version) in overrides:
|
||||
not_overrode = set(problems).difference(
|
||||
set(overrides[(src_pkg, version)]))
|
||||
if len(not_overrode) > 0:
|
||||
issues[rname][pub_source] = {}
|
||||
issues[rname][pub_source]['buckets'] = not_overrode
|
||||
else:
|
||||
issues[rname][pub_source] = {}
|
||||
issues[rname][pub_source]['buckets'] = problems
|
||||
if rate_increase and (src_pkg, version) not in rate_overrides:
|
||||
if pub_source not in issues[rname]:
|
||||
issues[rname][pub_source] = {}
|
||||
issues[rname][pub_source]['rate'] = rate_increase
|
||||
if pbs:
|
||||
if pub_source not in issues[rname]:
|
||||
issues[rname][pub_source] = {}
|
||||
# phasing has stopped so check what the max value was
|
||||
if last_pup == 0:
|
||||
issues[rname][pub_source]['max_pup'] = max_pup
|
||||
issues[rname][pub_source]['pup'] = last_pup
|
||||
suite = rname + '-updates'
|
||||
if pub_source not in issues[rname]:
|
||||
continue
|
||||
elif ('rate' not in issues[rname][pub_source] and
|
||||
'buckets' not in issues[rname][pub_source] and
|
||||
pbs):
|
||||
# there is not an error so increment the phasing
|
||||
current_pup = issues[rname][pub_source]['pup']
|
||||
# if this is an update that is restarting we want to start at
|
||||
# the same percentage the stoppage happened at
|
||||
if 'max_pup' in issues[rname][pub_source]:
|
||||
current_pup = issues[rname][pub_source]['max_pup']
|
||||
new_pup = current_pup + PUP_INCREMENT
|
||||
if not options.no_act:
|
||||
set_pup(current_pup, new_pup, release, suite, src_pkg)
|
||||
issues[rname][pub_source]['pup'] = new_pup
|
||||
elif pbs:
|
||||
# there is an error and pup is not None so stop the phasing
|
||||
current_pup = issues[rname][pub_source]['pup']
|
||||
if 'max_pup' in issues[rname][pub_source]:
|
||||
issues[rname][pub_source]['previous_pup'] = \
|
||||
issues[rname][pub_source]['max_pup']
|
||||
else:
|
||||
issues[rname][pub_source]['previous_pup'] = \
|
||||
current_pup
|
||||
new_pup = 0
|
||||
if (not options.no_act and
|
||||
issues[rname][pub_source]['pup'] != 0):
|
||||
set_pup(current_pup, new_pup, release, suite, src_pkg)
|
||||
issues[rname][pub_source]['pup'] = new_pup
|
||||
generate_html_report(releases, issues)
|
||||
if options.email:
|
||||
create_email_notifications(releases, issues)
|
||||
|
||||
if __name__ == '__main__':
|
||||
start_time = time.time()
|
||||
BASE_ERRORS_URL = 'https://errors.ubuntu.com/api/1.0/'
|
||||
LOCAL_ERRORS_URL = 'http://10.0.3.182/api/1.0/'
|
||||
LP_BASE_URL = 'https://launchpad.net'
|
||||
OVERRIDES = 'phased-updates-overrides.txt'
|
||||
NOTIFICATIONS = 'phased-updates-emails.txt'
|
||||
EMAIL_SENDER = 'brian.murray@ubuntu.com'
|
||||
PUP_INCREMENT = 10
|
||||
REPORT_FILE = 'phased-updates.html'
|
||||
parser = OptionParser(usage="usage: %prog [options]")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--no-act", default=False, action="store_true",
|
||||
help="do not modify phased update percentages")
|
||||
parser.add_option(
|
||||
"-e", "--email", default=False, action="store_true",
|
||||
help="send email notifications to uploaders")
|
||||
parser.add_option(
|
||||
"-f", "--fully-phased", default=False, action="store_true",
|
||||
help="show packages which have been fully phased")
|
||||
options, args = parser.parse_args()
|
||||
if options.launchpad_instance != 'production':
|
||||
LP_BASE_URL = 'https://%s.launchpad.net' % options.launchpad_instance
|
||||
launchpad = Launchpad.login_with(
|
||||
'phased-updater', options.launchpad_instance, version='devel')
|
||||
logging.basicConfig(filename='phased-updates.log',
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
level=logging.INFO)
|
||||
logging.info('Starting phased-updater')
|
||||
main()
|
||||
end_time = time.time()
|
||||
logging.info("Elapsed time was %g seconds" % (end_time - start_time))
|
@ -0,0 +1,234 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Check for override mismatches between pockets
|
||||
# Copyright (C) 2005, 2008, 2011, 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from collections import defaultdict
|
||||
import gzip
|
||||
try:
|
||||
from html import escape
|
||||
except ImportError:
|
||||
from cgi import escape
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from textwrap import dedent
|
||||
import time
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
tempdir = None
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix='component-mismatches')
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
def decompress_open(tagfile):
|
||||
ensure_tempdir()
|
||||
decompressed = tempfile.mktemp(dir=tempdir)
|
||||
fin = gzip.GzipFile(filename=tagfile)
|
||||
with open(decompressed, 'wb') as fout:
|
||||
fout.write(fin.read())
|
||||
return open(decompressed, 'r')
|
||||
|
||||
|
||||
def pockets(series):
|
||||
yield series
|
||||
yield '%s-security' % series
|
||||
yield '%s-proposed' % series
|
||||
yield '%s-updates' % series
|
||||
|
||||
|
||||
priorities = {
|
||||
'required': 1,
|
||||
'important': 2,
|
||||
'standard': 3,
|
||||
'optional': 4,
|
||||
'extra': 5
|
||||
}
|
||||
|
||||
|
||||
def priority_key(priority):
|
||||
return priorities.get(priority, 6)
|
||||
|
||||
|
||||
def print_section(options, header, items):
|
||||
print("%s:" % header)
|
||||
print("-" * (len(header) + 1))
|
||||
print()
|
||||
for item in items:
|
||||
print(item)
|
||||
print()
|
||||
|
||||
if options.html_output is not None:
|
||||
print("<h2>%s</h2>" % escape(header), file=options.html_output)
|
||||
print("<ul>", file=options.html_output)
|
||||
for item in items:
|
||||
print("<li>%s</li>" % escape(item), file=options.html_output)
|
||||
print("</ul>", file=options.html_output)
|
||||
|
||||
|
||||
def process(options, series, components, arches):
|
||||
archive = os.path.expanduser('~/mirror/ubuntu/')
|
||||
|
||||
pkgcomp = defaultdict(lambda: defaultdict(list))
|
||||
pkgsect = defaultdict(lambda: defaultdict(list))
|
||||
pkgprio = defaultdict(lambda: defaultdict(list))
|
||||
for suite in pockets(series):
|
||||
for component in components:
|
||||
for arch in arches:
|
||||
try:
|
||||
binaries_path = "%s/dists/%s/%s/binary-%s/Packages.gz" % (
|
||||
archive, suite, component, arch)
|
||||
binaries = apt_pkg.TagFile(decompress_open(binaries_path))
|
||||
except IOError:
|
||||
continue
|
||||
suite_arch = '%s/%s' % (suite, arch)
|
||||
for section in binaries:
|
||||
if 'Package' in section:
|
||||
pkg = section['Package']
|
||||
pkgcomp[pkg][component].append(suite_arch)
|
||||
if 'Section' in section:
|
||||
pkgsect[pkg][section['Section']].append(suite_arch)
|
||||
if 'Priority' in section:
|
||||
pkgprio[pkg][section['Priority']].append(
|
||||
suite_arch)
|
||||
|
||||
packages = sorted(pkgcomp)
|
||||
|
||||
items = []
|
||||
for pkg in packages:
|
||||
if len(pkgcomp[pkg]) > 1:
|
||||
out = []
|
||||
for component in sorted(pkgcomp[pkg]):
|
||||
out.append("%s [%s]" %
|
||||
(component,
|
||||
' '.join(sorted(pkgcomp[pkg][component]))))
|
||||
items.append("%s: %s" % (pkg, ' '.join(out)))
|
||||
print_section(
|
||||
options, "Packages with inconsistent components between pockets",
|
||||
items)
|
||||
|
||||
items = []
|
||||
for pkg in packages:
|
||||
if pkg in pkgsect and len(pkgsect[pkg]) > 1:
|
||||
out = []
|
||||
for section in sorted(pkgsect[pkg]):
|
||||
out.append("%s [%s]" %
|
||||
(section,
|
||||
' '.join(sorted(pkgsect[pkg][section]))))
|
||||
items.append("%s: %s" % (pkg, ' '.join(out)))
|
||||
print_section(
|
||||
options, "Packages with inconsistent sections between pockets", items)
|
||||
|
||||
items = []
|
||||
for pkg in packages:
|
||||
if pkg in pkgprio and len(pkgprio[pkg]) > 1:
|
||||
out = []
|
||||
for priority in sorted(pkgprio[pkg], key=priority_key):
|
||||
out.append("%s [%s]" %
|
||||
(priority,
|
||||
' '.join(sorted(pkgprio[pkg][priority]))))
|
||||
items.append("%s: %s" % (pkg, ' '.join(out)))
|
||||
print_section(
|
||||
options, "Packages with inconsistent priorities between pockets",
|
||||
items)
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
description='Check for override mismatches between pockets.')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option('-o', '--output-file', help='output to this file')
|
||||
parser.add_option('--html-output-file', help='output HTML to this file')
|
||||
parser.add_option('-s', '--series',
|
||||
help='check these series (comma-separated)')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
"pocket-mismatches", options.launchpad_instance)
|
||||
if options.series is not None:
|
||||
all_series = options.series.split(',')
|
||||
else:
|
||||
all_series = reversed([
|
||||
series.name
|
||||
for series in launchpad.distributions["ubuntu"].series
|
||||
if series.status in ("Supported", "Current Stable Release")])
|
||||
components = ["main", "restricted", "universe", "multiverse"]
|
||||
arches = ["amd64", "arm64", "armhf", "i386", "ppc64el", "s390x"]
|
||||
|
||||
if options.output_file is not None:
|
||||
sys.stdout = open('%s.new' % options.output_file, 'w')
|
||||
if options.html_output_file is not None:
|
||||
options.html_output = open('%s.new' % options.html_output_file, 'w')
|
||||
else:
|
||||
options.html_output = None
|
||||
|
||||
options.timestamp = time.strftime('%a %b %e %H:%M:%S %Z %Y')
|
||||
print('Generated: %s' % options.timestamp)
|
||||
print()
|
||||
|
||||
if options.html_output is not None:
|
||||
all_series_str = escape(", ".join(all_series))
|
||||
print(dedent("""\
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type"
|
||||
content="text/html; charset=utf-8" />
|
||||
<title>Pocket mismatches for %s</title>
|
||||
<style type="text/css">
|
||||
body { background: #CCCCB0; color: black; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Pocket mismatches for %s</h1>
|
||||
""") % (all_series_str, all_series_str),
|
||||
file=options.html_output)
|
||||
|
||||
for series in all_series:
|
||||
process(options, series, components, arches)
|
||||
|
||||
if options.html_output_file is not None:
|
||||
print(
|
||||
"<p><small>Generated: %s</small></p>" % escape(options.timestamp),
|
||||
file=options.html_output)
|
||||
print("</body></html>", file=options.html_output)
|
||||
options.html_output.close()
|
||||
os.rename(
|
||||
'%s.new' % options.html_output_file, options.html_output_file)
|
||||
if options.output_file is not None:
|
||||
sys.stdout.close()
|
||||
os.rename('%s.new' % options.output_file, options.output_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,37 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="%prog [options] distroseries snapshot-name")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show actions that would be performed")
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 2:
|
||||
parser.error("need distroseries and snapshot-name")
|
||||
|
||||
dist = args[0]
|
||||
snapshot = args[1]
|
||||
|
||||
base = os.path.expanduser('~/mirror/ubuntu')
|
||||
snapshot_base = os.path.expanduser('~/point-releases/%s' % snapshot)
|
||||
|
||||
dst = os.path.join(snapshot_base, 'dists')
|
||||
os.makedirs(dst)
|
||||
for pocket in ('%s-security' % dist, '%s-updates' % dist):
|
||||
disttree = os.path.join(base, 'dists', pocket)
|
||||
src = os.path.join(base, disttree)
|
||||
if options.dry_run:
|
||||
print('cp -a %s %s' % (src, dst))
|
||||
else:
|
||||
subprocess.call(['cp', '-a', src, dst])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,132 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
# See isotracker.py for setup instructions.
|
||||
from isotracker import ISOTracker
|
||||
|
||||
# USAGE: post-amis-to-iso-tracker /<localpath>/published-ec2-daily.txt
|
||||
# it will return silently if successful. Check by looking at:
|
||||
# http://iso.qa.ubuntu.com/qatracker/build/all
|
||||
#
|
||||
# URLs to wget locally first are of the form:
|
||||
# http://uec-images.ubuntu.com/server/natty/20110302.2/published-ec2-daily.txt
|
||||
#
|
||||
# See isotracker.py for setup instructions.
|
||||
#
|
||||
# Reminder 2011/03/02 - check with jibel what's happening in the iso.tracker
|
||||
# right now, and if this is still necessary.
|
||||
# Also, why are the paths for downloading the images from the isotracker not
|
||||
# synching up with what was in the published-ec2-daily.txt.
|
||||
# 2011/03/29 - added in ap-northeast images
|
||||
|
||||
ec2_to_product_map = {
|
||||
'eu-west-1-amd64-ebs': 'Ubuntu Server EC2 EBS (Europe) amd64',
|
||||
'eu-west-1-i386-ebs': 'Ubuntu Server EC2 EBS (Europe) i386',
|
||||
'eu-west-1-amd64-hvm': 'Ubuntu Server EC2 HVM (Europe) amd64',
|
||||
'us-east-1-amd64-ebs': 'Ubuntu Server EC2 EBS (US-East) amd64',
|
||||
'us-east-1-i386-ebs': 'Ubuntu Server EC2 EBS (US-East) i386',
|
||||
'us-west-1-amd64-ebs': 'Ubuntu Server EC2 EBS (US-West-1) amd64',
|
||||
'us-west-1-i386-ebs': 'Ubuntu Server EC2 EBS (US-West-1) i386',
|
||||
'us-west-2-amd64-ebs': 'Ubuntu Server EC2 EBS (US-West-2) amd64',
|
||||
'us-west-2-i386-ebs': 'Ubuntu Server EC2 EBS (US-West-2) i386',
|
||||
'us-west-2-amd64-hvm': 'Ubuntu Server EC2 HVM (US-West-2) amd64',
|
||||
'us-east-1-amd64-hvm': 'Ubuntu Server EC2 HVM (US-East) amd64',
|
||||
'eu-west-1-amd64-instance': 'Ubuntu Server EC2 instance (Europe) amd64',
|
||||
'eu-west-1-i386-instance': 'Ubuntu Server EC2 instance (Europe) i386',
|
||||
'us-east-1-amd64-instance': 'Ubuntu Server EC2 instance (US-East) amd64',
|
||||
'us-east-1-i386-instance': 'Ubuntu Server EC2 instance (US-East) i386',
|
||||
'us-west-1-amd64-instance': 'Ubuntu Server EC2 instance (US-West-1) amd64',
|
||||
'us-west-1-i386-instance': 'Ubuntu Server EC2 instance (US-West-1) i386',
|
||||
'us-west-2-amd64-instance': 'Ubuntu Server EC2 instance (US-West-2) amd64',
|
||||
'us-west-2-i386-instance': 'Ubuntu Server EC2 instance (US-West-2) i386',
|
||||
'ap-southeast-1-amd64-instance': (
|
||||
'Ubuntu Server EC2 instance (Asia-Pacific-SouthEast) amd64'),
|
||||
'ap-southeast-1-i386-instance': (
|
||||
'Ubuntu Server EC2 instance (Asia-Pacific-SouthEast) i386'),
|
||||
'ap-southeast-1-amd64-ebs': (
|
||||
'Ubuntu Server EC2 EBS (Asia-Pacific-SouthEast) amd64'),
|
||||
'ap-southeast-1-i386-ebs': (
|
||||
'Ubuntu Server EC2 EBS (Asia-Pacific-SouthEast) i386'),
|
||||
'ap-northeast-1-amd64-instance': (
|
||||
'Ubuntu Server EC2 instance (Asia-Pacific-NorthEast) amd64'),
|
||||
'ap-northeast-1-i386-instance': (
|
||||
'Ubuntu Server EC2 instance (Asia-Pacific-NorthEast) i386'),
|
||||
'ap-northeast-1-amd64-ebs': (
|
||||
'Ubuntu Server EC2 EBS (Asia-Pacific-NorthEast) amd64'),
|
||||
'ap-northeast-1-i386-ebs': (
|
||||
'Ubuntu Server EC2 EBS (Asia-Pacific-NorthEast) i386'),
|
||||
'sa-east-1-amd64-ebs': (
|
||||
'Ubuntu Server EC2 EBS (South-America-East-1) amd64'),
|
||||
'sa-east-1-i386-ebs': 'Ubuntu Server EC2 EBS (South-America-East-1) i386',
|
||||
'sa-east-1-amd64-instance': (
|
||||
'Ubuntu Server EC2 instance (South-America-East-1) amd64'),
|
||||
'sa-east-1-i386-instance': (
|
||||
'Ubuntu Server EC2 instance (South-America-East-1) i386'),
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Publish a provided list of AMIs to the QA tracker.")
|
||||
parser.add_argument('-m', '--milestone',
|
||||
help='post to MILESTONE rather than the default')
|
||||
parser.add_argument('-n', '--note', default="",
|
||||
help='set the note field on the build')
|
||||
parser.add_argument('-t', '--target',
|
||||
help='post to an alternate QATracker')
|
||||
parser.add_argument("input_file", type=str,
|
||||
help="An input file (published-ec2-daily.txt)")
|
||||
args = parser.parse_args()
|
||||
|
||||
isotracker = ISOTracker(target=args.target)
|
||||
|
||||
if not os.path.exists(args.input_file):
|
||||
print("Can't find input file: %s" % args.input_file)
|
||||
sys.exit(1)
|
||||
|
||||
if args.milestone is None:
|
||||
args.milestone = isotracker.default_milestone()
|
||||
|
||||
with open(args.input_file, 'r') as handle:
|
||||
for line in handle:
|
||||
zone, ami, arch, store = line.split()[0:4]
|
||||
if not ami.startswith('ami-'):
|
||||
continue
|
||||
if store == 'instance-store':
|
||||
store = 'instance'
|
||||
try:
|
||||
product = ec2_to_product_map['%s-%s-%s' % (zone, arch, store)]
|
||||
except KeyError:
|
||||
print("Can't find: %s-%s-%s" % (zone, arch, store))
|
||||
continue
|
||||
|
||||
try:
|
||||
isotracker.post_build(product, ami,
|
||||
milestone=args.milestone,
|
||||
note=args.note)
|
||||
except KeyError as e:
|
||||
print(e)
|
||||
continue
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,47 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2011 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
# See isotracker.py for setup instructions.
|
||||
from isotracker import ISOTracker
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="Usage: %prog [options] product version")
|
||||
|
||||
parser.add_option('-m', '--milestone',
|
||||
help='post to MILESTONE rather than the default')
|
||||
parser.add_option('-n', '--note', default="",
|
||||
help='set the note field on the build')
|
||||
parser.add_option('-t', '--target', help='post to an alternate QATracker')
|
||||
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 2:
|
||||
parser.error("product and version arguments required")
|
||||
|
||||
isotracker = ISOTracker(target=options.target)
|
||||
if options.milestone is None:
|
||||
options.milestone = isotracker.default_milestone()
|
||||
|
||||
isotracker.post_build(args[0], args[1], milestone=options.milestone,
|
||||
note=options.note)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -0,0 +1,338 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Synchronise package priorities with germinate output
|
||||
# Copyright (C) 2005, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
# <vorlon> elmo: grip_3.2.0-5/sparc seems to have gone missing, marked as
|
||||
# Uploaded 2 1/2 hours ago and nowhere to be found on newraff
|
||||
# <elmo> uh?
|
||||
# <elmo> grip | 3.2.0-5 | unstable | source, alpha, arm, hppa,
|
||||
# i386, ia64, m68k, mips, mipsel, powerpc, s390, sparc
|
||||
# <elmo> I hid it in the pool, being the cunning cabalist that I am
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
from collections import defaultdict
|
||||
import csv
|
||||
import gzip
|
||||
try:
|
||||
from html import escape
|
||||
except ImportError:
|
||||
from cgi import escape
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from textwrap import dedent
|
||||
import time
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from charts import make_chart, make_chart_header
|
||||
|
||||
|
||||
tempdir = None
|
||||
|
||||
# XXX unhardcode, or maybe adjust seeds?
|
||||
# These packages are not really to be installed by debootstrap, despite
|
||||
# germinate saying so
|
||||
re_not_base = re.compile(r"^(linux-(image|restricted|386|generic|server|power|"
|
||||
"cell|imx51).*|nvidia-kernel-common|grub|yaboot)$")
|
||||
|
||||
# tuples of (package, desired_priority, architecture) which are known to not
|
||||
# be fixable and should be ignored; in particular we cannot set per-arch
|
||||
# priorities
|
||||
ignore = [
|
||||
('hfsutils', 'standard', 'powerpc'), # optional on all other arches
|
||||
('bc', 'important', 'powerpc'), # needed for powerpc-ibm-utils
|
||||
('bc', 'important', 'ppc64el'), # needed for powerpc-ibm-utils
|
||||
('libsgutils2-2', 'standard', 'powerpc'), # needed for lsvpd
|
||||
('libsgutils2-2', 'standard', 'ppc64el'), # needed for lsvpd
|
||||
('libdrm-intel1', 'required', 'amd64'), # needed for plymouth only on x86
|
||||
('libdrm-intel1', 'required', 'i386'), # needed for plymouth only on x86
|
||||
('libelf1', 'optional', 'arm64'), # ltrace not built on arm64
|
||||
('libpciaccess0', 'required', 'amd64'), # needed for plymouth only on x86
|
||||
('libpciaccess0', 'required', 'i386'), # needed for plymouth only on x86
|
||||
('libnuma1', 'optional', 's390x'), # standard on all other arches
|
||||
('libnuma1', 'optional', 'armhf'), # standard on all other arches
|
||||
('libunwind8','standard','amd64'), # wanted by strace on only amd64
|
||||
('multiarch-support','optional','s390x'), # eventually, all arches will downgrade
|
||||
]
|
||||
|
||||
|
||||
def ensure_tempdir():
|
||||
global tempdir
|
||||
if not tempdir:
|
||||
tempdir = tempfile.mkdtemp(prefix='priority-mismatches')
|
||||
atexit.register(shutil.rmtree, tempdir)
|
||||
|
||||
|
||||
def decompress_open(tagfile):
|
||||
ensure_tempdir()
|
||||
decompressed = tempfile.mktemp(dir=tempdir)
|
||||
fin = gzip.GzipFile(filename=tagfile)
|
||||
with open(decompressed, 'wb') as fout:
|
||||
fout.write(fin.read())
|
||||
return open(decompressed, 'r')
|
||||
|
||||
|
||||
# XXX partial code duplication from component-mismatches
|
||||
def read_germinate(suite, arch, seed):
|
||||
local_germinate = os.path.expanduser('~/mirror/ubuntu-germinate')
|
||||
# XXX hardcoding
|
||||
filename = "%s_ubuntu_%s_%s" % (seed, suite, arch)
|
||||
pkgs = {}
|
||||
|
||||
f = open(local_germinate + '/' + filename)
|
||||
for line in f:
|
||||
# Skip header and footer
|
||||
if line[0] == "-" or line.startswith("Package") or line[0] == " ":
|
||||
continue
|
||||
# Skip empty lines
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
pkgs[line.split('|', 1)[0].strip()] = None
|
||||
f.close()
|
||||
|
||||
return pkgs
|
||||
|
||||
|
||||
def process(options, arch):
|
||||
suite = options.suite
|
||||
components = options.component.split(',')
|
||||
|
||||
archive = os.path.expanduser('~/mirror/ubuntu/')
|
||||
|
||||
if suite in ("warty", "hoary"):
|
||||
required_seed = None
|
||||
important_seed = "base"
|
||||
standard_seed = None
|
||||
elif suite in ("breezy", "dapper", "edgy", "feisty"):
|
||||
required_seed = None
|
||||
important_seed = "minimal"
|
||||
standard_seed = "standard"
|
||||
else:
|
||||
required_seed = "required"
|
||||
important_seed = "minimal"
|
||||
standard_seed = "standard"
|
||||
|
||||
if required_seed is not None:
|
||||
required_pkgs = read_germinate(suite, arch, required_seed)
|
||||
required_pkgs = [
|
||||
pkg for pkg in required_pkgs if not re_not_base.match(pkg)]
|
||||
important_pkgs = read_germinate(suite, arch, important_seed)
|
||||
important_pkgs = [
|
||||
pkg for pkg in important_pkgs if not re_not_base.match(pkg)]
|
||||
if standard_seed is not None:
|
||||
standard_pkgs = read_germinate(suite, arch, standard_seed).keys()
|
||||
required_pkgs.sort()
|
||||
important_pkgs.sort()
|
||||
standard_pkgs.sort()
|
||||
|
||||
original = {}
|
||||
for component in components:
|
||||
binaries_path = "%s/dists/%s/%s/binary-%s/Packages.gz" % (
|
||||
archive, suite, component, arch)
|
||||
for section in apt_pkg.TagFile(decompress_open(binaries_path)):
|
||||
if 'Package' in section and 'Priority' in section:
|
||||
(pkg, priority) = (section['Package'], section['Priority'])
|
||||
original[pkg] = priority
|
||||
|
||||
packages = sorted(original)
|
||||
|
||||
# XXX hardcoding, but who cares really
|
||||
priorities = {'required': 1, 'important': 2, 'standard': 3,
|
||||
'optional': 4, 'extra': 5, 'source': 99}
|
||||
|
||||
# If there is a required seed:
|
||||
# Force everything in the required seed to >= required.
|
||||
# Force everything not in the required seed to < required.
|
||||
# Force everything in the important seed to >= important.
|
||||
# Force everything not in the important seed to < important.
|
||||
# (This allows debootstrap to determine the base system automatically.)
|
||||
# If there is a standard seed:
|
||||
# Force everything in the standard seed to >= standard.
|
||||
# Force everything not in the standard seed to < standard.
|
||||
|
||||
changed = defaultdict(lambda: defaultdict(list))
|
||||
|
||||
for pkg in packages:
|
||||
priority = original[pkg]
|
||||
|
||||
if required_seed is not None and pkg in required_pkgs:
|
||||
if priorities[priority] > priorities["required"]:
|
||||
priority = "required"
|
||||
elif pkg in important_pkgs:
|
||||
if (required_seed is not None and
|
||||
priorities[priority] < priorities["important"]):
|
||||
priority = "important"
|
||||
elif priorities[priority] > priorities["important"]:
|
||||
priority = "important"
|
||||
else:
|
||||
# XXX assumes important and standard are adjacent
|
||||
if priorities[priority] < priorities["standard"]:
|
||||
priority = "standard"
|
||||
|
||||
if standard_seed is not None:
|
||||
if pkg in standard_pkgs:
|
||||
if priorities[priority] > priorities["standard"]:
|
||||
priority = "standard"
|
||||
else:
|
||||
# XXX assumes standard and optional are adjacent
|
||||
if priorities[priority] < priorities["optional"]:
|
||||
priority = "optional"
|
||||
|
||||
if priority != original[pkg] and (pkg, priority, arch) not in ignore:
|
||||
changed[original[pkg]][priority].append(pkg)
|
||||
|
||||
changes =0
|
||||
oldprios = sorted(changed, key=lambda x: priorities[x])
|
||||
for oldprio in oldprios:
|
||||
newprios = sorted(changed[oldprio], key=lambda x: priorities[x])
|
||||
for newprio in newprios:
|
||||
changes += len(changed[oldprio][newprio])
|
||||
header = ("Packages to change from priority %s to %s" %
|
||||
(oldprio, newprio))
|
||||
print(header)
|
||||
print("-" * len(header))
|
||||
for pkg in changed[oldprio][newprio]:
|
||||
print("%s" % pkg)
|
||||
print()
|
||||
if options.html_output is not None:
|
||||
print("<h3>%s</h3>" % escape(header), file=options.html_output)
|
||||
print("<ul>", file=options.html_output)
|
||||
for pkg in changed[oldprio][newprio]:
|
||||
print(
|
||||
"<li>%s</li>" % escape(pkg), file=options.html_output)
|
||||
print("</ul>", file=options.html_output)
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
description='Synchronise package priorities with germinate output.')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option('-o', '--output-file', help='output to this file')
|
||||
parser.add_option('--html-output-file', help='output HTML to this file')
|
||||
parser.add_option(
|
||||
'--csv-file', help='record CSV time series data in this file')
|
||||
parser.add_option('-a', '--architecture',
|
||||
help='look at germinate output for this architecture')
|
||||
parser.add_option('-c', '--component',
|
||||
default='main,restricted,universe,multiverse',
|
||||
help='set overrides by component')
|
||||
parser.add_option('-s', '--suite', help='set overrides by suite')
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if options.suite is None:
|
||||
launchpad = Launchpad.login_anonymously('priority-mismatches',
|
||||
options.launchpad_instance)
|
||||
options.suite = launchpad.distributions['ubuntu'].current_series.name
|
||||
|
||||
if options.output_file is not None:
|
||||
sys.stdout = open('%s.new' % options.output_file, 'w')
|
||||
if options.html_output_file is not None:
|
||||
options.html_output = open('%s.new' % options.html_output_file, 'w')
|
||||
else:
|
||||
options.html_output = None
|
||||
|
||||
options.time = time.time()
|
||||
options.timestamp = time.strftime(
|
||||
'%a %b %e %H:%M:%S %Z %Y', time.gmtime(options.time))
|
||||
print('Generated: %s' % options.timestamp)
|
||||
print()
|
||||
|
||||
if options.html_output is not None:
|
||||
print(dedent("""\
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type"
|
||||
content="text/html; charset=utf-8" />
|
||||
<title>Priority mismatches for %s</title>
|
||||
<style type="text/css">
|
||||
body { background: #CCCCB0; color: black; }
|
||||
</style>
|
||||
%s
|
||||
</head>
|
||||
<body>
|
||||
<h1>Priority mismatches for %s</h1>
|
||||
""") % (
|
||||
escape(options.suite), make_chart_header(),
|
||||
escape(options.suite)),
|
||||
file=options.html_output)
|
||||
|
||||
changes = 0
|
||||
if options.architecture is None:
|
||||
for arch in ('amd64', 'arm64', 'armhf', 'i386', 'ppc64el', 's390x'):
|
||||
print(arch)
|
||||
print('=' * len(arch))
|
||||
print()
|
||||
if options.html_output is not None:
|
||||
print("<h2>%s</h2>" % escape(arch), file=options.html_output)
|
||||
changes += process(options, arch)
|
||||
else:
|
||||
changes += process(options, options.architecture)
|
||||
|
||||
if options.html_output_file is not None:
|
||||
print("<h2>Over time</h2>", file=options.html_output)
|
||||
print(
|
||||
make_chart("priority-mismatches.csv", ["changes"]),
|
||||
file=options.html_output)
|
||||
print(
|
||||
"<p><small>Generated: %s</small></p>" % escape(options.timestamp),
|
||||
file=options.html_output)
|
||||
print("</body></html>", file=options.html_output)
|
||||
options.html_output.close()
|
||||
os.rename(
|
||||
'%s.new' % options.html_output_file, options.html_output_file)
|
||||
if options.output_file is not None:
|
||||
sys.stdout.close()
|
||||
os.rename('%s.new' % options.output_file, options.output_file)
|
||||
if options.csv_file is not None:
|
||||
if sys.version < "3":
|
||||
open_mode = "ab"
|
||||
open_kwargs = {}
|
||||
else:
|
||||
open_mode = "a"
|
||||
open_kwargs = {"newline": ""}
|
||||
csv_is_new = not os.path.exists(options.csv_file)
|
||||
with open(options.csv_file, open_mode, **open_kwargs) as csv_file:
|
||||
# Field names deliberately hardcoded; any changes require
|
||||
# manually rewriting the output file.
|
||||
fieldnames = [
|
||||
"time",
|
||||
"changes",
|
||||
]
|
||||
csv_writer = csv.DictWriter(csv_file, fieldnames)
|
||||
if csv_is_new:
|
||||
csv_writer.writeheader()
|
||||
csv_writer.writerow(
|
||||
{"time": int(options.time * 1000), "changes": changes})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,423 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Parse removals.txt file
|
||||
# Copyright (C) 2004, 2005, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Authors: James Troup <james.troup@canonical.com>,
|
||||
# Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
############################################################################
|
||||
|
||||
# What kind of genius logs 3.5 years of removal data in
|
||||
# semi-human-parseable text and nothing else? Hmm. That would be me. :(
|
||||
|
||||
# MAAAAAAAAAAAAAADNESSSSSSSSSSSSSSSSS
|
||||
|
||||
############################################################################
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import copy
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
try:
|
||||
from urllib.request import urlopen, urlretrieve
|
||||
except ImportError:
|
||||
from urllib import urlretrieve
|
||||
from urllib2 import urlopen
|
||||
import re
|
||||
import logging
|
||||
import gzip
|
||||
import datetime
|
||||
import subprocess
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
CONSUMER_KEY = "process-removals"
|
||||
|
||||
|
||||
Debian = None
|
||||
|
||||
|
||||
def parse_removals_file(options, removals_file):
|
||||
if options.report_ubuntu_delta and options.no_action:
|
||||
me = None
|
||||
else:
|
||||
me = options.launchpad.me.name
|
||||
|
||||
state = "first separator"
|
||||
removal = {}
|
||||
for line in removals_file:
|
||||
line = line.strip().decode('UTF-8')
|
||||
# skip over spurious empty lines
|
||||
if not line and state in ("first separtor", "next separator", "date"):
|
||||
continue
|
||||
if line == ("=" * 73):
|
||||
if state == "done":
|
||||
state = "next separator"
|
||||
elif state in ("first separator", "next separator"):
|
||||
state = "date"
|
||||
else:
|
||||
raise RuntimeError("found separator but state is %s." % state)
|
||||
# NB: The 'Reason' is an abbreviation, check any referenced bugs for
|
||||
# more
|
||||
elif state == "next separator" and line.startswith("NB:"):
|
||||
state = "first separator"
|
||||
# [Date: Tue, 9 Jan 2001 20:46:15 -0500] [ftpmaster: James Troup]
|
||||
elif state in ("date", "next separator"):
|
||||
try:
|
||||
(date, ftpmaster, unused) = line.split("]")
|
||||
date = date.replace("[Date: ", "")
|
||||
state = "removed from"
|
||||
except:
|
||||
state = "broken date"
|
||||
# Sat, 06 May 2017 08:45:30 +0000] [ftpmaster: Chris Lamb]
|
||||
elif state == "broken date":
|
||||
(date, ftpmaster, unused2) = line.split("]")
|
||||
state = "removed from"
|
||||
# Removed the following packages from unstable:
|
||||
elif state == "removed from":
|
||||
# complete processing of the date from the preceding line
|
||||
removal["date"] = time.mktime(time.strptime(
|
||||
date, "%a, %d %b %Y %H:%M:%S %z"))
|
||||
removal["ftpmaster"] = ftpmaster.replace("] [ftpmaster: ", "")
|
||||
|
||||
prefix = "Removed the following packages from "
|
||||
if not line.startswith(prefix):
|
||||
raise RuntimeError(
|
||||
"state = %s, expected '%s', got '%s'" %
|
||||
(state, prefix, line))
|
||||
line = line.replace(prefix, "")[:-1]
|
||||
line = line.replace(" and", "")
|
||||
line = line.replace(",", "")
|
||||
removal["suites"] = line.split()
|
||||
state = "before packages"
|
||||
elif state == "before packages" or state == "after packages":
|
||||
if line:
|
||||
raise RuntimeError(
|
||||
"state = %s, expected '', got '%s'" % (state, line))
|
||||
if state == "before packages":
|
||||
state = "packages"
|
||||
elif state == "after packages":
|
||||
state = "reason prefix"
|
||||
# xroach | 4.0-8 | source, alpha, arm, hppa, i386, ia64, m68k, \
|
||||
# mips, mipsel, powerpc, s390, sparc
|
||||
# Closed bugs: 158188
|
||||
elif state == "packages":
|
||||
if line.find("|") != -1:
|
||||
package, version, architectures = [
|
||||
word.strip() for word in line.split("|")]
|
||||
architectures = [
|
||||
arch.strip() for arch in architectures.split(",")]
|
||||
removal.setdefault("packages", [])
|
||||
removal["packages"].append([package, version, architectures])
|
||||
elif line.startswith("Closed bugs: "):
|
||||
line = line.replace("Closed bugs: ", "")
|
||||
removal["closed bugs"] = line.split()
|
||||
state = "after packages"
|
||||
elif not line:
|
||||
state = "reason prefix"
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"state = %s, expected package list or 'Closed bugs:', "
|
||||
"got '%s'" % (state, line))
|
||||
# ------------------- Reason -------------------
|
||||
elif state == "reason prefix":
|
||||
expected = "------------------- Reason -------------------"
|
||||
if not line == expected:
|
||||
raise RuntimeError(
|
||||
"state = %s, expected '%s', got '%s'" %
|
||||
(state, expected, line))
|
||||
state = "reason"
|
||||
# RoSRM; license problems.
|
||||
# ----------------------------------------------
|
||||
elif state == "reason":
|
||||
if line == "----------------------------------------------":
|
||||
state = "done"
|
||||
do_removal(me, options, removal)
|
||||
removal = {}
|
||||
else:
|
||||
removal.setdefault("reason", "")
|
||||
removal["reason"] += "%s\n" % line
|
||||
|
||||
# nothing should go here
|
||||
|
||||
|
||||
def show_reverse_depends(options, package):
|
||||
"""Show reverse dependencies.
|
||||
|
||||
This is mostly done by calling reverse-depends, but with some tweaks to
|
||||
make the output easier to read in context.
|
||||
"""
|
||||
series_name = options.series.name
|
||||
commands = (
|
||||
["reverse-depends", "-r", series_name, "src:%s" % package],
|
||||
["reverse-depends", "-r", series_name, "-b", "src:%s" % package],
|
||||
)
|
||||
for command in commands:
|
||||
subp = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
line = None
|
||||
for line in subp.communicate()[0].splitlines():
|
||||
line = line.decode('UTF-8').rstrip("\n")
|
||||
if line == "No reverse dependencies found":
|
||||
line = None
|
||||
else:
|
||||
print(line)
|
||||
if line:
|
||||
print()
|
||||
|
||||
|
||||
non_meta_re = re.compile(r'^[a-zA-Z0-9+,./:=@_-]+$')
|
||||
|
||||
|
||||
def shell_escape(arg):
|
||||
if non_meta_re.match(arg):
|
||||
return arg
|
||||
else:
|
||||
return "'%s'" % arg.replace("'", "'\\''")
|
||||
|
||||
|
||||
seen_sources = set()
|
||||
|
||||
|
||||
def do_removal(me, options, removal):
|
||||
if options.removed_from and options.removed_from not in removal["suites"]:
|
||||
return
|
||||
if options.date and int(options.date) > int(removal["date"]):
|
||||
return
|
||||
if options.architecture:
|
||||
for architecture in re.split(r'[, ]+', options.architecture):
|
||||
package_list = copy.copy(removal["packages"])
|
||||
for entry in package_list:
|
||||
(package, version, architectures) = entry
|
||||
if architecture not in architectures:
|
||||
removal["packages"].remove(entry)
|
||||
if not removal["packages"]:
|
||||
return
|
||||
|
||||
for entry in removal.get("packages", []):
|
||||
(package, version, architectures) = entry
|
||||
if options.source and options.source != package:
|
||||
continue
|
||||
if package in seen_sources:
|
||||
continue
|
||||
seen_sources.add(package)
|
||||
if package in Debian and not (options.force and options.source):
|
||||
logging.info("%s (%s): back in sid - skipping.", package, version)
|
||||
continue
|
||||
|
||||
spphs = []
|
||||
for pocket in ("Release", "Proposed"):
|
||||
options.pocket = pocket
|
||||
if pocket == "Release":
|
||||
options.suite = options.series.name
|
||||
else:
|
||||
options.suite = "%s-proposed" % options.series.name
|
||||
try:
|
||||
spphs.append(
|
||||
lputils.find_latest_published_source(options, package))
|
||||
except lputils.PackageMissing:
|
||||
pass
|
||||
|
||||
if not spphs:
|
||||
logging.debug("%s (%s): not found", package, version)
|
||||
continue
|
||||
|
||||
if options.report_ubuntu_delta:
|
||||
if 'ubuntu' in version:
|
||||
print("%s %s" % (package, version))
|
||||
continue
|
||||
if options.no_action:
|
||||
continue
|
||||
|
||||
for spph in spphs:
|
||||
logging.info(
|
||||
"%s (%s/%s), removed from Debian on %s",
|
||||
package, version, spph.source_package_version,
|
||||
time.strftime("%Y-%m-%d", time.localtime(removal["date"])))
|
||||
|
||||
removal["reason"] = removal["reason"].rstrip('.\n')
|
||||
try:
|
||||
bugs = ';' + ','.join(
|
||||
[" Debian bug #%s" % item for item in removal["closed bugs"]])
|
||||
except KeyError:
|
||||
bugs = ''
|
||||
reason = "(From Debian) " + removal["reason"] + bugs
|
||||
|
||||
subprocess.call(['seeded-in-ubuntu', package])
|
||||
show_reverse_depends(options, package)
|
||||
for spph in spphs:
|
||||
if options.no_action:
|
||||
print("remove-package -s %s%s -e %s -m %s %s" % (
|
||||
options.series.name,
|
||||
"-proposed" if spph.pocket == "Proposed" else "",
|
||||
spph.source_package_version, shell_escape(reason),
|
||||
package))
|
||||
else:
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
print("Removing packages:")
|
||||
print("\t%s" % spph.display_name)
|
||||
for binary in spph.getPublishedBinaries():
|
||||
print("\t\t%s" % binary.display_name)
|
||||
print("Comment: %s" % reason)
|
||||
if YesNoQuestion().ask("Remove", "no") == "yes":
|
||||
spph.requestDeletion(removal_comment=reason)
|
||||
|
||||
|
||||
def fetch_removals_file(options):
|
||||
removals = "removals-full.txt"
|
||||
if options.date:
|
||||
thisyear = datetime.datetime.today().year
|
||||
if options.date >= time.mktime(time.strptime(str(thisyear), "%Y")):
|
||||
removals = "removals.txt"
|
||||
logging.debug("Fetching %s" % removals)
|
||||
return urlopen("http://ftp-master.debian.org/%s" % removals)
|
||||
|
||||
|
||||
def read_sources():
|
||||
global Debian
|
||||
Debian = {}
|
||||
base = "http://ftp.debian.org/debian"
|
||||
|
||||
logging.debug("Reading Debian sources")
|
||||
for component in "main", "contrib", "non-free":
|
||||
filename = "Debian_unstable_%s_Sources" % component
|
||||
if not os.path.exists(filename):
|
||||
url = "%s/dists/unstable/%s/source/Sources.gz" % (base, component)
|
||||
logging.info("Fetching %s" % url)
|
||||
fetched, headers = urlretrieve(url)
|
||||
out = open(filename, 'wb')
|
||||
gzip_handle = gzip.GzipFile(fetched)
|
||||
while True:
|
||||
block = gzip_handle.read(65536)
|
||||
if not block:
|
||||
break
|
||||
out.write(block)
|
||||
out.close()
|
||||
gzip_handle.close()
|
||||
sources_filehandle = open(filename)
|
||||
Sources = apt_pkg.TagFile(sources_filehandle)
|
||||
while Sources.step():
|
||||
pkg = Sources.section.find("Package")
|
||||
version = Sources.section.find("Version")
|
||||
|
||||
if (pkg in Debian and
|
||||
apt_pkg.version_compare(Debian[pkg]["version"],
|
||||
version) > 0):
|
||||
continue
|
||||
|
||||
Debian[pkg] = {}
|
||||
Debian[pkg]["version"] = version
|
||||
sources_filehandle.close()
|
||||
|
||||
|
||||
def parse_options():
|
||||
parser = optparse.OptionParser()
|
||||
|
||||
parser.add_option("-l", "--launchpad", dest="launchpad_instance",
|
||||
default="production")
|
||||
|
||||
parser.add_option("-a", "--architecture", metavar="ARCH",
|
||||
help="remove from ARCH")
|
||||
parser.add_option("-d", "--date", metavar="DATE",
|
||||
help="only those removed since DATE (Unix epoch or %Y-%m-%d)")
|
||||
|
||||
parser.add_option("-r", "--removed-from", metavar="SUITE",
|
||||
help="only those removed from SUITE (aka distroseries)")
|
||||
|
||||
parser.add_option("-s", "--source", metavar="NAME",
|
||||
help="only source package NAME")
|
||||
parser.add_option("--force", action="store_true", default=False,
|
||||
help="force removal even for packages back in unstable "
|
||||
"(only with -s)")
|
||||
|
||||
parser.add_option("-f", "--filename",
|
||||
help="parse FILENAME")
|
||||
|
||||
parser.add_option("-n", "--no-action", action="store_true",
|
||||
help="don't remove packages; just print remove-package "
|
||||
"commands")
|
||||
parser.add_option("-v", "--verbose", action="store_true",
|
||||
help="emit verbose debugging messages")
|
||||
|
||||
parser.add_option("--report-ubuntu-delta", action="store_true",
|
||||
help="skip and report packages with Ubuntu deltas")
|
||||
|
||||
options, args = parser.parse_args()
|
||||
if len(args):
|
||||
parser.error("no arguments expected")
|
||||
|
||||
if options.date:
|
||||
if len(options.date) == 8:
|
||||
print("The format of date should be %Y-%m-%d.")
|
||||
sys.exit(1)
|
||||
try:
|
||||
int(options.date)
|
||||
except ValueError:
|
||||
options.date = time.mktime(time.strptime(options.date, "%Y-%m-%d"))
|
||||
else:
|
||||
options.date = time.mktime(time.strptime("2009-02-01", "%Y-%m-%d"))
|
||||
if not options.removed_from:
|
||||
options.removed_from = "unstable"
|
||||
if not options.architecture:
|
||||
options.architecture = "source"
|
||||
|
||||
if options.verbose:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
return options, args
|
||||
|
||||
|
||||
def main():
|
||||
apt_pkg.init()
|
||||
|
||||
"""Initialization, including parsing of options."""
|
||||
options, args = parse_options()
|
||||
|
||||
if options.report_ubuntu_delta and options.no_action:
|
||||
options.launchpad = Launchpad.login_anonymously(
|
||||
CONSUMER_KEY, options.launchpad_instance)
|
||||
else:
|
||||
options.launchpad = Launchpad.login_with(
|
||||
CONSUMER_KEY, options.launchpad_instance)
|
||||
|
||||
options.distribution = options.launchpad.distributions["ubuntu"]
|
||||
options.series = options.distribution.current_series
|
||||
options.archive = options.distribution.main_archive
|
||||
options.version = None
|
||||
|
||||
read_sources()
|
||||
|
||||
if options.filename:
|
||||
removals_file = open(options.filename, "rb")
|
||||
else:
|
||||
removals_file = fetch_removals_file(options)
|
||||
parse_removals_file(options, removals_file)
|
||||
removals_file.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,196 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Promote packages to release pocket based on britney output."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
|
||||
from launchpadlib.errors import HTTPError
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
def promote(options, name, version, architecture):
|
||||
if architecture is None:
|
||||
display = "%s/%s" % (name, version)
|
||||
else:
|
||||
display = "%s/%s/%s" % (name, version, architecture)
|
||||
|
||||
if architecture is None:
|
||||
try:
|
||||
release_sources = options.archive.getPublishedSources(
|
||||
source_name=name, version=version,
|
||||
distro_series=options.series, pocket="Release",
|
||||
exact_match=True, status="Published")
|
||||
except IndexError:
|
||||
release_sources = options.archive.getPublishedSources(
|
||||
source_name=name, version=version,
|
||||
distro_series=options.series, pocket="Release",
|
||||
exact_match=True, status="Pending")
|
||||
except HTTPError as e:
|
||||
print("getPublishedSources %s: %s" % (display, e.content),
|
||||
file=sys.stderr)
|
||||
return True
|
||||
if len(release_sources) > 0:
|
||||
return True
|
||||
|
||||
if options.dry_run:
|
||||
print("Would copy: %s" % display)
|
||||
return
|
||||
elif options.verbose:
|
||||
print("Copying: %s" % display)
|
||||
sys.stdout.flush()
|
||||
|
||||
try:
|
||||
options.archive.copyPackage(
|
||||
source_name=name, version=version,
|
||||
from_archive=options.archive,
|
||||
from_series=options.series.name, from_pocket="Proposed",
|
||||
to_series=options.series.name, to_pocket="Release",
|
||||
include_binaries=True, sponsored=options.requestor,
|
||||
auto_approve=True)
|
||||
except HTTPError as e:
|
||||
print("copyPackage %s: %s" % (display, e.content), file=sys.stderr)
|
||||
return False
|
||||
|
||||
try:
|
||||
proposed_source = options.archive.getPublishedSources(
|
||||
source_name=name, version=version,
|
||||
distro_series=options.series, pocket="Proposed",
|
||||
exact_match=True)[0]
|
||||
except HTTPError as e:
|
||||
print("getPublishedSources %s: %s" % (display, e.content),
|
||||
file=sys.stderr)
|
||||
return True
|
||||
except IndexError:
|
||||
print("getPublishedSources %s found no publication" % display,
|
||||
file=sys.stderr)
|
||||
return True
|
||||
|
||||
if architecture is None:
|
||||
try:
|
||||
proposed_source.requestDeletion(removal_comment="moved to release")
|
||||
except HTTPError as e:
|
||||
print("requestDeletion %s: %s" % (display, e.content),
|
||||
file=sys.stderr)
|
||||
else:
|
||||
for bpph in proposed_source.getPublishedBinaries():
|
||||
if bpph.is_debug:
|
||||
continue
|
||||
elif bpph.architecture_specific:
|
||||
if architecture != bpph.distro_arch_series.architecture_tag:
|
||||
continue
|
||||
else:
|
||||
if architecture != "i386":
|
||||
continue
|
||||
try:
|
||||
bpph.requestDeletion(removal_comment="moved to release")
|
||||
except HTTPError as e:
|
||||
print("requestDeletion %s/%s/%s: %s" %
|
||||
(bpph.binary_package_name, bpph.binary_package_version,
|
||||
bpph.distro_arch_series.architecture_tag, e.content),
|
||||
file=sys.stderr)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def promote_all(options, delta):
|
||||
with open(delta) as delta_file:
|
||||
for line in delta_file:
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
words = line.rstrip("\n").split(" ")
|
||||
if len(words) == 1:
|
||||
name = words[0]
|
||||
print("Cannot handle removal: %s" % name, file=sys.stderr)
|
||||
continue
|
||||
elif len(words) == 2:
|
||||
name = words[0]
|
||||
if name.startswith("-"):
|
||||
print("Cannot handle removal: %s" % name[1:],
|
||||
file=sys.stderr)
|
||||
continue
|
||||
elif "/" in name:
|
||||
name, architecture = name.split("/", 1)
|
||||
else:
|
||||
architecture = None
|
||||
version = words[1]
|
||||
if not promote(options, name, version, architecture):
|
||||
# Stop on any single failure. Britney's output delta
|
||||
# should be ordered such that the optimal order of
|
||||
# copying is from start to finish, and skipping one is
|
||||
# more likely to cause problems than aborting.
|
||||
return False
|
||||
elif len(words) == 3:
|
||||
name = words[0]
|
||||
if name.startswith("-"):
|
||||
print("Cannot handle removal: %s" % name[1:],
|
||||
file=sys.stderr)
|
||||
continue
|
||||
version = words[1]
|
||||
architecture = words[2]
|
||||
if not promote(options, name, version, architecture):
|
||||
# Stop on any single failure. Britney's output delta
|
||||
# should be ordered such that the optimal order of
|
||||
# copying is from start to finish, and skipping one is
|
||||
# more likely to cause problems than aborting.
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="usage: %prog [options] BRITNEY-OUTPUT-DELTA")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show copies that would be performed")
|
||||
parser.add_option(
|
||||
"-v", "--verbose", default=False, action="store_true",
|
||||
help="be more verbose (redundant in --dry-run mode)")
|
||||
parser.add_option(
|
||||
"-d", "--distribution", default="ubuntu",
|
||||
metavar="DISTRIBUTION", help="promote within DISTRIBUTION")
|
||||
# dest="suite" to make lputils.setup_location's job easier.
|
||||
parser.add_option(
|
||||
"-s", "--series", dest="suite",
|
||||
metavar="SERIES", help="promote from SERIES-proposed to SERIES")
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 1:
|
||||
parser.error("need britney output delta file")
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"promote-to-release", options.launchpad_instance, version="devel")
|
||||
lputils.setup_location(options)
|
||||
options.dases = {}
|
||||
for das in options.series.architectures:
|
||||
options.dases[das.architecture_tag] = das
|
||||
|
||||
options.requestor = options.launchpad.people["katie"]
|
||||
|
||||
if promote_all(options, args[0]):
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -0,0 +1,322 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2010, 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Look at the ISO tracker for currently tested builds, and generate
|
||||
# publish-release commands for them.
|
||||
|
||||
# publish-release needs to be called as follows:
|
||||
|
||||
# for-project <derivative> publish-release <dir> <buildstamp> <type>
|
||||
# <releaseflag> <name>
|
||||
#
|
||||
# <derivative>: ubuntu/kubuntu/edubuntu/xubuntu
|
||||
# <dir>: daily or daily-live, dir on cdimage.u.c./
|
||||
# <buildstamp>: e. g. 20070605.3; ubuntu-server/daily/<timestamp> for
|
||||
# server/netbook/etc.
|
||||
# <type>: desktop/alternate/server/serveraddon/src
|
||||
# <releaseflag>: yes/no/poolonly/named (should appear on releases.u.c.?)
|
||||
# <name>: name of the release (alpha-2, beta, etc.)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
import optparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
# See isotracker.py for setup instructions.
|
||||
from isotracker import ISOTracker
|
||||
|
||||
milestone_name_re = re.compile('(Alpha|Beta|RC|Final|Pre-release)(?: (\d))?')
|
||||
stable_name_re = re.compile('(Trusty|Xenial|Bionic) (14|16|18)\.04\.\d+')
|
||||
|
||||
# do not warn about not being able to handle those
|
||||
ignore_product_re = re.compile(
|
||||
'Netboot |Upgrade |Server EC2|Server Windows Azure|line-through')
|
||||
# this identifies known builds
|
||||
product_re = re.compile(
|
||||
'((?:|u|lu|ku|edu|xu|myth)buntu(?: studio|kylin| kylin| gnome| mate| budgie| next)?) '
|
||||
'(alternate|desktop|dvd|server(?: subiquity)?|mobile|base|active|wubi)(?: preinstalled)? '
|
||||
'(i386|amd64$|amd64\+mac|armel$|armel\+dove|armel\+omap$|armel\+omap4|'
|
||||
'armel\+ac100|armel\+mx5|armhf$|armhf\+omap$|armhf\+omap4|armhf\+ac100|'
|
||||
'armhf\+mx5|armhf\+nexus7|armhf\+raspi2|armhf\+raspi3|arm64$|arm64\+raspi3|'
|
||||
'powerpc|ppc64el|s390x)', re.I)
|
||||
|
||||
# map an image type from the ISO tracker to a source directory for
|
||||
# publish-release
|
||||
type_map = {
|
||||
'desktop': 'daily-live',
|
||||
'alternate': 'daily',
|
||||
'src': 'source',
|
||||
'dvd': 'dvd',
|
||||
'mobile': 'daily-live',
|
||||
'active': 'daily-live',
|
||||
'server': 'daily',
|
||||
'base': 'daily',
|
||||
'wubi': 'wubi',
|
||||
'preinstalled-desktop': 'daily-preinstalled',
|
||||
'preinstalled-mobile': 'daily-preinstalled',
|
||||
'preinstalled-active': 'daily-preinstalled',
|
||||
'preinstalled-server': 'ubuntu-server/daily-preinstalled',
|
||||
'live-server': 'ubuntu-server/daily-live',
|
||||
}
|
||||
|
||||
|
||||
def parse_iso_tracker(opts):
|
||||
'''Get release builds information from ISO tracker.
|
||||
|
||||
Return an info dictionary with the following keys:
|
||||
- build_map: projectname -> type -> build_stamp -> set(arches)
|
||||
- milestone_name: Milestone name (e. g. "Alpha 3")
|
||||
- milestone_code: publish-release milestone code (e. g. "alpha-3")
|
||||
- stable (optional): stable release name (e. g. "lucid")
|
||||
'''
|
||||
build_map = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
|
||||
ret = {'build_map': build_map, 'stable': None}
|
||||
|
||||
# access the ISO tracker
|
||||
isotracker = ISOTracker(target=opts.target)
|
||||
|
||||
# get current milestone
|
||||
if opts.milestone:
|
||||
ms = isotracker.get_milestone_by_name(opts.milestone)
|
||||
else:
|
||||
ms = isotracker.default_milestone()
|
||||
|
||||
if ms.status_string != 'Testing':
|
||||
sys.stderr.write(
|
||||
'ERROR: Current milestone is not marked as "Testing"\n')
|
||||
sys.exit(1)
|
||||
|
||||
m = milestone_name_re.search(ms.title)
|
||||
if m:
|
||||
# require number for alphas
|
||||
if m.group(1) != 'Alpha' or m.group(2):
|
||||
ret['milestone_name'] = ' '.join(
|
||||
[g for g in m.groups() if g is not None])
|
||||
ret['milestone_code'] = (
|
||||
ret['milestone_name'].lower().replace(' ', '-'))
|
||||
|
||||
if 'milestone_name' not in ret:
|
||||
sys.stderr.write(
|
||||
"ERROR: Milestone '%s' isn't a valid target for publishing.\n"
|
||||
% ms.title)
|
||||
sys.exit(1)
|
||||
|
||||
if ret['milestone_code'] == 'pre-release':
|
||||
ret['milestone_code'] = 'final'
|
||||
else:
|
||||
m = stable_name_re.search(ms.title)
|
||||
if not m:
|
||||
sys.stderr.write(
|
||||
"ERROR: Milestone '%s' isn't a valid target for publishing.\n"
|
||||
% ms.title)
|
||||
sys.exit(1)
|
||||
|
||||
ret['milestone_name'] = m.group(0)
|
||||
ret['milestone_code'] = 'final'
|
||||
ret['stable'] = m.group(1).lower()
|
||||
|
||||
# product name lookup
|
||||
products = {}
|
||||
for product in isotracker.tracker_products:
|
||||
products[product.id] = product.title
|
||||
|
||||
# builds
|
||||
for build in isotracker.get_builds(ms):
|
||||
product = products[build.productid]
|
||||
|
||||
# Start by skipping anything in the ignore list
|
||||
if ignore_product_re.search(product):
|
||||
continue
|
||||
|
||||
if opts.prepublish or opts.include_active:
|
||||
ready_states = ('Active', 'Ready')
|
||||
else:
|
||||
ready_states = ('Ready',)
|
||||
if build.status_string not in ready_states:
|
||||
print('Ignoring %s which has status %s' %
|
||||
(product, build.status_string))
|
||||
continue
|
||||
|
||||
# Fail when finding an unknown product
|
||||
m = product_re.match(product)
|
||||
if not m:
|
||||
sys.stderr.write('ERROR: Cannot handle product %s\n' % product)
|
||||
sys.exit(1)
|
||||
|
||||
project = m.group(1).lower().replace(' ', '')
|
||||
type = m.group(2).lower()
|
||||
arch = m.group(3).lower()
|
||||
|
||||
if 'Server armhf+raspi2' in product:
|
||||
# This product is mislabeled in the tracker:
|
||||
type = 'preinstalled-%s' % type
|
||||
if 'Server armhf+raspi3' in product:
|
||||
type = 'preinstalled-%s' % type
|
||||
if 'Server arm64+raspi3' in product:
|
||||
type = 'preinstalled-%s' % type
|
||||
if 'Server Subiquity' in product:
|
||||
type = 'live-server'
|
||||
project = 'ubuntu'
|
||||
if 'Preinstalled' in product:
|
||||
type = 'preinstalled-%s' % type
|
||||
if project == 'kubuntu' and type == 'mobile':
|
||||
project = 'kubuntu-mobile'
|
||||
if project == 'kubuntu' and type == 'active':
|
||||
project = 'kubuntu-active'
|
||||
type = 'desktop'
|
||||
if project == 'ubuntu' and type == 'base':
|
||||
project = 'ubuntu-base'
|
||||
if project == 'ubuntugnome':
|
||||
project = 'ubuntu-gnome'
|
||||
if project == 'ubuntumate':
|
||||
project = 'ubuntu-mate'
|
||||
if project == 'ubuntubudgie':
|
||||
project = 'ubuntu-budgie'
|
||||
if project == 'lubuntunext':
|
||||
project = 'lubuntu-next'
|
||||
|
||||
build_map[project][type][build.version].add(arch)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def do_publish_release(opts, project, type, buildstamp, arches, milestone,
|
||||
stable):
|
||||
'''Process a particular build publishing'''
|
||||
|
||||
primary = set(('amd64', 'i386'))
|
||||
|
||||
primary_arches = arches & primary
|
||||
ports_arches = arches - primary
|
||||
|
||||
if 'alpha' in milestone:
|
||||
official = 'no'
|
||||
else:
|
||||
official = 'named'
|
||||
if (project in ('ubuntu',) and
|
||||
type in ('desktop', 'alternate', 'netbook', 'live-server',
|
||||
'wubi') and
|
||||
primary_arches):
|
||||
official = 'yes'
|
||||
if opts.prepublish:
|
||||
if official == 'named':
|
||||
# no prepublication needed
|
||||
return
|
||||
elif official == 'yes':
|
||||
official = 'poolonly'
|
||||
|
||||
# For pre-Natty: remove "official in ('yes', 'poolonly') and"
|
||||
if official in ('yes', 'poolonly') and primary_arches and ports_arches:
|
||||
do_publish_release(
|
||||
opts, project, type, buildstamp, primary_arches, milestone, stable)
|
||||
do_publish_release(
|
||||
opts, project, type, buildstamp, ports_arches, milestone, stable)
|
||||
return
|
||||
|
||||
cmd = ['for-project', project, 'publish-release']
|
||||
if type != 'src':
|
||||
cmd.insert(0, "ARCHES='%s'" % ' '.join(sorted(arches)))
|
||||
if stable is not None:
|
||||
cmd.insert(0, "DIST=%s" % stable)
|
||||
|
||||
if opts.dryrun:
|
||||
cmd.append('--dry-run')
|
||||
|
||||
# dir and buildstamp arguments
|
||||
try:
|
||||
dir = type_map[type]
|
||||
# For pre-Natty: uncomment next two lines
|
||||
#if ports_arches:
|
||||
# dir = re.sub(r'daily', 'ports/daily', dir)
|
||||
# Sometimes a daily build is treated as being one project (e.g.
|
||||
# ubuntu-netbook) but should be published under the auspices of
|
||||
# another project (e.g. ubuntu). This is of course NOT AT ALL
|
||||
# CONFUSING.
|
||||
if project == 'ubuntu' and type == 'server':
|
||||
dir = 'ubuntu-server/%s' % dir
|
||||
elif project == 'ubuntu' and type == 'netbook' and primary_arches:
|
||||
dir = 'ubuntu-netbook/%s' % dir
|
||||
cmd.append(dir)
|
||||
cmd.append(buildstamp)
|
||||
except KeyError:
|
||||
print('ERROR: Cannot handle type', type, 'for', project,
|
||||
file=sys.stderr)
|
||||
return
|
||||
|
||||
# type argument
|
||||
cmd.append(type)
|
||||
|
||||
# releaseflag argument
|
||||
cmd.append(official)
|
||||
|
||||
# name argument
|
||||
if milestone != 'final':
|
||||
cmd.append(milestone)
|
||||
|
||||
print(' '.join(cmd))
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='Usage: %prog [options]')
|
||||
parser.add_option('-m', '--milestone',
|
||||
help='post to MILESTONE rather than the default')
|
||||
parser.add_option('-n', '--dry-run', dest='dryrun',
|
||||
action='store_true', default=False,
|
||||
help='Generate dry-run commands')
|
||||
parser.add_option('-p', '--prepublish', dest='prepublish',
|
||||
action='store_true', default=False,
|
||||
help='Pre-publish images to .pool')
|
||||
parser.add_option('-t', '--target', help='post to an alternate QATracker')
|
||||
parser.add_option('--include-active', action='store_true', default=False,
|
||||
help='Always include Active (not Ready) images')
|
||||
opts, args = parser.parse_args()
|
||||
|
||||
info = parse_iso_tracker(opts)
|
||||
|
||||
print('\n## make backup:')
|
||||
print('cd ~/cdimage/; rm -rf www.prev; cp -al www www.prev; cd www')
|
||||
|
||||
print('\n## publish images:')
|
||||
source_milestone = None
|
||||
for project, builds in info['build_map'].items():
|
||||
for type, buildstamps in builds.items():
|
||||
for buildstamp, arches in buildstamps.items():
|
||||
do_publish_release(opts, project, type, buildstamp, arches,
|
||||
info['milestone_code'], info['stable'])
|
||||
source_milestone = info['milestone_code']
|
||||
print()
|
||||
|
||||
if source_milestone:
|
||||
do_publish_release(opts, 'ubuntu', 'src', 'current', set(),
|
||||
source_milestone, info['stable'])
|
||||
|
||||
if not opts.prepublish:
|
||||
print('\n## fix name in headers:')
|
||||
print("find full -path '*/%s*HEADER.html' | "
|
||||
"xargs sed -i 's/Daily Build/%s/'" %
|
||||
(info['milestone_code'], info['milestone_name']))
|
||||
|
||||
print('\n## check changes against www.prev:')
|
||||
print('diff -u <(cd ../www.prev/full && find | sort) '
|
||||
'<(cd full && find | sort) | less')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,535 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Stéphane Graber <stgraber@ubuntu.com>
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
|
||||
# USA
|
||||
|
||||
try:
|
||||
import xmlrpc.client as xmlrpclib
|
||||
except ImportError:
|
||||
import xmlrpclib
|
||||
|
||||
import base64
|
||||
from datetime import datetime
|
||||
|
||||
# Taken from qatracker/qatracker.modules (PHP code)
|
||||
# cat qatracker.module | grep " = array" | sed -e 's/^\$//g' \
|
||||
# -e 's/array(/[/g' -e 's/);/]/g' -e "s/t('/\"/g" -e "s/')/\"/g"
|
||||
### AUTO-GENERATED ->
|
||||
qatracker_build_milestone_status = ["Active", "Re-building", "Disabled",
|
||||
"Superseded", "Ready"]
|
||||
qatracker_milestone_notify = ["No", "Yes"]
|
||||
qatracker_milestone_autofill = ["No", "Yes"]
|
||||
qatracker_milestone_status = ["Testing", "Released", "Archived"]
|
||||
qatracker_milestone_series_status = ["Active", "Disabled"]
|
||||
qatracker_milestone_series_manifest_status = ["Active", "Disabled"]
|
||||
qatracker_product_status = ["Active", "Disabled"]
|
||||
qatracker_product_type = ["iso", "package", "hardware"]
|
||||
qatracker_product_download_type = ["HTTP", "RSYNC", "ZSYNC",
|
||||
"GPG signature", "MD5 checksum", "Comment",
|
||||
"Torrent"]
|
||||
qatracker_testsuite_testcase_status = ["Mandatory", "Disabled", "Run-once",
|
||||
"Optional"]
|
||||
qatracker_result_result = ["Failed", "Passed", "In progress"]
|
||||
qatracker_result_status = ["Active", "Disabled"]
|
||||
qatracker_rebuild_status = ["Requested", "Queued", "Building", "Built",
|
||||
"Published", "Canceled"]
|
||||
### <- AUTO-GENERATED
|
||||
|
||||
|
||||
class QATrackerRPCObject():
|
||||
"""Base class for objects received over XML-RPC"""
|
||||
|
||||
CONVERT_BOOL = []
|
||||
CONVERT_DATE = []
|
||||
CONVERT_INT = []
|
||||
|
||||
def __init__(self, tracker, rpc_dict):
|
||||
# Convert the dict we get from the API into an object
|
||||
|
||||
for key in rpc_dict:
|
||||
if key in self.CONVERT_INT:
|
||||
try:
|
||||
setattr(self, key, int(rpc_dict[key]))
|
||||
except ValueError:
|
||||
setattr(self, key, None)
|
||||
elif key in self.CONVERT_BOOL:
|
||||
setattr(self, key, rpc_dict[key] == "true")
|
||||
elif key in self.CONVERT_DATE:
|
||||
try:
|
||||
setattr(self, key, datetime.strptime(rpc_dict[key],
|
||||
'%Y-%m-%d %H:%M:%S'))
|
||||
except ValueError:
|
||||
setattr(self, key, None)
|
||||
else:
|
||||
setattr(self, key, str(rpc_dict[key]))
|
||||
|
||||
self.tracker = tracker
|
||||
|
||||
def __repr__(self):
|
||||
return "%s: %s" % (self.__class__.__name__, self.title)
|
||||
|
||||
|
||||
class QATrackerBug(QATrackerRPCObject):
|
||||
"""A bug entry"""
|
||||
|
||||
CONVERT_INT = ['bugnumber', 'count']
|
||||
CONVERT_DATE = ['earliest_report', 'latest_report']
|
||||
|
||||
def __repr__(self):
|
||||
return "%s: %s" % (self.__class__.__name__, self.bugnumber)
|
||||
|
||||
|
||||
class QATrackerBuild(QATrackerRPCObject):
|
||||
"""A build entry"""
|
||||
|
||||
CONVERT_INT = ['id', 'productid', 'userid', 'status']
|
||||
CONVERT_DATE = ['date']
|
||||
|
||||
def __repr__(self):
|
||||
return "%s: %s" % (self.__class__.__name__, self.id)
|
||||
|
||||
def add_result(self, testcase, result, comment='', hardware='', bugs={}):
|
||||
"""Add a result to the build"""
|
||||
|
||||
if (self.tracker.access not in ("user", "admin") and
|
||||
self.tracker.access is not None):
|
||||
raise Exception("Access denied, you need 'user' but are '%s'" %
|
||||
self.tracker.access)
|
||||
|
||||
build_testcase = None
|
||||
|
||||
# FIXME: Supporting 'str' containing the testcase name would be nice
|
||||
if isinstance(testcase, QATrackerTestcase):
|
||||
build_testcase = testcase.id
|
||||
elif isinstance(testcase, int):
|
||||
build_testcase = testcase
|
||||
|
||||
if not build_testcase:
|
||||
raise IndexError("Couldn't find testcase: %s" % (testcase,))
|
||||
|
||||
if isinstance(result, list):
|
||||
raise TypeError("result must be a string or an integer")
|
||||
|
||||
build_result = self.tracker._get_valid_id_list(qatracker_result_result,
|
||||
result)
|
||||
|
||||
if not isinstance(bugs, dict):
|
||||
raise TypeError("bugs must be a dict")
|
||||
|
||||
for bug in bugs:
|
||||
if not isinstance(bug, int) or bug <= 0:
|
||||
raise ValueError("A bugnumber must be a number >= 0")
|
||||
|
||||
if not isinstance(bugs[bug], int) or bugs[bug] not in (0, 1):
|
||||
raise ValueError("A bugimportance must be in (0,1)")
|
||||
|
||||
resultid = int(self.tracker.tracker.results.add(self.id,
|
||||
build_testcase,
|
||||
build_result[0],
|
||||
str(comment),
|
||||
str(hardware),
|
||||
bugs))
|
||||
if resultid == -1:
|
||||
raise Exception("Couldn't post your result.")
|
||||
|
||||
new_result = None
|
||||
for entry in self.get_results(build_testcase, 0):
|
||||
if entry.id == resultid:
|
||||
new_result = entry
|
||||
break
|
||||
|
||||
return new_result
|
||||
|
||||
def get_results(self, testcase, status=qatracker_result_status):
|
||||
"""Get a list of results for the given build and testcase"""
|
||||
|
||||
build_testcase = None
|
||||
|
||||
# FIXME: Supporting 'str' containing the testcase name would be nice
|
||||
if isinstance(testcase, QATrackerTestcase):
|
||||
build_testcase = testcase.id
|
||||
elif isinstance(testcase, int):
|
||||
build_testcase = testcase
|
||||
|
||||
if not build_testcase:
|
||||
raise IndexError("Couldn't find testcase: %s" % (testcase,))
|
||||
|
||||
record_filter = self.tracker._get_valid_id_list(
|
||||
qatracker_result_status,
|
||||
status)
|
||||
|
||||
if len(record_filter) == 0:
|
||||
return []
|
||||
|
||||
results = []
|
||||
for entry in self.tracker.tracker.results.get_list(
|
||||
self.id, build_testcase, list(record_filter)):
|
||||
results.append(QATrackerResult(self.tracker, entry))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class QATrackerMilestone(QATrackerRPCObject):
|
||||
"""A milestone entry"""
|
||||
|
||||
CONVERT_INT = ['id', 'status', 'series']
|
||||
CONVERT_BOOL = ['notify']
|
||||
|
||||
def get_bugs(self):
|
||||
"""Returns a list of all bugs linked to this milestone"""
|
||||
|
||||
bugs = []
|
||||
for entry in self.tracker.tracker.bugs.get_list(self.id):
|
||||
bugs.append(QATrackerBug(self.tracker, entry))
|
||||
|
||||
return bugs
|
||||
|
||||
def add_build(self, product, version, note="", notify=True):
|
||||
"""Add a build to the milestone"""
|
||||
|
||||
if self.status != 0:
|
||||
raise TypeError("Only active milestones are accepted")
|
||||
|
||||
if self.tracker.access != "admin" and self.tracker.access is not None:
|
||||
raise Exception("Access denied, you need 'admin' but are '%s'" %
|
||||
self.tracker.access)
|
||||
|
||||
if not isinstance(notify, bool):
|
||||
raise TypeError("notify must be a boolean")
|
||||
|
||||
build_product = None
|
||||
|
||||
if isinstance(product, QATrackerProduct):
|
||||
build_product = product
|
||||
else:
|
||||
valid_products = self.tracker.get_products(0)
|
||||
|
||||
for entry in valid_products:
|
||||
if (entry.title.lower() == str(product).lower() or
|
||||
entry.id == product):
|
||||
build_product = entry
|
||||
break
|
||||
|
||||
if not build_product:
|
||||
raise IndexError("Couldn't find product: %s" % product)
|
||||
|
||||
if build_product.status != 0:
|
||||
raise TypeError("Only active products are accepted")
|
||||
|
||||
self.tracker.tracker.builds.add(build_product.id, self.id,
|
||||
str(version), str(note), notify)
|
||||
|
||||
new_build = None
|
||||
for entry in self.get_builds(0):
|
||||
if (entry.productid == build_product.id
|
||||
and entry.version == str(version)):
|
||||
new_build = entry
|
||||
break
|
||||
|
||||
return new_build
|
||||
|
||||
def get_builds(self, status=qatracker_build_milestone_status):
|
||||
"""Get a list of builds for the milestone"""
|
||||
|
||||
record_filter = self.tracker._get_valid_id_list(
|
||||
qatracker_build_milestone_status, status)
|
||||
|
||||
if len(record_filter) == 0:
|
||||
return []
|
||||
|
||||
builds = []
|
||||
for entry in self.tracker.tracker.builds.get_list(self.id,
|
||||
list(record_filter)):
|
||||
builds.append(QATrackerBuild(self.tracker, entry))
|
||||
|
||||
return builds
|
||||
|
||||
|
||||
class QATrackerProduct(QATrackerRPCObject):
|
||||
CONVERT_INT = ['id', 'type', 'status']
|
||||
|
||||
def get_testcases(self, series,
|
||||
status=qatracker_testsuite_testcase_status):
|
||||
"""Get a list of testcases associated with the product"""
|
||||
|
||||
record_filter = self.tracker._get_valid_id_list(
|
||||
qatracker_testsuite_testcase_status, status)
|
||||
|
||||
if len(record_filter) == 0:
|
||||
return []
|
||||
|
||||
if isinstance(series, QATrackerMilestone):
|
||||
seriesid = series.series
|
||||
elif isinstance(series, int):
|
||||
seriesid = series
|
||||
else:
|
||||
raise TypeError("series needs to be a valid QATrackerMilestone"
|
||||
" instance or an integer")
|
||||
|
||||
testcases = []
|
||||
for entry in self.tracker.tracker.testcases.get_list(
|
||||
self.id, seriesid, list(record_filter)):
|
||||
testcases.append(QATrackerTestcase(self.tracker, entry))
|
||||
|
||||
return testcases
|
||||
|
||||
|
||||
class QATrackerRebuild(QATrackerRPCObject):
|
||||
CONVERT_INT = ['id', 'seriesid', 'productid', 'milestoneid', 'requestedby',
|
||||
'changedby', 'status']
|
||||
CONVERT_DATE = ['requestedat', 'changedat']
|
||||
|
||||
def __repr__(self):
|
||||
return "%s: %s" % (self.__class__.__name__, self.id)
|
||||
|
||||
def save(self):
|
||||
"""Save any change that happened on this entry.
|
||||
NOTE: At the moment only supports the status field."""
|
||||
|
||||
if (self.tracker.access != "admin" and
|
||||
self.tracker.access is not None):
|
||||
raise Exception("Access denied, you need 'admin' but are '%s'" %
|
||||
self.tracker.access)
|
||||
|
||||
retval = self.tracker.tracker.rebuilds.update_status(self.id,
|
||||
self.status)
|
||||
if retval is not True:
|
||||
raise Exception("Failed to update rebuild")
|
||||
|
||||
return retval
|
||||
|
||||
|
||||
class QATrackerResult(QATrackerRPCObject):
|
||||
CONVERT_INT = ['id', 'reporterid', 'revisionid', 'result', 'changedby',
|
||||
'status']
|
||||
CONVERT_DATE = ['date', 'lastchange']
|
||||
__deleted = False
|
||||
|
||||
def __repr__(self):
|
||||
return "%s: %s" % (self.__class__.__name__, self.id)
|
||||
|
||||
def delete(self):
|
||||
"""Remove the result from the tracker"""
|
||||
|
||||
if (self.tracker.access not in ("user", "admin") and
|
||||
self.tracker.access is not None):
|
||||
raise Exception("Access denied, you need 'user' but are '%s'" %
|
||||
self.tracker.access)
|
||||
|
||||
if self.__deleted:
|
||||
raise IndexError("Result has already been removed")
|
||||
|
||||
retval = self.tracker.tracker.results.delete(self.id)
|
||||
if retval is not True:
|
||||
raise Exception("Failed to remove result")
|
||||
|
||||
self.status = 1
|
||||
self.__deleted = True
|
||||
|
||||
def save(self):
|
||||
"""Save any change that happened on this entry"""
|
||||
|
||||
if (self.tracker.access not in ("user", "admin") and
|
||||
self.tracker.access is not None):
|
||||
raise Exception("Access denied, you need 'user' but are '%s'" %
|
||||
self.tracker.access)
|
||||
|
||||
if self.__deleted:
|
||||
raise IndexError("Result no longer exists")
|
||||
|
||||
retval = self.tracker.tracker.results.update(self.id, self.result,
|
||||
self.comment,
|
||||
self.hardware,
|
||||
self.bugs)
|
||||
if retval is not True:
|
||||
raise Exception("Failed to update result")
|
||||
|
||||
return retval
|
||||
|
||||
|
||||
class QATrackerSeries(QATrackerRPCObject):
|
||||
CONVERT_INT = ['id', 'status']
|
||||
|
||||
def get_manifest(self, status=qatracker_milestone_series_manifest_status):
|
||||
"""Get a list of products in the series' manifest"""
|
||||
|
||||
record_filter = self.tracker._get_valid_id_list(
|
||||
qatracker_milestone_series_manifest_status, status)
|
||||
|
||||
if len(record_filter) == 0:
|
||||
return []
|
||||
|
||||
manifest_entries = []
|
||||
for entry in self.tracker.tracker.series.get_manifest(
|
||||
self.id, list(record_filter)):
|
||||
manifest_entries.append(QATrackerSeriesManifest(
|
||||
self.tracker, entry))
|
||||
|
||||
return manifest_entries
|
||||
|
||||
|
||||
class QATrackerSeriesManifest(QATrackerRPCObject):
|
||||
CONVERT_INT = ['id', 'productid', 'status']
|
||||
|
||||
def __repr__(self):
|
||||
return "%s: %s" % (self.__class__.__name__, self.product_title)
|
||||
|
||||
|
||||
class QATrackerTestcase(QATrackerRPCObject):
|
||||
CONVERT_INT = ['id', 'status', 'weight', 'suite']
|
||||
|
||||
|
||||
class QATracker():
|
||||
def __init__(self, url, username=None, password=None):
|
||||
class AuthTransport(xmlrpclib.Transport):
|
||||
def set_auth(self, auth):
|
||||
self.auth = auth
|
||||
|
||||
def get_host_info(self, host):
|
||||
host, extra_headers, x509 = \
|
||||
xmlrpclib.Transport.get_host_info(self, host)
|
||||
if extra_headers is None:
|
||||
extra_headers = []
|
||||
extra_headers.append(('Authorization', 'Basic %s' % auth))
|
||||
return host, extra_headers, x509
|
||||
|
||||
if username and password:
|
||||
try:
|
||||
auth = str(base64.b64encode(
|
||||
bytes('%s:%s' % (username, password), 'utf-8')),
|
||||
'utf-8')
|
||||
except TypeError:
|
||||
auth = base64.b64encode('%s:%s' % (username, password))
|
||||
|
||||
transport = AuthTransport()
|
||||
transport.set_auth(auth)
|
||||
drupal = xmlrpclib.ServerProxy(url, transport=transport)
|
||||
else:
|
||||
drupal = xmlrpclib.ServerProxy(url)
|
||||
|
||||
# Call listMethods() so if something is wrong we know it immediately
|
||||
drupal.system.listMethods()
|
||||
|
||||
# Get our current access
|
||||
self.access = drupal.qatracker.get_access()
|
||||
|
||||
self.tracker = drupal.qatracker
|
||||
|
||||
def _get_valid_id_list(self, status_list, status):
|
||||
""" Get a list of valid keys and a list or just a single
|
||||
entry of input to check against the list of valid keys.
|
||||
The function looks for valid indexes and content, doing
|
||||
case insensitive checking for strings and returns a list
|
||||
of indexes for the list of valid keys. """
|
||||
|
||||
def process(status_list, status):
|
||||
valid_status = [entry.lower() for entry in status_list]
|
||||
|
||||
if isinstance(status, int):
|
||||
if status < 0 or status >= len(valid_status):
|
||||
raise IndexError("Invalid status: %s" % status)
|
||||
return int(status)
|
||||
|
||||
if isinstance(status, str):
|
||||
status = status.lower()
|
||||
if status not in valid_status:
|
||||
raise IndexError("Invalid status: %s" % status)
|
||||
return valid_status.index(status)
|
||||
|
||||
raise TypeError("Invalid status type: %s (expected str or int)" %
|
||||
type(status))
|
||||
|
||||
record_filter = set()
|
||||
|
||||
if isinstance(status, list):
|
||||
for entry in status:
|
||||
record_filter.add(process(status_list, entry))
|
||||
else:
|
||||
record_filter.add(process(status_list, status))
|
||||
|
||||
return list(record_filter)
|
||||
|
||||
def get_bugs(self):
|
||||
"""Get a list of all bugs reported on the site"""
|
||||
|
||||
bugs = []
|
||||
for entry in self.tracker.bugs.get_list(0):
|
||||
bugs.append(QATrackerBug(self, entry))
|
||||
|
||||
return bugs
|
||||
|
||||
def get_milestones(self, status=qatracker_milestone_status):
|
||||
"""Get a list of all milestones"""
|
||||
|
||||
record_filter = self._get_valid_id_list(qatracker_milestone_status,
|
||||
status)
|
||||
|
||||
if len(record_filter) == 0:
|
||||
return []
|
||||
|
||||
milestones = []
|
||||
for entry in self.tracker.milestones.get_list(list(record_filter)):
|
||||
milestones.append(QATrackerMilestone(self, entry))
|
||||
|
||||
return milestones
|
||||
|
||||
def get_products(self, status=qatracker_product_status):
|
||||
"""Get a list of all products"""
|
||||
|
||||
record_filter = self._get_valid_id_list(qatracker_product_status,
|
||||
status)
|
||||
|
||||
if len(record_filter) == 0:
|
||||
return []
|
||||
|
||||
products = []
|
||||
for entry in self.tracker.products.get_list(list(record_filter)):
|
||||
products.append(QATrackerProduct(self, entry))
|
||||
|
||||
return products
|
||||
|
||||
def get_rebuilds(self, status=qatracker_rebuild_status):
|
||||
"""Get a list of all rebuilds"""
|
||||
|
||||
record_filter = self._get_valid_id_list(
|
||||
qatracker_rebuild_status, status)
|
||||
|
||||
if len(record_filter) == 0:
|
||||
return []
|
||||
|
||||
rebuilds = []
|
||||
for entry in self.tracker.rebuilds.get_list(list(record_filter)):
|
||||
rebuilds.append(QATrackerRebuild(self, entry))
|
||||
|
||||
return rebuilds
|
||||
|
||||
def get_series(self, status=qatracker_milestone_series_status):
|
||||
"""Get a list of all series"""
|
||||
|
||||
record_filter = self._get_valid_id_list(
|
||||
qatracker_milestone_series_status, status)
|
||||
|
||||
if len(record_filter) == 0:
|
||||
return []
|
||||
|
||||
series = []
|
||||
for entry in self.tracker.series.get_list(list(record_filter)):
|
||||
series.append(QATrackerSeries(self, entry))
|
||||
|
||||
return series
|
@ -0,0 +1,497 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Manipulate Ubuntu upload queues."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
from datetime import datetime
|
||||
from operator import attrgetter
|
||||
from optparse import OptionParser, SUPPRESS_HELP
|
||||
import os
|
||||
import sys
|
||||
try:
|
||||
from urllib.parse import unquote, urlsplit
|
||||
from urllib.request import urlretrieve
|
||||
except ImportError:
|
||||
from urllib import unquote, urlretrieve
|
||||
from urlparse import urlsplit
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
import pytz
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
CONSUMER_KEY = "queue"
|
||||
|
||||
|
||||
queue_names = (
|
||||
"New",
|
||||
"Unapproved",
|
||||
"Accepted",
|
||||
"Done",
|
||||
"Rejected",
|
||||
)
|
||||
|
||||
|
||||
now = datetime.now(pytz.timezone("UTC"))
|
||||
|
||||
|
||||
def queue_item(options, queue_id):
|
||||
"""Load a queue item by its numeric ID."""
|
||||
return options.launchpad.load('%s%s/%s/+upload/%s' % (
|
||||
options.launchpad._root_uri.ensureSlash(), options.distribution.name,
|
||||
options.series.name, queue_id))
|
||||
|
||||
|
||||
def queue_item_allowed(options, item):
|
||||
# Rather than using item.contains_build, treat anything that isn't
|
||||
# sourceful as binaryful. This allows us to treat copies of custom
|
||||
# uploads (which have none of contains_source, contains_copy, or
|
||||
# contains_build) as binaryful. However, copies may contain both source
|
||||
# and binaries.
|
||||
sourceful = item.contains_source or item.contains_copy
|
||||
binaryful = not item.contains_source or item.contains_copy
|
||||
if options.source and sourceful:
|
||||
return True
|
||||
elif options.binary and binaryful:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def queue_items(options, args):
|
||||
if not args:
|
||||
args = ['']
|
||||
|
||||
items = collections.OrderedDict()
|
||||
for arg in args:
|
||||
arg = arg.strip()
|
||||
if arg.isdigit():
|
||||
item = queue_item(options, arg)
|
||||
if item in items:
|
||||
continue
|
||||
if item.status != options.queue:
|
||||
raise ValueError(
|
||||
"Item %s is in queue %s, not requested queue %s" %
|
||||
(item.id, item.status, options.queue))
|
||||
if (item.distroseries != options.series or
|
||||
item.pocket != options.pocket):
|
||||
if item.pocket == "Release":
|
||||
item_suite = item.distroseries.name
|
||||
else:
|
||||
item_suite = "%s-%s" % (
|
||||
item.distroseries.name, item.pocket.lower())
|
||||
raise ValueError("Item %s is in %s/%s not in %s/%s" % (
|
||||
item.id, item.distroseries.distribution.name,
|
||||
item_suite, options.distribution.name,
|
||||
options.suite))
|
||||
if queue_item_allowed(options, item):
|
||||
items[item] = 1
|
||||
else:
|
||||
kwargs = {}
|
||||
if "/" in arg:
|
||||
kwargs["name"], kwargs["version"] = arg.split("/")
|
||||
elif arg:
|
||||
kwargs["name"] = arg
|
||||
new_items = options.series.getPackageUploads(
|
||||
archive=options.archive, pocket=options.pocket,
|
||||
status=options.queue, exact_match=options.exact_match,
|
||||
**kwargs)
|
||||
for item in new_items:
|
||||
if queue_item_allowed(options, item):
|
||||
items[item] = 1
|
||||
|
||||
return items
|
||||
|
||||
|
||||
#XXX cprov 2006-09-19: We need to use template engine instead of hardcoded
|
||||
# format variables.
|
||||
HEAD = "-" * 9 + "|----|" + "-" * 22 + "|" + "-" * 22 + "|" + "-" * 15
|
||||
FOOT_MARGIN = " " * (9 + 6 + 1 + 22 + 1 + 22 + 2)
|
||||
|
||||
|
||||
def make_tag(item):
|
||||
if item.contains_copy:
|
||||
return "X-"
|
||||
else:
|
||||
return (("S" if item.contains_source else "-") +
|
||||
("B" if item.contains_build else "-"))
|
||||
|
||||
|
||||
def approximate_age(time):
|
||||
"""Return a nicely-formatted approximate age."""
|
||||
seconds = int((now - time).total_seconds())
|
||||
if seconds == 1:
|
||||
return "1 second"
|
||||
elif seconds < 60:
|
||||
return "%d seconds" % seconds
|
||||
|
||||
minutes = int(round(seconds / 60.0))
|
||||
if minutes == 1:
|
||||
return "1 minute"
|
||||
elif minutes < 60:
|
||||
return "%d minutes" % minutes
|
||||
|
||||
hours = int(round(minutes / 60.0))
|
||||
if hours == 1:
|
||||
return "1 hour"
|
||||
elif hours < 48:
|
||||
return "%d hours" % hours
|
||||
|
||||
days = int(round(hours / 24.0))
|
||||
if days == 1:
|
||||
return "1 day"
|
||||
elif days < 14:
|
||||
return "%d days" % days
|
||||
|
||||
weeks = int(round(days / 7.0))
|
||||
if weeks == 1:
|
||||
return "1 week"
|
||||
else:
|
||||
return "%d weeks" % weeks
|
||||
|
||||
|
||||
def show_item_main(item):
|
||||
tag = make_tag(item)
|
||||
# TODO truncation sucks
|
||||
print("%8d | %s | %s | %s | %s" %
|
||||
(item.id, tag, item.display_name.ljust(20)[:20],
|
||||
item.display_version.ljust(20)[:20],
|
||||
approximate_age(item.date_created)))
|
||||
|
||||
|
||||
def show_source(source):
|
||||
print("\t | * %s/%s Component: %s Section: %s" %
|
||||
(source.package_name, source.package_version,
|
||||
source.component_name, source.section_name))
|
||||
|
||||
|
||||
def show_binary(binary):
|
||||
if "customformat" in binary:
|
||||
print("\t | * %s Format: %s" % (
|
||||
binary["name"], binary["customformat"]))
|
||||
else:
|
||||
if binary["is_new"]:
|
||||
status_flag = "N"
|
||||
else:
|
||||
status_flag = "*"
|
||||
print("\t | %s %s/%s/%s "
|
||||
"Component: %s Section: %s Priority: %s" % (
|
||||
status_flag, binary["name"], binary["version"],
|
||||
binary["architecture"], binary["component"],
|
||||
binary["section"], binary["priority"]))
|
||||
|
||||
|
||||
def show_item(item):
|
||||
show_item_main(item)
|
||||
if item.contains_copy or item.contains_source:
|
||||
show_source(item)
|
||||
if item.contains_build:
|
||||
for binary in item.getBinaryProperties():
|
||||
show_binary(binary)
|
||||
|
||||
|
||||
def display_name(item):
|
||||
display = "%s/%s" % (item.display_name, item.display_version)
|
||||
if item.contains_build:
|
||||
display += " (%s)" % item.display_arches
|
||||
return display
|
||||
|
||||
|
||||
def info(options, args):
|
||||
"""Show information on queue items."""
|
||||
items = queue_items(options, args)
|
||||
print("Listing %s/%s (%s) %s" %
|
||||
(options.distribution.name, options.suite, options.queue,
|
||||
len(items)))
|
||||
print(HEAD)
|
||||
for item in items:
|
||||
show_item(item)
|
||||
print(HEAD)
|
||||
print(FOOT_MARGIN + str(len(items)))
|
||||
return 0
|
||||
|
||||
|
||||
# Get librarian URLs for source_package_publishing_history or package_upload
|
||||
# objects
|
||||
def urls(options, item):
|
||||
try:
|
||||
if item.contains_copy:
|
||||
archive = item.copy_source_archive
|
||||
item = archive.getPublishedSources(
|
||||
exact_match=True, source_name=item.package_name,
|
||||
version=item.package_version)
|
||||
if item:
|
||||
return urls(options, item[0])
|
||||
else:
|
||||
print("Error: Can't find source package for copy")
|
||||
return []
|
||||
except AttributeError:
|
||||
# Not a package_upload
|
||||
pass
|
||||
|
||||
ret = []
|
||||
try:
|
||||
ret.append(item.changes_file_url)
|
||||
ret.extend(item.customFileUrls())
|
||||
except AttributeError: # Copies won't have this
|
||||
ret.append(item.changesFileUrl())
|
||||
if options.source:
|
||||
ret.extend(item.sourceFileUrls())
|
||||
if options.binary:
|
||||
ret.extend(item.binaryFileUrls())
|
||||
# On staging we may get None URLs due to missing library files; filter
|
||||
# these out.
|
||||
ret = list(filter(None, ret))
|
||||
return ret
|
||||
|
||||
|
||||
def fetch(options, args):
|
||||
"""Fetch the contents of a queue item."""
|
||||
ret = 1
|
||||
items = queue_items(options, args)
|
||||
for item in items:
|
||||
print("Fetching %s" % display_name(item))
|
||||
fetch_item(options, item)
|
||||
ret = 0
|
||||
return ret
|
||||
|
||||
|
||||
def fetch_item(options, item):
|
||||
for url in urls(options, item):
|
||||
path = urlsplit(url)[2]
|
||||
filename = unquote(path.split("/")[-1])
|
||||
exists = os.path.exists(filename)
|
||||
if options.overwrite or not exists:
|
||||
print("Constructing %s (%s)" % (filename, url))
|
||||
urlretrieve(url, filename)
|
||||
elif exists:
|
||||
print("Not overwriting existing %s with %s" %
|
||||
(filename, url))
|
||||
|
||||
|
||||
def show_urls(options, args):
|
||||
"""Show the URLs from which a queue item may be downloaded."""
|
||||
items = queue_items(options, args)
|
||||
for item in items:
|
||||
for url in urls(options, item):
|
||||
print(url)
|
||||
return 0 if items else 1
|
||||
|
||||
|
||||
def accept(options, args):
|
||||
"""Accept a queue item."""
|
||||
items = queue_items(options, args)
|
||||
for item in sorted(items, key=attrgetter("id")):
|
||||
if options.dry_run:
|
||||
print("Would accept %s" % display_name(item))
|
||||
else:
|
||||
print("Accepting %s" % display_name(item))
|
||||
item.acceptFromQueue()
|
||||
return 0 if items else 1
|
||||
|
||||
|
||||
def reject(options, args):
|
||||
"""Reject a queue item."""
|
||||
items = queue_items(options, args)
|
||||
for item in sorted(items, key=attrgetter("id")):
|
||||
if options.dry_run:
|
||||
print("Would reject %s" % display_name(item))
|
||||
else:
|
||||
print("Rejecting %s" % display_name(item))
|
||||
item.rejectFromQueue(comment=options.reject_comment)
|
||||
return 0 if items else 1
|
||||
|
||||
|
||||
def override_source(options, item):
|
||||
"""Override properties of source packages in a queue item."""
|
||||
kwargs = {}
|
||||
if options.component:
|
||||
kwargs["new_component"] = options.component
|
||||
if options.section:
|
||||
kwargs["new_section"] = options.section
|
||||
|
||||
print("Overriding %s_%s (%s/%s)" % (
|
||||
item.package_name, item.package_version,
|
||||
item.component_name, item.section_name))
|
||||
item.overrideSource(**kwargs)
|
||||
show_item(options.launchpad.load(item.self_link))
|
||||
return set((item.package_name,))
|
||||
|
||||
|
||||
def override_binary(options, args, item):
|
||||
"""Override properties of binary packages in a queue item."""
|
||||
overridden = set()
|
||||
changes = []
|
||||
show_binaries = []
|
||||
for binary in item.getBinaryProperties():
|
||||
if binary["name"] in args:
|
||||
overridden.add(binary["name"])
|
||||
print("Overriding %s_%s (%s/%s/%s)" % (
|
||||
binary["name"], binary["version"],
|
||||
binary["component"], binary["section"], binary["priority"]))
|
||||
change = {"name": binary["name"]}
|
||||
if options.component is not None:
|
||||
change["component"] = options.component
|
||||
if options.section is not None:
|
||||
change["section"] = options.section
|
||||
if options.priority is not None:
|
||||
change["priority"] = options.priority
|
||||
changes.append(change)
|
||||
show_binaries.append(binary["name"])
|
||||
if changes:
|
||||
item.overrideBinaries(changes=changes)
|
||||
if show_binaries:
|
||||
show_item_main(item)
|
||||
for binary in item.getBinaryProperties():
|
||||
if binary["name"] in show_binaries:
|
||||
show_binary(binary)
|
||||
return overridden
|
||||
|
||||
|
||||
def override(options, args):
|
||||
"""Override properties of packages in the queue.
|
||||
|
||||
You may override the component (-c) or the section (-x). In the case of
|
||||
binary packages, you may also override the priority (-p).
|
||||
"""
|
||||
overridden = set()
|
||||
items = queue_items(options, args)
|
||||
for item in items:
|
||||
if item.contains_source or item.contains_copy:
|
||||
overridden.update(override_source(options, item))
|
||||
if item.contains_build:
|
||||
overridden.update(override_binary(options, args, item))
|
||||
not_overridden = set(args) - overridden
|
||||
if not_overridden:
|
||||
print("No matches for %s" % ",".join(sorted(not_overridden)))
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def report(options, args):
|
||||
"""Show a report on the sizes of available queues."""
|
||||
print("Report for %s/%s" % (options.distribution.name, options.suite))
|
||||
for queue_name in queue_names:
|
||||
items = options.series.getPackageUploads(
|
||||
archive=options.archive, pocket=options.pocket, status=queue_name)
|
||||
print(" %s -> %s entries" % (queue_name, len(items)))
|
||||
return 0
|
||||
|
||||
|
||||
queue_actions = {
|
||||
'info': info,
|
||||
'fetch': fetch,
|
||||
'show-urls': show_urls,
|
||||
'accept': accept,
|
||||
'reject': reject,
|
||||
'override': override,
|
||||
'report': report,
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
usage="usage: %prog [options] ACTION [...]",
|
||||
description=(
|
||||
"ACTION may be one of info, fetch, show-urls, accept, reject, "
|
||||
"override, or report."),
|
||||
epilog=lputils.ARCHIVE_REFERENCE_DESCRIPTION)
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option("-A", "--archive", help="look in ARCHIVE")
|
||||
parser.add_option(
|
||||
"-s", "--suite", dest="suite", metavar="SUITE",
|
||||
help="look in suite SUITE")
|
||||
parser.add_option(
|
||||
"-Q", "--queue", dest="queue", metavar="QUEUE", default="new",
|
||||
help="consider packages in QUEUE")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", dest="dry_run", default=False, action="store_true",
|
||||
help="don't make any modifications")
|
||||
parser.add_option(
|
||||
"-e", "--exact-match", dest="exact_match",
|
||||
default=True, action="store_true",
|
||||
help="treat name filter as an exact match")
|
||||
parser.add_option(
|
||||
"-E", "--no-exact-match", dest="exact_match", action="store_false",
|
||||
help="treat name filter as a prefix match")
|
||||
parser.add_option(
|
||||
"-c", "--component", dest="component", metavar="COMPONENT",
|
||||
help="when overriding, move package to COMPONENT")
|
||||
parser.add_option(
|
||||
"-x", "--section", dest="section", metavar="SECTION",
|
||||
help="when overriding, move package to SECTION")
|
||||
parser.add_option(
|
||||
"-p", "--priority", dest="priority", metavar="PRIORITY",
|
||||
help="when overriding, move package to PRIORITY")
|
||||
parser.add_option(
|
||||
"--source", dest="source", default=False, action="store_true",
|
||||
help="only operate on source packages")
|
||||
parser.add_option(
|
||||
"--binary", dest="binary", default=False, action="store_true",
|
||||
help="only operate on binary packages")
|
||||
parser.add_option(
|
||||
"--overwrite", dest="overwrite", default=False, action="store_true",
|
||||
help="when fetching, overwrite existing files")
|
||||
parser.add_option("-m", "--reject-comment", help="rejection comment")
|
||||
|
||||
# Deprecated in favour of -A.
|
||||
parser.add_option(
|
||||
"-d", "--distribution", dest="distribution", default="ubuntu",
|
||||
help=SUPPRESS_HELP)
|
||||
parser.add_option("--ppa", help=SUPPRESS_HELP)
|
||||
parser.add_option("--ppa-name", help=SUPPRESS_HELP)
|
||||
parser.add_option(
|
||||
"-j", "--partner", default=False, action="store_true",
|
||||
help=SUPPRESS_HELP)
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if not args:
|
||||
parser.error("must select an action")
|
||||
action = args.pop(0)
|
||||
try:
|
||||
queue_action = queue_actions[action]
|
||||
except KeyError:
|
||||
parser.error("unknown action: %s" % action)
|
||||
|
||||
if action == "reject" and options.reject_comment is None:
|
||||
parser.error("rejections must supply a rejection comment")
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
CONSUMER_KEY, options.launchpad_instance, version="devel")
|
||||
|
||||
options.queue = options.queue.title()
|
||||
lputils.setup_location(options, default_pocket="Proposed")
|
||||
|
||||
if not options.source and not options.binary:
|
||||
options.source = True
|
||||
options.binary = True
|
||||
|
||||
try:
|
||||
sys.exit(queue_action(options, args))
|
||||
except ValueError as x:
|
||||
print(x)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,258 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Copyright (C) 2010 Scott Kitterman <scott@kitterman.com>
|
||||
# Author: Martin Pitt <martin.pitt@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
'''Show changes in an unapproved upload.
|
||||
|
||||
Generate a debdiff between current source package in a given release and the
|
||||
version in the unapproved queue.
|
||||
|
||||
USAGE:
|
||||
queuediff -s hardy -b hal | view -
|
||||
'''
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import gzip
|
||||
import optparse
|
||||
import re
|
||||
import sys
|
||||
try:
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen, urlretrieve
|
||||
except ImportError:
|
||||
from urllib import quote, urlopen, urlretrieve
|
||||
import webbrowser
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
default_release = 'cosmic'
|
||||
|
||||
lp = None
|
||||
|
||||
queue_url = 'https://launchpad.net/ubuntu/%s/+queue?queue_state=1&batch=300'
|
||||
ppa_url = ('https://launchpad.net/~%s/+archive/ubuntu/%s/+packages?'
|
||||
'field.series_filter=%s')
|
||||
|
||||
|
||||
def parse_options():
|
||||
'''Parse command line arguments.
|
||||
|
||||
Return (options, source_package) tuple.
|
||||
'''
|
||||
parser = optparse.OptionParser(
|
||||
usage='Usage: %prog [options] source_package')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-s", dest="release", default=default_release, metavar="RELEASE",
|
||||
help="release (default: %s)" % default_release)
|
||||
parser.add_option(
|
||||
"-p", dest="ppa", metavar="LP_USER/PPA_NAME",
|
||||
help="Check a PPA instead of the Ubuntu unapproved queue")
|
||||
parser.add_option(
|
||||
"-b", "--browser", dest="browser", action="store_true",
|
||||
default=False, help="Open Launchpad bugs in browser")
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error('Need to specify one source package name')
|
||||
|
||||
return (opts, args[0])
|
||||
|
||||
|
||||
def parse_changes(changes_url):
|
||||
'''Parse .changes file.
|
||||
|
||||
Return dictionary with interesting information: 'bugs' (list),
|
||||
'distribution'.
|
||||
'''
|
||||
info = {'bugs': []}
|
||||
for l in urlopen(changes_url):
|
||||
if l.startswith('Distribution:'):
|
||||
info['distribution'] = l.split()[1]
|
||||
if l.startswith('Launchpad-Bugs-Fixed:'):
|
||||
info['bugs'] = sorted(set(l.split()[1:]))
|
||||
if l.startswith('Version:'):
|
||||
info['version'] = l.split()[1]
|
||||
return info
|
||||
|
||||
|
||||
def from_queue(sourcepkg, release):
|
||||
'''Get .changes and debdiff from queue page.
|
||||
|
||||
Return (changes URL, debdiff URL) pair.
|
||||
'''
|
||||
oops_re = re.compile('class="oopsid">(OOPS[a-zA-Z0-9-]+)<')
|
||||
changes_re = re.compile(
|
||||
'href="(http://launchpadlibrarian.net/\d+/%s_[^"]+_source.changes)"' %
|
||||
re.escape(quote(sourcepkg)))
|
||||
debdiff_re = re.compile(
|
||||
'href="(http://launchpadlibrarian.net/'
|
||||
'\d+/%s_[^"_]+_[^_"]+\.diff\.gz)">\s*diff from' %
|
||||
re.escape(quote(sourcepkg)))
|
||||
|
||||
queue_html = urlopen(queue_url % release).read()
|
||||
|
||||
m = oops_re.search(queue_html)
|
||||
if m:
|
||||
print('ERROR: Launchpad failure:', m.group(1), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
changes_url = None
|
||||
for m in changes_re.finditer(queue_html):
|
||||
# ensure that there's only one upload
|
||||
if changes_url:
|
||||
print('ERROR: Queue has more than one upload of this source, '
|
||||
'please handle manually', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
changes_url = m.group(1)
|
||||
#print('changes URL:', changes_url, file=sys.stderr)
|
||||
|
||||
m = debdiff_re.search(queue_html)
|
||||
if not m:
|
||||
print('ERROR: queue does not have a debdiff', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
debdiff_url = m.group(1)
|
||||
#print('debdiff URL:', debdiff_url, file=sys.stderr)
|
||||
|
||||
return (changes_url, debdiff_url)
|
||||
|
||||
|
||||
def from_ppa(sourcepkg, release, user, ppaname):
|
||||
'''Get .changes and debdiff from a PPA.
|
||||
|
||||
Return (changes URL, debdiff URL) pair.
|
||||
'''
|
||||
changes_re = re.compile(
|
||||
'href="(https://launchpad.net/[^ "]+/%s_[^"]+_source.changes)"' %
|
||||
re.escape(quote(sourcepkg)))
|
||||
sourcepub_re = re.compile(
|
||||
'href="(\+sourcepub/\d+/\+listing-archive-extra)"')
|
||||
#debdiff_re = re.compile(
|
||||
# 'href="(https://launchpad.net/.[^ "]+.diff.gz)">diff from')
|
||||
|
||||
changes_url = None
|
||||
changes_sourcepub = None
|
||||
last_sourcepub = None
|
||||
|
||||
for line in urlopen(ppa_url % (user, ppaname, release)):
|
||||
m = sourcepub_re.search(line)
|
||||
if m:
|
||||
last_sourcepub = m.group(1)
|
||||
continue
|
||||
m = changes_re.search(line)
|
||||
if m:
|
||||
# ensure that there's only one upload
|
||||
if changes_url:
|
||||
print('ERROR: PPA has more than one upload of this source, '
|
||||
'please handle manually', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
changes_url = m.group(1)
|
||||
assert changes_sourcepub is None, (
|
||||
'got two sourcepubs before .changes')
|
||||
changes_sourcepub = last_sourcepub
|
||||
|
||||
#print('changes URL:', changes_url, file=sys.stderr)
|
||||
|
||||
# the code below works, but the debdiffs generated by Launchpad are rather
|
||||
# useless, as they are against the final version, not what is in
|
||||
# -updates/-security; so disable
|
||||
|
||||
## now open the sourcepub and get the URL for the debdiff
|
||||
#changes_sourcepub = changes_url.rsplit('+', 1)[0] + changes_sourcepub
|
||||
##print('sourcepub URL:', changes_sourcepub, file=sys.stderr)
|
||||
#sourcepub_html = urlopen(changes_sourcepub).read()
|
||||
|
||||
#m = debdiff_re.search(sourcepub_html)
|
||||
#if not m:
|
||||
# print('ERROR: PPA does not have a debdiff', file=sys.stderr)
|
||||
# sys.exit(1)
|
||||
#debdiff_url = m.group(1)
|
||||
##print('debdiff URL:', debdiff_url, file=sys.stderr)
|
||||
debdiff_url = None
|
||||
|
||||
return (changes_url, debdiff_url)
|
||||
#
|
||||
# main
|
||||
#
|
||||
|
||||
|
||||
(opts, sourcepkg) = parse_options()
|
||||
|
||||
if opts.ppa:
|
||||
(user, ppaname) = opts.ppa.split('/', 1)
|
||||
(changes_url, debdiff_url) = from_ppa(
|
||||
sourcepkg, opts.release, user, ppaname)
|
||||
else:
|
||||
(changes_url, debdiff_url) = from_queue(sourcepkg, opts.release)
|
||||
|
||||
# print diff
|
||||
if debdiff_url:
|
||||
print(gzip.open(urlretrieve(debdiff_url)[0]).read())
|
||||
else:
|
||||
print('No debdiff available')
|
||||
|
||||
# parse changes and open bugs
|
||||
changes = parse_changes(changes_url)
|
||||
|
||||
if opts.browser:
|
||||
for b in changes['bugs']:
|
||||
webbrowser.open('https://bugs.launchpad.net/bugs/' + b)
|
||||
|
||||
# print matching sru-accept command
|
||||
if changes['bugs']:
|
||||
# Check for existing version in proposed
|
||||
lp = Launchpad.login_anonymously('queuediff', opts.launchpad_instance)
|
||||
ubuntu = lp.distributions['ubuntu']
|
||||
series = ubuntu.getSeries(name_or_version=opts.release)
|
||||
if series != ubuntu.current_series:
|
||||
archive = ubuntu.main_archive
|
||||
existing = [
|
||||
pkg.source_package_version for pkg in archive.getPublishedSources(
|
||||
exact_match=True, distro_series=series, pocket='Proposed',
|
||||
source_name=sourcepkg, status='Published')]
|
||||
updates = [
|
||||
pkg.source_package_version for pkg in archive.getPublishedSources(
|
||||
exact_match=True, distro_series=series, pocket='Updates',
|
||||
source_name=sourcepkg, status='Published')]
|
||||
for pkg in existing:
|
||||
if pkg not in updates:
|
||||
msg = '''\
|
||||
*******************************************************
|
||||
*
|
||||
* WARNING: %s already published in Proposed (%s)
|
||||
*
|
||||
*******************************************************''' % (sourcepkg, pkg)
|
||||
# show it in the diff as well as in the terminal
|
||||
print(msg)
|
||||
print(msg, file=sys.stderr)
|
||||
|
||||
print('''After accepting this SRU from the queue, run:
|
||||
sru-accept -v %s -s %s -p %s %s''' %
|
||||
(changes['version'], changes['distribution'].split('-')[0],
|
||||
sourcepkg, ' '.join(changes['bugs'])), file=sys.stderr)
|
||||
|
||||
# for PPAs, print matching copy command
|
||||
if opts.ppa:
|
||||
print('\nTo copy from PPA to distribution, run:\n'
|
||||
' copy-package -b --from=~%s/ubuntu/%s -s %s --to=ubuntu '
|
||||
'--to-suite %s-proposed -y %s\n' %
|
||||
(user, ppaname, opts.release, opts.release, sourcepkg),
|
||||
file=sys.stderr)
|
@ -0,0 +1,142 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2012 Canonical, Ltd.
|
||||
# Author: Brian Murray <brian@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# given a release find all the packages published in proposed and
|
||||
# search each package for bug tasks about the package reported
|
||||
# since the date the package was uploaded to proposed for apport and release
|
||||
# tagged bugs that contain the version of the package from -proposed
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import optparse
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib import urlopen
|
||||
|
||||
|
||||
def bugs_from_changes(change_url):
|
||||
'''Return (bug_list, cve_list) from a .changes file URL'''
|
||||
changelog = urlopen(change_url)
|
||||
|
||||
refs = []
|
||||
bugs = set()
|
||||
|
||||
for l in changelog:
|
||||
if l.startswith('Launchpad-Bugs-Fixed: '):
|
||||
refs = l.split()[1:]
|
||||
break
|
||||
|
||||
for b in refs:
|
||||
try:
|
||||
lpbug = lp.bugs[int(b)]
|
||||
except KeyError:
|
||||
continue
|
||||
bugs.add(lpbug)
|
||||
|
||||
return sorted(bugs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
APPORT_TAGS = (
|
||||
'apport-package',
|
||||
'apport-bug',
|
||||
'apport-crash',
|
||||
'apport-kerneloops',
|
||||
)
|
||||
|
||||
lp = Launchpad.login_with(
|
||||
'ubuntu-archive-tools', 'production', version='devel')
|
||||
|
||||
ubuntu = lp.distributions['ubuntu']
|
||||
archive = ubuntu.getArchive(name='primary')
|
||||
|
||||
parser = optparse.OptionParser(usage="usage: %prog --release RELEASE")
|
||||
parser.add_option('--release', help='', dest='release')
|
||||
|
||||
(opt, args) = parser.parse_args()
|
||||
|
||||
releases = {}
|
||||
if not opt.release:
|
||||
for series in ubuntu.series:
|
||||
if not series.supported:
|
||||
continue
|
||||
if series.active:
|
||||
releases[series.name] = series
|
||||
else:
|
||||
series = ubuntu.getSeries(name_or_version=opt.release)
|
||||
releases[series.name] = series
|
||||
|
||||
for release in sorted(releases):
|
||||
print('Release: %s' % release)
|
||||
for spph in archive.getPublishedSources(
|
||||
pocket='Proposed', status='Published',
|
||||
distro_series=releases[release]):
|
||||
package_name = spph.source_package_name
|
||||
# for langpack updates, only keep -en as a representative
|
||||
# cargo-culted from sru-report
|
||||
if (package_name.startswith('language-pack-') and
|
||||
package_name not in ('language-pack-en',
|
||||
'language-pack-en-base')):
|
||||
continue
|
||||
date_pub = spph.date_published
|
||||
version = spph.source_package_version
|
||||
change_url = spph.changesFileUrl()
|
||||
|
||||
if not change_url:
|
||||
print("Package %s has no changes file url")
|
||||
continue
|
||||
|
||||
package = ubuntu.getSourcePackage(name=package_name)
|
||||
tasks = []
|
||||
# search for bugs reported by apport
|
||||
for tag in APPORT_TAGS:
|
||||
for task in package.searchTasks(
|
||||
tags=[tag, release], created_since=date_pub,
|
||||
tags_combinator='All'):
|
||||
tasks.append(task)
|
||||
# also search for ones tagged regression-proposed
|
||||
for task in package.searchTasks(
|
||||
tags=['regression-proposed', release],
|
||||
created_since=date_pub, tags_combinator='All'):
|
||||
tasks.append(task)
|
||||
|
||||
for task in tasks:
|
||||
if version not in task.bug.description:
|
||||
continue
|
||||
sru_bugs = bugs_from_changes(change_url)
|
||||
# check to see if any of the sru bugs are already tagged
|
||||
# verification-failed
|
||||
v_failed = False
|
||||
for sru_bug in sru_bugs:
|
||||
if 'verification-failed' in sru_bug.tags:
|
||||
print(' The SRU for package %s already has a '
|
||||
'verification-failed bug in LP: #%s' %
|
||||
(package_name, sru_bug.id))
|
||||
v_failed = True
|
||||
bug = task.bug
|
||||
if not v_failed and set(APPORT_TAGS).intersection(bug.tags):
|
||||
print(' LP: #%s is regarding %s from -proposed' %
|
||||
(bug.id, package_name))
|
||||
elif not v_failed:
|
||||
print(' LP: #%s is regarding %s from -proposed and '
|
||||
'tagged regression-proposed' %
|
||||
(bug.id, package_name))
|
@ -0,0 +1,146 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
# Copyright 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Remove a package from the archive."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser, SUPPRESS_HELP
|
||||
import sys
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
try:
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
except ImportError:
|
||||
print("Could not find ubuntutools.question; run sudo apt-get install "
|
||||
"python-ubuntutools")
|
||||
sys.exit()
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
def find_removables(options, package):
|
||||
if options.binaryonly:
|
||||
for binary in lputils.find_latest_published_binaries(options, package):
|
||||
if not binary.is_debug:
|
||||
yield binary, True
|
||||
else:
|
||||
source = lputils.find_latest_published_source(options, package)
|
||||
yield source, True
|
||||
for binary in source.getPublishedBinaries():
|
||||
if not binary.is_debug:
|
||||
yield binary, False
|
||||
|
||||
|
||||
def find_all_removables(options, packages):
|
||||
for package in packages:
|
||||
try:
|
||||
for removable in find_removables(options, package):
|
||||
yield removable
|
||||
except lputils.PackageMissing as message:
|
||||
print(message)
|
||||
if options.skip_missing:
|
||||
print("Skipping")
|
||||
else:
|
||||
print("Exiting")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def remove_package(options, packages):
|
||||
removables = []
|
||||
|
||||
print("Removing packages from %s:" % options.suite)
|
||||
for removable, direct in find_all_removables(options, packages):
|
||||
removables.append((removable, direct))
|
||||
print("\t%s%s" % ("" if direct else "\t", removable.display_name))
|
||||
print("Comment: %s" % options.removal_comment)
|
||||
|
||||
if options.dry_run:
|
||||
print("Dry run; no packages removed.")
|
||||
else:
|
||||
if not options.confirm_all:
|
||||
if YesNoQuestion().ask("Remove", "no") == "no":
|
||||
return
|
||||
|
||||
removals = []
|
||||
for removable, direct in removables:
|
||||
if direct:
|
||||
removable.requestDeletion(
|
||||
removal_comment=options.removal_comment)
|
||||
removals.append(removable)
|
||||
|
||||
print("%d %s successfully removed." %
|
||||
(len(removals), "package" if len(removals) == 1 else "packages"))
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(
|
||||
usage='usage: %prog -m "comment" [options] package [...]',
|
||||
epilog=lputils.ARCHIVE_REFERENCE_DESCRIPTION)
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-n", "--dry-run", default=False, action="store_true",
|
||||
help="only show removals that would be performed")
|
||||
parser.add_option(
|
||||
"-y", "--confirm-all", default=False, action="store_true",
|
||||
help="do not ask for confirmation")
|
||||
parser.add_option("-A", "--archive", help="remove from ARCHIVE")
|
||||
parser.add_option(
|
||||
"-s", "--suite", metavar="SUITE", help="remove from SUITE")
|
||||
parser.add_option(
|
||||
"-a", "--architecture", dest="architectures", action="append",
|
||||
metavar="ARCHITECTURE",
|
||||
help="architecture tag (may be given multiple times)")
|
||||
parser.add_option(
|
||||
"-e", "--version",
|
||||
metavar="VERSION", help="package version (default: current version)")
|
||||
parser.add_option(
|
||||
"-b", "--binary", dest="binaryonly",
|
||||
default=False, action="store_true", help="remove binaries only")
|
||||
parser.add_option("-m", "--removal-comment", help="removal comment")
|
||||
parser.add_option(
|
||||
"--skip-missing", default=False, action="store_true",
|
||||
help=(
|
||||
"When a package cannot be removed, normally this script exits "
|
||||
"with a non-zero status. With --skip-missing instead, the "
|
||||
"error is printed and removing continues"))
|
||||
|
||||
# Deprecated in favour of -A.
|
||||
parser.add_option(
|
||||
"-d", "--distribution", default="ubuntu", help=SUPPRESS_HELP)
|
||||
parser.add_option("-p", "--ppa", help=SUPPRESS_HELP)
|
||||
parser.add_option("--ppa-name", help=SUPPRESS_HELP)
|
||||
parser.add_option(
|
||||
"-j", "--partner", default=False, action="store_true",
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"remove-package", options.launchpad_instance, version="devel")
|
||||
lputils.setup_location(options)
|
||||
|
||||
if options.removal_comment is None:
|
||||
parser.error(
|
||||
"You must provide a comment/reason for all package removals.")
|
||||
|
||||
remove_package(options, args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,42 @@
|
||||
#!/usr/bin/python
|
||||
# Rescore all builds in a PPA.
|
||||
#
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
parser = OptionParser(usage="usage: %prog <PPA owner> <PPA name>")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 2:
|
||||
parser.error("need both PPA owner and PPA name")
|
||||
|
||||
owner, name = args
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
'rescore-ppa-builds', options.launchpad_instance)
|
||||
ppa = launchpad.people[owner].getPPAByName(name=name)
|
||||
|
||||
for build in ppa.getBuildRecords(build_state='Needs building'):
|
||||
if build.can_be_rescored:
|
||||
print('Rescoring %s' % build.title)
|
||||
build.rescore(score=5000)
|
@ -0,0 +1,192 @@
|
||||
#!/usr/bin/python3
|
||||
# Generate a list of autopkgtest request.cgi URLs to
|
||||
# re-run all autopkgtests which regressed
|
||||
# Copyright (C) 2015-2016 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
|
||||
# USA
|
||||
|
||||
from datetime import datetime
|
||||
import dateutil.parser
|
||||
from dateutil.tz import tzutc
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import yaml
|
||||
import json
|
||||
|
||||
request_url = 'https://autopkgtest.ubuntu.com/request.cgi'
|
||||
default_series = 'disco'
|
||||
args = None
|
||||
|
||||
|
||||
def get_cache_dir():
|
||||
cache_dir = os.environ.get('XDG_CACHE_HOME',
|
||||
os.path.expanduser(os.path.join('~', '.cache')))
|
||||
uat_cache = os.path.join(cache_dir, 'ubuntu-archive-tools')
|
||||
os.makedirs(uat_cache, exist_ok=True)
|
||||
return uat_cache
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
'Generate %s URLs to re-run regressions' % request_url,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description='''Typical workflow:
|
||||
- export autopkgtest.ubuntu.com session cookie into ~/.cache/autopkgtest.cookie
|
||||
Use a browser plugin or get the value from the settings and create it with
|
||||
printf "autopkgtest.ubuntu.com\\tTRUE\\t/\\tTRUE\\t0\\tsession\\tVALUE\\n" > ~/.cache/autopkgtest.cookie
|
||||
(The cookie is valid for one month)
|
||||
|
||||
- retry-autopkgtest-regressions [opts...] | vipe | xargs -rn1 -P10 wget --load-cookies ~/.cache/autopkgtest.cookie -O-
|
||||
edit URL list to pick/remove requests as desired, then close editor to let it run
|
||||
''')
|
||||
parser.add_argument('-s', '--series', default=default_series,
|
||||
help='Ubuntu series (default: %(default)s)')
|
||||
parser.add_argument('--bileto', metavar='TICKETNUMBER',
|
||||
help='Run for bileto ticket')
|
||||
parser.add_argument('--all-proposed', action='store_true',
|
||||
help='run tests against all of proposed, i. e. with disabling apt pinning')
|
||||
parser.add_argument('--state', default='REGRESSION',
|
||||
help='generate commands for given test state (default: %(default)s)')
|
||||
parser.add_argument('--max-age', type=float, metavar='DAYS',
|
||||
help='only consider candiates which are at most '
|
||||
'this number of days old (float allowed)')
|
||||
parser.add_argument('--min-age', type=float, metavar='DAYS',
|
||||
help='only consider candiates which are at least '
|
||||
'this number of days old (float allowed)')
|
||||
parser.add_argument('--blocks',
|
||||
help='rerun only those tests that were triggered '
|
||||
'by the named package')
|
||||
parser.add_argument('--no-proposed', action='store_true',
|
||||
help='run tests against release+updates instead of '
|
||||
'against proposed, to re-establish a baseline for the '
|
||||
'test. This currently only works for packages that '
|
||||
'do not themselves have a newer version in proposed.')
|
||||
args = parser.parse_args()
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def get_regressions(excuses_url, release, retry_state, min_age, max_age,
|
||||
blocks, no_proposed):
|
||||
'''Return dictionary with regressions
|
||||
|
||||
Return dict: release → pkg → arch → [trigger, ...]
|
||||
'''
|
||||
cache_file = None
|
||||
|
||||
# load YAML excuses
|
||||
|
||||
# ignore bileto urls wrt caching, they're usually too small to matter
|
||||
# and we don't do proper cache expiry
|
||||
m = re.search('people.canonical.com/~ubuntu-archive/proposed-migration/'
|
||||
'([^/]*)/([^/]*)',
|
||||
excuses_url)
|
||||
if m:
|
||||
cache_dir = get_cache_dir()
|
||||
cache_file = os.path.join(cache_dir, '%s_%s' % (m.group(1), m.group(2)))
|
||||
try:
|
||||
prev_mtime = os.stat(cache_file).st_mtime
|
||||
except FileNotFoundError:
|
||||
prev_mtime = 0
|
||||
prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc())
|
||||
new_timestamp = datetime.now(tz=tzutc()).timestamp()
|
||||
|
||||
f = urllib.request.urlopen(excuses_url)
|
||||
if cache_file:
|
||||
remote_ts = dateutil.parser.parse(f.headers['last-modified'])
|
||||
if remote_ts > prev_timestamp:
|
||||
with open('%s.new' % cache_file, 'wb') as new_cache:
|
||||
for line in f:
|
||||
new_cache.write(line)
|
||||
os.rename('%s.new' % cache_file, cache_file)
|
||||
os.utime(cache_file, times=(new_timestamp, new_timestamp))
|
||||
f.close()
|
||||
f = open(cache_file, 'rb')
|
||||
|
||||
excuses = yaml.load(f, Loader=yaml.CSafeLoader)
|
||||
f.close()
|
||||
regressions = {}
|
||||
for excuse in excuses['sources']:
|
||||
if blocks and blocks != excuse['source']:
|
||||
continue
|
||||
try:
|
||||
age = excuse['policy_info']['age']['current-age']
|
||||
except KeyError:
|
||||
age = None
|
||||
|
||||
# excuses are sorted by ascending age
|
||||
if min_age is not None and age is not None and age < min_age:
|
||||
continue
|
||||
if max_age is not None and age is not None and age > max_age:
|
||||
break
|
||||
for pkg, archinfo in excuse.get('policy_info', {}).get('autopkgtest', {}).items():
|
||||
try:
|
||||
pkg, pkg_ver = re.split('[ /]+', pkg, 1) # split off version (either / or space separated)
|
||||
# error and the package version is unknown
|
||||
except ValueError:
|
||||
pass
|
||||
if no_proposed:
|
||||
trigger = pkg + '/' + pkg_ver
|
||||
else:
|
||||
trigger = excuse['source'] + '/' + excuse['new-version']
|
||||
for arch, state in archinfo.items():
|
||||
if state[0] == retry_state:
|
||||
regressions.setdefault(release, {}).setdefault(
|
||||
pkg, {}).setdefault(arch, []).append(trigger)
|
||||
|
||||
return regressions
|
||||
|
||||
|
||||
args = parse_args()
|
||||
|
||||
extra_params = []
|
||||
if args.all_proposed:
|
||||
extra_params.append(('all-proposed', '1'))
|
||||
|
||||
if args.bileto:
|
||||
url_root = 'https://bileto.ubuntu.com'
|
||||
ticket_url = url_root + '/v2/ticket/%s' % args.bileto
|
||||
excuses_url = None
|
||||
with urllib.request.urlopen(ticket_url) as f:
|
||||
ticket = json.loads(f.read().decode('utf-8'))['tickets'][0]
|
||||
ppa_name = ticket.get('ppa', '')
|
||||
for line in ticket.get('autopkgtest', '').splitlines():
|
||||
if args.series in line:
|
||||
excuses_url = line
|
||||
break
|
||||
if excuses_url.startswith('/'):
|
||||
excuses_url = url_root + excuses_url
|
||||
excuses_url = excuses_url.replace('.html', '.yaml')
|
||||
extra_params += [('ppa', 'ci-train-ppa-service/stable-phone-overlay'),
|
||||
('ppa', 'ci-train-ppa-service/%s' % ppa_name)]
|
||||
else:
|
||||
excuses_url = 'http://people.canonical.com/~ubuntu-archive/proposed-migration/%s/update_excuses.yaml' % args.series
|
||||
regressions = get_regressions(excuses_url, args.series, args.state,
|
||||
args.min_age, args.max_age, args.blocks,
|
||||
args.no_proposed)
|
||||
|
||||
for release, pkgmap in regressions.items():
|
||||
for pkg, archmap in pkgmap.items():
|
||||
for arch, triggers in archmap.items():
|
||||
params = [('release', release), ('arch', arch), ('package', pkg)]
|
||||
params += [('trigger', t) for t in triggers]
|
||||
params += extra_params
|
||||
url = request_url + '?' + urllib.parse.urlencode(params)
|
||||
print(url)
|
@ -0,0 +1,77 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2008, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Adjust SRU bugs after accepting the corresponding update."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
import re
|
||||
import sys
|
||||
|
||||
import launchpadlib.errors
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from sru_workflow import process_bug
|
||||
|
||||
|
||||
CONSUMER_KEY = "sru-accept"
|
||||
|
||||
|
||||
def append_series(option, opt_str, value, parser):
|
||||
if value.endswith('-proposed'):
|
||||
value = value[:-9]
|
||||
parser.values.ensure_value(option.dest, []).append(value)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = OptionParser(
|
||||
usage="Usage: %prog [options] -v version [options] bug [bug ...]")
|
||||
|
||||
parser.add_option("-l", "--launchpad", dest="launchpad_instance",
|
||||
default="production")
|
||||
parser.add_option('-s', action='callback', callback=append_series,
|
||||
type='string', dest='targets',
|
||||
help='accept for SUITE(-proposed) instead of current '
|
||||
'stable release',
|
||||
metavar='SUITE')
|
||||
parser.add_option('-p', dest='package',
|
||||
help='only change tasks for a particular source package',
|
||||
default=None,
|
||||
metavar='SRCPACKAGE')
|
||||
parser.add_option('-v', dest='version',
|
||||
help='the version of the package being accepted',
|
||||
default=None,
|
||||
metavar='VERSION')
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if not options.version:
|
||||
print('A package version (-v) was not provided.')
|
||||
sys.exit(1)
|
||||
|
||||
launchpad = Launchpad.login_with(CONSUMER_KEY, options.launchpad_instance)
|
||||
if not options.targets:
|
||||
options.targets = [[
|
||||
series.name for series in launchpad.distributions["ubuntu"].series
|
||||
if series.status == "Current Stable Release"][0]]
|
||||
try:
|
||||
for num in args:
|
||||
for series in options.targets:
|
||||
process_bug(
|
||||
launchpad, options.package, options.version, series, num)
|
||||
except launchpadlib.errors.HTTPError as err:
|
||||
print("There was an error:")
|
||||
print(err.content)
|
@ -0,0 +1,395 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
'''Release a proposed stable release update.
|
||||
|
||||
Copy packages from -proposed to -updates, and optionally to -security and the
|
||||
development release.
|
||||
|
||||
USAGE:
|
||||
sru-release [-s] [-d] <release> <package> [<package> ...]
|
||||
'''
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
import optparse
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib import urlopen
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
# Each entry in this list is a list of source packages that are known
|
||||
# to have inter-dependencies and must be released simultaneously.
|
||||
# If possible, each list should be ordered such that earlier
|
||||
# entries could be released slightly before subsequent entries.
|
||||
RELEASE_TOGETHER_PACKAGE_GROUPS = [
|
||||
['linux-hwe', 'linux-meta-hwe'],
|
||||
['linux', 'linux-meta'],
|
||||
['grub2', 'grub2-signed'],
|
||||
['shim', 'shim-signed'],
|
||||
]
|
||||
|
||||
MISSING_PACKAGES_FROM_GROUP = (
|
||||
"The set of packages requested for release are listed as dangerous \n"
|
||||
"to release without also releasing the following at the same time:\n"
|
||||
" {missing}\n\n"
|
||||
"For more information, see:\n"
|
||||
" https://lists.ubuntu.com/archives/ubuntu-devel/2018-June/040380.html\n\n"
|
||||
"To ignore this message, pass '--skip-package-group-check'.")
|
||||
|
||||
|
||||
def check_package_sets(packages):
|
||||
"""Return a re-ordered list of packages respecting the PACKAGE_SETS
|
||||
defined above. If any packages are missing, raise error."""
|
||||
|
||||
# pkg2group is a dict where each key is a pkg in a group and value is the
|
||||
# complete group.
|
||||
pkg2group = {}
|
||||
for pgroup in RELEASE_TOGETHER_PACKAGE_GROUPS:
|
||||
for pkg in pgroup:
|
||||
if pkg in pkg2group:
|
||||
raise RuntimeError(
|
||||
"Overlapping package groups. '%s' is in '%s' and '%s'." %
|
||||
(pkg, pgroup, pkg2group[pkg]))
|
||||
pkg2group[pkg] = pgroup
|
||||
|
||||
seen = set()
|
||||
new_pkgs = []
|
||||
for pkg in packages:
|
||||
if pkg not in pkg2group:
|
||||
add = [pkg]
|
||||
else:
|
||||
add = list(pkg2group[pkg])
|
||||
new_pkgs.extend([a for a in add if a not in seen])
|
||||
seen.update(add)
|
||||
|
||||
orig = set(packages)
|
||||
new = set(new_pkgs)
|
||||
if orig != new:
|
||||
raise ValueError(
|
||||
MISSING_PACKAGES_FROM_GROUP.format(
|
||||
missing=' '.join(new.difference(orig))))
|
||||
return new_pkgs
|
||||
|
||||
|
||||
class CheckPackageSets(unittest.TestCase):
|
||||
def test_expected_linux_order_fixed(self):
|
||||
self.assertEqual(
|
||||
['pkg1', 'linux', 'linux-meta', 'pkg2'],
|
||||
check_package_sets(['pkg1', 'linux-meta', 'linux', 'pkg2']))
|
||||
|
||||
def test_raises_value_error_on_missing(self):
|
||||
self.assertRaises(
|
||||
ValueError, check_package_sets, ['pkg1', 'linux'])
|
||||
|
||||
def test_single_item_with_missing(self):
|
||||
self.assertRaises(
|
||||
ValueError, check_package_sets, ['linux'])
|
||||
|
||||
def test_single_item_without_missing(self):
|
||||
self.assertEqual(
|
||||
check_package_sets(['pkg1']), ['pkg1'])
|
||||
|
||||
def test_multiple_package_groups(self):
|
||||
"""Just make sure that having multiple groups listed still errors."""
|
||||
self.assertRaises(
|
||||
ValueError, check_package_sets, ['pkg1', 'linux', 'grub2'])
|
||||
|
||||
|
||||
def match_srubugs(options, changesfileurl):
|
||||
'''match between bugs with verification- tag and bugs in changesfile'''
|
||||
|
||||
bugs = []
|
||||
|
||||
if changesfileurl is None:
|
||||
return bugs
|
||||
|
||||
# Load changesfile
|
||||
changelog = urlopen(changesfileurl)
|
||||
bugnums = []
|
||||
for l in changelog:
|
||||
if l.startswith('Launchpad-Bugs-Fixed: '):
|
||||
bugnums = l.split()[1:]
|
||||
break
|
||||
|
||||
for b in bugnums:
|
||||
if b in options.exclude_bug:
|
||||
continue
|
||||
try:
|
||||
bugs.append(launchpad.bugs[int(b)])
|
||||
except:
|
||||
print('%s: bug %s does not exist or is not accessible' %
|
||||
(changesfileurl, b))
|
||||
|
||||
return bugs
|
||||
|
||||
|
||||
def update_sru_bug(bug, pkg):
|
||||
'''Unsubscribe SRU team and comment on bug re: how to report regressions'''
|
||||
m_subjects = [m.subject for m in bug.messages]
|
||||
if 'Update Released' in m_subjects:
|
||||
print('LP: #%s was not commented on' % bug.id)
|
||||
return
|
||||
sru_team = launchpad.people['ubuntu-sru']
|
||||
bug.unsubscribe(person=sru_team)
|
||||
text = ("The verification of the Stable Release Update for %s has "
|
||||
"completed successfully and the package has now been released "
|
||||
"to -updates. Subsequently, the Ubuntu Stable Release Updates "
|
||||
"Team is being unsubscribed and will not receive messages "
|
||||
"about this bug report. In the event that you encounter "
|
||||
"a regression using the package from -updates please report "
|
||||
"a new bug using ubuntu-bug and tag the bug report "
|
||||
"regression-update so we can easily find any regressions." % pkg)
|
||||
bug.newMessage(subject="Update Released", content=text)
|
||||
bug.lp_save()
|
||||
|
||||
|
||||
def get_versions(options, sourcename):
|
||||
'''Get current package versions.
|
||||
|
||||
If options.pattern is True, return all versions for package names
|
||||
matching options.pattern.
|
||||
If options.pattern is False, only return one result.
|
||||
|
||||
Return map pkgname -> {'release': version, 'updates': version,
|
||||
'proposed': version, 'changesfile': url_of_proposed_changes,
|
||||
'published': proposed_date}
|
||||
'''
|
||||
versions = defaultdict(dict)
|
||||
if options.esm:
|
||||
pocket = 'Release'
|
||||
else:
|
||||
pocket = 'Proposed'
|
||||
|
||||
matches = src_archive.getPublishedSources(
|
||||
source_name=sourcename, exact_match=not options.pattern,
|
||||
status='Published', pocket=pocket, distro_series=series)
|
||||
for match in matches:
|
||||
# versions in all pockets
|
||||
for pub in src_archive.getPublishedSources(
|
||||
source_name=match.source_package_name, exact_match=True,
|
||||
status='Published', distro_series=series):
|
||||
key = pub.pocket.lower()
|
||||
# special case for ESM ppas, which don't have pockets but need
|
||||
# to be treated as -proposed
|
||||
if options.esm and key == 'release':
|
||||
key = 'proposed'
|
||||
versions[pub.source_package_name][key] = (
|
||||
pub.source_package_version)
|
||||
if pocket in pub.pocket:
|
||||
versions[pub.source_package_name]['changesfile'] = (
|
||||
pub.changesFileUrl())
|
||||
# When the destination archive differs from the source scan that too.
|
||||
if dst_archive != src_archive:
|
||||
for pub in dst_archive.getPublishedSources(
|
||||
source_name=match.source_package_name, exact_match=True,
|
||||
status='Published', distro_series=series):
|
||||
key = 'security' # pub.pocket.lower()
|
||||
versions[pub.source_package_name][key] = (
|
||||
pub.source_package_version)
|
||||
if pocket in pub.pocket:
|
||||
versions[pub.source_package_name]['changesfile'] = (
|
||||
pub.changesFileUrl())
|
||||
|
||||
# devel version
|
||||
if devel_series:
|
||||
for pub in src_archive.getPublishedSources(
|
||||
source_name=match.source_package_name, exact_match=True,
|
||||
status='Published', distro_series=devel_series):
|
||||
if pub.pocket in ('Release', 'Proposed'):
|
||||
versions[pub.source_package_name]['devel'] = (
|
||||
pub.source_package_version)
|
||||
else:
|
||||
versions[match.source_package_name]['devel'] = None
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
def release_package(options, package):
|
||||
'''Release a package.'''
|
||||
|
||||
pkg_versions_map = get_versions(options, package)
|
||||
if not pkg_versions_map:
|
||||
message = 'ERROR: No such package, ' + package + ', in -proposed, aborting\n'
|
||||
sys.stderr.write(message)
|
||||
sys.exit(1)
|
||||
|
||||
for pkg, versions in pkg_versions_map.iteritems():
|
||||
print('--- Releasing %s ---' % pkg)
|
||||
print('Proposed: %s' % versions['proposed'])
|
||||
if 'security' in versions:
|
||||
print('Security: %s' % versions['security'])
|
||||
if 'updates' in versions:
|
||||
print('Updates: %s' % versions['updates'])
|
||||
else:
|
||||
print('Release: %s' % versions.get('release'))
|
||||
if options.devel and 'devel' in versions:
|
||||
print('Devel: %s' % versions['devel'])
|
||||
|
||||
copy = partial(
|
||||
dst_archive.copyPackage, from_archive=src_archive,
|
||||
include_binaries=True, source_name=pkg,
|
||||
version=versions['proposed'], auto_approve=True)
|
||||
|
||||
if options.devel:
|
||||
if ('devel' not in versions or
|
||||
versions['devel'] in (
|
||||
versions.get('updates', 'notexisting'),
|
||||
versions['release'])):
|
||||
if not options.no_act:
|
||||
copy(to_pocket='Proposed', to_series=devel_series.name)
|
||||
print('Version in %s matches development series, '
|
||||
'copied to %s-proposed' % (release, devel_series.name))
|
||||
else:
|
||||
print('ERROR: Version in %s does not match development '
|
||||
'series, not copying' % release)
|
||||
|
||||
if options.no_act:
|
||||
if options.release:
|
||||
print('Would copy to %s' % release)
|
||||
else:
|
||||
print('Would copy to %s-updates' % release)
|
||||
else:
|
||||
if options.release:
|
||||
# -proposed -> release
|
||||
copy(to_pocket='Release', to_series=release)
|
||||
print('Copied to %s' % release)
|
||||
else:
|
||||
# -proposed -> -updates
|
||||
# only phasing updates for >=raring to start
|
||||
if (release not in ('lucid', 'precise') and
|
||||
package != 'linux' and
|
||||
not package.startswith('linux-') and
|
||||
not options.security):
|
||||
copy(to_pocket='Updates', to_series=release,
|
||||
phased_update_percentage=options.percentage)
|
||||
else:
|
||||
copy(to_pocket='Updates', to_series=release)
|
||||
print('Copied to %s-updates' % release)
|
||||
if not options.no_bugs:
|
||||
sru_bugs = match_srubugs(options, versions['changesfile'])
|
||||
tag = 'verification-needed-%s' % release
|
||||
for sru_bug in sru_bugs:
|
||||
if tag not in sru_bug.tags:
|
||||
update_sru_bug(sru_bug, pkg)
|
||||
|
||||
# -proposed -> -security
|
||||
if options.security:
|
||||
if options.no_act:
|
||||
print('Would copy to %s-security' % release)
|
||||
else:
|
||||
copy(to_pocket='Security', to_series=release)
|
||||
print('Copied to %s-security' % release)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "run-tests":
|
||||
sys.exit(unittest.main(argv=[sys.argv[0]] + sys.argv[2:]))
|
||||
|
||||
parser = optparse.OptionParser(
|
||||
usage='usage: %prog [options] <release> <package> [<package> ...]')
|
||||
|
||||
parser.add_option(
|
||||
'-l', '--launchpad', dest='launchpad_instance', default='production')
|
||||
parser.add_option(
|
||||
'--security', action='store_true', default=False,
|
||||
help='Additionally copy to -security pocket')
|
||||
parser.add_option(
|
||||
'-d', '--devel', action='store_true', default=False,
|
||||
help='Additionally copy to development release (only works if that '
|
||||
'has the same version as <release>)')
|
||||
parser.add_option(
|
||||
'-r', '--release', action='store_true', default=False,
|
||||
help='Copy to release pocket instead of -updates (useful for staging '
|
||||
'uploads in development release)')
|
||||
parser.add_option(
|
||||
"-z", "--percentage", type="int", default=10,
|
||||
metavar="PERCENTAGE", help="set phased update percentage")
|
||||
parser.add_option(
|
||||
'-n', '--no-act', action='store_true', default=False,
|
||||
help='Only perform checks, but do not actually copy packages')
|
||||
parser.add_option(
|
||||
'-p', '--pattern', action='store_true', default=False,
|
||||
help='Treat package names as patterns, not exact matches')
|
||||
parser.add_option(
|
||||
'--no-bugs', action='store_true', default=False,
|
||||
help='Do not act on any bugs (helpful to avoid races).')
|
||||
parser.add_option(
|
||||
'--exclude-bug', action='append', default=[], metavar='BUG',
|
||||
help='Do not update BUG.')
|
||||
parser.add_option(
|
||||
'-E', '--esm', action='store_true', default=False,
|
||||
help='Copy from the kernel ESM proposed PPA to the ESM publication PPA')
|
||||
parser.add_option(
|
||||
'--skip-package-group-check', action='store_true', default=False,
|
||||
help=('Skip the package set checks that require some packages '
|
||||
'be released together'))
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) < 2:
|
||||
parser.error(
|
||||
'You must specify a release and source package(s), see --help')
|
||||
|
||||
if options.release and (options.security or options.devel):
|
||||
parser.error('-r and -s/-d are mutually exclusive, see --help')
|
||||
|
||||
release = args.pop(0)
|
||||
packages = args
|
||||
|
||||
if not options.skip_package_group_check:
|
||||
try:
|
||||
packages = check_package_sets(packages)
|
||||
except ValueError as e:
|
||||
sys.stderr.write(e.args[0] + '\n')
|
||||
sys.exit(1)
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
'ubuntu-archive-tools', options.launchpad_instance, version='devel')
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
series = ubuntu.getSeries(name_or_version=release)
|
||||
devel_series = ubuntu.current_series
|
||||
if not devel_series:
|
||||
sys.stderr.write(
|
||||
'WARNING: No current development series, -d will not work\n')
|
||||
devel_series = None
|
||||
if release == 'precise':
|
||||
sys.stdout.write(
|
||||
'Called for precise; assuming kernel ESM publication\n')
|
||||
options.esm = True
|
||||
|
||||
if options.esm:
|
||||
# --security is meaningless for ESM everything is a security update.
|
||||
options.security = False
|
||||
options.release = True
|
||||
src_archive = launchpad.archives.getByReference(
|
||||
reference='~canonical-kernel-esm/ubuntu/proposed')
|
||||
dst_archive = launchpad.archives.getByReference(
|
||||
reference='~ubuntu-esm/ubuntu/esm')
|
||||
else:
|
||||
src_archive = dst_archive = ubuntu.getArchive(name='primary')
|
||||
|
||||
for package in packages:
|
||||
release_package(options, package)
|
@ -0,0 +1,159 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2015 Brian Murray <brian.murray@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
'''Remove an SRU fom the -proposed pocket for a release.
|
||||
|
||||
Remove a package from the -proposed pocket for a release of Ubuntu and comment
|
||||
on bug reports regarding the removal of the package giving an explanation that
|
||||
it was removed to due a failure for the SRU bug(s) to be verified in a timely
|
||||
fashion.
|
||||
|
||||
USAGE:
|
||||
sru-remove -s trusty -p homerun 12345
|
||||
'''
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import optparse
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
def parse_options():
|
||||
'''Parse command line arguments.
|
||||
|
||||
Return (options, [bugs]) tuple.
|
||||
'''
|
||||
|
||||
parser = optparse.OptionParser(
|
||||
usage='Usage: %prog [options]')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-s", dest="release", default=default_release, metavar="RELEASE",
|
||||
help="release (default: %s)" % default_release)
|
||||
parser.add_option(
|
||||
"-p", "--package", dest="sourcepkg")
|
||||
|
||||
opts, args = parser.parse_args()
|
||||
|
||||
return (opts, args)
|
||||
|
||||
|
||||
def process_bug(launchpad, distroseries, sourcepkg, num):
|
||||
bug_target_re = re.compile(
|
||||
r'/ubuntu/(?:(?P<suite>[^/]+)/)?\+source/(?P<source>[^/]+)$')
|
||||
bug = launchpad.bugs[num]
|
||||
series_name = distroseries.name
|
||||
open_task = False
|
||||
|
||||
for task in bug.bug_tasks:
|
||||
# Ugly; we have to do URL-parsing to figure this out.
|
||||
# /ubuntu/+source/foo can be fed to launchpad.load() to get a
|
||||
# distribution_source_package, but /ubuntu/hardy/+source/foo can't.
|
||||
match = bug_target_re.search(task.target.self_link)
|
||||
if (not match or
|
||||
(sourcepkg and
|
||||
match.group('source') != sourcepkg)):
|
||||
print("Ignoring task %s in bug %s" % (task.web_link, num))
|
||||
continue
|
||||
if (match.group('suite') != series_name and
|
||||
match.group('suite') in supported_series and
|
||||
task.status == "Fix Committed"):
|
||||
open_task = True
|
||||
if (match.group('suite') == series_name and
|
||||
task.status == "Fix Committed"):
|
||||
task.status = "Won't Fix"
|
||||
task.lp_save()
|
||||
print("Success: task %s in bug %s" % (task.web_link, num))
|
||||
btags = bug.tags
|
||||
series_v_needed = 'verification-needed-%s' % series_name
|
||||
if series_v_needed in btags:
|
||||
# this dance is needed due to
|
||||
# https://bugs.launchpad.net/launchpadlib/+bug/254901
|
||||
tags = btags
|
||||
tags.remove(series_v_needed)
|
||||
bug.tags = tags
|
||||
bug.lp_save()
|
||||
|
||||
text = ('The version of %s in the proposed pocket of %s that was '
|
||||
'purported to fix this bug report has been removed because '
|
||||
'the bugs that were to be fixed by the upload were not '
|
||||
'verified in a timely (105 days) fashion.' %
|
||||
(sourcepkg, series_name.title()))
|
||||
bug.newMessage(content=text,
|
||||
subject='Proposed package removed from archive')
|
||||
|
||||
# remove verification-needed tag if there are no open tasks
|
||||
if open_task:
|
||||
return
|
||||
# only unsubscribe the teams if there are no open tasks left
|
||||
bug.unsubscribe(person=launchpad.people['ubuntu-sru'])
|
||||
bug.unsubscribe(person=launchpad.people['sru-verification'])
|
||||
if 'verification-needed' in btags:
|
||||
# this dance is needed due to
|
||||
# https://bugs.launchpad.net/launchpadlib/+bug/254901
|
||||
tags = btags
|
||||
tags.remove('verification-needed')
|
||||
bug.tags = tags
|
||||
bug.lp_save()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
default_release = 'cosmic'
|
||||
removal_comment = ('The package was removed due to its SRU bug(s) '
|
||||
'not being verified in a timely fashion.')
|
||||
|
||||
(opts, bugs) = parse_options()
|
||||
|
||||
launchpad = Launchpad.login_with('sru-remove', opts.launchpad_instance,
|
||||
version="devel")
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
# determine series for which we issue SRUs
|
||||
supported_series = []
|
||||
for serie in ubuntu.series:
|
||||
if serie.supported:
|
||||
supported_series.append(serie.name)
|
||||
|
||||
series = ubuntu.getSeries(name_or_version=opts.release)
|
||||
archive = ubuntu.main_archive
|
||||
|
||||
existing = [
|
||||
pkg for pkg in archive.getPublishedSources(
|
||||
exact_match=True, distro_series=series, pocket='Proposed',
|
||||
source_name=opts.sourcepkg, status='Published')]
|
||||
|
||||
if not existing:
|
||||
print("ERROR: %s was not found in -proposed for release %s." %
|
||||
(opts.sourcepkg, opts.release), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
rm_p_cmd = ["remove-package", "-m", removal_comment, "-y",
|
||||
"-l", opts.launchpad_instance, "-s",
|
||||
"%s-proposed" % opts.release, opts.sourcepkg]
|
||||
ret = subprocess.call(rm_p_cmd)
|
||||
if ret != 0:
|
||||
print("ERROR: There was an error removing %s from %s-proposed.\n"
|
||||
"The remove-package command returned %s." %
|
||||
(opts.sourcepkg, opts.release, ret), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
# only comment on the bugs after removing the package
|
||||
for bug in bugs:
|
||||
process_bug(launchpad, series, opts.sourcepkg, bug)
|
@ -0,0 +1,722 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Authors:
|
||||
# Martin Pitt <martin.pitt@ubuntu.com>
|
||||
# Jean-Baptiste Lallement <jean-baptiste.lallement@canonical.com>
|
||||
# (initial conversion to launchpadlib)
|
||||
# Brian Murray <brian.murray@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Generate a report of pending SRU
|
||||
#
|
||||
# TODO:
|
||||
# - Add to report bug reports tagged with verification-* and not in -proposed
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
from operator import itemgetter
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib import urlopen
|
||||
import yaml
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.errors import HTTPError
|
||||
from launchpadlib.launchpad import Launchpad as _Launchpad
|
||||
from lazr.restfulclient.errors import ClientError
|
||||
|
||||
|
||||
# Work around non-multiple-instance-safety of launchpadlib (bug #459418).
|
||||
class Launchpad(_Launchpad):
|
||||
@classmethod
|
||||
def _get_paths(cls, service_root, launchpadlib_dir=None):
|
||||
service_root, launchpadlib_dir, cache_path, service_root_dir = (
|
||||
_Launchpad._get_paths(
|
||||
service_root, launchpadlib_dir=launchpadlib_dir))
|
||||
cache_path += "-sru-report"
|
||||
if not os.path.exists(cache_path):
|
||||
os.makedirs(cache_path, 0o700)
|
||||
return service_root, launchpadlib_dir, cache_path, service_root_dir
|
||||
|
||||
|
||||
if os.getenv('DEBUG'):
|
||||
DEBUGLEVEL = logging.DEBUG
|
||||
else:
|
||||
DEBUGLEVEL = logging.WARNING
|
||||
|
||||
lp = None
|
||||
lp_url = None
|
||||
ubuntu = None
|
||||
archive = None
|
||||
releases = {} # name -> distro_series
|
||||
series = []
|
||||
broken_bugs = set()
|
||||
ignored_commenters = []
|
||||
excuses_url = ("http://people.canonical.com/~ubuntu-archive/proposed-migration"
|
||||
"/%s/update_excuses.yaml")
|
||||
|
||||
|
||||
def current_versions(distro_series, sourcename):
|
||||
'''Get current package versions
|
||||
|
||||
Return map {'release': version,
|
||||
'updates': version,
|
||||
'proposed': version,
|
||||
'creator': proposed_creator,
|
||||
'signer': proposed_signer,
|
||||
'changesfiles': [urls_of_proposed_changes],
|
||||
'published': proposed_date}
|
||||
'''
|
||||
global archive
|
||||
|
||||
logging.debug(
|
||||
'Fetching publishing history for %s/%s' %
|
||||
(distro_series.name, sourcename))
|
||||
history = {
|
||||
'release': '', 'updates': '', 'proposed': '', 'changesfiles': [],
|
||||
'published': datetime.datetime.now()}
|
||||
pubs = archive.getPublishedSources(
|
||||
source_name=sourcename, exact_match=True, status='Published',
|
||||
distro_series=distro_series)
|
||||
base_version = None
|
||||
base_created = None
|
||||
for pub in pubs:
|
||||
p_srcpkg_version = pub.source_package_version
|
||||
p_date_pub = pub.date_published
|
||||
p_pocket = pub.pocket
|
||||
if pub.pocket in ('Release', 'Updates'):
|
||||
if (base_version is None or
|
||||
apt_pkg.version_compare(
|
||||
base_version, p_srcpkg_version) < 0):
|
||||
base_version = p_srcpkg_version
|
||||
base_created = pub.date_created
|
||||
elif p_pocket == 'Proposed':
|
||||
history['changesfiles'].append(pub.changesFileUrl())
|
||||
history['published'] = p_date_pub
|
||||
try:
|
||||
history['creator'] = str(pub.package_creator)
|
||||
except ClientError as error:
|
||||
if error.response['status'] == '410':
|
||||
history['creator'] = ''
|
||||
try:
|
||||
history['signer'] = str(pub.package_signer)
|
||||
except ClientError as error:
|
||||
if error.response['status'] == '410':
|
||||
history['signer'] = ''
|
||||
logging.debug(
|
||||
'%s=%s published to %s/%s on %s' %
|
||||
(sourcename, p_srcpkg_version,
|
||||
distro_series.name, p_pocket, p_date_pub))
|
||||
history[p_pocket.lower()] = p_srcpkg_version
|
||||
if base_version is not None:
|
||||
proposed = archive.getPublishedSources(
|
||||
source_name=sourcename, exact_match=True,
|
||||
distro_series=distro_series, pocket='Proposed',
|
||||
created_since_date=base_created)
|
||||
for pub in proposed:
|
||||
if pub.status == 'Deleted':
|
||||
continue
|
||||
if apt_pkg.version_compare(
|
||||
base_version, pub.source_package_version) >= 0:
|
||||
continue
|
||||
changesfileurl = pub.changesFileUrl()
|
||||
if changesfileurl not in history['changesfiles']:
|
||||
history['changesfiles'].append(changesfileurl)
|
||||
if not history['published'].tzinfo:
|
||||
history['published'] = pub.date_published
|
||||
return history
|
||||
|
||||
|
||||
def bug_open_js(bugs, title=None):
|
||||
'''Return JavaScript snippet for opening bug URLs'''
|
||||
if not bugs:
|
||||
return ''
|
||||
if not title:
|
||||
title = 'open bugs'
|
||||
|
||||
js = ''
|
||||
for b in bugs:
|
||||
js += "window.open('%s/bugs/%d');" % (lp_url, b)
|
||||
return '<button onclick="%s">%s (%i)</button>' % (js, title, len(bugs))
|
||||
|
||||
|
||||
def print_report(srus):
|
||||
'''render the report'''
|
||||
global releases
|
||||
|
||||
#
|
||||
# headers/CSS
|
||||
#
|
||||
|
||||
print('''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Pending Ubuntu SRUs</title>
|
||||
<style type="text/css">
|
||||
body { background: #CCCCB0; color: black; }
|
||||
a { text-decoration: none; }
|
||||
table { border-collapse: collapse; border-style: solid none;
|
||||
border-width: 3px; margin-bottom: 3ex; empty-cells: show; }
|
||||
table th { text-align: left; border-style: none none dotted none;
|
||||
border-width: 1px; padding-right: 10px; }
|
||||
table td { text-align: left; border-style: none none dotted none;
|
||||
border-width: 1px; padding-right: 10px; }
|
||||
.noborder { border-style: none; }
|
||||
a { color: blue; }
|
||||
a.messages { color: #999900; font-weight: bold; }
|
||||
a.incomplete { color: yellow; font-weight: bold; }
|
||||
a.verified { color: green; font-weight: bold; }
|
||||
a.verificationfailed { color: red; font-weight: bold; }
|
||||
a.kerneltracking { font-style: italic; }
|
||||
a.testing { color: blue; }
|
||||
a.broken { text-decoration: line-through; color: black; }
|
||||
a.removal { color: gray; font-weight: bold }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Pending Ubuntu Stable Release Updates</h1>
|
||||
''')
|
||||
print('<p>Generated: %s by <a href="http://bazaar.launchpad.net/'
|
||||
'~ubuntu-archive/ubuntu-archive-tools/trunk/annotate/head%%3A/'
|
||||
'sru-report">sru-report</a></p>' %
|
||||
time.strftime('%F %T UTC', time.gmtime()))
|
||||
|
||||
print('<p>Jump to: ', end="")
|
||||
print('<a href="#superseded">security-superseded</a> '
|
||||
'<a href="#upload-queues">upload-queues</a> '
|
||||
'<a href="#cleanup">cleanup</a></p>')
|
||||
|
||||
print('''<p>A <a href="https://wiki.ubuntu.com/StableReleaseUpdates">stable
|
||||
release update</a> is currently in progress for the following packages, i. e.
|
||||
they have a newer version in -proposed than in -updates. Note that there is a
|
||||
separate <a href="http://kernel.ubuntu.com/sru/kernel-sru-workflow.html">report
|
||||
for Kernel updates</a>. Once an update has been
|
||||
verified and released to -updates it then proceeds through the phased update
|
||||
process. The status of current updates undergoing phasing can be found in a
|
||||
separate <a
|
||||
href="http://people.canonical.com/~ubuntu-archive/phased-updates.html">
|
||||
report</a>.</p>
|
||||
|
||||
<p>Bugs in <span style="color:green;">green</span> are verified,
|
||||
bugs in <span style="color:red;">red</span> failed verification,
|
||||
bugs in <span style="color:yellow;">yellow</span> are Incomplete,
|
||||
bugs in <span style="color: #999900;">golden</span> have received a comment
|
||||
since the package was accepted in -proposed,
|
||||
bugs in <span style="color: gray;">gray</span> are candidates for removal
|
||||
due to a lack of verification,
|
||||
bugs in <span style="font-style: italic">italic</span> are kernel tracking
|
||||
bugs and bugs that are
|
||||
<span style="text-decoration: line-through;">struck through</span> are
|
||||
duplicate bug reports or weren't accessible at the time the report was
|
||||
generated.</p>''')
|
||||
|
||||
#
|
||||
# pending SRUs
|
||||
#
|
||||
|
||||
pkg_index = defaultdict(dict)
|
||||
pkgcleanup = []
|
||||
pkgcleanup_release = []
|
||||
pkgsuperseded = []
|
||||
# set of (series_name, srcpkg, [bugs])
|
||||
proposed_antique = []
|
||||
for release in sorted(srus):
|
||||
if not srus[release]:
|
||||
continue
|
||||
for pack in srus[release]:
|
||||
pkg_index[release][pack] = srus[release][pack]['published']
|
||||
for pkg, pub in sorted(pkg_index[release].iteritems(),
|
||||
key=itemgetter(1)):
|
||||
rpkg = srus[release][pkg]
|
||||
if cleanup(rpkg):
|
||||
pkgcleanup.append([release, pkg, rpkg])
|
||||
del pkg_index[release][pkg]
|
||||
continue
|
||||
if cleanup_release(rpkg):
|
||||
pkgcleanup_release.append([release, pkg, rpkg])
|
||||
del pkg_index[release][pkg]
|
||||
continue
|
||||
if security_superseded(rpkg):
|
||||
pkgsuperseded.append([release, pkg, rpkg])
|
||||
del pkg_index[release][pkg]
|
||||
continue
|
||||
|
||||
for release in reversed(series):
|
||||
if releases[release].status == "Active Development":
|
||||
# Migrations in the development series are handled automatically.
|
||||
continue
|
||||
if not srus[release]:
|
||||
continue
|
||||
print('''<h3>%s</h3>
|
||||
<table id='%s'>
|
||||
<tr><th>Package</th><th>-release</th><th>-updates</th>
|
||||
<th>-proposed (signer, creator)</th>
|
||||
<th>changelog bugs</th><th>days</th></tr>''' % (release, release))
|
||||
for pkg, pub in sorted(pkg_index[release].iteritems(),
|
||||
key=itemgetter(1)):
|
||||
# skip everything that shows up on the kernel SRU reports
|
||||
if (pkg in ('linux', 'linux-hwe', 'linux-hwe-edge',
|
||||
'linux-kvm', 'linux-oem',
|
||||
'linux-raspi2', 'linux-snapdragon',
|
||||
'linux-keystone', 'linux-armadaxp', 'linux-ti-omap4',
|
||||
'linux-aws', 'linux-aws-hwe', 'linux-aws-edge',
|
||||
'linux-azure', 'linux-azure-edge',
|
||||
'linux-gcp', 'linux-gcp-edge',
|
||||
'linux-gke', 'linux-euclid', 'linux-oracle') or
|
||||
pkg.startswith('linux-signed') or
|
||||
pkg.startswith('linux-meta') or
|
||||
pkg.startswith('linux-lts') or
|
||||
pkg.startswith('linux-backports-modules')):
|
||||
continue
|
||||
# for langpack updates, only keep -en as a representative
|
||||
if (pkg.startswith('language-pack-') and
|
||||
pkg not in ('language-pack-en', 'language-pack-en-base')):
|
||||
continue
|
||||
if (pkg.startswith('kde-l10n-') and pkg != 'kde-l10n-de'):
|
||||
continue
|
||||
|
||||
rpkg = srus[release][pkg]
|
||||
pkgurl = '%s/ubuntu/+source/%s/' % (lp_url, pkg)
|
||||
age = (datetime.datetime.now() - rpkg['published'].replace(
|
||||
tzinfo=None)).days
|
||||
|
||||
builds = ''
|
||||
for arch, (state, url) in rpkg['build_problems'].items():
|
||||
builds += '<br/>%s: <a href="%s">%s</a> ' % (arch, url, state)
|
||||
if builds:
|
||||
builds = '<span style="font-size: x-small">%s</span>' % builds
|
||||
|
||||
autopkg_fails = ''
|
||||
for excuse in rpkg['autopkg_fails']:
|
||||
autopkg_fails += '<br/>%s' % excuse
|
||||
if autopkg_fails:
|
||||
autopkg_fails = '<span style="font-size: x-small">%s</span>' \
|
||||
% autopkg_fails
|
||||
|
||||
print(' <tr><td><a href="%s">%s</a>%s %s</td> ' %
|
||||
(pkgurl, pkg, builds, autopkg_fails))
|
||||
print(' <td><a href="%s">%s</a></td> ' %
|
||||
(pkgurl + rpkg['release'], rpkg['release']))
|
||||
print(' <td><a href="%s">%s</a></td> ' %
|
||||
(pkgurl + rpkg['updates'], rpkg['updates']))
|
||||
signer = str(rpkg['signer']).split('~')[-1]
|
||||
uploaders = '<a href="%s/~%s">%s</a>' % \
|
||||
(lp_url, signer, signer)
|
||||
if rpkg['creator'] and rpkg['creator'] != rpkg['signer']:
|
||||
creator = str(rpkg['creator']).split('~')[-1]
|
||||
uploaders += ', <a href="%s/~%s">%s</a>' % \
|
||||
(lp_url, creator, creator)
|
||||
print(' <td><a href="%s">%s</a> (%s)</td> ' %
|
||||
(pkgurl + rpkg['proposed'], rpkg['proposed'], uploaders))
|
||||
print(' <td>')
|
||||
removable = True
|
||||
antique = False
|
||||
for b, t in sorted(rpkg['bugs'].iteritems()):
|
||||
cls = ' class="'
|
||||
incomplete = False
|
||||
try:
|
||||
bug = lp.bugs[b]
|
||||
bug_title = bug.title.encode('UTF-8')
|
||||
hover_text = bug_title
|
||||
for task in bug.bug_tasks:
|
||||
if task.self_link.split('/')[4] != 'ubuntu':
|
||||
continue
|
||||
if len(task.self_link.split('/')) != 10:
|
||||
continue
|
||||
if pkg == task.self_link.split('/')[7] \
|
||||
and release == task.self_link.split('/')[5]:
|
||||
if task.status == 'Incomplete':
|
||||
incomplete = True
|
||||
break
|
||||
except KeyError:
|
||||
logging.debug(
|
||||
'bug %d does not exist or is not accessible' % b)
|
||||
broken_bugs.add(b)
|
||||
hover_text = ''
|
||||
if ('kernel-tracking-bug' in t or
|
||||
'kernel-release-tracking-bug' in t):
|
||||
cls += 'kerneltracking '
|
||||
if incomplete:
|
||||
cls += ' incomplete'
|
||||
elif ('verification-failed' in t or
|
||||
'verification-failed-%s' % release in t):
|
||||
cls += ' verificationfailed'
|
||||
elif 'verification-done-%s' % release in t:
|
||||
cls += ' verified'
|
||||
removable = False
|
||||
elif b in broken_bugs:
|
||||
cls += ' broken'
|
||||
removable = False
|
||||
elif bug:
|
||||
if bug.duplicate_of:
|
||||
cls += ' broken'
|
||||
last_message_date = bug.date_last_message.replace(
|
||||
minute=0, second=0, microsecond=0)
|
||||
published_date = rpkg['published'].replace(
|
||||
minute=0, second=0, microsecond=0)
|
||||
today = datetime.datetime.utcnow()
|
||||
if last_message_date > published_date:
|
||||
for message in bug.messages:
|
||||
m_date = message.date_created
|
||||
if m_date <= rpkg['published']:
|
||||
continue
|
||||
m_owner = message.owner
|
||||
if ('verification still needed'
|
||||
in message.subject.lower()):
|
||||
if (m_date.replace(tzinfo=None) < today
|
||||
- datetime.timedelta(16)):
|
||||
cls += ' removal'
|
||||
antique = True
|
||||
continue
|
||||
if 'messages' in cls:
|
||||
cls = cls.replace('messages', '')
|
||||
continue
|
||||
try:
|
||||
if (m_owner not in ignored_commenters and
|
||||
'messages' not in cls):
|
||||
cls += ' messages'
|
||||
if m_owner not in ignored_commenters:
|
||||
hover_text = '%s\n%s\n' % ( \
|
||||
bug_title, \
|
||||
datetime.datetime.strftime(
|
||||
m_date, '%Y-%m-%d'))
|
||||
hover_text += message.content.encode(
|
||||
'UTF-8') + ' - '
|
||||
hover_text += m_owner.name.encode(
|
||||
'UTF-8')
|
||||
antique = False
|
||||
except ClientError as error:
|
||||
# people who don't use lp anymore
|
||||
if error.response['status'] == '410':
|
||||
continue
|
||||
cls += '"'
|
||||
|
||||
print('<a href="%s/bugs/%d" '
|
||||
'title="%s" %s>%d%s</a>' %
|
||||
(lp_url, b, hover_text.replace('"', ''), cls, b,
|
||||
'(hw)' if 'hw-specific' in t else ''))
|
||||
if antique and removable:
|
||||
proposed_antique.append((releases[release].name, pkg,
|
||||
[str(b) for b in rpkg['bugs']]))
|
||||
print(' </td>')
|
||||
print(' <td>%i</td></tr>' % age)
|
||||
print('</table>')
|
||||
|
||||
#
|
||||
# superseded by -security
|
||||
#
|
||||
|
||||
print('<h2><a name="superseded">Superseded by -security</a></h2>')
|
||||
|
||||
print('<p>The following SRUs have been shadowed by a security update and '
|
||||
'need to be re-merged:</p>')
|
||||
|
||||
for pkg in pkgsuperseded:
|
||||
print('''<h3>%s</h3>
|
||||
<table>
|
||||
<tr><th>Package</th><th>-proposed</th><th>-security</th></tr>''' % pkg[0])
|
||||
pkgurl = '%s/ubuntu/+source/%s/' % (lp_url, pkg[1])
|
||||
(vprop, vsec) = (pkg[2]['proposed'], pkg[2]['security'])
|
||||
print(' <tr><th><a href="%s">%s</a></th> \
|
||||
<td><a href="%s">%s</a></td> \
|
||||
<td><a href="%s">%s</a></td></tr>' % (
|
||||
pkgurl, pkg[1], pkgurl + vprop, vprop, pkgurl + vsec, vsec))
|
||||
print('</table>')
|
||||
|
||||
print('''\
|
||||
<h2><a name="upload-queues">Upload queue status at a glance:</a></h2>
|
||||
<table class="noborder">
|
||||
<tr>
|
||||
<th class="noborder">Proposed</th>
|
||||
<th class="noborder">Updates</th>
|
||||
<th class="noborder">Backports</th>
|
||||
<th class="noborder">Security</th>
|
||||
</tr>
|
||||
<tr>''')
|
||||
for p in ['Proposed', 'Updates', 'Backports', 'Security']:
|
||||
print(''' <td class="noborder"><table>
|
||||
<tr><th>Release</th><th>Unapproved</th><th>New</th></tr>''')
|
||||
for r in sorted(releases):
|
||||
new_url = (
|
||||
'%s/ubuntu/%s/+queue?queue_state=0' % (lp_url, r))
|
||||
unapproved_url = (
|
||||
'%s/ubuntu/%s/+queue?queue_state=1' % (lp_url, r))
|
||||
print(' <tr><td>%s</td><td><a href="%s">%s</a></td>'
|
||||
'<td><a href="%s">%s</a></tr>' %
|
||||
(r, unapproved_url,
|
||||
get_queue_count('Unapproved', releases[r], p),
|
||||
new_url, get_queue_count('New', releases[r], p)))
|
||||
print(' </table></td>')
|
||||
|
||||
print(' </tr>')
|
||||
print('</table>')
|
||||
|
||||
#
|
||||
# -proposed cleanup
|
||||
#
|
||||
|
||||
print('<h2><a name="cleanup">-proposed cleanup</a></h2>')
|
||||
print('<p>The following packages have an equal or higher version in '
|
||||
'-updates and should be removed from -proposed:</p>')
|
||||
|
||||
print('<pre>')
|
||||
for r in releases:
|
||||
for pkg in sorted(pkgcleanup):
|
||||
if pkg[0].startswith(r):
|
||||
print(
|
||||
'remove-package -y -m "moved to -updates" -s %s-proposed '
|
||||
'-e %s %s' % (r, pkg[2]['proposed'], pkg[1]))
|
||||
print('</pre>')
|
||||
|
||||
print('<p>The following packages have an equal or higher version in the '
|
||||
'release pocket and should be removed from -proposed:</p>')
|
||||
|
||||
print('<pre>')
|
||||
for r in releases:
|
||||
for pkg in sorted(pkgcleanup_release):
|
||||
if pkg[0].startswith(r):
|
||||
print(
|
||||
'remove-package -y -m "moved to release" -s %s-proposed '
|
||||
'-e %s %s' % (r, pkg[2]['proposed'], pkg[1]))
|
||||
print('</pre>')
|
||||
|
||||
print('<p>The following packages have not had their SRU bugs verified in '
|
||||
'105 days and should be removed from -proposed:</p>')
|
||||
|
||||
print('<pre>')
|
||||
for r in releases:
|
||||
for pkg in sorted(proposed_antique):
|
||||
if pkg[0].startswith(r):
|
||||
print('sru-remove -s %s -p %s %s' %
|
||||
(r, pkg[1], ' '.join(pkg[2])))
|
||||
print('</pre>')
|
||||
|
||||
print('''</body>
|
||||
</html>''')
|
||||
|
||||
|
||||
def cleanup(pkgrecord):
|
||||
'''Return True if updates is newer or equal than proposed'''
|
||||
if 'updates' in pkgrecord:
|
||||
return apt_pkg.version_compare(
|
||||
pkgrecord['proposed'], pkgrecord['updates']) <= 0
|
||||
return False
|
||||
|
||||
|
||||
def cleanup_release(pkgrecord):
|
||||
'''Return True if updates is newer or equal than release'''
|
||||
if 'release' in pkgrecord:
|
||||
return apt_pkg.version_compare(
|
||||
pkgrecord['proposed'], pkgrecord['release']) <= 0
|
||||
return False
|
||||
|
||||
|
||||
def security_superseded(pkgrecord):
|
||||
'''Return True if security is newer than proposed'''
|
||||
if 'security' in pkgrecord:
|
||||
return apt_pkg.version_compare(
|
||||
pkgrecord['proposed'], pkgrecord['security']) < 0
|
||||
return False
|
||||
|
||||
|
||||
def match_srubugs(changesfileurls):
|
||||
'''match between bugs with verification- tag and bugs in changesfile'''
|
||||
global lp
|
||||
bugs = {}
|
||||
|
||||
for changesfileurl in changesfileurls:
|
||||
if changesfileurl is None:
|
||||
continue
|
||||
|
||||
# Load changesfile
|
||||
logging.debug("Fetching Changelog: %s" % changesfileurl)
|
||||
changelog = urlopen(changesfileurl)
|
||||
bugnums = []
|
||||
for l in changelog:
|
||||
if l.startswith('Launchpad-Bugs-Fixed: '):
|
||||
bugnums = [int(b) for b in l.split()[1:]]
|
||||
break
|
||||
|
||||
for b in bugnums:
|
||||
if b in bugs:
|
||||
continue
|
||||
try:
|
||||
bug = lp.bugs[b]
|
||||
bugs[b] = bug.tags
|
||||
except KeyError:
|
||||
logging.debug(
|
||||
'%s: bug %d does not exist or is not accessible' %
|
||||
(changesfileurl, b))
|
||||
broken_bugs.add(b)
|
||||
bugs[b] = []
|
||||
|
||||
logging.debug("%d bugs found: %s" % (len(bugs), " ".join(map(str, bugs))))
|
||||
return bugs
|
||||
|
||||
|
||||
def lpinit():
|
||||
'''Init LP credentials, archive, distro list and sru-team members'''
|
||||
global lp, lp_url, ubuntu, archive, releases, ignored_commenters, series
|
||||
logging.debug("Initializing LP Credentials")
|
||||
lp = Launchpad.login_anonymously('sru-report', 'production',
|
||||
version="devel")
|
||||
lp_url = str(lp._root_uri).replace('api.', '').strip('devel/')
|
||||
ubuntu = lp.distributions['ubuntu']
|
||||
archive = ubuntu.getArchive(name='primary')
|
||||
for s in ubuntu.series:
|
||||
if s.status in ('Current Stable Release', 'Supported'):
|
||||
releases[s.name] = s
|
||||
series.append(s.name)
|
||||
logging.debug('Active releases found: %s' % ' '.join(releases))
|
||||
# create a list of people for whom comments will be ignored when
|
||||
# displaying the last comment in the report
|
||||
ignored_commenters = []
|
||||
ubuntu_sru = lp.people['ubuntu-sru']
|
||||
for participant in ubuntu_sru.participants:
|
||||
ignored_commenters.append(participant)
|
||||
ignored_commenters.append(lp.people['janitor'])
|
||||
ignored_commenters.append(
|
||||
lp.people['bug-watch-updater'])
|
||||
|
||||
|
||||
def get_queue_count(search_status, release, search_pocket):
|
||||
'''Return number of results of given queue page URL'''
|
||||
return len(release.getPackageUploads(
|
||||
status=search_status, archive=archive, pocket=search_pocket))
|
||||
|
||||
|
||||
def get_srus():
|
||||
'''Generate SRU map.
|
||||
|
||||
Return a dictionary release -> packagename -> {
|
||||
'release': version,
|
||||
'proposed': version,
|
||||
'updates': version,
|
||||
'published': proposed_date,
|
||||
'bugs': [buglist],
|
||||
'changesfiles': [changes_urls],
|
||||
'build_problems': { arch -> (state, URL) },
|
||||
'autopkg_fails': [excuses]
|
||||
}
|
||||
'''
|
||||
srus = defaultdict(dict)
|
||||
|
||||
for release in releases:
|
||||
#if releases[release].status not in (
|
||||
# "Active Development", "Pre-release Freeze"):
|
||||
# continue # for quick testing
|
||||
pkg_excuses = []
|
||||
if release != 'lucid':
|
||||
excuses_page = excuses_url % release
|
||||
excuses = urlopen(excuses_page)
|
||||
excuses_data = yaml.load(excuses)
|
||||
pkg_excuses = [excuse['source']
|
||||
for excuse in excuses_data['sources']
|
||||
if 'autopkgtest' in excuse['reason']
|
||||
or 'block' in excuse['reason']]
|
||||
|
||||
for published in archive.getPublishedSources(
|
||||
pocket='Proposed', status='Published',
|
||||
distro_series=releases[release]):
|
||||
pkg = published.source_package_name
|
||||
|
||||
srus[release][pkg] = current_versions(releases[release], pkg)
|
||||
srus[release][pkg]['bugs'] = match_srubugs(
|
||||
srus[release][pkg]['changesfiles'])
|
||||
|
||||
srus[release][pkg]['build_problems'] = {}
|
||||
try:
|
||||
for build in published.getBuilds():
|
||||
if not build.buildstate.startswith('Success'):
|
||||
srus[release][pkg]['build_problems'][build.arch_tag] \
|
||||
= (build.buildstate, build.web_link)
|
||||
except HTTPError as e:
|
||||
if e.response['status'] == '401':
|
||||
continue
|
||||
else:
|
||||
raise e
|
||||
|
||||
srus[release][pkg]['autopkg_fails'] = []
|
||||
if pkg in pkg_excuses:
|
||||
for excuse in excuses_data['sources']:
|
||||
if excuse['source'] == pkg:
|
||||
if 'autopkgtest' not in excuse['policy_info']:
|
||||
continue
|
||||
for testpkg in excuse['policy_info']['autopkgtest']:
|
||||
for arch in excuse['policy_info']['autopkgtest'][testpkg]:
|
||||
if excuse['policy_info']['autopkgtest'][testpkg][arch][0] == 'REGRESSION':
|
||||
link = excuse['policy_info']['autopkgtest'][testpkg][arch][1]
|
||||
testpkg_name = testpkg.split('/')[0]
|
||||
if testpkg_name.startswith('lib'):
|
||||
testpkg_idx = testpkg_name[:3]
|
||||
else:
|
||||
testpkg_idx = testpkg_name[0]
|
||||
autopkg_url = 'http://autopkgtest.ubuntu.com/packages/%s/%s/%s/%s' % (testpkg_idx, testpkg_name, release, arch)
|
||||
srus[release][pkg]['autopkg_fails'].append('Regression in autopkgtest for <a href="%s">%s (%s)</a>: <a href="%s">test log</a>' % (autopkg_url, testpkg_name, arch, link))
|
||||
|
||||
return srus
|
||||
|
||||
|
||||
def bugs_from_changes(change_url):
|
||||
'''Return (bug_list, cve_list) from a .changes file URL'''
|
||||
changelog = urlopen(change_url)
|
||||
|
||||
refs = []
|
||||
bugs = set()
|
||||
cves = set()
|
||||
|
||||
for l in changelog:
|
||||
if l.startswith('Launchpad-Bugs-Fixed: '):
|
||||
refs = [int(b) for b in l.split()[1:]]
|
||||
break
|
||||
|
||||
for b in refs:
|
||||
try:
|
||||
lpbug = lp.bugs[b]
|
||||
except KeyError:
|
||||
logging.debug('%s: bug %d does not exist or is not accessible' % (
|
||||
change_url, b))
|
||||
broken_bugs.add(b)
|
||||
continue
|
||||
if lpbug.title.startswith('CVE-'):
|
||||
cves.add(b)
|
||||
else:
|
||||
bugs.add(b)
|
||||
|
||||
return (sorted(bugs), sorted(cves))
|
||||
|
||||
|
||||
def main():
|
||||
logging.basicConfig(level=DEBUGLEVEL,
|
||||
format="%(asctime)s - %(levelname)s - %(message)s")
|
||||
lpinit()
|
||||
apt_pkg.init_system()
|
||||
|
||||
srus = get_srus()
|
||||
|
||||
print_report(srus)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -0,0 +1,419 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012, 2018 Canonical Ltd.
|
||||
# Copyright (C) 2010 Scott Kitterman <scott@kitterman.com>
|
||||
# Author: Martin Pitt <martin.pitt@canonical.com>
|
||||
# Author: Brian Murray <brian.murray@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
'''Show and approve changes in an unapproved upload.
|
||||
|
||||
Generate a debdiff between current source package in a given release and the
|
||||
version in the unapproved queue, and ask whether or not to approve the upload.
|
||||
Approve upload and then comment on the SRU bugs regarding verification process.
|
||||
|
||||
USAGE:
|
||||
sru-review -b -s precise isc-dhcp
|
||||
'''
|
||||
|
||||
import gzip
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
try:
|
||||
from urllib.request import urlopen, urlretrieve
|
||||
except ImportError:
|
||||
from urllib import urlopen, urlretrieve
|
||||
import webbrowser
|
||||
|
||||
from contextlib import ExitStack
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from lazr.restfulclient.errors import ServerError
|
||||
from sru_workflow import process_bug
|
||||
from time import sleep
|
||||
|
||||
|
||||
def parse_options():
|
||||
'''Parse command line arguments.
|
||||
|
||||
Return (options, source_package) tuple.
|
||||
'''
|
||||
parser = optparse.OptionParser(
|
||||
usage='Usage: %prog [options] source_package')
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
"-s", dest="release", default=default_release, metavar="RELEASE",
|
||||
help="release (default: %s)" % default_release)
|
||||
parser.add_option(
|
||||
"-p", dest="ppa", metavar="LP_USER/PPA_NAME",
|
||||
help="Check a PPA instead of the Ubuntu unapproved queue")
|
||||
parser.add_option(
|
||||
"-b", "--browser", dest="browser", action="store_true",
|
||||
default=True, help="Open Launchpad bugs in browser")
|
||||
parser.add_option(
|
||||
"--no-browser", dest="browser", action="store_false",
|
||||
default=True, help="Don't open Launchpad bugs in browser")
|
||||
parser.add_option(
|
||||
"-v", "--view", dest="view", action="store_true",
|
||||
default=True, help="View debdiff in pager")
|
||||
parser.add_option(
|
||||
"-e", "--version", dest="version",
|
||||
help="Look at version VERSION of a package in the queue",
|
||||
metavar="VERSION")
|
||||
parser.add_option(
|
||||
"--no-diff", dest="diff", action="store_false", default=True,
|
||||
help=(
|
||||
"Don't fetch debdiff, assuming that it has been reviewed "
|
||||
"separately (useful for copies)"))
|
||||
parser.add_option(
|
||||
"--no-diffstat", dest="diffstat", action="store_false", default=True,
|
||||
help="Do not attach diffstat to the debdiff")
|
||||
parser.add_option(
|
||||
"-q", "--queue", dest='queue',
|
||||
help='Use a specific queue instead of Unapproved',
|
||||
default="Unapproved",
|
||||
choices=("Unapproved", "New", "Rejected",
|
||||
"unapproved", "new", "rejected"),
|
||||
metavar='QUEUE')
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error('Need to specify one source package name')
|
||||
|
||||
return (opts, args[0])
|
||||
|
||||
|
||||
def parse_changes(changes_url):
|
||||
'''Parse .changes file.
|
||||
|
||||
Return dictionary with interesting information: 'bugs' (list),
|
||||
'distribution'.
|
||||
'''
|
||||
info = {'bugs': []}
|
||||
for line in urlopen(changes_url):
|
||||
line = line.decode('utf-8')
|
||||
if line.startswith('Distribution:'):
|
||||
info['distribution'] = line.split()[1]
|
||||
if line.startswith('Launchpad-Bugs-Fixed:'):
|
||||
info['bugs'] = sorted(set(line.split()[1:]))
|
||||
if line.startswith('Version:'):
|
||||
info['version'] = line.split()[1]
|
||||
return info
|
||||
|
||||
|
||||
def from_queue(options, archive, sourcepkg, series, version=None):
|
||||
'''Get package_upload from LP and debdiff from queue page.
|
||||
|
||||
Return (package_upload, changes URL, debdiff URL) tuple.
|
||||
'''
|
||||
queues = {'New': 0, 'Unapproved': 1, 'Rejected': 4}
|
||||
queue = options.queue.title()
|
||||
queue_url = ('https://launchpad.net/ubuntu/%s/+queue?'
|
||||
'queue_state=%s&batch=300&queue_text=%s' %
|
||||
(series.name, queues[queue], sourcepkg))
|
||||
uploads = [upload for upload in
|
||||
series.getPackageUploads(archive=archive, exact_match=True,
|
||||
name=sourcepkg, pocket='Proposed',
|
||||
status=queue, version=version)]
|
||||
if len(uploads) == 0:
|
||||
print('ERROR: Queue does not have an upload of this source.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if len(uploads) > 1:
|
||||
print('ERROR: Queue has more than one upload of this source, '
|
||||
'please handle manually', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
upload = uploads[0]
|
||||
|
||||
if upload.contains_copy:
|
||||
archive = upload.copy_source_archive
|
||||
pubs = archive.getPublishedSources(
|
||||
exact_match=True, source_name=upload.package_name,
|
||||
version=upload.package_version)
|
||||
if pubs:
|
||||
changes_url = pubs[0].changesFileUrl()
|
||||
else:
|
||||
print("ERROR: Can't find source package %s %s in %s" %
|
||||
(upload.package_name, upload.package_version,
|
||||
archive.web_link),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
changes_url = upload.changes_file_url
|
||||
|
||||
if options.diff:
|
||||
oops_re = re.compile('class="oopsid">(OOPS[a-zA-Z0-9-]+)<')
|
||||
debdiff_re = re.compile(
|
||||
'href="(http://launchpadlibrarian.net/'
|
||||
'\d+/%s_[^"_]+_[^_"]+\.diff\.gz)">\s*diff from' %
|
||||
re.escape(sourcepkg))
|
||||
|
||||
queue_html = urlopen(queue_url).read().decode('utf-8')
|
||||
|
||||
m = oops_re.search(queue_html)
|
||||
if m:
|
||||
print('ERROR: Launchpad failure:', m.group(1), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
m = debdiff_re.search(queue_html)
|
||||
if not m:
|
||||
print('ERROR: queue does not have a debdiff', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
debdiff_url = m.group(1)
|
||||
#print('debdiff URL:', debdiff_url, file=sys.stderr)
|
||||
else:
|
||||
debdiff_url = None
|
||||
|
||||
return (upload, changes_url, debdiff_url)
|
||||
|
||||
|
||||
def from_ppa(options, sourcepkg, user, ppaname):
|
||||
'''Get .changes and debdiff from a PPA.
|
||||
|
||||
Return (changes URL, debdiff URL) pair.
|
||||
'''
|
||||
changes_re = re.compile(
|
||||
'href="(https://launchpad.net/[^ "]+/%s_[^"]+_source.changes)"' %
|
||||
re.escape(sourcepkg))
|
||||
sourcepub_re = re.compile(
|
||||
'href="(\+sourcepub/\d+/\+listing-archive-extra)"')
|
||||
#debdiff_re = re.compile(
|
||||
# 'href="(https://launchpad.net/.[^ "]+.diff.gz)">diff from')
|
||||
|
||||
changes_url = None
|
||||
changes_sourcepub = None
|
||||
last_sourcepub = None
|
||||
|
||||
for line in urlopen(ppa_url % (user, ppaname, options.release)):
|
||||
m = sourcepub_re.search(line)
|
||||
if m:
|
||||
last_sourcepub = m.group(1)
|
||||
continue
|
||||
m = changes_re.search(line)
|
||||
if m:
|
||||
# ensure that there's only one upload
|
||||
if changes_url:
|
||||
print('ERROR: PPA has more than one upload of this source, '
|
||||
'please handle manually', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
changes_url = m.group(1)
|
||||
assert changes_sourcepub is None, (
|
||||
'got two sourcepubs before .changes')
|
||||
changes_sourcepub = last_sourcepub
|
||||
|
||||
#print('changes URL:', changes_url, file=sys.stderr)
|
||||
|
||||
# the code below works, but the debdiffs generated by Launchpad are rather
|
||||
# useless, as they are against the final version, not what is in
|
||||
# -updates/-security; so disable
|
||||
|
||||
#if options.diff:
|
||||
# # now open the sourcepub and get the URL for the debdiff
|
||||
# changes_sourcepub = changes_url.rsplit('+', 1)[0] + changes_sourcepub
|
||||
# #print('sourcepub URL:', changes_sourcepub, file=sys.stderr)
|
||||
# sourcepub_html = urlopen(changes_sourcepub).read()
|
||||
|
||||
# m = debdiff_re.search(sourcepub_html)
|
||||
# if not m:
|
||||
# print('ERROR: PPA does not have a debdiff', file=sys.stderr)
|
||||
# sys.exit(1)
|
||||
# debdiff_url = m.group(1)
|
||||
# #print('debdiff URL:', debdiff_url, file=sys.stderr)
|
||||
#else:
|
||||
debdiff_url = None
|
||||
|
||||
return (changes_url, debdiff_url)
|
||||
|
||||
|
||||
def reject_comment(launchpad, num, package, release, reason):
|
||||
text = ('An upload of %s to %s-proposed has been rejected from the upload '
|
||||
'queue for the following reason: "%s".' %
|
||||
(package, release, reason))
|
||||
try:
|
||||
bug = launchpad.bugs[num]
|
||||
bug.newMessage(content=text,
|
||||
subject='Proposed package upload rejected')
|
||||
except KeyError:
|
||||
print("LP: #%s may be private or a typo" % num)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
default_release = 'cosmic'
|
||||
ppa_url = ('https://launchpad.net/~%s/+archive/ubuntu/%s/+packages?'
|
||||
'field.series_filter=%s')
|
||||
|
||||
(opts, sourcepkg) = parse_options()
|
||||
|
||||
launchpad = Launchpad.login_with('sru-review', opts.launchpad_instance,
|
||||
version="devel")
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
series = ubuntu.getSeries(name_or_version=opts.release)
|
||||
archive = ubuntu.main_archive
|
||||
version = opts.version
|
||||
|
||||
if opts.ppa:
|
||||
(user, ppaname) = opts.ppa.split('/', 1)
|
||||
(changes_url, debdiff_url) = from_ppa(opts, sourcepkg, user, ppaname)
|
||||
else:
|
||||
(upload, changes_url, debdiff_url) = from_queue(
|
||||
opts, archive, sourcepkg, series, version)
|
||||
|
||||
# Check for existing version in proposed
|
||||
if series != ubuntu.current_series:
|
||||
existing = [
|
||||
pkg for pkg in archive.getPublishedSources(
|
||||
exact_match=True, distro_series=series, pocket='Proposed',
|
||||
source_name=sourcepkg, status='Published')]
|
||||
updates = [
|
||||
pkg for pkg in archive.getPublishedSources(
|
||||
exact_match=True, distro_series=series, pocket='Updates',
|
||||
source_name=sourcepkg, status='Published')]
|
||||
for pkg in existing:
|
||||
if pkg not in updates:
|
||||
changesfile_url = pkg.changesFileUrl()
|
||||
changes = parse_changes(changesfile_url)
|
||||
msg = ('''\
|
||||
*******************************************************
|
||||
*
|
||||
* WARNING: %s already published in Proposed (%s)
|
||||
* SRU Bug: LP: #%s
|
||||
*
|
||||
*******************************************************''' %
|
||||
(sourcepkg, pkg.source_package_version,
|
||||
' LP: #'.join(changes['bugs'])))
|
||||
print(msg, file=sys.stderr)
|
||||
print('''View the debdiff anyway? [yN]''', end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if response.strip().lower().startswith('y'):
|
||||
continue
|
||||
else:
|
||||
print('''Exiting''')
|
||||
sys.exit(1)
|
||||
|
||||
debdiff = None
|
||||
if debdiff_url:
|
||||
with tempfile.NamedTemporaryFile() as f:
|
||||
debdiff = gzip.open(urlretrieve(debdiff_url, f.name)[0])
|
||||
elif opts.diff:
|
||||
print('No debdiff available')
|
||||
|
||||
# parse changes and open bugs first since we are using subprocess
|
||||
# to view the diff
|
||||
changes = parse_changes(changes_url)
|
||||
|
||||
changelog_bugs = True
|
||||
if not changes['bugs']:
|
||||
changelog_bugs = False
|
||||
print('There are no Launchpad bugs in the changelog!',
|
||||
file=sys.stderr)
|
||||
print('''View the debdiff anyway? [yN]''', end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if response.strip().lower().startswith('n'):
|
||||
print('''Exiting''')
|
||||
sys.exit(1)
|
||||
|
||||
if opts.browser and changes['bugs']:
|
||||
for b in changes['bugs']:
|
||||
# use a full url so the right task is highlighted
|
||||
webbrowser.open('https://bugs.launchpad.net/ubuntu/+source/'
|
||||
'%s/+bug/%s' % (sourcepkg, b))
|
||||
sleep(1)
|
||||
# also open the source package page to review version numbers
|
||||
if opts.browser:
|
||||
webbrowser.open('https://launchpad.net/ubuntu/+source/'
|
||||
'%s' % (sourcepkg))
|
||||
|
||||
if debdiff and opts.view:
|
||||
with ExitStack() as resources:
|
||||
tfile = resources.enter_context(tempfile.NamedTemporaryFile())
|
||||
for line in debdiff:
|
||||
tfile.write(line)
|
||||
tfile.flush()
|
||||
if opts.diffstat:
|
||||
combinedfile = resources.enter_context(
|
||||
tempfile.NamedTemporaryFile())
|
||||
subprocess.call('(cat %s; echo; echo "--"; diffstat %s) >%s' %
|
||||
(tfile.name, tfile.name, combinedfile.name),
|
||||
shell=True)
|
||||
tfile = combinedfile
|
||||
ret = subprocess.call(["sensible-pager", tfile.name])
|
||||
|
||||
if opts.ppa:
|
||||
print('\nTo copy from PPA to distribution, run:\n'
|
||||
' copy-package -b --from=~%s/ubuntu/%s -s %s --to=ubuntu '
|
||||
'--to-suite %s-proposed -y %s\n' %
|
||||
(user, ppaname, opts.release, opts.release, sourcepkg),
|
||||
file=sys.stderr)
|
||||
sys.exit(0)
|
||||
|
||||
if not changelog_bugs:
|
||||
print("The SRU has no Launchpad bugs referenced!\n")
|
||||
print("Accept the package into -proposed? [yN] ", end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if response.strip().lower().startswith('y'):
|
||||
upload.acceptFromQueue()
|
||||
print("Accepted")
|
||||
if changes['bugs']:
|
||||
for bug_num in changes['bugs']:
|
||||
success = False
|
||||
for i in range(3):
|
||||
try:
|
||||
process_bug(
|
||||
launchpad, upload.package_name,
|
||||
upload.package_version,
|
||||
upload.distroseries.name, bug_num)
|
||||
except ServerError:
|
||||
# In some cases LP can time-out, so retry a few times.
|
||||
continue
|
||||
else:
|
||||
success = True
|
||||
break
|
||||
if not success:
|
||||
print('\nFailed communicating with Launchpad to process '
|
||||
'one of the SRU bugs. Please retry manually by '
|
||||
'running:\nsru-accept -p %s -s %s -v %s %s' %
|
||||
(upload.package_name, upload.distroseries.name,
|
||||
upload.package_version, bug_num))
|
||||
|
||||
else:
|
||||
print("REJECT the package from -proposed? [yN] ", end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if response.strip().lower().startswith('y'):
|
||||
print("Please give a reason for the rejection.")
|
||||
print("Be advised it will appear in the bug.")
|
||||
sys.stdout.flush()
|
||||
reason = sys.stdin.readline().strip()
|
||||
if reason == '':
|
||||
print("A reason must be provided.")
|
||||
sys.exit(1)
|
||||
upload.rejectFromQueue(comment=reason)
|
||||
if changelog_bugs:
|
||||
for bug_num in changes['bugs']:
|
||||
reject_comment(launchpad, bug_num,
|
||||
sourcepkg, opts.release,
|
||||
reason)
|
||||
print("Rejected")
|
||||
else:
|
||||
print("Not accepted")
|
||||
sys.exit(1)
|
@ -0,0 +1,140 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2017 Canonical Ltd.
|
||||
# Author: Brian Murray <brian.murray@canonical.com>
|
||||
# Author: Lukasz 'sil2100' Zemczak <lukasz.zemczak@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Portions of SRU-related code that is re-used by various SRU tools."""
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def process_bug(launchpad, sourcepkg, version, release, num):
|
||||
bug_target_re = re.compile(
|
||||
r'/ubuntu/(?:(?P<suite>[^/]+)/)?\+source/(?P<source>[^/]+)$')
|
||||
bug = launchpad.bugs[num]
|
||||
sourcepkg_match = False
|
||||
distroseries_match = False
|
||||
for task in bug.bug_tasks:
|
||||
# Ugly; we have to do URL-parsing to figure this out.
|
||||
# /ubuntu/+source/foo can be fed to launchpad.load() to get a
|
||||
# distribution_source_package, but /ubuntu/hardy/+source/foo can't.
|
||||
match = bug_target_re.search(task.target.self_link)
|
||||
if (not match or
|
||||
(sourcepkg and
|
||||
match.group('source') != sourcepkg)):
|
||||
print("Ignoring task %s in bug %s" % (task.web_link, num))
|
||||
continue
|
||||
sourcepkg_match = True
|
||||
if match.group('suite') == release:
|
||||
if task.status in ("Invalid", "Won't Fix", "Fix Released"):
|
||||
print("Matching task was set to %s before accepting the SRU, "
|
||||
"please double-check if this bug is still liable for "
|
||||
"fixing. Switching to Fix Committed." % task.status)
|
||||
task.status = "Fix Committed"
|
||||
task.lp_save()
|
||||
print("Success: task %s in bug %s" % (task.web_link, num))
|
||||
distroseries_match = True
|
||||
|
||||
if sourcepkg_match and not distroseries_match:
|
||||
# add a release task
|
||||
lp_url = launchpad._root_uri
|
||||
series_task_url = '%subuntu/%s/+source/%s' % \
|
||||
(lp_url, release, sourcepkg)
|
||||
sourcepkg_target = launchpad.load(series_task_url)
|
||||
new_task = bug.addTask(target=sourcepkg_target)
|
||||
new_task.status = "Fix Committed"
|
||||
new_task.lp_save()
|
||||
print("LP: #%s added task for %s %s" % (num, sourcepkg, release))
|
||||
if not sourcepkg_match:
|
||||
# warn that the bug has no source package tasks
|
||||
print("LP: #%s has no %s tasks!" % (num, sourcepkg))
|
||||
|
||||
# XXX: it might be useful if the package signer/sponsor was
|
||||
# subscribed to the bug report
|
||||
bug.subscribe(person=launchpad.people['ubuntu-sru'])
|
||||
bug.subscribe(person=launchpad.people['sru-verification'])
|
||||
|
||||
# there may be something else to sponsor so just warn
|
||||
subscribers = [sub.person for sub in bug.subscriptions]
|
||||
if launchpad.people['ubuntu-sponsors'] in subscribers:
|
||||
print('ubuntu-sponsors is still subscribed to LP: #%s. '
|
||||
'Is there anything left to sponsor?' % num)
|
||||
|
||||
if not sourcepkg or 'linux' not in sourcepkg:
|
||||
# this dance is needed due to
|
||||
# https://bugs.launchpad.net/launchpadlib/+bug/254901
|
||||
btags = bug.tags
|
||||
for t in ('verification-failed', 'verification-failed-%s' % release,
|
||||
'verification-done', 'verification-done-%s' % release):
|
||||
if t in btags:
|
||||
tags = btags
|
||||
tags.remove(t)
|
||||
bug.tags = tags
|
||||
|
||||
if 'verification-needed' not in btags:
|
||||
btags.append('verification-needed')
|
||||
bug.tags = btags
|
||||
|
||||
needed_tag = 'verification-needed-%s' % release
|
||||
if needed_tag not in btags:
|
||||
btags.append(needed_tag)
|
||||
bug.tags = btags
|
||||
|
||||
bug.lp_save()
|
||||
|
||||
text = ('Hello %s, or anyone else affected,\n\n' %
|
||||
re.split(r'[,\s]', bug.owner.display_name)[0])
|
||||
|
||||
if sourcepkg:
|
||||
text += 'Accepted %s into ' % sourcepkg
|
||||
else:
|
||||
text += 'Accepted into '
|
||||
if sourcepkg and release:
|
||||
text += ('%s-proposed. The package will build now and be available at '
|
||||
'https://launchpad.net/ubuntu/+source/%s/%s in a few hours, '
|
||||
'and then in the -proposed repository.\n\n' % (
|
||||
release, sourcepkg, version))
|
||||
else:
|
||||
text += ('%s-proposed. The package will build now and be available in '
|
||||
'a few hours in the -proposed repository.\n\n' % (
|
||||
release))
|
||||
|
||||
text += ('Please help us by testing this new package. ')
|
||||
|
||||
if sourcepkg == 'casper':
|
||||
text += ('To properly test it you will need to obtain and boot '
|
||||
'a daily build of a Live CD for %s.' % (release))
|
||||
else:
|
||||
text += ('See https://wiki.ubuntu.com/Testing/EnableProposed for '
|
||||
'documentation on how to enable and use -proposed.')
|
||||
|
||||
text += (' Your feedback will aid us getting this update out to other '
|
||||
'Ubuntu users.\n\nIf this package fixes the bug for you, '
|
||||
'please add a comment to this bug, mentioning the version of the '
|
||||
'package you tested and change the tag from '
|
||||
'verification-needed-%s to verification-done-%s. '
|
||||
'If it does not fix the bug for you, please add a comment '
|
||||
'stating that, and change the tag to verification-failed-%s. '
|
||||
'In either case, without details of your testing we will not '
|
||||
'be able to proceed.\n\nFurther information regarding the '
|
||||
'verification process can be found at '
|
||||
'https://wiki.ubuntu.com/QATeam/PerformingSRUVerification . '
|
||||
'Thank you in advance for helping!\n\n'
|
||||
'N.B. The updated package will be released to -updates after '
|
||||
'the bug(s) fixed by this package have been verified and '
|
||||
'the package has been in -proposed for a minimum of 7 days.' %
|
||||
(release, release, release))
|
||||
bug.newMessage(content=text, subject='Please test proposed package')
|
@ -0,0 +1,67 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2011 Iain Lane
|
||||
# Copyright (C) 2011 Stefano Rivera
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="usage: %prog [options] output-file")
|
||||
parser.add_option(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 1:
|
||||
parser.error("requires output file as argument")
|
||||
output = args[0]
|
||||
|
||||
lp = Launchpad.login_with(
|
||||
'sync-blacklist', options.launchpad_instance, version='devel')
|
||||
ubuntu = lp.distributions['ubuntu']
|
||||
|
||||
devel_series = ubuntu.current_series
|
||||
|
||||
blacklisted_always = devel_series.getDifferencesTo(
|
||||
status="Blacklisted always")
|
||||
|
||||
with open(output, "w") as output_file:
|
||||
print("""# THIS IS AN AUTOGENERATED FILE
|
||||
# BLACKLISTED SYNCS ARE NOW STORED IN LAUNCHPAD
|
||||
# SEE <some URL> FOR THE CODE WHICH GENERATES THIS FILE""", file=output_file)
|
||||
|
||||
authors = {}
|
||||
|
||||
for dsd in blacklisted_always:
|
||||
pkg = dsd.sourcepackagename
|
||||
comments = devel_series.getDifferenceComments(
|
||||
source_package_name=pkg)
|
||||
for comment in comments:
|
||||
if comment.comment_author_link not in authors:
|
||||
authors[comment.comment_author_link] = (
|
||||
comment.comment_author.name)
|
||||
comment_author = authors[comment.comment_author_link]
|
||||
comment_text = [c for c in comment.body_text.splitlines()
|
||||
if c and not c.startswith("Ignored")]
|
||||
print("# %s: %s" % (comment_author, "\n#".join(comment_text)),
|
||||
file=output_file)
|
||||
print("%s\n" % pkg, file=output_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,65 @@
|
||||
#! /bin/sh
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
DIST="${DIST:-disco}"
|
||||
|
||||
MADISON="$(rmadison -a source -s "$DIST" "$1")"
|
||||
[ "$MADISON" ] || exit 1
|
||||
|
||||
VER="$(echo "$MADISON" | cut -d'|' -f2 | tr -d ' ' | sed -r 's/^[0-9]+://')"
|
||||
SECTION="$(echo "$MADISON" | cut -d'|' -f3 | tr -d ' ')"
|
||||
case $SECTION in
|
||||
$DIST)
|
||||
SECTION=main
|
||||
;;
|
||||
$DIST/*)
|
||||
SECTION="${SECTION#$DIST/}"
|
||||
;;
|
||||
esac
|
||||
case $1 in
|
||||
lib?*)
|
||||
POOLINDEX="$(echo "$1" | cut -c 1-4)"
|
||||
;;
|
||||
*)
|
||||
POOLINDEX="$(echo "$1" | cut -c 1)"
|
||||
;;
|
||||
esac
|
||||
|
||||
NL='
|
||||
'
|
||||
OLDIFS="$IFS"
|
||||
IFS="$NL"
|
||||
wget -q -O- http://changelogs.ubuntu.com/changelogs/pool/$SECTION/$POOLINDEX/$1/${1}_$VER/changelog | while read -r line; do
|
||||
IFS="$OLDIFS"
|
||||
case $line in
|
||||
[A-Za-z0-9]*)
|
||||
# changelog entry header
|
||||
target="$(echo "$line" | cut -d' ' -f3)"
|
||||
target="${target%;}"
|
||||
target="${target%%-*}"
|
||||
case $target in
|
||||
warty|hoary|breezy|dapper|edgy|feisty|gutsy|hardy|intrepid|jaunty|karmic|lucid|maverick|natty|oneiric|precise|quantal|raring|saucy|trusty|utopic|vivid|wily|xenial|yakkety|zesty|artful|bionic|cosmic|disco|devel)
|
||||
;;
|
||||
*)
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
echo "$line"
|
||||
IFS="$NL"
|
||||
done
|
||||
IFS="$OLDIFS"
|
Loading…
Reference in new issue