mirror of
https://github.com/lubuntu-team/ppa-britney.git
synced 2025-03-09 22:31:07 +00:00
Update ubuntu-archive-tools.
This commit is contained in:
parent
64dd1fc405
commit
363622addf
@ -1 +1 @@
|
||||
1209 cjwatson@canonical.com-20190220074107-dvkdscxl2y2ww9j6
|
||||
1324 steve.langasek@canonical.com-20200516195933-ymljxy32gq2m13p9
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python2.7
|
||||
|
||||
# Check for override mismatches between architectures
|
||||
# Copyright (C) 2005, 2008, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
|
||||
# GNU Affero General Public License version 3.
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2016 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
@ -19,67 +19,74 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
import argparse
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
import lazr.restfulclient.errors
|
||||
|
||||
|
||||
def branch_livefses(options, owner):
|
||||
for livefs in list(options.launchpad.livefses):
|
||||
def branch_livefses(args, owner):
|
||||
for livefs in list(args.launchpad.livefses):
|
||||
if (livefs.owner == owner and
|
||||
livefs.distro_series == options.source_series):
|
||||
livefs.distro_series == args.source_series):
|
||||
print("Branching %s for %s ..." % (
|
||||
livefs.web_link, options.dest_series.name))
|
||||
new_livefs = options.launchpad.livefses.new(
|
||||
owner=owner, distro_series=options.dest_series,
|
||||
name=livefs.name, metadata=livefs.metadata)
|
||||
livefs.web_link, args.dest_series.name))
|
||||
try:
|
||||
new_livefs = args.launchpad.livefses.getByName(
|
||||
owner=owner, distro_series=args.dest_series,
|
||||
name=livefs.name)
|
||||
except lazr.restfulclient.errors.NotFound:
|
||||
new_livefs = args.launchpad.livefses.new(
|
||||
owner=owner, distro_series=args.dest_series,
|
||||
name=livefs.name, metadata=livefs.metadata)
|
||||
new_livefs.require_virtualized = livefs.require_virtualized
|
||||
new_livefs.relative_build_score = livefs.relative_build_score
|
||||
new_livefs.lp_save()
|
||||
try:
|
||||
new_livefs.lp_save()
|
||||
except lazr.restfulclient.errors.Unauthorized:
|
||||
print("Could not devirt ppa, ask Launchpad team for support.")
|
||||
pass
|
||||
print(" %s" % new_livefs.web_link)
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="usage: %prog [options] OWNER")
|
||||
parser.add_option(
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option(
|
||||
parser.add_argument(
|
||||
"-d", "--distribution", default="ubuntu", metavar="DISTRIBUTION",
|
||||
help="branch live filesystems for DISTRIBUTION")
|
||||
parser.add_option(
|
||||
parser.add_argument(
|
||||
"--source-series",
|
||||
help="source series (default: current stable release)")
|
||||
parser.add_option(
|
||||
parser.add_argument(
|
||||
"--dest-series",
|
||||
help="destination series (default: series in pre-release freeze)")
|
||||
options, args = parser.parse_args()
|
||||
if not args:
|
||||
parser.error(
|
||||
"You must specify an owner whose live filesystems you want to "
|
||||
"copy.")
|
||||
parser.add_argument("owner", help="owner of live filesystems to copy")
|
||||
args = parser.parse_args()
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"branch-livefses", options.launchpad_instance, version="devel")
|
||||
args.launchpad = Launchpad.login_with(
|
||||
"branch-livefses", args.launchpad_instance, version="devel")
|
||||
|
||||
distro = options.launchpad.distributions[options.distribution]
|
||||
if options.source_series is None:
|
||||
options.source_series = [
|
||||
distro = args.launchpad.distributions[args.distribution]
|
||||
if args.source_series is None:
|
||||
args.source_series = [
|
||||
series for series in distro.series
|
||||
if series.status == "Current Stable Release"][0]
|
||||
else:
|
||||
options.source_series = distro.getSeries(
|
||||
name_or_version=options.source_series)
|
||||
if options.dest_series is None:
|
||||
options.dest_series = [
|
||||
args.source_series = distro.getSeries(
|
||||
name_or_version=args.source_series)
|
||||
if args.dest_series is None:
|
||||
args.dest_series = [
|
||||
series for series in distro.series
|
||||
if series.status == "Pre-release Freeze"][0]
|
||||
else:
|
||||
options.dest_series = distro.getSeries(
|
||||
name_or_version=options.dest_series)
|
||||
args.dest_series = distro.getSeries(
|
||||
name_or_version=args.dest_series)
|
||||
|
||||
owner = options.launchpad.people[args[0]]
|
||||
owner = args.launchpad.people[args.owner]
|
||||
|
||||
branch_livefses(options, owner)
|
||||
branch_livefses(args, owner)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
@ -22,6 +22,7 @@ from __future__ import print_function
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import subprocess
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
@ -134,8 +135,18 @@ def branch(options, collection):
|
||||
if "git.launchpad.net" in remote:
|
||||
lp_git_repo = options.launchpad.git_repositories.getByPath(
|
||||
path=urlparse(remote).path.lstrip("/"))
|
||||
lp_git_repo.default_branch = options.dest_series
|
||||
lp_git_repo.lp_save()
|
||||
new_ref = "refs/heads/%s" % options.dest_series
|
||||
# Sometimes it takes LP a while to notice the new ref
|
||||
for i in range(10):
|
||||
if lp_git_repo.getRefByPath(path=new_ref):
|
||||
lp_git_repo.default_branch = new_ref
|
||||
lp_git_repo.lp_save()
|
||||
break
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise Exception(
|
||||
"Was unable to set default_branch of %s after "
|
||||
"multiple retries - proceed manually." % remote)
|
||||
else:
|
||||
raise Exception(
|
||||
"Git remote URL must be on git.launchpad.net.")
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
@ -17,18 +17,16 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
import gzip
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import apt_pkg
|
||||
from utils import read_tag_file
|
||||
|
||||
|
||||
default_base = '/home/ubuntu-archive/mirror/ubuntu'
|
||||
default_suite = 'disco'
|
||||
default_suite = 'groovy'
|
||||
components = ('main', 'restricted', 'universe', 'multiverse')
|
||||
|
||||
# Cut-down RE from deb822.PkgRelation.
|
||||
@ -65,34 +63,6 @@ def ports_arches(suite):
|
||||
return ('arm64', 'armhf', 'ppc64el', 's390x')
|
||||
|
||||
|
||||
def read_tag_file(path):
|
||||
tmp = tempfile.NamedTemporaryFile(prefix='checkrdepends.', delete=False)
|
||||
try:
|
||||
compressed = gzip.open(path)
|
||||
try:
|
||||
tmp.write(compressed.read())
|
||||
finally:
|
||||
compressed.close()
|
||||
tmp.close()
|
||||
with open(tmp.name) as uncompressed:
|
||||
tag_file = apt_pkg.TagFile(uncompressed)
|
||||
prev_name = None
|
||||
prev_stanza = None
|
||||
for stanza in tag_file:
|
||||
try:
|
||||
name = stanza['package']
|
||||
except KeyError:
|
||||
continue
|
||||
if name != prev_name and prev_stanza is not None:
|
||||
yield prev_stanza
|
||||
prev_name = name
|
||||
prev_stanza = stanza
|
||||
if prev_stanza is not None:
|
||||
yield prev_stanza
|
||||
finally:
|
||||
os.unlink(tmp.name)
|
||||
|
||||
|
||||
def read_sources(path):
|
||||
ret = {
|
||||
'binary': {},
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Sync a suite with a Seed list.
|
||||
# Copyright (C) 2004, 2005, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
@ -22,8 +22,6 @@
|
||||
# listed for promotion at once
|
||||
# i.e. to allow 'change-override -S' usage
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import atexit
|
||||
@ -36,6 +34,7 @@ try:
|
||||
except ImportError:
|
||||
from cgi import escape
|
||||
import json
|
||||
from operator import attrgetter
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import shutil
|
||||
@ -43,10 +42,7 @@ import sys
|
||||
import tempfile
|
||||
from textwrap import dedent
|
||||
import time
|
||||
try:
|
||||
from urllib.parse import quote_plus
|
||||
except ImportError:
|
||||
from urllib import quote_plus
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
@ -128,7 +124,7 @@ def read_current_source(options):
|
||||
archive_source[pkg] = (
|
||||
version, component.split("/")[0])
|
||||
|
||||
for pkg, (version, component) in archive_source.items():
|
||||
for pkg, (version, component) in list(archive_source.items()):
|
||||
if component in options.components:
|
||||
current_source[pkg] = (version, component)
|
||||
|
||||
@ -140,9 +136,7 @@ def read_current_binary(options):
|
||||
components_with_di.append('%s/debian-installer' % component)
|
||||
for suite in options.suites:
|
||||
for component in components_with_di:
|
||||
for arch in [
|
||||
"i386", "amd64", "armhf", "arm64", "ppc64el",
|
||||
"s390x"]:
|
||||
for arch in options.architectures:
|
||||
binaries_path = "%s/dists/%s/%s/binary-%s/Packages.gz" % (
|
||||
options.archive_dir, suite, component, arch)
|
||||
for section in apt_pkg.TagFile(decompress_open(binaries_path)):
|
||||
@ -161,7 +155,7 @@ def read_current_binary(options):
|
||||
archive_binary[pkg][0], version) < 0:
|
||||
archive_binary[pkg] = (version, component, src)
|
||||
|
||||
for pkg, (version, component, src) in archive_binary.items():
|
||||
for pkg, (version, component, src) in list(archive_binary.items()):
|
||||
if component in options.components:
|
||||
current_binary[pkg] = (version, component, src)
|
||||
|
||||
@ -183,8 +177,7 @@ def read_germinate(options):
|
||||
# ideally supported+build-depends too, but Launchpad's
|
||||
# cron.germinate doesn't save this
|
||||
|
||||
for arch in ["i386", "amd64", "armhf", "arm64", "ppc64el",
|
||||
"s390x"]:
|
||||
for arch in options.architectures:
|
||||
for seed in seeds:
|
||||
filename = "%s/%s_%s_%s_%s" % (
|
||||
options.germinate_path, seed, flavour, options.suite, arch)
|
||||
@ -264,13 +257,13 @@ def find_signer(options, source):
|
||||
exact_match=True)
|
||||
if not publications:
|
||||
return('no publications found', '')
|
||||
sorted_pubs = sorted([(ps.date_published, ps)
|
||||
for ps in publications
|
||||
if ps.date_published is not None], reverse=True)
|
||||
sorted_pubs = sorted(
|
||||
[ps for ps in publications if ps.date_published is not None],
|
||||
key=attrgetter('date_published'), reverse=True)
|
||||
for pub in sorted_pubs:
|
||||
if pub[1].package_signer:
|
||||
signer = pub[1].package_signer.name
|
||||
web_link = pub[1].package_signer.web_link
|
||||
if pub.package_signer:
|
||||
signer = pub.package_signer.name
|
||||
web_link = pub.package_signer.web_link
|
||||
return(signer, web_link)
|
||||
else:
|
||||
signer = ''
|
||||
@ -343,8 +336,8 @@ def do_dot(why, fd, mir_bugs, suite):
|
||||
|
||||
fd.write(
|
||||
'digraph "component-mismatches: movements to main/restricted" {\n')
|
||||
for s, binwhy in why.iteritems():
|
||||
for binary, why in binwhy.iteritems():
|
||||
for s, binwhy in why.items():
|
||||
for binary, why in binwhy.items():
|
||||
# ignore binaries from this source, and "rescued"
|
||||
if why in binwhy or why.startswith('Rescued'):
|
||||
continue
|
||||
@ -472,7 +465,7 @@ def get_teams(options, source):
|
||||
|
||||
if os.path.exists(options.package_team_mapping):
|
||||
with open(options.package_team_mapping) as ptm_file:
|
||||
for team, packages in json.load(ptm_file).items():
|
||||
for team, packages in list(json.load(ptm_file).items()):
|
||||
if team == "unsubscribed":
|
||||
continue
|
||||
for package in packages:
|
||||
@ -575,7 +568,7 @@ def do_output(options,
|
||||
package_team_mapping = defaultdict(set)
|
||||
if os.path.exists(options.package_team_mapping):
|
||||
with open(options.package_team_mapping) as ptm_file:
|
||||
for team, packages in json.load(ptm_file).items():
|
||||
for team, packages in (json.load(ptm_file).items()):
|
||||
if team == "unsubscribed":
|
||||
continue
|
||||
for package in packages:
|
||||
@ -741,7 +734,7 @@ def do_output(options,
|
||||
}
|
||||
results["source demotions"] += len(output)
|
||||
|
||||
for title, output_spec in all_output.items():
|
||||
for title, output_spec in list(all_output.items()):
|
||||
source_and_binary = output_spec.get("source_and_binary", False)
|
||||
binary_only = output_spec.get("binary_only", False)
|
||||
print_section_text(
|
||||
@ -882,6 +875,9 @@ def main():
|
||||
else:
|
||||
options.suites = [options.suite]
|
||||
|
||||
options.series = options.distro.getSeries(name_or_version=options.suites[0])
|
||||
options.architectures = [a.architecture_tag for a in options.series.architectures]
|
||||
|
||||
if options.output_file is not None:
|
||||
sys.stdout = open('%s.new' % options.output_file, 'w')
|
||||
if options.html_output_file is not None:
|
||||
@ -889,6 +885,11 @@ def main():
|
||||
else:
|
||||
options.html_output = None
|
||||
|
||||
# Force encoding to UTF-8 even in non-UTF-8 locales.
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(
|
||||
sys.stdout.detach(), encoding="UTF-8", line_buffering=True)
|
||||
|
||||
options.time = time.time()
|
||||
options.timestamp = time.strftime(
|
||||
'%a %b %e %H:%M:%S %Z %Y', time.gmtime(options.time))
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@canonical.com>
|
||||
@ -24,87 +24,429 @@ USAGE:
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
from contextlib import contextmanager
|
||||
from copy import copy
|
||||
from io import StringIO
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from kernel_series import KernelSeries
|
||||
|
||||
|
||||
class TestBase(unittest.TestCase):
|
||||
class FakeArgs:
|
||||
def __init__(self, **kwargs):
|
||||
self.testing = True
|
||||
self.series = None
|
||||
self.source = None
|
||||
self.ppa2 = False
|
||||
self.security = False
|
||||
self.security2 = False
|
||||
self.esm = False
|
||||
self.fips = False
|
||||
self.ibmgt = False
|
||||
self.to_signing = False
|
||||
self.from_signing = False
|
||||
self.no_auto = False
|
||||
|
||||
self.update(**kwargs)
|
||||
|
||||
def update(self, **kwargs):
|
||||
for (key, value) in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
return self
|
||||
|
||||
@contextmanager
|
||||
def capture(self):
|
||||
new_out, new_err = StringIO(), StringIO()
|
||||
old_out, old_err = sys.stdout, sys.stderr
|
||||
try:
|
||||
sys.stdout, sys.stderr = new_out, new_err
|
||||
yield sys.stdout, sys.stderr
|
||||
finally:
|
||||
sys.stdout, sys.stderr = old_out, old_err
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
data = """
|
||||
defaults:
|
||||
routing-table:
|
||||
default:
|
||||
security-build:
|
||||
- ['ppa:canonical-kernel-security-team/ubuntu/ppa', 'Release' ]
|
||||
- ['ppa:canonical-kernel-security-team/ubuntu/ppa2', 'Release' ]
|
||||
build:
|
||||
- ['ppa:canonical-kernel-team/ubuntu/ppa', 'Release' ]
|
||||
proposed:
|
||||
- ['ubuntu', 'Proposed' ]
|
||||
esm:
|
||||
security-build:
|
||||
- ['ppa:canonical-kernel-security-team/ubuntu/esm', 'Release']
|
||||
build:
|
||||
- ['ppa:canonical-kernel-esm/ubuntu/ppa', 'Release']
|
||||
signing:
|
||||
- ['ppa:canonical-signing/ubuntu/esm', 'Release']
|
||||
proposed:
|
||||
- ['ppa:canonical-kernel-esm/ubuntu/proposed', 'Release']
|
||||
14.04:
|
||||
codename: trusty
|
||||
supported: true
|
||||
esm: true
|
||||
sources:
|
||||
linux:
|
||||
packages:
|
||||
linux:
|
||||
linux-signed:
|
||||
type: signed
|
||||
linux-meta:
|
||||
type: meta
|
||||
16.04:
|
||||
codename: xenial
|
||||
supported: true
|
||||
sources:
|
||||
linux-fips:
|
||||
routing:
|
||||
security-build:
|
||||
- ['ppa:canonical-kernel-security-team/ubuntu/ppa', 'Release']
|
||||
- ['ppa:canonical-kernel-security-team/ubuntu/ppa2', 'Release']
|
||||
build:
|
||||
- ['ppa:fips-cc-stig/ubuntu/fips-build', 'Release']
|
||||
signing:
|
||||
- ['ppa:canonical-signing/ubuntu/fips', 'Release']
|
||||
proposed:
|
||||
- ['ppa:ubuntu-advantage/ubuntu/fips-proposed', 'Release']
|
||||
packages:
|
||||
linux-fips:
|
||||
linux-meta-fips:
|
||||
type: meta
|
||||
linux-signed-fips:
|
||||
type: signed
|
||||
18.04:
|
||||
codename: bionic
|
||||
supported: true
|
||||
sources:
|
||||
linux:
|
||||
packages:
|
||||
linux:
|
||||
linux-signed:
|
||||
type: signed
|
||||
linux-meta:
|
||||
type: meta
|
||||
linux-ibm-gt:
|
||||
routing:
|
||||
security-build:
|
||||
- ['ppa:canonical-kernel-security-team/ubuntu/ppa', 'Release']
|
||||
- ['ppa:canonical-kernel-security-team/ubuntu/ppa2', 'Release']
|
||||
build:
|
||||
- ['ppa:ibm-cloud/ubuntu/build', 'Release']
|
||||
proposed:
|
||||
- ['ppa:ibm-cloud/ubuntu/proposed', 'Release']
|
||||
packages:
|
||||
linux-ibm-gt:
|
||||
linux-meta-ibm-gt:
|
||||
type: meta
|
||||
"""
|
||||
cls.ks = KernelSeries(data=data)
|
||||
|
||||
|
||||
class TestRouting(TestBase):
|
||||
def test_default(self):
|
||||
expected = (['ppa:canonical-kernel-team/ubuntu/ppa', 'Release'], ['ubuntu', 'Proposed'], False)
|
||||
result = routing(self.FakeArgs(series='bionic', source='linux'), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_security(self):
|
||||
expected = (['ppa:canonical-kernel-security-team/ubuntu/ppa', 'Release'], ['ubuntu', 'Proposed'], True)
|
||||
result = routing(self.FakeArgs(series='bionic', source='linux', security=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_security2(self):
|
||||
expected = (['ppa:canonical-kernel-security-team/ubuntu/ppa2', 'Release'], ['ubuntu', 'Proposed'], True)
|
||||
result = routing(self.FakeArgs(series='bionic', source='linux', security2=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_to_signing(self):
|
||||
expected = (['ppa:canonical-kernel-team/ubuntu/ppa', 'Release'], None, False)
|
||||
result = routing(self.FakeArgs(series='bionic', source='linux', to_signing=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_from_signing(self):
|
||||
expected = (None, ['ubuntu', 'Proposed'], False)
|
||||
result = routing(self.FakeArgs(series='bionic', source='linux', from_signing=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_esm(self):
|
||||
expected = (['ppa:canonical-kernel-esm/ubuntu/ppa', 'Release'], ['ppa:canonical-signing/ubuntu/esm', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='trusty', source='linux'), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_esm_security(self):
|
||||
expected = (['ppa:canonical-kernel-security-team/ubuntu/esm', 'Release'], ['ppa:canonical-signing/ubuntu/esm', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='trusty', source='linux', security=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_esm_security2(self):
|
||||
with self.assertRaises(SystemExit), self.capture() as (out, err):
|
||||
expected = (None, ['ppa:canonical-kernel-esm/ubuntu/proposed', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='trusty', source='linux', security2=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_esm_to_signing(self):
|
||||
expected = (['ppa:canonical-kernel-esm/ubuntu/ppa', 'Release'], ['ppa:canonical-signing/ubuntu/esm', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='trusty', source='linux', esm=True, to_signing=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_esm_from_signing(self):
|
||||
expected = (['ppa:canonical-signing/ubuntu/esm', 'Release'], ['ppa:canonical-kernel-esm/ubuntu/proposed', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='trusty', source='linux', esm=True, from_signing=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
# Autorouting will enable to_signing, the user will then want to switch us
|
||||
# to from_signing in order to perform phase two copies. To ensure this is
|
||||
# simple we make from_signing take presidence over to_signing. Test this
|
||||
# is honoured correctly.
|
||||
def test_esm_from_signing_override_to_signing(self):
|
||||
expected = (['ppa:canonical-signing/ubuntu/esm', 'Release'], ['ppa:canonical-kernel-esm/ubuntu/proposed', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='trusty', source='linux', esm=True, to_signing=True, from_signing=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_fips(self):
|
||||
expected = (['ppa:fips-cc-stig/ubuntu/fips-build', 'Release'], ['ppa:canonical-signing/ubuntu/fips', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='xenial', source='linux-fips'), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_fips_security(self):
|
||||
expected = (['ppa:canonical-kernel-security-team/ubuntu/ppa', 'Release'], ['ppa:canonical-signing/ubuntu/fips', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='xenial', source='linux-fips', security=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_fips_security2(self):
|
||||
expected = (['ppa:canonical-kernel-security-team/ubuntu/ppa2', 'Release'], ['ppa:canonical-signing/ubuntu/fips', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='xenial', source='linux-fips', security2=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_fips_to_signing(self):
|
||||
expected = (['ppa:fips-cc-stig/ubuntu/fips-build', 'Release'], ['ppa:canonical-signing/ubuntu/fips', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='xenial', source='linux-fips', to_signing=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_fips_from_signing(self):
|
||||
expected = (['ppa:canonical-signing/ubuntu/fips', 'Release'], ['ppa:ubuntu-advantage/ubuntu/fips-proposed', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='xenial', source='linux-fips', from_signing=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_ibmgt(self):
|
||||
expected = (['ppa:ibm-cloud/ubuntu/build', 'Release'], ['ppa:ibm-cloud/ubuntu/proposed', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='bionic', source='linux-ibm-gt'), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_ibmgt_security(self):
|
||||
expected = (['ppa:canonical-kernel-security-team/ubuntu/ppa', 'Release'], ['ppa:ibm-cloud/ubuntu/proposed', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='bionic', source='linux-ibm-gt', security=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_ibmgt_security2(self):
|
||||
expected = (['ppa:canonical-kernel-security-team/ubuntu/ppa2', 'Release'], ['ppa:ibm-cloud/ubuntu/proposed', 'Release'], False)
|
||||
result = routing(self.FakeArgs(series='bionic', source='linux-ibm-gt', security2=True), self.ks)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
|
||||
def routing(args, ks):
|
||||
series_name = args.series
|
||||
package_name = args.source
|
||||
|
||||
series = ks.lookup_series(codename=series_name)
|
||||
if series is None:
|
||||
print("ERROR: {} -- series unknown".format(series_name))
|
||||
sys.exit(1)
|
||||
|
||||
package = None
|
||||
package_signed = None
|
||||
for source_srch in series.sources:
|
||||
package_signed = None
|
||||
for package_srch in source_srch.packages:
|
||||
if package_srch.name == package_name:
|
||||
package = package_srch
|
||||
if (package_srch.name.startswith('linux-signed-') or
|
||||
package_srch.name == 'linux-signed'):
|
||||
package_signed = package_srch
|
||||
if package is not None:
|
||||
break
|
||||
if package is None:
|
||||
print("ERROR: {}/{} -- package unknown".format(series_name, package_name))
|
||||
sys.exit(1)
|
||||
|
||||
source = package.source
|
||||
routing = source.routing
|
||||
if routing is None:
|
||||
print("ERROR: {}/{} -- package has no routing".format(series_name, package_name))
|
||||
sys.exit(1)
|
||||
|
||||
build_archives = routing.lookup_destination('build')
|
||||
security_archives = routing.lookup_destination('security-build')
|
||||
proposed_archive = routing.lookup_destination('proposed', primary=True)
|
||||
signing_archive = routing.lookup_destination('signing', primary=True)
|
||||
|
||||
if build_archives is None or len(build_archives) < 1:
|
||||
print("ERROR: {}/{} -- package has no primary build archive".format(series_name, package_name))
|
||||
sys.exit(1)
|
||||
if args.ppa2 and (build_archives is None or len(build_archives) < 2):
|
||||
print("ERROR: {}/{} -- package has no secondary build archive".format(series_name, package_name))
|
||||
sys.exit(1)
|
||||
if build_archives is None:
|
||||
print("ERROR: {}/{} -- package has no build archive".format(series_name, package_name))
|
||||
sys.exit(1)
|
||||
if proposed_archive is None:
|
||||
print("ERROR: {}/{} -- package has no proposed archive".format(series_name, package_name))
|
||||
sys.exit(1)
|
||||
if args.security and (security_archives is None or len(security_archives) < 1):
|
||||
print("ERROR: {}/{} -- package has no primary security archive".format(series_name, package_name))
|
||||
sys.exit(1)
|
||||
if args.security2 and (security_archives is None or len(security_archives) < 2):
|
||||
print("ERROR: {}/{} -- package has no secondary security archive".format(series_name, package_name))
|
||||
sys.exit(1)
|
||||
|
||||
# Default route build -> proposed
|
||||
if args.ppa2:
|
||||
from_archive = build_archives[1]
|
||||
else:
|
||||
from_archive = build_archives[0]
|
||||
to_archive = proposed_archive
|
||||
|
||||
unembargo = False
|
||||
|
||||
# Handle security routing.
|
||||
if args.security:
|
||||
from_archive = security_archives[0]
|
||||
if args.security2:
|
||||
from_archive = security_archives[1]
|
||||
|
||||
# Allow us to unembargo when releasing from security to ubuntu.
|
||||
if (args.security or args.security2) and to_archive[0] == 'ubuntu':
|
||||
unembargo = True
|
||||
|
||||
# Handle signing routing.
|
||||
if args.from_signing:
|
||||
from_archive = signing_archive
|
||||
elif args.to_signing:
|
||||
to_archive = signing_archive
|
||||
# Automatically route to signing by default.
|
||||
elif args.no_auto is False and signing_archive is not None and package_signed is not None:
|
||||
to_archive = signing_archive
|
||||
|
||||
# Announce the routing if needed.
|
||||
if (args.testing is False and (routing.name != 'default' or from_archive == signing_archive or to_archive == signing_archive)):
|
||||
msg = "NOTE: directing copy using {} routes".format(routing.name)
|
||||
if from_archive == signing_archive:
|
||||
msg += ' from signing'
|
||||
elif to_archive == signing_archive:
|
||||
msg += ' to signing'
|
||||
print(msg)
|
||||
|
||||
return (from_archive, to_archive, unembargo)
|
||||
|
||||
|
||||
# SELF-TESTS:
|
||||
if len(sys.argv) >= 2 and sys.argv[1] == '--self-test':
|
||||
unittest.main(argv=[sys.argv[0]] + sys.argv[2:])
|
||||
sys.exit(0)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Copy a proposed kernel to the apropriate archive pocket')
|
||||
parser.set_defaults(testing=False)
|
||||
parser.add_argument('--dry-run', action='store_true', help='Do everything but actually copy the package')
|
||||
parser.add_argument('--ppa2', action='store_true', help='Copy from the kernel build PPA2')
|
||||
parser.add_argument('--security', '-S', action='store_true', help='Copy from the kernel security PPA')
|
||||
parser.add_argument('--security2', action='store_true', help='Copy from the kernel security PPA2')
|
||||
parser.add_argument('--esm', '-E', action='store_true', help='Copy from the kernel ESM PPA and to the kernel ESM proposed PPA')
|
||||
parser.add_argument('--fips', action='store_true', help='Copy from the kernel FIPS PPA and to the kernel FIPS proposed PPA')
|
||||
parser.add_argument('--ibmgt', action='store_true', help='Copy from the kernel IBM-GT build PPA to the corresponding proposed PPA')
|
||||
parser.add_argument('--no-auto', action='store_true', help='Turn off automatic detection of ESM et al based on series')
|
||||
parser.add_argument('--to-signing', action='store_true', help='Copy from the kernel ESM/FIPS PPA to the ESM/FIPS signing PPA')
|
||||
parser.add_argument('--from-signing', action='store_true', help='Copy from the ESM/FIPS signing PPA to the ESM/FIPS proposed PPA')
|
||||
parser.add_argument('series', action='store', help='The series the source package is in')
|
||||
parser.add_argument('source', action='store', help='The source package name')
|
||||
parser.add_argument('source', action='store', nargs='+', help='The source package name')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
to = 'ubuntu'
|
||||
ppa_name = '~canonical-kernel-team/ubuntu/ppa'
|
||||
security = False
|
||||
if args.esm or args.fips or args.ibmgt:
|
||||
print("NOTE: flags --esm, --fips, and --ibmgt are now deprecated")
|
||||
|
||||
# If we are allowed to intuit destinations do so:
|
||||
# 1) precise is now destined for the ESM PPAs
|
||||
if not args.no_auto:
|
||||
if args.series == 'precise' and not args.esm:
|
||||
print("NOTE: directing copy from and to ESM for precise")
|
||||
args.esm = True
|
||||
release = args.series
|
||||
|
||||
if args.esm:
|
||||
ppa_name = '~canonical-kernel-esm/ubuntu/ppa'
|
||||
to = '~canonical-kernel-esm/ubuntu/proposed'
|
||||
to_pocket = 'release'
|
||||
if args.security:
|
||||
ppa_name = '~canonical-kernel-security-team/ubuntu/ppa'
|
||||
if not args.esm:
|
||||
security = True
|
||||
else:
|
||||
ppa_name = '~canonical-kernel-security-team/ubuntu/esm'
|
||||
if args.security2:
|
||||
ppa_name = '~canonical-kernel-security-team/ubuntu/ppa2'
|
||||
if not args.esm:
|
||||
security = True
|
||||
|
||||
(release, pkg) = (args.series, args.source)
|
||||
ks = KernelSeries()
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
'ubuntu-archive-tools', 'production', version='devel')
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
distro_series = ubuntu.getSeries(name_or_version=release)
|
||||
kernel_ppa = launchpad.archives.getByReference(
|
||||
reference=ppa_name)
|
||||
|
||||
# get current version in PPA for that series
|
||||
versions = kernel_ppa.getPublishedSources(
|
||||
source_name=pkg, exact_match=True, status='Published', pocket='Release',
|
||||
distro_series=distro_series)
|
||||
assert versions.total_size == 1
|
||||
version = versions[0].source_package_version
|
||||
copies = []
|
||||
for pkg in list(args.source):
|
||||
# BODGE: routing should just take release/pkg.
|
||||
args.source = pkg
|
||||
|
||||
include_binaries = (pkg not in ('debian-installer')
|
||||
and not pkg.startswith('linux-signed'))
|
||||
(from_archive, to_archive, security) = routing(args, ks)
|
||||
##print("from_archive<{}> to_archive<{}>".format(from_archive, to_archive))
|
||||
|
||||
# Grab a reference to the 'to' archive and select a pocket.
|
||||
to_archive = launchpad.archives.getByReference(reference=to)
|
||||
if to == 'ubuntu':
|
||||
to_pocket = 'proposed'
|
||||
else:
|
||||
to_pocket = 'release'
|
||||
if from_archive is None:
|
||||
print("ERROR: bad source PPA")
|
||||
sys.exit(1)
|
||||
if to_archive is None:
|
||||
print("ERROR: bad destination")
|
||||
sys.exit(1)
|
||||
|
||||
print("""Copying {}/{}:
|
||||
From: {} release
|
||||
To: {} {}""".format(pkg, version, kernel_ppa, to_archive, to_pocket))
|
||||
(from_reference, from_pocket) = from_archive
|
||||
(to_reference, to_pocket) = to_archive
|
||||
|
||||
# Grab a reference to the 'from' archive.
|
||||
from_archive = launchpad.archives.getByReference(
|
||||
reference=from_reference)
|
||||
|
||||
# Grab a reference to the 'to' archive.
|
||||
to_archive = launchpad.archives.getByReference(reference=to_reference)
|
||||
|
||||
# get current version in PPA for that series
|
||||
versions = from_archive.getPublishedSources(
|
||||
source_name=pkg, exact_match=True, status='Published', pocket=from_pocket,
|
||||
distro_series=distro_series)
|
||||
version = None
|
||||
if versions.total_size == 1:
|
||||
version = versions[0].source_package_version
|
||||
|
||||
include_binaries = (pkg not in ('debian-installer')
|
||||
and not pkg.startswith('linux-signed'))
|
||||
if args.from_signing:
|
||||
include_binaries = True
|
||||
|
||||
print("""Copying {}/{}:
|
||||
From: {} {}
|
||||
To: {} {}
|
||||
Binaries: {}""".format(pkg, version, from_archive.reference, from_pocket, to_archive.reference, to_pocket, include_binaries))
|
||||
|
||||
if not version:
|
||||
print("ERROR: no version to copy")
|
||||
sys.exit(1)
|
||||
|
||||
copies.append({
|
||||
'from_archive': from_archive,
|
||||
'include_binaries': include_binaries,
|
||||
'source_name': pkg,
|
||||
'to_series': release,
|
||||
'to_pocket': to_pocket,
|
||||
'version': version,
|
||||
'auto_approve': True,
|
||||
'unembargo': security,
|
||||
})
|
||||
|
||||
if args.dry_run:
|
||||
print("Dry run; no packages copied.")
|
||||
sys.exit(0)
|
||||
|
||||
# Finally ready to actually copy this.
|
||||
to_archive.copyPackage(
|
||||
from_archive=kernel_ppa, include_binaries=include_binaries,
|
||||
source_name=pkg, to_series=release, to_pocket=to_pocket, version=version,
|
||||
auto_approve=True, unembargo=security)
|
||||
for copy in copies:
|
||||
# We found valid packages for each requested element, actually copy them.
|
||||
to_archive.copyPackage(**copy)
|
||||
|
||||
# TODO: adjust this script to use find-bin-overrides or rewrite
|
||||
# find-bin-overrides to use lpapi and use it here.
|
||||
|
@ -1,10 +1,10 @@
|
||||
#! /usr/bin/env python
|
||||
#! /usr/bin/env python2.7
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
import bz2
|
||||
from collections import namedtuple
|
||||
import gzip
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
@ -13,12 +13,13 @@ import subprocess
|
||||
import tempfile
|
||||
try:
|
||||
from urllib.parse import unquote
|
||||
from urllib.request import urlretrieve
|
||||
except ImportError:
|
||||
from urllib import unquote, urlretrieve
|
||||
from urllib import unquote
|
||||
|
||||
import apt_pkg
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
import lzma
|
||||
import requests
|
||||
|
||||
|
||||
# from dak, more or less
|
||||
@ -44,18 +45,42 @@ def ensure_tempdir():
|
||||
|
||||
def decompress_open(tagfile):
|
||||
if tagfile.startswith('http:') or tagfile.startswith('ftp:'):
|
||||
url = tagfile
|
||||
tagfile = urlretrieve(url)[0]
|
||||
|
||||
if tagfile.endswith('.gz'):
|
||||
ensure_tempdir()
|
||||
decompressed = tempfile.mktemp(dir=tempdir)
|
||||
fin = gzip.GzipFile(filename=tagfile)
|
||||
with open(decompressed, 'wb') as fout:
|
||||
fout.write(fin.read())
|
||||
return open(decompressed, 'r')
|
||||
response = requests.get(tagfile, stream=True)
|
||||
if response.status_code == 404:
|
||||
response.close()
|
||||
tagfile = tagfile.replace('.xz', '.bz2')
|
||||
response = requests.get(tagfile, stream=True)
|
||||
response.raise_for_status()
|
||||
if '.' in tagfile:
|
||||
suffix = '.' + tagfile.rsplit('.', 1)[1]
|
||||
else:
|
||||
suffix = ''
|
||||
fd, tagfile = tempfile.mkstemp(suffix=suffix, dir=tempdir)
|
||||
with os.fdopen(fd, 'wb') as f:
|
||||
f.write(response.raw.read())
|
||||
response.close()
|
||||
elif not os.path.exists(tagfile):
|
||||
tagfile = tagfile.replace('.xz', '.bz2')
|
||||
|
||||
if tagfile.endswith('.xz'):
|
||||
decompressor = lzma.LZMAFile
|
||||
elif tagfile.endswith('.bz2'):
|
||||
decompressor = bz2.BZ2File
|
||||
else:
|
||||
return open(tagfile, 'r')
|
||||
decompressor = None
|
||||
|
||||
if decompressor is not None:
|
||||
fd, decompressed = tempfile.mkstemp(dir=tempdir)
|
||||
dcf = decompressor(tagfile)
|
||||
try:
|
||||
with os.fdopen(fd, 'wb') as f:
|
||||
f.write(dcf.read())
|
||||
finally:
|
||||
dcf.close()
|
||||
return open(decompressed, 'rb')
|
||||
else:
|
||||
return open(tagfile, 'rb')
|
||||
|
||||
|
||||
Section = namedtuple("Section", ["version", "directory", "files"])
|
||||
@ -84,7 +109,11 @@ def find_dsc(options, pkg, section):
|
||||
filenames = []
|
||||
for url in spph.sourceFileUrls():
|
||||
filename = os.path.join(outdir, unquote(os.path.basename(url)))
|
||||
urlretrieve(url, filename)
|
||||
response = requests.get(url, stream=True)
|
||||
response.raise_for_status()
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(response.raw.read())
|
||||
response.close()
|
||||
filenames.append(filename)
|
||||
yield [s for s in filenames if s.endswith('.dsc')][0]
|
||||
|
||||
@ -132,7 +161,8 @@ def descended_from(options, pkg, section1, section2):
|
||||
for dsc in find_dsc(options, pkg, section1):
|
||||
try:
|
||||
versions = get_changelog_versions(pkg, dsc, section1.version)
|
||||
except BrokenSourcePackage as exception:
|
||||
except BrokenSourcePackage as e:
|
||||
exception = e
|
||||
continue
|
||||
return section1.version in versions
|
||||
raise exception
|
||||
@ -223,9 +253,9 @@ def main():
|
||||
|
||||
for suite in suites:
|
||||
for component in 'main', 'restricted', 'universe', 'multiverse':
|
||||
tagfile1 = '%s/dists/%s-security/%s/source/Sources.gz' % (
|
||||
tagfile1 = '%s/dists/%s-security/%s/source/Sources.xz' % (
|
||||
options.mirrors[0], suite, component)
|
||||
tagfile2 = '%s/dists/%s-updates/%s/source/Sources.gz' % (
|
||||
tagfile2 = '%s/dists/%s-updates/%s/source/Sources.xz' % (
|
||||
options.mirrors[0], suite, component)
|
||||
name1 = '%s-security' % suite
|
||||
name2 = '%s-updates' % suite
|
||||
|
@ -20,7 +20,7 @@ set -e
|
||||
|
||||
MIRROR=$HOME/mirror
|
||||
DISTRIBUTION="${DISTRIBUTION:-ubuntu}"
|
||||
RELEASE="${RELEASE:-disco}"
|
||||
RELEASE="${RELEASE:-groovy}"
|
||||
OUTDIR="${OUTDIR:-$HOME/public_html/NBS}"
|
||||
OUTFILE="${OUTFILE:-$HOME/public_html/nbs.html}"
|
||||
|
||||
@ -42,6 +42,6 @@ checkrdepends -B "$MIRROR/$DISTRIBUTION" -s $RELEASE -b -d "$D" $CHECK
|
||||
|
||||
rsync -a --delete "$D/" "$OUTDIR/"
|
||||
|
||||
nbs-report -d "$DISTRIBUTION" -s "$RELEASE" --csv "${OUTFILE%.html}.csv" \
|
||||
nbs-report -B "$MIRROR/$DISTRIBUTION" -d "$DISTRIBUTION" -s "$RELEASE" --csv "${OUTFILE%.html}.csv" \
|
||||
"$OUTDIR/" >"$OUTFILE.new" && \
|
||||
mv "$OUTFILE.new" "$OUTFILE"
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2014 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2008, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
@ -24,7 +24,7 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser, SUPPRESS_HELP
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import launchpadlib.errors
|
||||
@ -74,44 +74,45 @@ def multiline_input(prompt):
|
||||
buf.append(line)
|
||||
|
||||
|
||||
def get_archive(options, launchpad):
|
||||
def get_archive(args, launchpad):
|
||||
# We default to looking up by archive reference (ubuntu,
|
||||
# ubuntu/partner or ~owner/ubuntu/ppa).
|
||||
if options.archive is not None:
|
||||
archive = launchpad.archives.getByReference(reference=options.archive)
|
||||
if args.archive is not None:
|
||||
archive = launchpad.archives.getByReference(reference=args.archive)
|
||||
if archive is not None:
|
||||
return archive
|
||||
|
||||
# But we also still support combining a distro name in -d and an
|
||||
# archive name or old PPA reference in -A (-d ubuntu,
|
||||
# -d ubuntu -A partner, or -d ubuntu -A owner/ppa).
|
||||
distro = launchpad.distributions[options.distro]
|
||||
if options.archive is None:
|
||||
distro = launchpad.distributions[args.distro]
|
||||
if args.archive is None:
|
||||
return distro.main_archive
|
||||
else:
|
||||
if '/' in options.archive:
|
||||
owner, ppa_name = options.archive.split('/')
|
||||
if '/' in args.archive:
|
||||
owner, ppa_name = args.archive.split('/')
|
||||
return launchpad.people[owner].getPPAByName(
|
||||
distribution=distro, name=ppa_name)
|
||||
for archive in distro.archives:
|
||||
if archive.name == options.archive:
|
||||
if archive.name == args.archive:
|
||||
return archive
|
||||
raise AssertionError("No such archive in Ubuntu: %s" % options.archive)
|
||||
raise AssertionError("No such archive in Ubuntu: %s" % args.archive)
|
||||
|
||||
|
||||
def get_source_components(options, launchpad, archive, source):
|
||||
def get_source_components(args, launchpad, archive, source):
|
||||
try:
|
||||
from debian import debian_support
|
||||
except ImportError:
|
||||
from debian_bundle import debian_support
|
||||
|
||||
args = {}
|
||||
if options.series:
|
||||
args['distro_series'] = options.series
|
||||
kwargs = {}
|
||||
if args.series:
|
||||
kwargs['distro_series'] = args.series
|
||||
|
||||
newest = {}
|
||||
for spph in archive.getPublishedSources(
|
||||
source_name=source, exact_match=True, status='Published', **args):
|
||||
source_name=source, exact_match=True, status='Published',
|
||||
**kwargs):
|
||||
if not spph.distro_series.active:
|
||||
continue
|
||||
new_version = debian_support.Version(spph.source_package_version)
|
||||
@ -128,15 +129,15 @@ permission_names = dict(upload='Archive Upload Rights',
|
||||
admin='Queue Administration Rights')
|
||||
|
||||
|
||||
def do_query(options):
|
||||
def do_query(args):
|
||||
"""Query existing permissions and show on stdout."""
|
||||
if options.archive.self_link == options.distro.main_archive_link:
|
||||
archives = options.distro.archives
|
||||
if args.archive.self_link == args.distro.main_archive_link:
|
||||
archives = args.distro.archives
|
||||
else:
|
||||
archives = [options.archive]
|
||||
archives = [args.archive]
|
||||
|
||||
if options.person:
|
||||
for person in options.person:
|
||||
if args.person:
|
||||
for person in args.person:
|
||||
if '@' in person:
|
||||
lp_person = launchpad.people.getByEmail(email=person)
|
||||
else:
|
||||
@ -149,29 +150,29 @@ def do_query(options):
|
||||
for archive in archives:
|
||||
perms.extend(archive.getPermissionsForPerson(
|
||||
person=lp_person))
|
||||
if options.acl_type:
|
||||
perm_name = permission_names[options.acl_type]
|
||||
if args.acl_type:
|
||||
perm_name = permission_names[args.acl_type]
|
||||
perms = [p for p in perms if p.permission == perm_name]
|
||||
print("== All rights for %s ==" % lp_person.name)
|
||||
print_perms(perms, options.series)
|
||||
print_perms(perms, args.series)
|
||||
|
||||
if options.component:
|
||||
if args.component:
|
||||
perms = []
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
if not args.acl_type or args.acl_type == 'upload':
|
||||
for archive in archives:
|
||||
perms.extend(archive.getUploadersForComponent(
|
||||
component_name=options.component))
|
||||
if not options.acl_type or options.acl_type == 'admin':
|
||||
component_name=args.component))
|
||||
if not args.acl_type or args.acl_type == 'admin':
|
||||
for archive in archives:
|
||||
perms.extend(archive.getQueueAdminsForComponent(
|
||||
component_name=options.component))
|
||||
print("== All rights for component '%s' ==" % options.component)
|
||||
print_perms(perms, options.series)
|
||||
component_name=args.component))
|
||||
print("== All rights for component '%s' ==" % args.component)
|
||||
print_perms(perms, args.series)
|
||||
|
||||
if options.packageset:
|
||||
for packageset in options.packageset:
|
||||
if args.packageset:
|
||||
for packageset in args.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
name=packageset, distroseries=args.series)
|
||||
|
||||
perms = []
|
||||
for archive in archives:
|
||||
@ -179,27 +180,27 @@ def do_query(options):
|
||||
packageset=lp_set))
|
||||
print(("== All uploaders for package set '%s' in '%s' "
|
||||
"(owned by '%s') ==" %
|
||||
(packageset, options.series.name,
|
||||
(packageset, args.series.name,
|
||||
lp_set.owner.display_name)))
|
||||
print_perms(perms, options.series)
|
||||
print_perms(perms, args.series)
|
||||
|
||||
sources = sorted(lp_set.getSourcesIncluded(direct_inclusion=True))
|
||||
if sources:
|
||||
print()
|
||||
print("== All source packages in package set '%s' "
|
||||
"in '%s' ==" % (packageset, options.series.name))
|
||||
"in '%s' ==" % (packageset, args.series.name))
|
||||
for source in sources:
|
||||
print(source)
|
||||
child_sets = list(lp_set.setsIncluded(direct_inclusion=True))
|
||||
if child_sets:
|
||||
print()
|
||||
print("== All package sets in package set '%s' in '%s' ==" %
|
||||
(packageset, options.series.name))
|
||||
(packageset, args.series.name))
|
||||
for child_set in child_sets:
|
||||
print(child_set.name)
|
||||
|
||||
if options.source:
|
||||
for source in options.source:
|
||||
if args.source:
|
||||
for source in args.source:
|
||||
perms = []
|
||||
perms_set = []
|
||||
for archive in archives:
|
||||
@ -208,63 +209,62 @@ def do_query(options):
|
||||
perms_set.extend(archive.getPackagesetsForSource(
|
||||
sourcepackagename=source))
|
||||
print("== All uploaders for package '%s' ==" % source)
|
||||
print_perms(perms, options.series)
|
||||
print_perms(perms_set, options.series)
|
||||
print_perms(perms, args.series)
|
||||
print_perms(perms_set, args.series)
|
||||
for archive in archives:
|
||||
for series, component in get_source_components(
|
||||
options, launchpad, archive, source):
|
||||
args, launchpad, archive, source):
|
||||
perms_component = archive.getUploadersForComponent(
|
||||
component_name=component)
|
||||
print_perms(perms_component, series=series)
|
||||
|
||||
if options.pocket:
|
||||
if args.pocket:
|
||||
perms = []
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
if not args.acl_type or args.acl_type == 'upload':
|
||||
for archive in archives:
|
||||
perms.extend(archive.getUploadersForPocket(
|
||||
pocket=options.pocket))
|
||||
if not options.acl_type or options.acl_type == 'admin':
|
||||
perms.extend(archive.getUploadersForPocket(pocket=args.pocket))
|
||||
if not args.acl_type or args.acl_type == 'admin':
|
||||
for archive in archives:
|
||||
perms.extend(archive.getQueueAdminsForPocket(
|
||||
pocket=options.pocket))
|
||||
print("== All rights for pocket '%s' ==" % options.pocket)
|
||||
print_perms(perms, options.series)
|
||||
pocket=args.pocket))
|
||||
print("== All rights for pocket '%s' ==" % args.pocket)
|
||||
print_perms(perms, args.series)
|
||||
|
||||
if (not options.person and not options.component and
|
||||
not options.packageset and not options.source and
|
||||
not options.pocket):
|
||||
if (not args.person and not args.component and
|
||||
not args.packageset and not args.source and
|
||||
not args.pocket):
|
||||
perms = []
|
||||
for archive in archives:
|
||||
perms.extend(archive.getAllPermissions())
|
||||
if options.acl_type:
|
||||
perm_name = permission_names[options.acl_type]
|
||||
if args.acl_type:
|
||||
perm_name = permission_names[args.acl_type]
|
||||
perms = [p for p in perms if p.permission == perm_name]
|
||||
print("== All rights ==")
|
||||
print_perms(perms, options.series)
|
||||
print_perms(perms, args.series)
|
||||
|
||||
|
||||
def validate_add_delete_options(options, requires_person=True):
|
||||
if options.packageset and options.source:
|
||||
def validate_add_delete_options(args, requires_person=True):
|
||||
if args.packageset and args.source:
|
||||
# Special options to manage package sets, bodged into this tool
|
||||
# since they aren't entirely inconvenient here.
|
||||
if options.component or options.person:
|
||||
if args.component or args.person:
|
||||
print("-P <packageset> -s <source> cannot be used with a "
|
||||
"component or person as well")
|
||||
return False
|
||||
return True
|
||||
|
||||
if requires_person and not options.person:
|
||||
if requires_person and not args.person:
|
||||
print("You must specify at least one person to (de-)authorise.")
|
||||
return False
|
||||
|
||||
count = 0
|
||||
if options.component:
|
||||
if args.component:
|
||||
count += 1
|
||||
if options.packageset:
|
||||
if args.packageset:
|
||||
count += 1
|
||||
if options.source:
|
||||
if args.source:
|
||||
count += 1
|
||||
if options.pocket:
|
||||
if args.pocket:
|
||||
count += 1
|
||||
if count > 1:
|
||||
print("You can only specify one of package set, source, component, "
|
||||
@ -279,94 +279,94 @@ def validate_add_delete_options(options, requires_person=True):
|
||||
return True
|
||||
|
||||
|
||||
def do_add(options):
|
||||
def do_add(args):
|
||||
"""Add a new permission."""
|
||||
if not validate_add_delete_options(options):
|
||||
if not validate_add_delete_options(args):
|
||||
return False
|
||||
|
||||
if options.packageset and options.source:
|
||||
for packageset in options.packageset:
|
||||
if args.packageset and args.source:
|
||||
for packageset in args.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
lp_set.addSources(names=options.source)
|
||||
name=packageset, distroseries=args.series)
|
||||
lp_set.addSources(names=args.source)
|
||||
print("Added:")
|
||||
for source in options.source:
|
||||
for source in args.source:
|
||||
print(source)
|
||||
return
|
||||
|
||||
people = [launchpad.people[person] for person in options.person]
|
||||
people = [launchpad.people[person] for person in args.person]
|
||||
|
||||
if options.source:
|
||||
for source in options.source:
|
||||
if args.source:
|
||||
for source in args.source:
|
||||
for person in people:
|
||||
perm = options.archive.newPackageUploader(
|
||||
perm = args.archive.newPackageUploader(
|
||||
person=person, source_package_name=source)
|
||||
print("Added:")
|
||||
print_perms([perm])
|
||||
return
|
||||
|
||||
if options.packageset:
|
||||
for packageset in options.packageset:
|
||||
if args.packageset:
|
||||
for packageset in args.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
name=packageset, distroseries=args.series)
|
||||
for person in people:
|
||||
perm = options.archive.newPackagesetUploader(
|
||||
perm = args.archive.newPackagesetUploader(
|
||||
person=person, packageset=lp_set)
|
||||
print("Added:")
|
||||
print_perms([perm])
|
||||
return
|
||||
|
||||
if options.component:
|
||||
if args.component:
|
||||
for person in people:
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
perm = options.archive.newComponentUploader(
|
||||
person=person, component_name=options.component)
|
||||
if not args.acl_type or args.acl_type == 'upload':
|
||||
perm = args.archive.newComponentUploader(
|
||||
person=person, component_name=args.component)
|
||||
else:
|
||||
perm = options.archive.newQueueAdmin(
|
||||
person=person, component_name=options.component)
|
||||
perm = args.archive.newQueueAdmin(
|
||||
person=person, component_name=args.component)
|
||||
print("Added:")
|
||||
print_perms([perm])
|
||||
return
|
||||
|
||||
if options.pocket:
|
||||
if args.pocket:
|
||||
admin_kwargs = {}
|
||||
if options.series:
|
||||
admin_kwargs["distroseries"] = options.series
|
||||
if args.series:
|
||||
admin_kwargs["distroseries"] = args.series
|
||||
for person in people:
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
perm = options.archive.newPocketUploader(
|
||||
person=person, pocket=options.pocket)
|
||||
if not args.acl_type or args.acl_type == 'upload':
|
||||
perm = args.archive.newPocketUploader(
|
||||
person=person, pocket=args.pocket)
|
||||
else:
|
||||
perm = options.archive.newPocketQueueAdmin(
|
||||
person=person, pocket=options.pocket, **admin_kwargs)
|
||||
perm = args.archive.newPocketQueueAdmin(
|
||||
person=person, pocket=args.pocket, **admin_kwargs)
|
||||
print("Added:")
|
||||
print_perms([perm])
|
||||
return
|
||||
|
||||
|
||||
def do_delete(options):
|
||||
def do_delete(args):
|
||||
"""Delete a permission."""
|
||||
# We kind of hacked packageset management into here.
|
||||
# Deleting packagesets doesn't require a person...
|
||||
requires_person = not (options.packageset and not options.source)
|
||||
if not validate_add_delete_options(options, requires_person):
|
||||
requires_person = not (args.packageset and not args.source)
|
||||
if not validate_add_delete_options(args, requires_person):
|
||||
return False
|
||||
|
||||
if options.packageset and options.source:
|
||||
for packageset in options.packageset:
|
||||
if args.packageset and args.source:
|
||||
for packageset in args.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
lp_set.removeSources(names=options.source)
|
||||
name=packageset, distroseries=args.series)
|
||||
lp_set.removeSources(names=args.source)
|
||||
print("Deleted:")
|
||||
for source in options.source:
|
||||
for source in args.source:
|
||||
print(source)
|
||||
return
|
||||
|
||||
if options.packageset and not options.person:
|
||||
for packageset in options.packageset:
|
||||
if args.packageset and not args.person:
|
||||
for packageset in args.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
uploaders = options.archive.getUploadersForPackageset(
|
||||
name=packageset, distroseries=args.series)
|
||||
uploaders = args.archive.getUploadersForPackageset(
|
||||
direct_permissions=True, packageset=lp_set)
|
||||
if len(uploaders) > 0:
|
||||
print("Cannot delete packageset with defined uploaders")
|
||||
@ -386,82 +386,82 @@ def do_delete(options):
|
||||
ack = input("Remove? (y/N): ")
|
||||
if ack.lower() == 'y':
|
||||
lp_set.lp_delete()
|
||||
print("Deleted %s/%s" % (lp_set.name, options.series.name))
|
||||
print("Deleted %s/%s" % (lp_set.name, args.series.name))
|
||||
return
|
||||
|
||||
lp_people = [launchpad.people[person] for person in options.person]
|
||||
lp_people = [launchpad.people[person] for person in args.person]
|
||||
|
||||
if options.source:
|
||||
for source in options.source:
|
||||
if args.source:
|
||||
for source in args.source:
|
||||
for lp_person in lp_people:
|
||||
try:
|
||||
options.archive.deletePackageUploader(
|
||||
args.archive.deletePackageUploader(
|
||||
person=lp_person, source_package_name=source)
|
||||
print("Deleted %s/%s" % (lp_person.name, source))
|
||||
except Exception:
|
||||
print("Failed to delete %s/%s" % (lp_person.name, source))
|
||||
return
|
||||
|
||||
if options.packageset:
|
||||
for packageset in options.packageset:
|
||||
if args.packageset:
|
||||
for packageset in args.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=options.series)
|
||||
name=packageset, distroseries=args.series)
|
||||
for lp_person in lp_people:
|
||||
options.archive.deletePackagesetUploader(
|
||||
args.archive.deletePackagesetUploader(
|
||||
person=lp_person, packageset=lp_set)
|
||||
print("Deleted %s/%s/%s" % (lp_person.name, packageset,
|
||||
options.series.name))
|
||||
args.series.name))
|
||||
return
|
||||
|
||||
if options.component:
|
||||
if args.component:
|
||||
for lp_person in lp_people:
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
options.archive.deleteComponentUploader(
|
||||
person=lp_person, component_name=options.component)
|
||||
print("Deleted %s/%s" % (lp_person.name, options.component))
|
||||
if not args.acl_type or args.acl_type == 'upload':
|
||||
args.archive.deleteComponentUploader(
|
||||
person=lp_person, component_name=args.component)
|
||||
print("Deleted %s/%s" % (lp_person.name, args.component))
|
||||
else:
|
||||
options.archive.deleteQueueAdmin(
|
||||
person=lp_person, component_name=options.component)
|
||||
args.archive.deleteQueueAdmin(
|
||||
person=lp_person, component_name=args.component)
|
||||
print("Deleted %s/%s (admin)" % (lp_person.name,
|
||||
options.component))
|
||||
args.component))
|
||||
return
|
||||
|
||||
if options.pocket:
|
||||
if args.pocket:
|
||||
admin_kwargs = {}
|
||||
if options.series:
|
||||
admin_kwargs["distroseries"] = options.series
|
||||
if args.series:
|
||||
admin_kwargs["distroseries"] = args.series
|
||||
for lp_person in lp_people:
|
||||
if not options.acl_type or options.acl_type == 'upload':
|
||||
options.archive.deletePocketUploader(
|
||||
person=lp_person, pocket=options.pocket)
|
||||
print("Deleted %s/%s" % (lp_person.name, options.pocket))
|
||||
if not args.acl_type or args.acl_type == 'upload':
|
||||
args.archive.deletePocketUploader(
|
||||
person=lp_person, pocket=args.pocket)
|
||||
print("Deleted %s/%s" % (lp_person.name, args.pocket))
|
||||
else:
|
||||
options.archive.deletePocketQueueAdmin(
|
||||
person=lp_person, pocket=options.pocket, **admin_kwargs)
|
||||
if options.series:
|
||||
args.archive.deletePocketQueueAdmin(
|
||||
person=lp_person, pocket=args.pocket, **admin_kwargs)
|
||||
if args.series:
|
||||
print(
|
||||
"Deleted %s/%s/%s (admin)" %
|
||||
(lp_person.name, options.pocket, options.series.name))
|
||||
(lp_person.name, args.pocket, args.series.name))
|
||||
else:
|
||||
print("Deleted %s/%s (admin)" %
|
||||
(lp_person.name, options.pocket))
|
||||
(lp_person.name, args.pocket))
|
||||
return
|
||||
|
||||
|
||||
def do_create(options):
|
||||
if not options.packageset:
|
||||
def do_create(args):
|
||||
if not args.packageset:
|
||||
print("You can only create a package set, not something else.")
|
||||
return False
|
||||
|
||||
if not options.person or len(options.person) != 1:
|
||||
if not args.person or len(args.person) != 1:
|
||||
print("You must specify exactly one person to own the new package "
|
||||
"set.")
|
||||
return False
|
||||
|
||||
distro_series = options.series or options.distro.current_series
|
||||
lp_person = launchpad.people[options.person[0]]
|
||||
distro_series = args.series or args.distro.current_series
|
||||
lp_person = launchpad.people[args.person[0]]
|
||||
|
||||
for packageset in options.packageset:
|
||||
for packageset in args.packageset:
|
||||
try:
|
||||
if launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=distro_series):
|
||||
@ -477,22 +477,22 @@ def do_create(options):
|
||||
print(ps)
|
||||
|
||||
|
||||
def do_modify(options):
|
||||
if not options.packageset:
|
||||
def do_modify(args):
|
||||
if not args.packageset:
|
||||
print("You can only modify a package set, not something else.")
|
||||
return False
|
||||
|
||||
if options.person and len(options.person) > 1:
|
||||
if args.person and len(args.person) > 1:
|
||||
print("You can only specify one person as the new packageset owner.")
|
||||
return False
|
||||
|
||||
distro_series = options.series or options.distro.current_series
|
||||
distro_series = args.series or args.distro.current_series
|
||||
|
||||
lp_person = None
|
||||
if options.person:
|
||||
lp_person = launchpad.people[options.person[0]]
|
||||
if args.person:
|
||||
lp_person = launchpad.people[args.person[0]]
|
||||
|
||||
for packageset in options.packageset:
|
||||
for packageset in args.packageset:
|
||||
lp_set = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=distro_series)
|
||||
if lp_person:
|
||||
@ -521,22 +521,22 @@ def do_modify(options):
|
||||
continue
|
||||
|
||||
|
||||
def do_copy(options):
|
||||
if options.archive.self_link == options.distro.main_archive_link:
|
||||
archives = options.distro.archives
|
||||
def do_copy(args):
|
||||
if args.archive.self_link == args.distro.main_archive_link:
|
||||
archives = args.distro.archives
|
||||
else:
|
||||
archives = [options.archive]
|
||||
archives = [args.archive]
|
||||
|
||||
if not options.packageset:
|
||||
if not args.packageset:
|
||||
print("You can only copy a package set, not something else.")
|
||||
return False
|
||||
|
||||
distro_series = options.series or options.distro.current_series
|
||||
distro_series = args.series or args.distro.current_series
|
||||
|
||||
dst = input("Name of the destination series: ")
|
||||
dst_series = options.distro.getSeries(name_or_version=dst)
|
||||
dst_series = args.distro.getSeries(name_or_version=dst)
|
||||
|
||||
for packageset in options.packageset:
|
||||
for packageset in args.packageset:
|
||||
src_pkgset = launchpad.packagesets.getByName(
|
||||
name=packageset, distroseries=distro_series)
|
||||
if not src_pkgset:
|
||||
@ -560,27 +560,27 @@ def do_copy(options):
|
||||
person=perm.person_link, packageset=ps)
|
||||
|
||||
|
||||
def do_check(options):
|
||||
def do_check(args):
|
||||
"""Check if a person can upload a package."""
|
||||
if not options.person:
|
||||
if not args.person:
|
||||
print("A person needs to be specified to check.")
|
||||
return False
|
||||
if not options.source:
|
||||
if not args.source:
|
||||
print("A source package needs to be specified to check.")
|
||||
return False
|
||||
|
||||
people = [launchpad.people[person] for person in options.person]
|
||||
distro_series = options.series or options.distro.current_series
|
||||
people = [launchpad.people[person] for person in args.person]
|
||||
distro_series = args.series or args.distro.current_series
|
||||
|
||||
if options.pocket:
|
||||
pocket = options.pocket
|
||||
if args.pocket:
|
||||
pocket = args.pocket
|
||||
else:
|
||||
pocket = 'Release'
|
||||
|
||||
for person in people:
|
||||
for srcpkg in options.source:
|
||||
for srcpkg in args.source:
|
||||
try:
|
||||
spph = options.archive.getPublishedSources(
|
||||
spph = args.archive.getPublishedSources(
|
||||
distro_series=distro_series,
|
||||
exact_match=True,
|
||||
pocket=pocket,
|
||||
@ -588,10 +588,10 @@ def do_check(options):
|
||||
status='Published',
|
||||
)[0]
|
||||
except IndexError:
|
||||
if not options.pocket:
|
||||
if not args.pocket:
|
||||
raise
|
||||
# Not yet in options.pocket, but maybe in Release?
|
||||
spph = options.archive.getPublishedSources(
|
||||
# Not yet in args.pocket, but maybe in Release?
|
||||
spph = args.archive.getPublishedSources(
|
||||
distro_series=distro_series,
|
||||
exact_match=True,
|
||||
pocket='Release',
|
||||
@ -599,7 +599,7 @@ def do_check(options):
|
||||
status='Published',
|
||||
)[0]
|
||||
try:
|
||||
options.archive.checkUpload(
|
||||
args.archive.checkUpload(
|
||||
component=spph.component_name,
|
||||
distroseries=distro_series,
|
||||
person=person,
|
||||
@ -619,99 +619,98 @@ def do_check(options):
|
||||
print(e.content)
|
||||
|
||||
|
||||
def main(options, action):
|
||||
def main(args):
|
||||
|
||||
if action == "query":
|
||||
do_query(options)
|
||||
elif action == "add":
|
||||
do_add(options)
|
||||
elif action in ("delete", "remove"):
|
||||
do_delete(options)
|
||||
elif action == "create":
|
||||
do_create(options)
|
||||
elif action == "modify":
|
||||
do_modify(options)
|
||||
elif action == "copy":
|
||||
do_copy(options)
|
||||
elif action == "check":
|
||||
do_check(options)
|
||||
if args.action == "query":
|
||||
do_query(args)
|
||||
elif args.action == "add":
|
||||
do_add(args)
|
||||
elif args.action in ("delete", "remove"):
|
||||
do_delete(args)
|
||||
elif args.action == "create":
|
||||
do_create(args)
|
||||
elif args.action == "modify":
|
||||
do_modify(args)
|
||||
elif args.action == "copy":
|
||||
do_copy(args)
|
||||
elif args.action == "check":
|
||||
do_check(args)
|
||||
else:
|
||||
raise AssertionError("Invalid action %s" % action)
|
||||
raise AssertionError("Invalid action %s" % args.action)
|
||||
|
||||
return
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = OptionParser(
|
||||
usage="usage: %prog [options] "
|
||||
parser = argparse.ArgumentParser(
|
||||
usage="%(prog)s [options] "
|
||||
"query|add|delete|create|modify|copy|check",
|
||||
epilog=lputils.ARCHIVE_REFERENCE_DESCRIPTION)
|
||||
|
||||
parser.add_option(
|
||||
parser.add_argument(
|
||||
"-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_option("-A", "--archive", dest="archive")
|
||||
parser.add_option("-S", "--series", dest="series")
|
||||
parser.add_option("-p", "--person", dest="person", action="append")
|
||||
parser.add_option("-c", "--component", dest="component")
|
||||
parser.add_option("-P", "--packageset", dest="packageset", action="append")
|
||||
parser.add_option("-s", "--source", dest="source", action="append")
|
||||
parser.add_option("--pocket", dest="pocket")
|
||||
parser.add_option("-t", "--acl-type", dest="acl_type",
|
||||
help="ACL type: upload or admin")
|
||||
parser.add_option("--anon", dest="anon_login", action="store_true",
|
||||
default=False, help="Login anonymously to Launchpad")
|
||||
parser.add_argument("-A", "--archive", dest="archive")
|
||||
parser.add_argument("-S", "--series", dest="series")
|
||||
parser.add_argument("-p", "--person", dest="person", action="append")
|
||||
parser.add_argument("-c", "--component", dest="component")
|
||||
parser.add_argument(
|
||||
"-P", "--packageset", dest="packageset", action="append")
|
||||
parser.add_argument("-s", "--source", dest="source", action="append")
|
||||
parser.add_argument("--pocket", dest="pocket")
|
||||
parser.add_argument(
|
||||
"-t", "--acl-type", dest="acl_type", help="ACL type: upload or admin")
|
||||
parser.add_argument(
|
||||
"--anon", dest="anon_login", action="store_true", default=False,
|
||||
help="Login anonymously to Launchpad")
|
||||
parser.add_argument(
|
||||
"action",
|
||||
choices=(
|
||||
"query", "add", "delete", "create", "modify", "copy", "check"),
|
||||
help="action to perform")
|
||||
|
||||
# Deprecated in favour of -A.
|
||||
parser.add_option(
|
||||
parser.add_argument(
|
||||
"-d", "--distribution", dest="distro", default="ubuntu",
|
||||
help=SUPPRESS_HELP)
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
options, args = parser.parse_args()
|
||||
args = parser.parse_args()
|
||||
|
||||
possible_actions = ('query', 'add', 'delete', 'create', 'copy', 'check')
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error(
|
||||
"You must specify an action, one of:\n%s" %
|
||||
", ".join(possible_actions))
|
||||
|
||||
if options.anon_login and args[0] not in ('query', 'check'):
|
||||
if args.anon_login and args.action not in ('query', 'check'):
|
||||
print("E: Anonymous login not supported for this action.")
|
||||
sys.exit(1)
|
||||
|
||||
if (args[0] != 'query' and
|
||||
not options.person and not options.component and
|
||||
not options.packageset and not options.source and
|
||||
not options.pocket):
|
||||
if (args.action != 'query' and
|
||||
not args.person and not args.component and
|
||||
not args.packageset and not args.source and
|
||||
not args.pocket):
|
||||
parser.error("Provide at least one of "
|
||||
"person/component/packageset/source/pocket")
|
||||
if options.packageset and not options.series:
|
||||
if args.packageset and not args.series:
|
||||
parser.error("Package set requires an associated series")
|
||||
if options.acl_type and options.acl_type not in ('upload', 'admin'):
|
||||
if args.acl_type and args.acl_type not in ('upload', 'admin'):
|
||||
parser.error("Invalid ACL type '%s' (valid: 'upload', 'admin')" %
|
||||
options.acl_type)
|
||||
if options.acl_type == 'admin' and options.packageset:
|
||||
args.acl_type)
|
||||
if args.acl_type == 'admin' and args.packageset:
|
||||
parser.error("ACL type admin not allowed for package sets")
|
||||
if options.acl_type == 'admin' and options.source:
|
||||
if args.acl_type == 'admin' and args.source:
|
||||
parser.error("ACL type admin not allowed for source packages")
|
||||
if options.pocket:
|
||||
options.pocket = options.pocket.title()
|
||||
if args.pocket:
|
||||
args.pocket = args.pocket.title()
|
||||
|
||||
if options.anon_login:
|
||||
if args.anon_login:
|
||||
launchpad = Launchpad.login_anonymously(
|
||||
CONSUMER_KEY, options.launchpad_instance, version="devel")
|
||||
CONSUMER_KEY, args.launchpad_instance, version="devel")
|
||||
else:
|
||||
launchpad = Launchpad.login_with(
|
||||
CONSUMER_KEY, options.launchpad_instance, version="devel")
|
||||
CONSUMER_KEY, args.launchpad_instance, version="devel")
|
||||
|
||||
options.archive = get_archive(options, launchpad)
|
||||
options.distro = options.archive.distribution
|
||||
if options.series is not None:
|
||||
options.series = options.distro.getSeries(
|
||||
name_or_version=options.series)
|
||||
args.archive = get_archive(args, launchpad)
|
||||
args.distro = args.archive.distribution
|
||||
if args.series is not None:
|
||||
args.series = args.distro.getSeries(name_or_version=args.series)
|
||||
|
||||
try:
|
||||
main(options, args[0])
|
||||
main(args)
|
||||
except launchpadlib.errors.HTTPError as err:
|
||||
print("There was a %s error:" % err.response.status)
|
||||
print(err.content)
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
# Copyright 2012 Canonical Ltd.
|
||||
#
|
||||
# This script will write update metrics for a given Ubuntu release in CSV
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2011 Iain Lane
|
||||
# Copyright (C) 2011 Stefano Rivera
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2010, 2012 Canonical Ltd.
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
|
@ -46,20 +46,18 @@ def release_task_callback(lp, bugnum, task, context):
|
||||
% bugnum)
|
||||
return {}
|
||||
return {'proposed': task}
|
||||
if subtask == 'promote-to-updates':
|
||||
if task.status != 'Confirmed':
|
||||
raise KernelWorkflowError(
|
||||
"Ignoring bug %s, not ready to promote-to-updates"
|
||||
% bugnum)
|
||||
return {}
|
||||
if subtask == 'promote-to-updates' and task.status in ('Confirmed', 'Fix Released'):
|
||||
return {'updates': task}
|
||||
if (subtask == 'promote-to-security' and task.status == 'Confirmed'):
|
||||
if subtask == 'promote-to-security' and task.status == 'Confirmed':
|
||||
return {'security': task}
|
||||
return {}
|
||||
|
||||
|
||||
def release_source_callback(lp, bugnum, tasks, full_packages, release, context):
|
||||
if not 'proposed' in tasks or not 'updates' in tasks:
|
||||
if 'proposed' not in tasks or 'updates' not in tasks:
|
||||
raise KernelWorkflowError()
|
||||
if (tasks['updates'].status == 'Fix Released' and
|
||||
'security' not in tasks):
|
||||
raise KernelWorkflowError()
|
||||
cmd = ['sru-release', '--no-bugs', release]
|
||||
cmd.extend(full_packages)
|
||||
@ -71,10 +69,11 @@ def release_source_callback(lp, bugnum, tasks, full_packages, release, context):
|
||||
print("Failed to run sru-release for %s" % bugnum)
|
||||
raise
|
||||
|
||||
tasks['updates'].status = 'Fix Committed'
|
||||
tasks['updates'].assignee = lp.me
|
||||
tasks['updates'].lp_save()
|
||||
if 'security' in tasks:
|
||||
if 'updates' in tasks and tasks['updates'].status != 'Fix Released':
|
||||
tasks['updates'].status = 'Fix Committed'
|
||||
tasks['updates'].assignee = lp.me
|
||||
tasks['updates'].lp_save()
|
||||
if 'security' in tasks and tasks['security'].status != 'Fix Released':
|
||||
tasks['security'].status = 'Fix Committed'
|
||||
tasks['security'].assignee = lp.me
|
||||
tasks['security'].lp_save()
|
||||
|
@ -118,8 +118,16 @@ def generate_diff_from_master(me, archive, master_source, master_version,
|
||||
sys.stdin.readline()
|
||||
return
|
||||
|
||||
# we need to pull in the master kernel into a separate directory as
|
||||
# it might have the same name (flavor) as the one we are reviewing
|
||||
master_dir = os.path.join(work_dir, 'master')
|
||||
os.mkdir(master_dir)
|
||||
# this is a bit ugly, since we actually have to chdir for a moment
|
||||
# because dget has no option of declaring the output directory
|
||||
os.chdir(master_dir)
|
||||
|
||||
fetch_tarball_from_cache(
|
||||
work_dir, tardir, master_source, master_upstream, start_dir)
|
||||
master_dir, tardir, master_source, master_upstream, start_dir)
|
||||
|
||||
# grab the old source first
|
||||
dget_cmd = ['dget', '-u', master_dsc]
|
||||
@ -130,11 +138,14 @@ def generate_diff_from_master(me, archive, master_source, master_version,
|
||||
(master_source, master_version))
|
||||
raise e
|
||||
|
||||
os.chdir(work_dir)
|
||||
|
||||
# generate the diff
|
||||
master_path = os.path.join(master_dir, master_source)
|
||||
print("Generating brief diff between new kernel and master (%s) to %s" %
|
||||
(master_version, os.path.join(work_dir, 'master_diff')))
|
||||
diff_cmd = ('diff -rq "{}-{}" "{}-{}" >master_diff').format(
|
||||
master_source, master_upstream, new_source, new_upstream)
|
||||
diff_cmd = ('diff -rq --label master "{}-{}" "{}-{}" >master_diff').format(
|
||||
master_path, master_upstream, new_source, new_upstream)
|
||||
subprocess.call(diff_cmd, shell=True)
|
||||
|
||||
|
||||
@ -168,12 +179,13 @@ def review_source_callback(lp, bugnum, tasks, full_packages, release, context):
|
||||
master_version = None
|
||||
if context['diff']:
|
||||
master_source, master_version = get_master_kernel(lp, bugnum)
|
||||
should_sign = any('-signed' in pkg for pkg in full_packages)
|
||||
for source in full_packages:
|
||||
process_source_package(
|
||||
source, release, me, context['archive'], context['ppa'],
|
||||
context['ubuntu'], context['startdir'], context['workdir'],
|
||||
context['tardir'], context['esm'], context['tarcache'],
|
||||
master_source, master_version)
|
||||
master_source, master_version, should_sign)
|
||||
tasks['proposed'].status = 'Fix Committed'
|
||||
tasks['proposed'].assignee = me
|
||||
tasks['proposed'].lp_save()
|
||||
@ -222,7 +234,8 @@ def save_tarball_to_cache(directory, tardir, source, version):
|
||||
def process_source_package(source, release, me, archive, ppa, ubuntu,
|
||||
start_dir, work_dir, tardir,
|
||||
esm=False, tar_cache=False,
|
||||
master_source=None, master_version=None):
|
||||
master_source=None, master_version=None,
|
||||
should_sign=False):
|
||||
series = ubuntu.getSeries(name_or_version=release)
|
||||
|
||||
ppa_src = ppa.getPublishedSources(order_by_date=True,
|
||||
@ -316,7 +329,8 @@ def process_source_package(source, release, me, archive, ppa, ubuntu,
|
||||
save_tarball_to_cache(work_dir, tardir, source, new_upstream)
|
||||
|
||||
if (master_source and master_version and
|
||||
'-meta' not in source and '-signed' not in source):
|
||||
'-meta' not in source and '-signed' not in source and
|
||||
'-restricted-modules' not in source):
|
||||
# if requested, we also generate a brief diff between the new kernel
|
||||
# and its 'master' kernel
|
||||
generate_diff_from_master(
|
||||
@ -349,35 +363,55 @@ def process_source_package(source, release, me, archive, ppa, ubuntu,
|
||||
(source, ppa_ver))
|
||||
raise e
|
||||
print("Accepted")
|
||||
# we only care about accepting signed bits if there is a -signed
|
||||
# package in the handled sources and when we're not working with
|
||||
# ESM (as those don't go through the queue)
|
||||
if not should_sign or esm:
|
||||
return
|
||||
# we know this isn't a kernel package containing signed bits,
|
||||
# so don't subject ourselves to extra delays
|
||||
if '-meta' in source or '-signed' in source:
|
||||
if ('-meta' in source or '-signed' in source or
|
||||
'-restricted-modules' in source):
|
||||
return
|
||||
print("Checking for UEFI binaries")
|
||||
# Arbitrary 10 second delay, maybe enough to let uefi binaries hit
|
||||
# the unapproved queue.
|
||||
time.sleep(10)
|
||||
# accept any related uefi binaries. We filter as closely as possible
|
||||
# on name without hard-coding architecture, and we also filter to
|
||||
# only include uefi binaries that have appeared since we started the
|
||||
# copy to avoid accepting something that might have been improperly
|
||||
# copied into the queue by an "attacker" with upload rights.
|
||||
print("Checking for UEFI binaries in the Unapproved queue")
|
||||
uefis = []
|
||||
for signed_type in ('uefi', 'signing'):
|
||||
uefis.extend(series.getPackageUploads(
|
||||
archive=archive['release'],
|
||||
pocket='Proposed',
|
||||
status='Unapproved',
|
||||
custom_type=signed_type,
|
||||
name='{}_{}_'.format(source, ppa_ver),
|
||||
created_since_date=copy_time))
|
||||
for uefi in uefis:
|
||||
print("Accepting {}".format(uefi))
|
||||
uefi.acceptFromQueue()
|
||||
# we try looking for signed bits a few times after short, constant
|
||||
# delays. The binaries nowadays appear after some seconds, but
|
||||
# having a constant delay is suboptimal.
|
||||
for n in range(5):
|
||||
time.sleep(3)
|
||||
# accept any related uefi binaries. We filter as closely as
|
||||
# possible on name without hard-coding architecture, and we also
|
||||
# filter to only include uefi binaries that have appeared since we
|
||||
# started the copy to avoid accepting something that might have
|
||||
# been improperly copied into the queue by an "attacker" with
|
||||
# upload rights.
|
||||
for signed_type in ('uefi', 'signing'):
|
||||
uefis.extend(series.getPackageUploads(
|
||||
archive=archive['release'],
|
||||
pocket='Proposed',
|
||||
status='Unapproved',
|
||||
custom_type=signed_type,
|
||||
name='{}_{}_'.format(source, ppa_ver),
|
||||
created_since_date=copy_time))
|
||||
|
||||
if uefis:
|
||||
for uefi in uefis:
|
||||
print("Accepting {}".format(uefi))
|
||||
uefi.acceptFromQueue()
|
||||
break
|
||||
else:
|
||||
print("No UEFI binaries found after %s tries. Please manually "
|
||||
"check for their existance and approve before accepting the "
|
||||
"signed sources." % n)
|
||||
print("Press enter to continue.")
|
||||
sys.stdout.flush()
|
||||
sys.stdin.readline()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
default_release = 'cosmic'
|
||||
default_release = 'focal'
|
||||
|
||||
parser = OptionParser(
|
||||
usage="Usage: %prog [options] bug [bug ...]")
|
||||
@ -399,6 +433,9 @@ if __name__ == '__main__':
|
||||
parser.add_option(
|
||||
"-d", "--diff-against-master", dest="diff_master",
|
||||
action="store_true")
|
||||
parser.add_option(
|
||||
"--skip-name-check", dest="nonamecheck",
|
||||
action="store_true")
|
||||
|
||||
opts, bugs = parser.parse_args()
|
||||
|
||||
@ -440,7 +477,8 @@ if __name__ == '__main__':
|
||||
context = {
|
||||
'archive': archive, 'ppa': ppa, 'ubuntu': ubuntu,
|
||||
'tardir': tardir, 'tarcache': opts.caching, 'startdir': start_dir,
|
||||
'esm': opts.esm, 'diff': opts.diff_master
|
||||
'esm': opts.esm, 'diff': opts.diff_master,
|
||||
'skipnamecheck': opts.nonamecheck
|
||||
}
|
||||
for bugnum in bugs:
|
||||
with ExitStack() as resources:
|
||||
|
657
ubuntu-archive-tools/kernel_series.py
Normal file
657
ubuntu-archive-tools/kernel_series.py
Normal file
@ -0,0 +1,657 @@
|
||||
#!/usr/bin/env python2.7
|
||||
#
|
||||
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib2 import urlopen
|
||||
|
||||
import os
|
||||
import yaml
|
||||
|
||||
class KernelRoutingEntry:
|
||||
def __init__(self, ks, source, data):
|
||||
name = "{}:{}".format(source.series.codename, source.name)
|
||||
if isinstance(data, str):
|
||||
name = data
|
||||
table = source.series.routing_table
|
||||
if table is None:
|
||||
raise ValueError("unable to map routing alias {}, "
|
||||
"no series routing table".format(data))
|
||||
if data not in table:
|
||||
raise ValueError("unable to map routing alias {}, "
|
||||
"not listed in series routing table".format(data))
|
||||
data = table[data]
|
||||
|
||||
# Clear out any entries that have been overriden to None.
|
||||
for entry in dict(data):
|
||||
if data[entry] is None:
|
||||
del data[entry]
|
||||
|
||||
self._ks = ks
|
||||
self._source = source
|
||||
self._name = name
|
||||
self._data = data if data else {}
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
return self._source
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self, other.__class__):
|
||||
return list(self) == list(other)
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._data.items())
|
||||
|
||||
def __getitem__(self, which):
|
||||
return self._data[which]
|
||||
|
||||
def lookup_destination(self, dest, primary=False):
|
||||
data = self._data.get(dest, None)
|
||||
if primary is False or data is None:
|
||||
return data
|
||||
return data[0]
|
||||
|
||||
def __str__(self):
|
||||
return str(self._data)
|
||||
|
||||
|
||||
class KernelRepoEntry:
|
||||
def __init__(self, ks, owner, data):
|
||||
if isinstance(data, list):
|
||||
new_data = {'url': data[0]}
|
||||
if len(data) == 1:
|
||||
new_data['branch'] = 'master'
|
||||
elif len(data) == 2:
|
||||
new_data['branch'] = data[1]
|
||||
data = new_data
|
||||
|
||||
self._ks = ks
|
||||
self._owner = owner
|
||||
self._data = data if data else {}
|
||||
|
||||
@property
|
||||
def owner(self):
|
||||
return self._owner
|
||||
|
||||
# XXX: should this object have a name ?
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self, other.__class__):
|
||||
return self.url == other.url and self.branch == other.branch
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self._data['url']
|
||||
|
||||
@property
|
||||
def branch(self):
|
||||
return self._data.get('branch', None)
|
||||
|
||||
def __str__(self):
|
||||
return "{} {}".format(self.url, self.branch)
|
||||
|
||||
|
||||
class KernelSnapEntry:
|
||||
def __init__(self, ks, source, name, data):
|
||||
self._ks = ks
|
||||
self._source = source
|
||||
self._name = name
|
||||
self._data = data if data else {}
|
||||
|
||||
# Convert arches/track to publish-to form.
|
||||
if 'publish-to' not in self._data:
|
||||
if 'arches' in self._data:
|
||||
publish_to = {}
|
||||
for arch in self._data['arches']:
|
||||
publish_to[arch] = [self._data.get('track', 'latest')]
|
||||
self._data['publish-to'] = publish_to
|
||||
|
||||
# Convert stable to promote-to form.
|
||||
if 'promote-to' not in self._data and 'stable' in self._data:
|
||||
if self._data['stable'] is True:
|
||||
self._data['promote-to'] = 'stable'
|
||||
else:
|
||||
self._data['promote-to'] = 'candidate'
|
||||
# Assume no promote-to data to mean just to edge.
|
||||
promote_to = self._data.get('promote-to', 'edge')
|
||||
if isinstance(promote_to, str):
|
||||
expand_promote_to = []
|
||||
for risk in ('edge', 'beta', 'candidate', 'stable'):
|
||||
expand_promote_to.append(risk)
|
||||
if risk == promote_to:
|
||||
break
|
||||
self._data['promote-to'] = expand_promote_to
|
||||
# Ensure we have stable when promote-to is present.
|
||||
if 'promote-to' in self._data and 'stable' not in self._data:
|
||||
if 'stable' in self._data['promote-to']:
|
||||
self._data['stable'] = True
|
||||
else:
|
||||
self._data['stable'] = False
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self, other.__class__):
|
||||
return self.name == other.name and self.source == other.source
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
@property
|
||||
def series(self):
|
||||
return self._source.series
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
return self._source
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def repo(self):
|
||||
data = self._data.get('repo', None)
|
||||
if not data:
|
||||
return None
|
||||
return KernelRepoEntry(self._ks, self, data)
|
||||
|
||||
@property
|
||||
def primary(self):
|
||||
return self._data.get('primary', False)
|
||||
|
||||
@property
|
||||
def gated(self):
|
||||
return self._data.get('gated', False)
|
||||
|
||||
@property
|
||||
def stable(self):
|
||||
return self._data.get('stable', False)
|
||||
|
||||
@property
|
||||
def qa(self):
|
||||
return self._data.get('qa', False)
|
||||
|
||||
@property
|
||||
def hw_cert(self):
|
||||
return self._data.get('hw-cert', False)
|
||||
|
||||
@property
|
||||
def arches(self):
|
||||
# XXX: should this be []
|
||||
return self._data.get('arches', None)
|
||||
|
||||
@property
|
||||
def track(self):
|
||||
return self._data.get('track', None)
|
||||
|
||||
@property
|
||||
def publish_to(self):
|
||||
return self._data.get('publish-to', None)
|
||||
|
||||
@property
|
||||
def promote_to(self):
|
||||
return self._data.get('promote-to', None)
|
||||
|
||||
def promote_to_risk(self, risk):
|
||||
return risk in self._data.get('promote-to', [])
|
||||
|
||||
def __str__(self):
|
||||
return "{} {}".format(str(self.source), self.name)
|
||||
|
||||
|
||||
class KernelPackageEntry:
|
||||
def __init__(self, ks, source, name, data):
|
||||
self._ks = ks
|
||||
self._source = source
|
||||
self._name = name
|
||||
self._data = data if data else {}
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self, other.__class__):
|
||||
return self.name == other.name and self.source == other.source
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
@property
|
||||
def series(self):
|
||||
return self._source.series
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
return self._source
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._data.get('type', None)
|
||||
|
||||
@property
|
||||
def repo(self):
|
||||
data = self._data.get('repo', None)
|
||||
if not data:
|
||||
return None
|
||||
return KernelRepoEntry(self._ks, self, data)
|
||||
|
||||
def __str__(self):
|
||||
return "{} {} {}".format(str(self.source), self.name, self.type)
|
||||
|
||||
|
||||
class KernelSourceEntry:
|
||||
def __init__(self, ks, series, name, data):
|
||||
self._ks = ks
|
||||
self._series = series
|
||||
self._name = name
|
||||
self._data = data if data else {}
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self, other.__class__):
|
||||
return self.name == other.name and self.series == other.series
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def series(self):
|
||||
return self._series
|
||||
|
||||
@property
|
||||
def versions(self):
|
||||
if 'versions' in self._data:
|
||||
return self._data['versions']
|
||||
|
||||
derived_from = self.derived_from
|
||||
if derived_from is not None:
|
||||
return derived_from.versions
|
||||
|
||||
copy_forward = self.copy_forward
|
||||
if copy_forward is not None:
|
||||
return copy_forward.versions
|
||||
|
||||
# XXX: should this be []
|
||||
return None
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
versions = self.versions
|
||||
if not versions:
|
||||
return None
|
||||
return versions[-1]
|
||||
|
||||
@property
|
||||
def development(self):
|
||||
return self._data.get('development', self.series.development)
|
||||
|
||||
@property
|
||||
def supported(self):
|
||||
return self._data.get('supported', self.series.supported)
|
||||
|
||||
@property
|
||||
def severe_only(self):
|
||||
return self._data.get('severe-only', False)
|
||||
|
||||
@property
|
||||
def stakeholder(self):
|
||||
return self._data.get('stakeholder', None)
|
||||
|
||||
@property
|
||||
def packages(self):
|
||||
# XXX: should this return None when empty
|
||||
result = []
|
||||
packages = self._data.get('packages')
|
||||
if packages:
|
||||
for package_key, package in packages.items():
|
||||
result.append(KernelPackageEntry(self._ks, self, package_key, package))
|
||||
return result
|
||||
|
||||
def lookup_package(self, package_key):
|
||||
packages = self._data.get('packages')
|
||||
if not packages or package_key not in packages:
|
||||
return None
|
||||
return KernelPackageEntry(self._ks, self, package_key, packages[package_key])
|
||||
|
||||
@property
|
||||
def snaps(self):
|
||||
# XXX: should this return None when empty
|
||||
result = []
|
||||
snaps = self._data.get('snaps')
|
||||
if snaps:
|
||||
for snap_key, snap in snaps.items():
|
||||
result.append(KernelSnapEntry(self._ks, self, snap_key, snap))
|
||||
return result
|
||||
|
||||
def lookup_snap(self, snap_key):
|
||||
snaps = self._data.get('snaps')
|
||||
if not snaps or snap_key not in snaps:
|
||||
return None
|
||||
return KernelSnapEntry(self._ks, self, snap_key, snaps[snap_key])
|
||||
|
||||
@property
|
||||
def derived_from(self):
|
||||
if 'derived-from' not in self._data:
|
||||
return None
|
||||
|
||||
(series_key, source_key) = self._data['derived-from']
|
||||
|
||||
series = self._ks.lookup_series(series_key)
|
||||
source = series.lookup_source(source_key)
|
||||
|
||||
return source
|
||||
|
||||
@property
|
||||
def testable_flavours(self):
|
||||
retval = []
|
||||
if (self._data.get('testing') is not None and
|
||||
self._data['testing'].get('flavours') is not None
|
||||
):
|
||||
for flavour in self._data['testing']['flavours'].keys():
|
||||
fdata = self._data['testing']['flavours'][flavour]
|
||||
# If we have neither arches nor clouds we represent a noop
|
||||
if not fdata:
|
||||
continue
|
||||
arches = fdata.get('arches', None)
|
||||
arches = arches if arches is not None else []
|
||||
clouds = fdata.get('clouds', None)
|
||||
clouds = clouds if clouds is not None else []
|
||||
retval.append(KernelSourceTestingFlavourEntry(flavour, arches, clouds))
|
||||
return retval
|
||||
|
||||
@property
|
||||
def invalid_tasks(self):
|
||||
retval = self._data.get('invalid-tasks', [])
|
||||
if retval is None:
|
||||
retval = []
|
||||
return retval
|
||||
|
||||
@property
|
||||
def copy_forward(self):
|
||||
if 'copy-forward' not in self._data:
|
||||
return None
|
||||
|
||||
# XXX: backwards compatibility.
|
||||
if self._data['copy-forward'] is False:
|
||||
return None
|
||||
if self._data['copy-forward'] is True:
|
||||
derived_from = self.derived_from
|
||||
if derived_from is None:
|
||||
return True
|
||||
return self.derived_from
|
||||
|
||||
(series_key, source_key) = self._data['copy-forward']
|
||||
|
||||
series = self._ks.lookup_series(series_key)
|
||||
source = series.lookup_source(source_key)
|
||||
|
||||
return source
|
||||
|
||||
@property
|
||||
def backport(self):
|
||||
return self._data.get('backport', False)
|
||||
|
||||
@property
|
||||
def routing(self):
|
||||
default = 'default'
|
||||
if self.series.development:
|
||||
default = 'devel'
|
||||
if self.series.esm:
|
||||
default = 'esm'
|
||||
data = self._data.get('routing', default)
|
||||
if data is None:
|
||||
return data
|
||||
return KernelRoutingEntry(self._ks, self, data)
|
||||
|
||||
@property
|
||||
def swm_data(self):
|
||||
return self._data.get('swm')
|
||||
|
||||
@property
|
||||
def private(self):
|
||||
return self._data.get('private', False)
|
||||
|
||||
def __str__(self):
|
||||
return "{} {}".format(self.series.name, self.name)
|
||||
|
||||
class KernelSourceTestingFlavourEntry:
|
||||
def __init__(self, name, arches, clouds):
|
||||
self._name = name
|
||||
self._arches = arches
|
||||
self._clouds = clouds
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def arches(self):
|
||||
return self._arches
|
||||
|
||||
@property
|
||||
def clouds(self):
|
||||
return self._clouds
|
||||
|
||||
class KernelSeriesEntry:
|
||||
def __init__(self, ks, name, data, defaults=None):
|
||||
self._ks = ks
|
||||
self._name = name
|
||||
self._data = {}
|
||||
if defaults is not None:
|
||||
self._data.update(defaults)
|
||||
if data is not None:
|
||||
self._data.update(data)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self, other.__class__):
|
||||
return self.name == other.name
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def codename(self):
|
||||
return self._data.get('codename', None)
|
||||
|
||||
@property
|
||||
def opening(self):
|
||||
if 'opening' in self._data:
|
||||
if self._data['opening'] is not False:
|
||||
return True
|
||||
return False
|
||||
|
||||
def opening_ready(self, *flags):
|
||||
if 'opening' not in self._data:
|
||||
return True
|
||||
allow = self._data['opening']
|
||||
if allow is None:
|
||||
return False
|
||||
if allow in (True, False):
|
||||
return not allow
|
||||
for flag in flags:
|
||||
flag_allow = allow.get(flag, False)
|
||||
if flag_allow is None or flag_allow is False:
|
||||
return False
|
||||
return True
|
||||
opening_allow = opening_ready
|
||||
|
||||
@property
|
||||
def development(self):
|
||||
return self._data.get('development', False)
|
||||
|
||||
@property
|
||||
def supported(self):
|
||||
return self._data.get('supported', False)
|
||||
|
||||
@property
|
||||
def lts(self):
|
||||
return self._data.get('lts', False)
|
||||
|
||||
@property
|
||||
def esm(self):
|
||||
return self._data.get('esm', False)
|
||||
|
||||
def __str__(self):
|
||||
return "{} ({})".format(self.name, self.codename)
|
||||
|
||||
@property
|
||||
def sources(self):
|
||||
result = []
|
||||
sources = self._data.get('sources')
|
||||
if sources:
|
||||
for source_key, source in sources.items():
|
||||
result.append(KernelSourceEntry(
|
||||
self._ks, self, source_key, source))
|
||||
return result
|
||||
|
||||
@property
|
||||
def routing_table(self):
|
||||
return self._data.get('routing-table', None)
|
||||
|
||||
def lookup_source(self, source_key):
|
||||
sources = self._data.get('sources')
|
||||
if not sources or source_key not in sources:
|
||||
return None
|
||||
return KernelSourceEntry(self._ks, self, source_key, sources[source_key])
|
||||
|
||||
|
||||
# KernelSeries
|
||||
#
|
||||
class KernelSeries:
|
||||
_url = 'https://git.launchpad.net/~canonical-kernel/' \
|
||||
'+git/kteam-tools/plain/info/kernel-series.yaml'
|
||||
_url_local = 'file://' + os.path.realpath(os.path.join(os.path.dirname(__file__),
|
||||
'..', 'info', 'kernel-series.yaml'))
|
||||
#_url = 'file:///home/apw/git2/kteam-tools/info/kernel-series.yaml'
|
||||
#_url = 'file:///home/work/kteam-tools/info/kernel-series.yaml'
|
||||
_data_txt = {}
|
||||
|
||||
@classmethod
|
||||
def __load_once(cls, url):
|
||||
if url not in cls._data_txt:
|
||||
response = urlopen(url)
|
||||
data = response.read()
|
||||
if not isinstance(data, str):
|
||||
data = data.decode('utf-8')
|
||||
cls._data_txt[url] = data
|
||||
return cls._data_txt[url]
|
||||
|
||||
def __init__(self, url=None, data=None, use_local=os.getenv("USE_LOCAL_KERNEL_SERIES_YAML", False)):
|
||||
if data or url:
|
||||
if url:
|
||||
response = urlopen(url)
|
||||
data = response.read()
|
||||
if not isinstance(data, str):
|
||||
data = data.decode('utf-8')
|
||||
else:
|
||||
data = self.__load_once(self._url_local if use_local else self._url)
|
||||
self._data = yaml.safe_load(data)
|
||||
|
||||
self._development_series = None
|
||||
self._codename_to_series = {}
|
||||
for series_key, series in self._data.items():
|
||||
if not series:
|
||||
continue
|
||||
if series.get('development', False):
|
||||
self._development_series = series_key
|
||||
if 'codename' in series:
|
||||
self._codename_to_series[series['codename']] = series_key
|
||||
|
||||
# Pull out the defaults.
|
||||
self._defaults_series = {}
|
||||
if 'defaults' in self._data:
|
||||
self._defaults_series = self._data['defaults']
|
||||
del self._data['defaults']
|
||||
|
||||
@staticmethod
|
||||
def key_series_name(series):
|
||||
return [int(x) for x in series.name.split('.')]
|
||||
|
||||
@property
|
||||
def series(self):
|
||||
return [KernelSeriesEntry(self, series_key, series,
|
||||
defaults=self._defaults_series)
|
||||
for series_key, series in self._data.items()]
|
||||
|
||||
def lookup_series(self, series=None, codename=None, development=False):
|
||||
if not series and not codename and not development:
|
||||
raise ValueError("series/codename/development required")
|
||||
if not series and codename:
|
||||
if codename not in self._codename_to_series:
|
||||
return None
|
||||
series = self._codename_to_series[codename]
|
||||
if not series and development:
|
||||
if not self._development_series:
|
||||
return None
|
||||
series = self._development_series
|
||||
if series and series not in self._data:
|
||||
return None
|
||||
return KernelSeriesEntry(self, series, self._data[series],
|
||||
defaults=self._defaults_series)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db = KernelSeries()
|
||||
|
||||
series = db.lookup_series('16.04')
|
||||
if series.name != '16.04':
|
||||
print('series.name != 16.04')
|
||||
if series.codename != 'xenial':
|
||||
print('series.codename != xenial')
|
||||
|
||||
series2 = db.lookup_series(codename='xenial')
|
||||
if series2.name != '16.04':
|
||||
print('series2.name != 16.04')
|
||||
if series2.codename != 'xenial':
|
||||
print('series2.codename != xenial')
|
||||
|
||||
series3 = db.lookup_series(development=True)
|
||||
if series3.name != '18.04':
|
||||
print('series3.name != 18.04')
|
||||
if series3.codename != 'bionic':
|
||||
print('series3.codename != bionic')
|
||||
|
||||
print(str(series), str(series2), str(series3))
|
||||
|
||||
for series2 in sorted(db.series, key=db.key_series_name):
|
||||
print(series2)
|
||||
|
||||
for source in series.sources:
|
||||
print(str(source), source.series.name, source.name)
|
||||
|
||||
print(source.derived_from)
|
||||
print(source.versions)
|
||||
|
||||
for package in source.packages:
|
||||
print("PACKAGE", str(package))
|
||||
|
||||
for snap in source.snaps:
|
||||
print("SNAP", str(snap), snap.arches)
|
||||
|
||||
|
||||
# vi:set ts=4 sw=4 expandtab:
|
@ -27,7 +27,8 @@ class KernelWorkflowError(Exception):
|
||||
|
||||
def get_name_and_version_from_bug(bug):
|
||||
title_re = re.compile(
|
||||
r'^(?P<package>[a-z0-9.-]+): (?P<version>[0-9.-]+[0-9a-z.~-]*)'
|
||||
r'^([a-z]+\/)?(?P<package>[a-z0-9.-]+): '
|
||||
+ '(?P<version>[0-9.-]+[0-9a-z.~-]*)'
|
||||
+ ' -proposed tracker$')
|
||||
match = title_re.search(bug.title)
|
||||
if not match:
|
||||
@ -93,7 +94,8 @@ def process_sru_bug(lp, bugnum, task_callback, source_callback, context=None):
|
||||
if source_name != package:
|
||||
print("Cannot determine base package for %s, %s vs. %s"
|
||||
% (bugnum, source_name, package))
|
||||
return
|
||||
if context['skipnamecheck']:
|
||||
return
|
||||
|
||||
if not packages:
|
||||
print("No packages in the prepare list, don't know what to do")
|
||||
@ -101,7 +103,7 @@ def process_sru_bug(lp, bugnum, task_callback, source_callback, context=None):
|
||||
|
||||
if not '' in packages:
|
||||
print("No kernel package in prepare list, only meta packages. "
|
||||
"Continue review? [yN] ", end="")
|
||||
"Continue? [yN] ", end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if not response.strip().lower().startswith('y'):
|
||||
@ -110,7 +112,9 @@ def process_sru_bug(lp, bugnum, task_callback, source_callback, context=None):
|
||||
full_packages = []
|
||||
for pkg in packages:
|
||||
if pkg == '-lbm':
|
||||
pkg = '-backports-modules-3.2.0'
|
||||
pkg = '-backports-modules-3.2.0'
|
||||
elif pkg == '-lrm':
|
||||
pkg = '-restricted-modules'
|
||||
|
||||
real_package = re.sub(r'^linux', 'linux' + pkg, package)
|
||||
full_packages.append(real_package)
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
|
Binary file not shown.
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
# Manage the Launchpad build farm.
|
||||
#
|
||||
# Copyright 2012-2014 Canonical Ltd.
|
||||
@ -83,6 +83,9 @@ parser.add_argument(
|
||||
parser.add_argument(
|
||||
"-d", "--disabled", action="store_const", dest="ok_filter", const=False,
|
||||
help="update only disabled builders")
|
||||
parser.add_argument(
|
||||
"--idle", action="store_const", dest="idle_filter", const=True,
|
||||
help="update only idle builders")
|
||||
parser.add_argument(
|
||||
"--cleaning", action="store_const", dest="cleaning_filter", const=True,
|
||||
help="update only builders that are stuck cleaning")
|
||||
@ -173,6 +176,12 @@ def get_processor_name(processor_link):
|
||||
def get_clean_status_duration(builder):
|
||||
return datetime.now(pytz.UTC) - builder.date_clean_status_changed
|
||||
|
||||
def is_idle(builder):
|
||||
return (
|
||||
builder.builderok
|
||||
and builder.current_build_link is None
|
||||
and builder.clean_status == 'Clean')
|
||||
|
||||
def is_cleaning(builder):
|
||||
return (
|
||||
builder.builderok
|
||||
@ -186,6 +195,8 @@ for builder in lp.builders:
|
||||
continue
|
||||
if args.ok_filter is not None and builder.builderok != args.ok_filter:
|
||||
continue
|
||||
if args.idle_filter is not None and is_idle(builder) != args.idle_filter:
|
||||
continue
|
||||
if (args.cleaning_filter is not None
|
||||
and is_cleaning(builder) != args.cleaning_filter):
|
||||
continue
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright 2013-2019 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
@ -22,28 +22,54 @@ from __future__ import print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import argparse
|
||||
from collections import OrderedDict
|
||||
import hashlib
|
||||
import subprocess
|
||||
import sys
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urlparse import urlparse
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from launchpadlib.uris import web_root_for_service_root
|
||||
from six.moves import shlex_quote
|
||||
from six.moves.urllib.parse import (
|
||||
unquote,
|
||||
urlparse,
|
||||
)
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
import lputils
|
||||
|
||||
|
||||
# Convenience aliases.
|
||||
image_types = {
|
||||
"chroot": "Chroot tarball",
|
||||
"lxd": "LXD image",
|
||||
image_types = OrderedDict([
|
||||
("chroot", "Chroot tarball"),
|
||||
("lxd", "LXD image"),
|
||||
])
|
||||
|
||||
|
||||
# Affordance for --from-livefs.
|
||||
image_types_by_name = {
|
||||
"livecd.ubuntu-base.rootfs.tar.gz": "Chroot tarball",
|
||||
"livecd.ubuntu-base.lxd.tar.gz": "LXD image",
|
||||
}
|
||||
|
||||
|
||||
def adjust_lp_url(parser, args, url):
|
||||
parsed_url = urlparse(url)
|
||||
if parsed_url.scheme != "":
|
||||
root_uri = args.launchpad._root_uri
|
||||
service_host = root_uri.host
|
||||
web_host = urlparse(web_root_for_service_root(str(root_uri))).hostname
|
||||
if parsed_url.hostname == service_host:
|
||||
return url
|
||||
elif parsed_url.hostname == web_host:
|
||||
return parsed_url.path
|
||||
else:
|
||||
parser.error(
|
||||
"%s is not on this Launchpad instance (%s)" % (url, web_host))
|
||||
else:
|
||||
return url
|
||||
|
||||
|
||||
def describe_image_type(image_type):
|
||||
if image_type == "Chroot tarball":
|
||||
return "base chroot tarball"
|
||||
@ -104,7 +130,87 @@ def remove_chroot(args):
|
||||
return 0
|
||||
|
||||
|
||||
def get_last_livefs_builds(livefs, architectures):
|
||||
"""Get the most recent build for each of `architectures` in `livefs`."""
|
||||
arch_tags = {das.self_link: das.architecture_tag for das in architectures}
|
||||
builds = {}
|
||||
for build in livefs.completed_builds:
|
||||
arch_tag = arch_tags.get(build.distro_arch_series_link)
|
||||
if arch_tag is not None and arch_tag not in builds:
|
||||
builds[arch_tag] = build
|
||||
if set(builds) == set(arch_tags.values()):
|
||||
break
|
||||
return [build for _, build in sorted(builds.items())]
|
||||
|
||||
|
||||
def set_chroots_from_livefs(args):
|
||||
"""Set a whole batch of base images at once, for convenience."""
|
||||
if args.image_type is None:
|
||||
image_types = [args.image_type]
|
||||
else:
|
||||
image_types = list(image_types.values())
|
||||
|
||||
livefs = args.launchpad.load(args.livefs_url)
|
||||
builds = get_last_livefs_builds(livefs, args.architectures)
|
||||
todo = []
|
||||
for build in builds:
|
||||
das = build.distro_arch_series
|
||||
suite_arch = "%s/%s" % (args.suite, das.architecture_tag)
|
||||
for image_url in build.getFileUrls():
|
||||
image_name = unquote(urlparse(image_url).path).split('/')[-1]
|
||||
image_type = image_types_by_name.get(image_name)
|
||||
if image_type is not None:
|
||||
previous_url = das.getChrootURL(
|
||||
pocket=args.pocket, image_type=image_type)
|
||||
if previous_url is not None:
|
||||
print("Previous %s for %s: %s" % (
|
||||
describe_image_type(image_type), suite_arch,
|
||||
previous_url))
|
||||
print("New %s for %s: %s" % (
|
||||
describe_image_type(image_type), suite_arch, image_url))
|
||||
todo.append(
|
||||
(das, build.self_link, image_name, args.pocket,
|
||||
image_type))
|
||||
|
||||
if todo:
|
||||
if args.dry_run:
|
||||
print("Not setting base images in dry-run mode.")
|
||||
else:
|
||||
if not args.confirm_all:
|
||||
if YesNoQuestion().ask("Set these base images", "no") == "no":
|
||||
return 0
|
||||
for das, build_url, image_name, pocket, image_type in todo:
|
||||
das.setChrootFromBuild(
|
||||
livefsbuild=build_url, filename=image_name, pocket=pocket,
|
||||
image_type=image_type)
|
||||
|
||||
print()
|
||||
print(
|
||||
"The following commands will roll back to these images if a "
|
||||
"future set is broken:")
|
||||
base_command = [
|
||||
"manage-chroot",
|
||||
"-l", args.launchpad_instance,
|
||||
"-d", args.distribution.name,
|
||||
"-s", args.suite,
|
||||
]
|
||||
for das, build_url, image_name, _, image_type in todo:
|
||||
command = base_command + [
|
||||
"-a", das.architecture_tag,
|
||||
"-i", image_type,
|
||||
"--from-build", build_url,
|
||||
"-f", image_name,
|
||||
"set",
|
||||
]
|
||||
print(" ".join(shlex_quote(arg) for arg in command))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def set_chroot(args):
|
||||
if args.livefs_url is not None:
|
||||
return set_chroots_from_livefs(args)
|
||||
|
||||
das = args.architectures[0]
|
||||
previous_url = das.getChrootURL(
|
||||
pocket=args.pocket, image_type=args.image_type)
|
||||
@ -128,7 +234,7 @@ def set_chroot(args):
|
||||
return 0
|
||||
if args.build_url:
|
||||
das.setChrootFromBuild(
|
||||
livefsbuild=urlparse(args.build_url).path,
|
||||
livefsbuild=args.build_url,
|
||||
filename=args.filepath,
|
||||
pocket=args.pocket, image_type=args.image_type)
|
||||
else:
|
||||
@ -165,11 +271,16 @@ def main():
|
||||
"-s", "--suite", "--series", dest="suite", metavar="SUITE",
|
||||
help="manage base images for SUITE")
|
||||
parser.add_argument(
|
||||
"-a", "--architecture", metavar="ARCHITECTURE", required=True,
|
||||
"-a", "--architecture", metavar="ARCHITECTURE",
|
||||
help="manage base images for ARCHITECTURE")
|
||||
parser.add_argument(
|
||||
"-i", "--image-type", metavar="TYPE", default="Chroot tarball",
|
||||
"-i", "--image-type", metavar="TYPE",
|
||||
help="manage base images of type TYPE")
|
||||
parser.add_argument(
|
||||
"--from-livefs", dest="livefs_url", metavar="URL",
|
||||
help=(
|
||||
"Live filesystem to set base images from (sets base images for "
|
||||
"all available architectures and image types)"))
|
||||
parser.add_argument(
|
||||
"--from-build", dest="build_url", metavar="URL",
|
||||
help="Live filesystem build URL to set base image from")
|
||||
@ -179,15 +290,23 @@ def main():
|
||||
parser.add_argument("command", choices=sorted(commands.keys()))
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "set" and args.filepath is None:
|
||||
parser.error("The set command requires a base image file path (-f).")
|
||||
if args.command == "set" and args.livefs_url is None:
|
||||
if args.architecture is None:
|
||||
parser.error("The set command requires an architecture (-a).")
|
||||
|
||||
if args.image_type not in image_types.values():
|
||||
image_type = image_types.get(args.image_type.lower())
|
||||
if image_type is not None:
|
||||
args.image_type = image_type
|
||||
else:
|
||||
parser.error("Unknown image type '%s'." % args.image_type)
|
||||
if args.filepath is None:
|
||||
parser.error(
|
||||
"The set command requires a base image file path (-f).")
|
||||
|
||||
if args.command != "set" or args.livefs_url is None:
|
||||
if args.image_type is None:
|
||||
args.image_type = "Chroot tarball"
|
||||
if args.image_type not in image_types.values():
|
||||
image_type = image_types.get(args.image_type.lower())
|
||||
if image_type is not None:
|
||||
args.image_type = image_type
|
||||
else:
|
||||
parser.error("Unknown image type '%s'." % args.image_type)
|
||||
|
||||
if args.command in ("get", "info"):
|
||||
login_method = Launchpad.login_anonymously
|
||||
@ -197,16 +316,11 @@ def main():
|
||||
"manage-chroot", args.launchpad_instance, version="devel")
|
||||
lputils.setup_location(args)
|
||||
|
||||
if args.command == "set" and args.build_url:
|
||||
parsed_build_url = urlparse(args.build_url)
|
||||
if parsed_build_url.scheme != "":
|
||||
service_host = args.launchpad._root_uri.host
|
||||
web_host = urlparse(web_root_for_service_root(
|
||||
str(args.launchpad._root_uri))).hostname
|
||||
if parsed_build_url.hostname not in (service_host, web_host):
|
||||
parser.error(
|
||||
"%s is not on this Launchpad instance (%s)" % (
|
||||
args.build_url, web_host))
|
||||
if args.command == "set":
|
||||
if args.livefs_url is not None:
|
||||
args.livefs_url = adjust_lp_url(parser, args, args.livefs_url)
|
||||
if args.build_url is not None:
|
||||
args.build_url = adjust_lp_url(parser, args, args.build_url)
|
||||
|
||||
return commands[args.command](args)
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright (C) 2017 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
|
@ -1,7 +1,8 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
# Author: Brian Murray <brian@ubuntu.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@ -28,6 +29,10 @@ import sys
|
||||
import time
|
||||
|
||||
from charts import make_chart, make_chart_header
|
||||
from utils import read_tag_file
|
||||
|
||||
default_base = '/home/ubuntu-archive/mirror/ubuntu'
|
||||
rdeps_with_alternates = []
|
||||
|
||||
|
||||
def parse_checkrdepends_file(path, pkgmap):
|
||||
@ -48,24 +53,70 @@ def parse_checkrdepends_file(path, pkgmap):
|
||||
pkgmap.setdefault(rdep, (cur_component, []))[1].append(cur_arch)
|
||||
|
||||
|
||||
def _pkg_removable(pkg, nbs, checked_v):
|
||||
'''Recursively check if pakcage is removable.
|
||||
def _pkg_removable(options, pkg, nbs, checked_v):
|
||||
'''Recursively check if package is removable.
|
||||
|
||||
checked_v is the working set of already checked vertices, to avoid infinite
|
||||
loops.
|
||||
'''
|
||||
checked_v.add(pkg)
|
||||
packages = {}
|
||||
for rdep in nbs.get(pkg, []):
|
||||
if rdep in checked_v:
|
||||
continue
|
||||
#checked_v.add(rdep)
|
||||
if not rdep in nbs:
|
||||
global rdeps_with_alternates
|
||||
# utilize a copy of the arches as nbs will be modified
|
||||
arches = list(nbs[pkg][rdep][1])
|
||||
for arch in arches:
|
||||
alternate_available = False
|
||||
if arch == 'build':
|
||||
ptype = 'source'
|
||||
file = 'Sources'
|
||||
else:
|
||||
ptype = 'binary-%s' % arch
|
||||
file = 'Packages'
|
||||
key = '%s/dists/%s/%s/%s/%s.gz' % \
|
||||
(options.archive_base, options.suite, nbs[pkg][rdep][0],
|
||||
ptype, file)
|
||||
if key not in packages:
|
||||
packages[key] = read_tag_file(key, rdep)
|
||||
stanzas = packages[key]
|
||||
for stanza in stanzas:
|
||||
if 'binary' in ptype:
|
||||
fields = ('Pre-Depends', 'Depends', 'Recommends')
|
||||
else:
|
||||
fields = ('Build-Depends', 'Build-Depends-Indep')
|
||||
for field in fields:
|
||||
if field not in stanza:
|
||||
continue
|
||||
if '|' not in stanza[field]:
|
||||
continue
|
||||
for or_dep in stanza[field].split(','):
|
||||
if '|' not in or_dep:
|
||||
continue
|
||||
alternatives = [dep.strip()
|
||||
for dep in or_dep.split('|')]
|
||||
if pkg not in alternatives:
|
||||
continue
|
||||
for dep in alternatives:
|
||||
if dep == pkg:
|
||||
continue
|
||||
if dep not in nbs:
|
||||
alternate_available = True
|
||||
break
|
||||
if alternate_available:
|
||||
nbs[pkg][rdep][1].remove(arch)
|
||||
|
||||
if len(nbs[pkg][rdep][1]) == 0:
|
||||
rdeps_with_alternates.append(rdep)
|
||||
|
||||
if rdep not in nbs and rdep not in rdeps_with_alternates:
|
||||
try:
|
||||
checked_v.remove(rdep)
|
||||
except KeyError:
|
||||
pass
|
||||
return False
|
||||
if not _pkg_removable(rdep, nbs, checked_v):
|
||||
if not _pkg_removable(options, rdep, nbs, checked_v):
|
||||
try:
|
||||
checked_v.remove(rdep)
|
||||
except KeyError:
|
||||
@ -74,7 +125,7 @@ def _pkg_removable(pkg, nbs, checked_v):
|
||||
return True
|
||||
|
||||
|
||||
def get_removables(nbs):
|
||||
def get_removables(options, nbs):
|
||||
'''Get set of removable packages.
|
||||
|
||||
This includes packages with no rdepends and disconnected subgraphs, i. e.
|
||||
@ -86,10 +137,9 @@ def get_removables(nbs):
|
||||
if p in removable:
|
||||
continue
|
||||
checked_v = set()
|
||||
if _pkg_removable(p, nbs, checked_v):
|
||||
# we can add the entire cluster here, not just p; avoids
|
||||
# re-checking the other vertices in that cluster
|
||||
removable.update(checked_v)
|
||||
if _pkg_removable(options, p, nbs, checked_v):
|
||||
# we only add packages which are nbs to removable
|
||||
removable.update([p for p in checked_v if p in nbs])
|
||||
|
||||
return removable
|
||||
|
||||
@ -97,6 +147,7 @@ def get_removables(nbs):
|
||||
def html_report(options, nbs, removables):
|
||||
'''Generate HTML report from NBS map.'''
|
||||
|
||||
global rdeps_with_alternates
|
||||
print('''\
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
@ -148,9 +199,11 @@ def html_report(options, nbs, removables):
|
||||
cls = 'removable'
|
||||
else:
|
||||
cls = 'normal'
|
||||
print('<tr><th colspan="4"><span class="%s">%s</span></td></tr>' %
|
||||
print('<tr><th colspan="4"><span class="%s">%s</span></th></tr>\n' %
|
||||
(cls, pkg), end="")
|
||||
for rdep in sorted(nbsmap):
|
||||
if rdep in rdeps_with_alternates:
|
||||
continue
|
||||
(component, arches) = nbsmap[rdep]
|
||||
|
||||
if component in ('main', 'restricted'):
|
||||
@ -208,8 +261,12 @@ def main():
|
||||
parser = OptionParser(
|
||||
usage="%prog <checkrdepends output directory>",
|
||||
description="Generate an HTML report of current NBS binary packages.")
|
||||
parser.add_option('-B', '--archive-base', dest='archive_base',
|
||||
help=('archive base directory (default: %s)' %
|
||||
default_base),
|
||||
default=default_base)
|
||||
parser.add_option('-d', '--distribution', default='ubuntu')
|
||||
parser.add_option('-s', '--suite', default='disco')
|
||||
parser.add_option('-s', '--suite', default='groovy')
|
||||
parser.add_option(
|
||||
'--csv-file', help='record CSV time series data in this file')
|
||||
options, args = parser.parse_args()
|
||||
@ -246,7 +303,7 @@ def main():
|
||||
# rd.replace('-', '').replace('.', '')), file=dot)
|
||||
# print('}', file=dot)
|
||||
|
||||
removables = get_removables(nbs)
|
||||
removables = get_removables(options, nbs)
|
||||
|
||||
html_report(options, nbs, removables)
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Authors:
|
||||
@ -32,15 +32,10 @@ import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
try:
|
||||
from urllib.parse import unquote, urlsplit
|
||||
from urllib.request import urlopen, urlretrieve
|
||||
except ImportError:
|
||||
from urllib import unquote, urlretrieve
|
||||
from urllib2 import urlopen
|
||||
from urlparse import urlsplit
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from six.moves.urllib.parse import unquote, urlsplit
|
||||
from six.moves.urllib.request import urlopen, urlretrieve
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
import lputils
|
||||
@ -81,7 +76,7 @@ def find_matching_uploads(options, explicit_suite):
|
||||
continue
|
||||
changes_file = urlopen(upload.changes_file_url)
|
||||
try:
|
||||
changes = changes_file.read()
|
||||
changes = changes_file.read().decode("UTF-8", errors="replace")
|
||||
finally:
|
||||
changes_file.close()
|
||||
if (" unstable; urgency=" not in changes and
|
||||
|
78
ubuntu-archive-tools/oem-metapackage-mir-check
Executable file
78
ubuntu-archive-tools/oem-metapackage-mir-check
Executable file
@ -0,0 +1,78 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright (C) 2020 Canonical Ltd.
|
||||
# Author: Iain Lane <iain.lane@canonical.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
# Compare a given source package with the oem-qemu-meta reference package, to
|
||||
# see if it complies with the MIR exception granted in
|
||||
# https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM
|
||||
|
||||
set -e
|
||||
set -u
|
||||
|
||||
shopt -s nullglob
|
||||
|
||||
THIS="$(basename "${0}")"
|
||||
|
||||
ensure_programs() {
|
||||
if [ ${#} -gt 0 ] && ! type "${1}" >/dev/null 2>/dev/null; then
|
||||
echo "Required program $1 not found." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
shift
|
||||
|
||||
if [ ${#} -gt 0 ]; then
|
||||
ensure_programs "${@}"
|
||||
fi
|
||||
}
|
||||
|
||||
if [ ${#} -ne 1 ]; then
|
||||
echo -e "Usage: ${THIS} <dsc>\\n" >&2
|
||||
cat <<EOM >&2
|
||||
Compare the given package against the oem-qemu-meta reference package. Check
|
||||
that all the differences are inconsequential or expected (different modalias,
|
||||
different package name), and then promote or NEW the package directly to main.
|
||||
|
||||
https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM
|
||||
EOM
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ensure_programs pull-lp-source debdiff
|
||||
|
||||
if ! [ -e "${1}" ]; then
|
||||
echo "${THIS}: ${1} not found" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DSC="$(realpath -e "${1}")"
|
||||
|
||||
WORKINGDIR=$(mktemp -d)
|
||||
|
||||
trap 'rm -rf ${WORKINGDIR}' EXIT HUP INT QUIT TERM
|
||||
|
||||
pushd "${WORKINGDIR}" >/dev/null
|
||||
|
||||
# Download the reference package
|
||||
pull-lp-source oem-qemu-meta -d 2>/dev/null
|
||||
|
||||
if [ -t 1 ] && type colordiff >/dev/null 2>/dev/null; then
|
||||
debdiff oem-qemu-meta_*.dsc "${DSC}" 2>/dev/null | colordiff
|
||||
else
|
||||
debdiff oem-qemu-meta_*.dsc "${DSC}" 2>/dev/null
|
||||
fi
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2013 Canonical Ltd.
|
||||
# Author: Brian Murray <brian.murray@canonical.com>
|
||||
@ -261,6 +261,8 @@ def create_email_notifications(releases, spph_buckets):
|
||||
signer_name = signer.name
|
||||
# use the changes file as a backup method for determining email addresses
|
||||
changes_file_url = spph.changesFileUrl()
|
||||
changer_name = ''
|
||||
changer_email = ''
|
||||
try:
|
||||
changes_file = urlopen(changes_file_url)
|
||||
for line in changes_file.readlines():
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python2.7
|
||||
|
||||
# Check for override mismatches between pockets
|
||||
# Copyright (C) 2005, 2008, 2011, 2012 Canonical Ltd.
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/env python
|
||||
#! /usr/bin/env python2.7
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2011 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python2.7
|
||||
|
||||
# Synchronise package priorities with germinate output
|
||||
# Copyright (C) 2005, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
@ -99,8 +99,12 @@ def parse_removals_file(options, removals_file):
|
||||
# Removed the following packages from unstable:
|
||||
elif state == "removed from":
|
||||
# complete processing of the date from the preceding line
|
||||
removal["date"] = time.mktime(time.strptime(
|
||||
date, "%a, %d %b %Y %H:%M:%S %z"))
|
||||
try:
|
||||
removal["date"] = time.mktime(time.strptime(
|
||||
date, "%a, %d %b %Y %H:%M:%S %z"))
|
||||
except ValueError:
|
||||
removal["date"] = time.mktime(time.strptime(
|
||||
date, "%a %d %b %H:%M:%S %Z %Y"))
|
||||
removal["ftpmaster"] = ftpmaster.replace("] [ftpmaster: ", "")
|
||||
|
||||
prefix = "Removed the following packages from "
|
||||
@ -173,7 +177,7 @@ def show_reverse_depends(options, package):
|
||||
series_name = options.series.name
|
||||
commands = (
|
||||
["reverse-depends", "-r", series_name, "src:%s" % package],
|
||||
["reverse-depends", "-r", series_name, "-b", "src:%s" % package],
|
||||
["reverse-depends", "-r", series_name, "-a", "source", "src:%s" % package],
|
||||
)
|
||||
for command in commands:
|
||||
subp = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
@ -34,16 +34,18 @@ def promote(options, name, version, architecture):
|
||||
else:
|
||||
display = "%s/%s/%s" % (name, version, architecture)
|
||||
|
||||
# check to see if the package and version has already been published or is
|
||||
# pending publication
|
||||
if architecture is None:
|
||||
try:
|
||||
release_sources = options.archive.getPublishedSources(
|
||||
source_name=name, version=version,
|
||||
distro_series=options.series, pocket="Release",
|
||||
distro_series=options.series, pocket=options.pocket.title(),
|
||||
exact_match=True, status="Published")
|
||||
except IndexError:
|
||||
release_sources = options.archive.getPublishedSources(
|
||||
source_name=name, version=version,
|
||||
distro_series=options.series, pocket="Release",
|
||||
distro_series=options.series, pocket=options.pocket.title(),
|
||||
exact_match=True, status="Pending")
|
||||
except HTTPError as e:
|
||||
print("getPublishedSources %s: %s" % (display, e.content),
|
||||
@ -60,12 +62,16 @@ def promote(options, name, version, architecture):
|
||||
sys.stdout.flush()
|
||||
|
||||
try:
|
||||
percentage = None
|
||||
if options.pocket == 'updates':
|
||||
percentage = 10
|
||||
options.archive.copyPackage(
|
||||
source_name=name, version=version,
|
||||
from_archive=options.archive,
|
||||
from_series=options.series.name, from_pocket="Proposed",
|
||||
to_series=options.series.name, to_pocket="Release",
|
||||
to_series=options.series.name, to_pocket=options.pocket.title(),
|
||||
include_binaries=True, sponsored=options.requestor,
|
||||
phased_update_percentage=percentage,
|
||||
auto_approve=True)
|
||||
except HTTPError as e:
|
||||
print("copyPackage %s: %s" % (display, e.content), file=sys.stderr)
|
||||
@ -87,7 +93,8 @@ def promote(options, name, version, architecture):
|
||||
|
||||
if architecture is None:
|
||||
try:
|
||||
proposed_source.requestDeletion(removal_comment="moved to release")
|
||||
proposed_source.requestDeletion(removal_comment="moved to %s" %
|
||||
options.pocket)
|
||||
except HTTPError as e:
|
||||
print("requestDeletion %s: %s" % (display, e.content),
|
||||
file=sys.stderr)
|
||||
@ -102,7 +109,8 @@ def promote(options, name, version, architecture):
|
||||
if architecture != "i386":
|
||||
continue
|
||||
try:
|
||||
bpph.requestDeletion(removal_comment="moved to release")
|
||||
bpph.requestDeletion(removal_comment="moved to %s" %
|
||||
options.pocket)
|
||||
except HTTPError as e:
|
||||
print("requestDeletion %s/%s/%s: %s" %
|
||||
(bpph.binary_package_name, bpph.binary_package_version,
|
||||
@ -173,9 +181,15 @@ def main():
|
||||
parser.add_option(
|
||||
"-s", "--series", dest="suite",
|
||||
metavar="SERIES", help="promote from SERIES-proposed to SERIES")
|
||||
parser.add_option(
|
||||
"-p", "--pocket", default="release",
|
||||
help="the pocket to which the package should be copied")
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 1:
|
||||
parser.error("need britney output delta file")
|
||||
if options.pocket not in ('updates', 'release'):
|
||||
parser.error("pocket must be updates or release not %s" %
|
||||
options.pocket)
|
||||
|
||||
options.launchpad = Launchpad.login_with(
|
||||
"promote-to-release", options.launchpad_instance, version="devel")
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2010, 2011, 2012 Canonical Ltd.
|
||||
@ -54,8 +54,8 @@ product_re = re.compile(
|
||||
'(alternate|desktop|dvd|server(?: subiquity)?|mobile|base|active|wubi)(?: preinstalled)? '
|
||||
'(i386|amd64$|amd64\+mac|armel$|armel\+dove|armel\+omap$|armel\+omap4|'
|
||||
'armel\+ac100|armel\+mx5|armhf$|armhf\+omap$|armhf\+omap4|armhf\+ac100|'
|
||||
'armhf\+mx5|armhf\+nexus7|armhf\+raspi2|armhf\+raspi3|arm64$|arm64\+raspi3|'
|
||||
'powerpc|ppc64el|s390x)', re.I)
|
||||
'armhf\+mx5|armhf\+nexus7|armhf\+raspi$|armhf\+raspi2|armhf\+raspi3|'
|
||||
'arm64$|arm64\+raspi$|arm64\+raspi3|powerpc|ppc64el|s390x)', re.I)
|
||||
|
||||
# map an image type from the ISO tracker to a source directory for
|
||||
# publish-release
|
||||
@ -67,6 +67,7 @@ type_map = {
|
||||
'mobile': 'daily-live',
|
||||
'active': 'daily-live',
|
||||
'server': 'daily',
|
||||
'legacy-server': 'daily',
|
||||
'base': 'daily',
|
||||
'wubi': 'wubi',
|
||||
'preinstalled-desktop': 'daily-preinstalled',
|
||||
@ -171,11 +172,19 @@ def parse_iso_tracker(opts):
|
||||
type = 'preinstalled-%s' % type
|
||||
if 'Server arm64+raspi3' in product:
|
||||
type = 'preinstalled-%s' % type
|
||||
if 'Server armhf+raspi' in product:
|
||||
type = 'preinstalled-%s' % type
|
||||
if 'Server arm64+raspi' in product:
|
||||
type = 'preinstalled-%s' % type
|
||||
if 'Server Subiquity' in product:
|
||||
type = 'live-server'
|
||||
project = 'ubuntu'
|
||||
if 'Preinstalled' in product:
|
||||
type = 'preinstalled-%s' % type
|
||||
if (ms.series_string == u'Focal' and
|
||||
project == 'ubuntu' and type == 'server'):
|
||||
project = 'ubuntu-server'
|
||||
type = 'legacy-server'
|
||||
if project == 'kubuntu' and type == 'mobile':
|
||||
project = 'kubuntu-mobile'
|
||||
if project == 'kubuntu' and type == 'active':
|
||||
|
@ -77,7 +77,11 @@ class QATrackerRPCObject():
|
||||
except ValueError:
|
||||
setattr(self, key, None)
|
||||
else:
|
||||
setattr(self, key, str(rpc_dict[key]))
|
||||
import sys
|
||||
if sys.version_info > (3,):
|
||||
setattr(self, key, str(rpc_dict[key]))
|
||||
else:
|
||||
setattr(self, key, unicode(rpc_dict[key]))
|
||||
|
||||
self.tracker = tracker
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
@ -80,12 +80,12 @@ def queue_items(options, args):
|
||||
if not args:
|
||||
args = ['']
|
||||
|
||||
items = collections.OrderedDict()
|
||||
items = []
|
||||
for arg in args:
|
||||
arg = arg.strip()
|
||||
if arg.isdigit():
|
||||
item = queue_item(options, arg)
|
||||
if item in items:
|
||||
if item.self_link in {item.self_link for item in items}:
|
||||
continue
|
||||
if item.status != options.queue:
|
||||
raise ValueError(
|
||||
@ -103,7 +103,7 @@ def queue_items(options, args):
|
||||
item_suite, options.distribution.name,
|
||||
options.suite))
|
||||
if queue_item_allowed(options, item):
|
||||
items[item] = 1
|
||||
items.append(item)
|
||||
else:
|
||||
kwargs = {}
|
||||
if "/" in arg:
|
||||
@ -116,7 +116,7 @@ def queue_items(options, args):
|
||||
**kwargs)
|
||||
for item in new_items:
|
||||
if queue_item_allowed(options, item):
|
||||
items[item] = 1
|
||||
items.append(item)
|
||||
|
||||
return items
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Copyright (C) 2010 Scott Kitterman <scott@kitterman.com>
|
||||
@ -41,7 +41,7 @@ import webbrowser
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
|
||||
default_release = 'cosmic'
|
||||
default_release = 'focal'
|
||||
|
||||
lp = None
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2012 Canonical, Ltd.
|
||||
# Author: Brian Murray <brian@canonical.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
|
||||
# Copyright 2012 Canonical Ltd.
|
||||
# Author: Colin Watson <cjwatson@ubuntu.com>
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
# Rescore all builds in a PPA.
|
||||
#
|
||||
# Copyright (C) 2012 Canonical Ltd.
|
||||
|
@ -31,7 +31,7 @@ import yaml
|
||||
import json
|
||||
|
||||
request_url = 'https://autopkgtest.ubuntu.com/request.cgi'
|
||||
default_series = 'disco'
|
||||
default_series = 'groovy'
|
||||
args = None
|
||||
|
||||
|
||||
@ -65,10 +65,10 @@ def parse_args():
|
||||
parser.add_argument('--state', default='REGRESSION',
|
||||
help='generate commands for given test state (default: %(default)s)')
|
||||
parser.add_argument('--max-age', type=float, metavar='DAYS',
|
||||
help='only consider candiates which are at most '
|
||||
help='only consider candidates which are at most '
|
||||
'this number of days old (float allowed)')
|
||||
parser.add_argument('--min-age', type=float, metavar='DAYS',
|
||||
help='only consider candiates which are at least '
|
||||
help='only consider candidates which are at least '
|
||||
'this number of days old (float allowed)')
|
||||
parser.add_argument('--blocks',
|
||||
help='rerun only those tests that were triggered '
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2008, 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2011, 2012 Canonical Ltd.
|
||||
# Author: Martin Pitt <martin.pitt@canonical.com>
|
||||
@ -28,27 +28,32 @@ from __future__ import print_function
|
||||
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
import datetime
|
||||
import optparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib import urlopen
|
||||
from six.moves.urllib.request import urlopen
|
||||
from io import TextIOWrapper
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from kernel_series import KernelSeries
|
||||
|
||||
|
||||
# Each entry in this list is a list of source packages that are known
|
||||
# to have inter-dependencies and must be released simultaneously.
|
||||
# If possible, each list should be ordered such that earlier
|
||||
# entries could be released slightly before subsequent entries.
|
||||
RELEASE_TOGETHER_PACKAGE_GROUPS = [
|
||||
['linux-hwe', 'linux-meta-hwe'],
|
||||
['linux', 'linux-meta'],
|
||||
['linux-hwe', 'linux-signed-hwe', 'linux-meta-hwe'],
|
||||
['linux', 'linux-signed', 'linux-meta'],
|
||||
['grub2', 'grub2-signed'],
|
||||
['shim', 'shim-signed'],
|
||||
['libreoffice', 'libreoffice-l10n'],
|
||||
]
|
||||
|
||||
MISSING_PACKAGES_FROM_GROUP = (
|
||||
@ -59,6 +64,8 @@ MISSING_PACKAGES_FROM_GROUP = (
|
||||
" https://lists.ubuntu.com/archives/ubuntu-devel/2018-June/040380.html\n\n"
|
||||
"To ignore this message, pass '--skip-package-group-check'.")
|
||||
|
||||
BZR_HINT_BRANCH = "lp:~ubuntu-sru/britney/hints-ubuntu-%s"
|
||||
|
||||
|
||||
def check_package_sets(packages):
|
||||
"""Return a re-ordered list of packages respecting the PACKAGE_SETS
|
||||
@ -97,8 +104,8 @@ def check_package_sets(packages):
|
||||
class CheckPackageSets(unittest.TestCase):
|
||||
def test_expected_linux_order_fixed(self):
|
||||
self.assertEqual(
|
||||
['pkg1', 'linux', 'linux-meta', 'pkg2'],
|
||||
check_package_sets(['pkg1', 'linux-meta', 'linux', 'pkg2']))
|
||||
['pkg1', 'linux', 'linux-signed', 'linux-meta', 'pkg2'],
|
||||
check_package_sets(['pkg1', 'linux-meta', 'linux', 'linux-signed', 'pkg2']))
|
||||
|
||||
def test_raises_value_error_on_missing(self):
|
||||
self.assertRaises(
|
||||
@ -127,7 +134,7 @@ def match_srubugs(options, changesfileurl):
|
||||
return bugs
|
||||
|
||||
# Load changesfile
|
||||
changelog = urlopen(changesfileurl)
|
||||
changelog = TextIOWrapper(urlopen(changesfileurl), encoding='utf-8')
|
||||
bugnums = []
|
||||
for l in changelog:
|
||||
if l.startswith('Launchpad-Bugs-Fixed: '):
|
||||
@ -155,7 +162,7 @@ def update_sru_bug(bug, pkg):
|
||||
sru_team = launchpad.people['ubuntu-sru']
|
||||
bug.unsubscribe(person=sru_team)
|
||||
text = ("The verification of the Stable Release Update for %s has "
|
||||
"completed successfully and the package has now been released "
|
||||
"completed successfully and the package is now being released "
|
||||
"to -updates. Subsequently, the Ubuntu Stable Release Updates "
|
||||
"Team is being unsubscribed and will not receive messages "
|
||||
"about this bug report. In the event that you encounter "
|
||||
@ -178,10 +185,10 @@ def get_versions(options, sourcename):
|
||||
'published': proposed_date}
|
||||
'''
|
||||
versions = defaultdict(dict)
|
||||
if options.esm:
|
||||
pocket = 'Release'
|
||||
else:
|
||||
if src_archive.reference == 'ubuntu':
|
||||
pocket = 'Proposed'
|
||||
else:
|
||||
pocket = 'Release'
|
||||
|
||||
matches = src_archive.getPublishedSources(
|
||||
source_name=sourcename, exact_match=not options.pattern,
|
||||
@ -192,9 +199,9 @@ def get_versions(options, sourcename):
|
||||
source_name=match.source_package_name, exact_match=True,
|
||||
status='Published', distro_series=series):
|
||||
key = pub.pocket.lower()
|
||||
# special case for ESM ppas, which don't have pockets but need
|
||||
# special case for ppas, which don't have pockets but need
|
||||
# to be treated as -proposed
|
||||
if options.esm and key == 'release':
|
||||
if pocket == 'Release' and key == 'release':
|
||||
key = 'proposed'
|
||||
versions[pub.source_package_name][key] = (
|
||||
pub.source_package_version)
|
||||
@ -212,7 +219,6 @@ def get_versions(options, sourcename):
|
||||
if pocket in pub.pocket:
|
||||
versions[pub.source_package_name]['changesfile'] = (
|
||||
pub.changesFileUrl())
|
||||
|
||||
# devel version
|
||||
if devel_series:
|
||||
for pub in src_archive.getPublishedSources(
|
||||
@ -227,81 +233,142 @@ def get_versions(options, sourcename):
|
||||
return versions
|
||||
|
||||
|
||||
def release_package(options, package):
|
||||
'''Release a package.'''
|
||||
def release_packages(options, packages):
|
||||
'''Release the packages listed in the packages argument.'''
|
||||
|
||||
pkg_versions_map = get_versions(options, package)
|
||||
if not pkg_versions_map:
|
||||
message = 'ERROR: No such package, ' + package + ', in -proposed, aborting\n'
|
||||
sys.stderr.write(message)
|
||||
sys.exit(1)
|
||||
pkg_versions_map = {}
|
||||
# Dictionary of packages and their versions that need copying by britney.
|
||||
# Those packages have unblock hints added.
|
||||
packages_to_britney = {}
|
||||
|
||||
for pkg, versions in pkg_versions_map.iteritems():
|
||||
print('--- Releasing %s ---' % pkg)
|
||||
print('Proposed: %s' % versions['proposed'])
|
||||
if 'security' in versions:
|
||||
print('Security: %s' % versions['security'])
|
||||
if 'updates' in versions:
|
||||
print('Updates: %s' % versions['updates'])
|
||||
else:
|
||||
print('Release: %s' % versions.get('release'))
|
||||
if options.devel and 'devel' in versions:
|
||||
print('Devel: %s' % versions['devel'])
|
||||
for package in packages:
|
||||
pkg_versions_map[package] = get_versions(options, package)
|
||||
if not pkg_versions_map[package]:
|
||||
message = ('ERROR: No such package, ' + package + ', in '
|
||||
'-proposed, aborting\n')
|
||||
sys.stderr.write(message)
|
||||
sys.exit(1)
|
||||
|
||||
copy = partial(
|
||||
dst_archive.copyPackage, from_archive=src_archive,
|
||||
include_binaries=True, source_name=pkg,
|
||||
version=versions['proposed'], auto_approve=True)
|
||||
|
||||
if options.devel:
|
||||
if ('devel' not in versions or
|
||||
versions['devel'] in (
|
||||
versions.get('updates', 'notexisting'),
|
||||
versions['release'])):
|
||||
if not options.no_act:
|
||||
copy(to_pocket='Proposed', to_series=devel_series.name)
|
||||
print('Version in %s matches development series, '
|
||||
'copied to %s-proposed' % (release, devel_series.name))
|
||||
for pkg, versions in pkg_versions_map[package].items():
|
||||
print('--- Releasing %s ---' % pkg)
|
||||
print('Proposed: %s' % versions['proposed'])
|
||||
if 'security' in versions:
|
||||
print('Security: %s' % versions['security'])
|
||||
if 'updates' in versions:
|
||||
print('Updates: %s' % versions['updates'])
|
||||
else:
|
||||
print('ERROR: Version in %s does not match development '
|
||||
'series, not copying' % release)
|
||||
print('Release: %s' % versions.get('release'))
|
||||
if options.devel and 'devel' in versions:
|
||||
print('Devel: %s' % versions['devel'])
|
||||
|
||||
if options.no_act:
|
||||
if options.release:
|
||||
print('Would copy to %s' % release)
|
||||
else:
|
||||
print('Would copy to %s-updates' % release)
|
||||
else:
|
||||
if options.release:
|
||||
# -proposed -> release
|
||||
copy(to_pocket='Release', to_series=release)
|
||||
print('Copied to %s' % release)
|
||||
else:
|
||||
# -proposed -> -updates
|
||||
# only phasing updates for >=raring to start
|
||||
if (release not in ('lucid', 'precise') and
|
||||
package != 'linux' and
|
||||
not package.startswith('linux-') and
|
||||
not options.security):
|
||||
copy(to_pocket='Updates', to_series=release,
|
||||
phased_update_percentage=options.percentage)
|
||||
copy = partial(
|
||||
dst_archive.copyPackage, from_archive=src_archive,
|
||||
include_binaries=True, source_name=pkg,
|
||||
version=versions['proposed'], auto_approve=True)
|
||||
|
||||
if options.devel and not options.britney:
|
||||
if ('devel' not in versions or
|
||||
versions['devel'] in (
|
||||
versions.get('updates', 'notexisting'),
|
||||
versions['release'])):
|
||||
if not options.no_act:
|
||||
copy(to_pocket='Proposed', to_series=devel_series.name)
|
||||
print('Version in %s matches development series, '
|
||||
'copied to %s-proposed' %
|
||||
(release, devel_series.name))
|
||||
else:
|
||||
copy(to_pocket='Updates', to_series=release)
|
||||
print('Copied to %s-updates' % release)
|
||||
if not options.no_bugs:
|
||||
sru_bugs = match_srubugs(options, versions['changesfile'])
|
||||
tag = 'verification-needed-%s' % release
|
||||
for sru_bug in sru_bugs:
|
||||
if tag not in sru_bug.tags:
|
||||
update_sru_bug(sru_bug, pkg)
|
||||
print('ERROR: Version in %s does not match development '
|
||||
'series, not copying' % release)
|
||||
|
||||
# -proposed -> -security
|
||||
if options.security:
|
||||
if options.no_act:
|
||||
print('Would copy to %s-security' % release)
|
||||
if options.release:
|
||||
print('Would copy to %s' % release)
|
||||
else:
|
||||
print('Would copy to %s-updates' % release)
|
||||
else:
|
||||
copy(to_pocket='Security', to_series=release)
|
||||
print('Copied to %s-security' % release)
|
||||
if options.release:
|
||||
# -proposed -> release
|
||||
copy(to_pocket='Release', to_series=release)
|
||||
print('Copied to %s' % release)
|
||||
else:
|
||||
# -proposed -> -updates
|
||||
if (package != 'linux' and
|
||||
not package.startswith('linux-') and
|
||||
not options.security):
|
||||
if options.britney:
|
||||
# We can opt in to use britney for the package copy
|
||||
# instead of doing direct pocket copies.
|
||||
packages_to_britney[pkg] = versions['proposed']
|
||||
else:
|
||||
copy(to_pocket='Updates', to_series=release,
|
||||
phased_update_percentage=options.percentage)
|
||||
print('Copied to %s-updates' % release)
|
||||
else:
|
||||
copy(to_pocket='Updates', to_series=release)
|
||||
print('Copied to %s-updates' % release)
|
||||
|
||||
# -proposed -> -security
|
||||
if options.security:
|
||||
if options.no_act:
|
||||
print('Would copy to %s-security' % release)
|
||||
else:
|
||||
copy(to_pocket='Security', to_series=release)
|
||||
print('Copied to %s-security' % release)
|
||||
|
||||
# Write hints for britney to copy the selected packages
|
||||
if options.britney and packages_to_britney:
|
||||
release_package_via_britney(options, packages_to_britney)
|
||||
# If everything went well, update the bugs
|
||||
if not options.no_bugs:
|
||||
for pkg_versions in pkg_versions_map.values():
|
||||
for pkg, versions in pkg_versions.items():
|
||||
sru_bugs = match_srubugs(options, versions['changesfile'])
|
||||
tag = 'verification-needed-%s' % release
|
||||
for sru_bug in sru_bugs:
|
||||
if tag not in sru_bug.tags:
|
||||
update_sru_bug(sru_bug, pkg)
|
||||
|
||||
|
||||
def release_package_via_britney(options, packages):
|
||||
'''Release selected packages via britney unblock hints.'''
|
||||
|
||||
hints_path = os.path.join(options.cache, 'hints-ubuntu-%s' % release)
|
||||
hints_file = os.path.join(hints_path, 'sru-release')
|
||||
# Checkout the hints branch
|
||||
if not os.path.exists(hints_path):
|
||||
cmd = ['bzr', 'checkout', '--lightweight',
|
||||
BZR_HINT_BRANCH % release, hints_path]
|
||||
else:
|
||||
cmd = ['bzr', 'update', hints_path]
|
||||
try:
|
||||
subprocess.check_call(cmd)
|
||||
except subprocess.CalledProcessError:
|
||||
sys.stderr.write("Failed bzr %s for the hints branch at %s\n" %
|
||||
(cmd[1], hints_path))
|
||||
sys.exit(1)
|
||||
# Create the hint with a timestamp comment
|
||||
timestamp = time.time() # In python2 we can't use datetime.timestamp()
|
||||
date = datetime.datetime.now().ctime()
|
||||
unblock_string = '# %s %s\n' % (timestamp, date)
|
||||
unblock_string += ''.join(['unblock %s/%s\n' % (pkg, ver)
|
||||
for pkg, ver in packages.items()])
|
||||
unblock_string += '\n'
|
||||
# Update and commit the hint
|
||||
with open(hints_file, 'a+') as f:
|
||||
f.write(unblock_string)
|
||||
cmd = ['bzr', 'commit', '-m', 'sru-release %s %s' %
|
||||
(release, ' '.join(packages.keys()))]
|
||||
try:
|
||||
subprocess.check_call(cmd)
|
||||
except subprocess.CalledProcessError:
|
||||
sys.stderr.write('Failed to bzr commit to the hints file %s\n'
|
||||
'Please investigate the local hint branch and '
|
||||
'commit the required unblock entries as otherwise '
|
||||
'your changes will be lost.\n' %
|
||||
hints_file)
|
||||
sys.exit(1)
|
||||
print('Added hints for promotion in release %s of packages %s' %
|
||||
(release, ' '.join(packages.keys())))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
@ -346,6 +413,13 @@ if __name__ == '__main__':
|
||||
'--skip-package-group-check', action='store_true', default=False,
|
||||
help=('Skip the package set checks that require some packages '
|
||||
'be released together'))
|
||||
parser.add_option(
|
||||
'--britney', action='store_true', default=False,
|
||||
help='Use britney for copying the packages over to -updates (only '
|
||||
'works for regular package releases into updates)')
|
||||
parser.add_option(
|
||||
'-C', '--cache', default='~/.cache/sru-release',
|
||||
help='Cache directory to be used for the britney hints checkout')
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
@ -359,6 +433,13 @@ if __name__ == '__main__':
|
||||
release = args.pop(0)
|
||||
packages = args
|
||||
|
||||
# XXX: we only want to instantiate KernelSeries if we suspect this is
|
||||
# a kernel package, this is necessarily dirty, dirty, dirty.
|
||||
kernel_checks = False
|
||||
for package in packages:
|
||||
if package.startswith('linux-') or package == 'linux':
|
||||
kernel_checks = True
|
||||
|
||||
if not options.skip_package_group_check:
|
||||
try:
|
||||
packages = check_package_sets(packages)
|
||||
@ -366,6 +447,14 @@ if __name__ == '__main__':
|
||||
sys.stderr.write(e.args[0] + '\n')
|
||||
sys.exit(1)
|
||||
|
||||
options.cache = os.path.expanduser(options.cache)
|
||||
if not os.path.isdir(options.cache):
|
||||
if os.path.exists(options.cache):
|
||||
print('Cache path %s already exists and is not a directory.'
|
||||
% options.cache)
|
||||
sys.exit(1)
|
||||
os.makedirs(options.cache)
|
||||
|
||||
launchpad = Launchpad.login_with(
|
||||
'ubuntu-archive-tools', options.launchpad_instance, version='devel')
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
@ -375,12 +464,57 @@ if __name__ == '__main__':
|
||||
sys.stderr.write(
|
||||
'WARNING: No current development series, -d will not work\n')
|
||||
devel_series = None
|
||||
if release == 'precise':
|
||||
|
||||
ks_source = None
|
||||
if kernel_checks:
|
||||
kernel_series = KernelSeries()
|
||||
|
||||
# See if we have a kernel-series record for this package. If we do
|
||||
# then we are going to pivot to the routing therein.
|
||||
ks_series = kernel_series.lookup_series(codename=release)
|
||||
for ks_source_find in ks_series.sources:
|
||||
for ks_package in ks_source_find.packages:
|
||||
if ks_package.name == packages[0]:
|
||||
ks_source = ks_source_find
|
||||
break
|
||||
|
||||
# First confirm everything in this set we are attempting to release
|
||||
# are indeed listed as valid for this kernel.
|
||||
if ks_source is not None:
|
||||
for package in packages:
|
||||
if ks_source.lookup_package(package) is None:
|
||||
sys.stderr.write(
|
||||
'WARNING: {} not found in packages for kernel {}\n'.format(
|
||||
package, ks_source.name))
|
||||
|
||||
if ks_source is None and release in ('precise', 'trusty'):
|
||||
sys.stdout.write(
|
||||
'Called for precise; assuming kernel ESM publication\n')
|
||||
'Called for {}; assuming kernel ESM publication\n'.format(release))
|
||||
options.esm = True
|
||||
|
||||
if options.esm:
|
||||
# If we found a KernelSeries entry this has accurate routing information
|
||||
# attached use that.
|
||||
if ks_source is not None:
|
||||
src_archive_ref, src_archive_pocket = ks_source.routing.lookup_destination('proposed', primary=True)
|
||||
src_archive = launchpad.archives.getByReference(
|
||||
reference=src_archive_ref)
|
||||
dst_archive_ref, dst_archive_pocket = ks_source.routing.lookup_destination('updates', primary=True)
|
||||
if dst_archive_ref == src_archive_ref:
|
||||
dst_archive = src_archive
|
||||
else:
|
||||
dst_archive = launchpad.archives.getByReference(
|
||||
reference=dst_archive_ref)
|
||||
|
||||
# Announce any non-standard archive routing.
|
||||
if src_archive_ref != 'ubuntu':
|
||||
print("Src Archive: {}".format(src_archive_ref))
|
||||
if dst_archive_ref != 'ubuntu':
|
||||
print("Dst Archive: {}".format(dst_archive_ref))
|
||||
# --security is meaningless for private PPA publishing (XXX: currently true)
|
||||
options.security = False
|
||||
options.release = True
|
||||
|
||||
elif options.esm:
|
||||
# --security is meaningless for ESM everything is a security update.
|
||||
options.security = False
|
||||
options.release = True
|
||||
@ -391,5 +525,4 @@ if __name__ == '__main__':
|
||||
else:
|
||||
src_archive = dst_archive = ubuntu.getArchive(name='primary')
|
||||
|
||||
for package in packages:
|
||||
release_package(options, package)
|
||||
release_packages(options, packages)
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2015 Brian Murray <brian.murray@canonical.com>
|
||||
|
||||
@ -50,13 +50,19 @@ def parse_options():
|
||||
help="release (default: %s)" % default_release)
|
||||
parser.add_option(
|
||||
"-p", "--package", dest="sourcepkg")
|
||||
parser.add_option(
|
||||
"--reason", dest="reason", default="ancient",
|
||||
help="reason for removal: ancient, failed (default: ancient)")
|
||||
|
||||
opts, args = parser.parse_args()
|
||||
|
||||
if opts.reason not in ('ancient', 'failed'):
|
||||
parser.error('Reason can only be "ancient" or "failed".')
|
||||
|
||||
return (opts, args)
|
||||
|
||||
|
||||
def process_bug(launchpad, distroseries, sourcepkg, num):
|
||||
def process_bug(launchpad, distroseries, sourcepkg, num, reason):
|
||||
bug_target_re = re.compile(
|
||||
r'/ubuntu/(?:(?P<suite>[^/]+)/)?\+source/(?P<source>[^/]+)$')
|
||||
bug = launchpad.bugs[num]
|
||||
@ -92,11 +98,19 @@ def process_bug(launchpad, distroseries, sourcepkg, num):
|
||||
bug.tags = tags
|
||||
bug.lp_save()
|
||||
|
||||
text = ('The version of %s in the proposed pocket of %s that was '
|
||||
'purported to fix this bug report has been removed because '
|
||||
'the bugs that were to be fixed by the upload were not '
|
||||
'verified in a timely (105 days) fashion.' %
|
||||
(sourcepkg, series_name.title()))
|
||||
if reason == 'failed':
|
||||
text = ('The version of %s in the proposed pocket of %s that was '
|
||||
'purported to fix this bug report has been removed '
|
||||
'because one or more bugs that were to be fixed by the '
|
||||
'upload have failed verification and been in this state '
|
||||
'for more than 10 days.' %
|
||||
(sourcepkg, series_name.title()))
|
||||
else: # 'ancient'
|
||||
text = ('The version of %s in the proposed pocket of %s that was '
|
||||
'purported to fix this bug report has been removed '
|
||||
'because the bugs that were to be fixed by the upload '
|
||||
'were not verified in a timely (105 days) fashion.' %
|
||||
(sourcepkg, series_name.title()))
|
||||
bug.newMessage(content=text,
|
||||
subject='Proposed package removed from archive')
|
||||
|
||||
@ -117,12 +131,17 @@ def process_bug(launchpad, distroseries, sourcepkg, num):
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
default_release = 'cosmic'
|
||||
removal_comment = ('The package was removed due to its SRU bug(s) '
|
||||
'not being verified in a timely fashion.')
|
||||
default_release = 'focal'
|
||||
|
||||
(opts, bugs) = parse_options()
|
||||
|
||||
if opts.reason == 'failed':
|
||||
removal_comment = ('The package was removed due to one or more of '
|
||||
'its SRU bugs having failed verification.')
|
||||
else: # 'ancient'
|
||||
removal_comment = ('The package was removed due to its SRU bug(s) '
|
||||
'not being verified in a timely fashion.')
|
||||
|
||||
launchpad = Launchpad.login_with('sru-remove', opts.launchpad_instance,
|
||||
version="devel")
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
@ -156,4 +175,4 @@ if __name__ == '__main__':
|
||||
sys.exit(1)
|
||||
# only comment on the bugs after removing the package
|
||||
for bug in bugs:
|
||||
process_bug(launchpad, series, opts.sourcepkg, bug)
|
||||
process_bug(launchpad, series, opts.sourcepkg, bug, opts.reason)
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2009, 2010, 2011, 2012 Canonical Ltd.
|
||||
# Authors:
|
||||
@ -159,6 +159,20 @@ def bug_open_js(bugs, title=None):
|
||||
return '<button onclick="%s">%s (%i)</button>' % (js, title, len(bugs))
|
||||
|
||||
|
||||
def verification_failed_check_for_removal(activities, release):
|
||||
'''Helper function, checking if the verification-failed bug qualifies the
|
||||
upload for removal'''
|
||||
for activity in reversed(activities):
|
||||
if (activity.whatchanged == 'tags' and
|
||||
'verification-failed-%s' % release in activity.newvalue):
|
||||
age = (datetime.datetime.now() - activity.datechanged.replace(
|
||||
tzinfo=None)).days
|
||||
if age >= 10:
|
||||
return True
|
||||
break
|
||||
return False
|
||||
|
||||
|
||||
def print_report(srus):
|
||||
'''render the report'''
|
||||
global releases
|
||||
@ -192,6 +206,7 @@ def print_report(srus):
|
||||
a.testing { color: blue; }
|
||||
a.broken { text-decoration: line-through; color: black; }
|
||||
a.removal { color: gray; font-weight: bold }
|
||||
a.blockproposed:after { content: "\\1F6A7"; font-weight: normal; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
@ -229,7 +244,10 @@ bugs in <span style="font-style: italic">italic</span> are kernel tracking
|
||||
bugs and bugs that are
|
||||
<span style="text-decoration: line-through;">struck through</span> are
|
||||
duplicate bug reports or weren't accessible at the time the report was
|
||||
generated.</p>''')
|
||||
generated. Bugs with the 🚧 character next to their number are bugs with
|
||||
a block-proposed-SERIES tag, indicating that they should not be released
|
||||
without double-checking the bug contents. Those can be staged in -proposed,
|
||||
even when verified, for a reason.</p>''')
|
||||
|
||||
#
|
||||
# pending SRUs
|
||||
@ -240,13 +258,14 @@ generated.</p>''')
|
||||
pkgcleanup_release = []
|
||||
pkgsuperseded = []
|
||||
# set of (series_name, srcpkg, [bugs])
|
||||
proposed_antique = []
|
||||
proposed_ancient = []
|
||||
proposed_failed = []
|
||||
for release in sorted(srus):
|
||||
if not srus[release]:
|
||||
continue
|
||||
for pack in srus[release]:
|
||||
pkg_index[release][pack] = srus[release][pack]['published']
|
||||
for pkg, pub in sorted(pkg_index[release].iteritems(),
|
||||
for pkg, pub in sorted(pkg_index[release].items(),
|
||||
key=itemgetter(1)):
|
||||
rpkg = srus[release][pkg]
|
||||
if cleanup(rpkg):
|
||||
@ -273,20 +292,26 @@ generated.</p>''')
|
||||
<tr><th>Package</th><th>-release</th><th>-updates</th>
|
||||
<th>-proposed (signer, creator)</th>
|
||||
<th>changelog bugs</th><th>days</th></tr>''' % (release, release))
|
||||
for pkg, pub in sorted(pkg_index[release].iteritems(),
|
||||
for pkg, pub in sorted(pkg_index[release].items(),
|
||||
key=itemgetter(1)):
|
||||
# skip everything that shows up on the kernel SRU reports
|
||||
if (pkg in ('linux', 'linux-hwe', 'linux-hwe-edge',
|
||||
'linux-kvm', 'linux-oem',
|
||||
'linux-raspi2', 'linux-snapdragon',
|
||||
if (pkg in ('linux', 'linux-hwe', 'linux-hwe-edge', 'linux-hwe-5.0',
|
||||
'linux-kvm', 'linux-oem', 'linux-oem-osp1',
|
||||
'linux-raspi2', 'linux-raspi2-5.3',
|
||||
'linux-snapdragon', 'linux-bluefield',
|
||||
'linux-keystone', 'linux-armadaxp', 'linux-ti-omap4',
|
||||
'linux-aws', 'linux-aws-hwe', 'linux-aws-edge',
|
||||
'linux-azure', 'linux-azure-edge',
|
||||
'linux-gcp', 'linux-gcp-edge',
|
||||
'linux-gke', 'linux-euclid', 'linux-oracle') or
|
||||
'linux-aws', 'linux-aws-5.0', 'linux-aws-5.3',
|
||||
'linux-aws-hwe', 'linux-aws-edge',
|
||||
'linux-azure', 'linux-azure-edge', 'linux-azure-5.3',
|
||||
'linux-gcp', 'linux-gcp-5.3', 'linux-gcp-edge',
|
||||
'linux-gke', 'linux-gke-4.15', 'linux-gke-5.0',
|
||||
'linux-gke-5.3',
|
||||
'linux-euclid', 'linux-oracle', 'linux-oracle-5.0',
|
||||
'linux-oracle-5.3') or
|
||||
pkg.startswith('linux-signed') or
|
||||
pkg.startswith('linux-meta') or
|
||||
pkg.startswith('linux-lts') or
|
||||
pkg.startswith('linux-restricted-modules') or
|
||||
pkg.startswith('linux-backports-modules')):
|
||||
continue
|
||||
# for langpack updates, only keep -en as a representative
|
||||
@ -331,14 +356,17 @@ generated.</p>''')
|
||||
(pkgurl + rpkg['proposed'], rpkg['proposed'], uploaders))
|
||||
print(' <td>')
|
||||
removable = True
|
||||
antique = False
|
||||
for b, t in sorted(rpkg['bugs'].iteritems()):
|
||||
ancient = False
|
||||
failed_and_removable = False
|
||||
for b, t in sorted(rpkg['bugs'].items()):
|
||||
cls = ' class="'
|
||||
incomplete = False
|
||||
activities = None
|
||||
try:
|
||||
bug = lp.bugs[b]
|
||||
bug_title = bug.title.encode('UTF-8')
|
||||
bug_title = bug.title
|
||||
hover_text = bug_title
|
||||
activities = bug.activity
|
||||
for task in bug.bug_tasks:
|
||||
if task.self_link.split('/')[4] != 'ubuntu':
|
||||
continue
|
||||
@ -362,6 +390,12 @@ generated.</p>''')
|
||||
elif ('verification-failed' in t or
|
||||
'verification-failed-%s' % release in t):
|
||||
cls += ' verificationfailed'
|
||||
# Check if the SRU was in verification-failed for long
|
||||
# enough to be considered for removal.
|
||||
if not failed_and_removable and activities:
|
||||
failed_and_removable = \
|
||||
verification_failed_check_for_removal(
|
||||
activities, release)
|
||||
elif 'verification-done-%s' % release in t:
|
||||
cls += ' verified'
|
||||
removable = False
|
||||
@ -387,7 +421,7 @@ generated.</p>''')
|
||||
if (m_date.replace(tzinfo=None) < today
|
||||
- datetime.timedelta(16)):
|
||||
cls += ' removal'
|
||||
antique = True
|
||||
ancient = True
|
||||
continue
|
||||
if 'messages' in cls:
|
||||
cls = cls.replace('messages', '')
|
||||
@ -401,23 +435,29 @@ generated.</p>''')
|
||||
bug_title, \
|
||||
datetime.datetime.strftime(
|
||||
m_date, '%Y-%m-%d'))
|
||||
hover_text += message.content.encode(
|
||||
'UTF-8') + ' - '
|
||||
hover_text += m_owner.name.encode(
|
||||
'UTF-8')
|
||||
antique = False
|
||||
hover_text += message.content + ' - '
|
||||
hover_text += m_owner.name
|
||||
ancient = False
|
||||
except ClientError as error:
|
||||
# people who don't use lp anymore
|
||||
if error.response['status'] == '410':
|
||||
continue
|
||||
# We now also try handling block-proposed tags for updates
|
||||
# that can be verified but should not be released yet for
|
||||
# some reasons.
|
||||
if 'block-proposed-%s' % release in t:
|
||||
cls += ' blockproposed'
|
||||
cls += '"'
|
||||
|
||||
print('<a href="%s/bugs/%d" '
|
||||
'title="%s" %s>%d%s</a>' %
|
||||
(lp_url, b, hover_text.replace('"', ''), cls, b,
|
||||
'(hw)' if 'hw-specific' in t else ''))
|
||||
if antique and removable:
|
||||
proposed_antique.append((releases[release].name, pkg,
|
||||
if failed_and_removable:
|
||||
proposed_failed.append((releases[release].name, pkg,
|
||||
[str(b) for b in rpkg['bugs']]))
|
||||
elif ancient and removable:
|
||||
proposed_ancient.append((releases[release].name, pkg,
|
||||
[str(b) for b in rpkg['bugs']]))
|
||||
print(' </td>')
|
||||
print(' <td>%i</td></tr>' % age)
|
||||
@ -501,12 +541,24 @@ generated.</p>''')
|
||||
'-e %s %s' % (r, pkg[2]['proposed'], pkg[1]))
|
||||
print('</pre>')
|
||||
|
||||
print('<p>The following packages have bugs that have failed '
|
||||
'verification for more than 10 days and should be removed from '
|
||||
'-proposed:</p>')
|
||||
|
||||
print('<pre>')
|
||||
for r in releases:
|
||||
for pkg in sorted(proposed_failed):
|
||||
if pkg[0].startswith(r):
|
||||
print('sru-remove --reason=failed -s %s -p %s %s' %
|
||||
(r, pkg[1], ' '.join(pkg[2])))
|
||||
print('</pre>')
|
||||
|
||||
print('<p>The following packages have not had their SRU bugs verified in '
|
||||
'105 days and should be removed from -proposed:</p>')
|
||||
|
||||
print('<pre>')
|
||||
for r in releases:
|
||||
for pkg in sorted(proposed_antique):
|
||||
for pkg in sorted(proposed_ancient):
|
||||
if pkg[0].startswith(r):
|
||||
print('sru-remove -s %s -p %s %s' %
|
||||
(r, pkg[1], ' '.join(pkg[2])))
|
||||
@ -554,7 +606,7 @@ def match_srubugs(changesfileurls):
|
||||
changelog = urlopen(changesfileurl)
|
||||
bugnums = []
|
||||
for l in changelog:
|
||||
if l.startswith('Launchpad-Bugs-Fixed: '):
|
||||
if l.startswith(b'Launchpad-Bugs-Fixed: '):
|
||||
bugnums = [int(b) for b in l.split()[1:]]
|
||||
break
|
||||
|
||||
@ -630,7 +682,7 @@ def get_srus():
|
||||
if release != 'lucid':
|
||||
excuses_page = excuses_url % release
|
||||
excuses = urlopen(excuses_page)
|
||||
excuses_data = yaml.load(excuses)
|
||||
excuses_data = yaml.load(excuses, Loader=yaml.CSafeLoader)
|
||||
pkg_excuses = [excuse['source']
|
||||
for excuse in excuses_data['sources']
|
||||
if 'autopkgtest' in excuse['reason']
|
||||
@ -708,6 +760,10 @@ def bugs_from_changes(change_url):
|
||||
|
||||
|
||||
def main():
|
||||
# Force encoding to UTF-8 even in non-UTF-8 locales.
|
||||
import io, sys
|
||||
sys.stdout = io.TextIOWrapper(
|
||||
sys.stdout.detach(), encoding="UTF-8", line_buffering=True)
|
||||
logging.basicConfig(level=DEBUGLEVEL,
|
||||
format="%(asctime)s - %(levelname)s - %(message)s")
|
||||
lpinit()
|
||||
|
@ -35,9 +35,10 @@ import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
try:
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen, urlretrieve
|
||||
except ImportError:
|
||||
from urllib import urlopen, urlretrieve
|
||||
from urllib import urlopen, urlretrieve, quote
|
||||
import webbrowser
|
||||
|
||||
from contextlib import ExitStack
|
||||
@ -126,7 +127,7 @@ def from_queue(options, archive, sourcepkg, series, version=None):
|
||||
queue = options.queue.title()
|
||||
queue_url = ('https://launchpad.net/ubuntu/%s/+queue?'
|
||||
'queue_state=%s&batch=300&queue_text=%s' %
|
||||
(series.name, queues[queue], sourcepkg))
|
||||
(series.name, queues[queue], quote(sourcepkg)))
|
||||
uploads = [upload for upload in
|
||||
series.getPackageUploads(archive=archive, exact_match=True,
|
||||
name=sourcepkg, pocket='Proposed',
|
||||
@ -255,7 +256,7 @@ def reject_comment(launchpad, num, package, release, reason):
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
default_release = 'cosmic'
|
||||
default_release = 'focal'
|
||||
ppa_url = ('https://launchpad.net/~%s/+archive/ubuntu/%s/+packages?'
|
||||
'field.series_filter=%s')
|
||||
|
||||
|
@ -74,6 +74,13 @@ def process_bug(launchpad, sourcepkg, version, release, num):
|
||||
'Is there anything left to sponsor?' % num)
|
||||
|
||||
if not sourcepkg or 'linux' not in sourcepkg:
|
||||
block_proposed_series = 'block-proposed-%s' % release
|
||||
if block_proposed_series in bug.tags:
|
||||
print('The %s tag is still set on bug LP: #%s. '
|
||||
'Should the package continue to be blocked in proposed? '
|
||||
'Please investigate and adjust the tags accordingly.'
|
||||
% (block_proposed_series, num))
|
||||
|
||||
# this dance is needed due to
|
||||
# https://bugs.launchpad.net/launchpadlib/+bug/254901
|
||||
btags = bug.tags
|
||||
@ -124,7 +131,8 @@ def process_bug(launchpad, sourcepkg, version, release, num):
|
||||
text += (' Your feedback will aid us getting this update out to other '
|
||||
'Ubuntu users.\n\nIf this package fixes the bug for you, '
|
||||
'please add a comment to this bug, mentioning the version of the '
|
||||
'package you tested and change the tag from '
|
||||
'package you tested, what testing has been performed on the '
|
||||
'package and change the tag from '
|
||||
'verification-needed-%s to verification-done-%s. '
|
||||
'If it does not fix the bug for you, please add a comment '
|
||||
'stating that, and change the tag to verification-failed-%s. '
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2011 Iain Lane
|
||||
# Copyright (C) 2011 Stefano Rivera
|
||||
|
@ -14,7 +14,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
DIST="${DIST:-disco}"
|
||||
DIST="${DIST:-groovy}"
|
||||
|
||||
MADISON="$(rmadison -a source -s "$DIST" "$1")"
|
||||
[ "$MADISON" ] || exit 1
|
||||
@ -51,7 +51,7 @@ wget -q -O- http://changelogs.ubuntu.com/changelogs/pool/$SECTION/$POOLINDEX/$1/
|
||||
target="${target%;}"
|
||||
target="${target%%-*}"
|
||||
case $target in
|
||||
warty|hoary|breezy|dapper|edgy|feisty|gutsy|hardy|intrepid|jaunty|karmic|lucid|maverick|natty|oneiric|precise|quantal|raring|saucy|trusty|utopic|vivid|wily|xenial|yakkety|zesty|artful|bionic|cosmic|disco|devel)
|
||||
warty|hoary|breezy|dapper|edgy|feisty|gutsy|hardy|intrepid|jaunty|karmic|lucid|maverick|natty|oneiric|precise|quantal|raring|saucy|trusty|utopic|vivid|wily|xenial|yakkety|zesty|artful|bionic|cosmic|disco|eoan|focal|groovy|devel)
|
||||
;;
|
||||
*)
|
||||
exit 0
|
||||
|
139
ubuntu-archive-tools/update-i386-whitelist
Executable file
139
ubuntu-archive-tools/update-i386-whitelist
Executable file
@ -0,0 +1,139 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright (C) 2020 Canonical Ltd.
|
||||
# Author: Steve Langasek <steve.langasek@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
'''Synchronize the i386 source package whitelist in Launchpad with the output
|
||||
of germinate.
|
||||
|
||||
USAGE:
|
||||
update-i386-whitelist [--dry-run] https://people.canonical.com/~ubuntu-archive/germinate-output/i386.focal/i386+build-depends.sources
|
||||
'''
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
import optparse
|
||||
from urllib.request import urlopen
|
||||
import sys
|
||||
|
||||
def get_sources_from_url(url):
|
||||
'''Download the germinate output and parse out the list of sources.
|
||||
|
||||
Returns list of source package names.
|
||||
'''
|
||||
sources = []
|
||||
|
||||
file = urlopen(url)
|
||||
for i in file:
|
||||
if i.startswith(b'Source') or i.startswith(b'---'):
|
||||
continue
|
||||
sources.append(i.decode('utf-8').split(' ',maxsplit=1)[0])
|
||||
return sources
|
||||
|
||||
def parse_options():
|
||||
'''Parse command line arguments.
|
||||
|
||||
Return (options, source_package) tuple.
|
||||
'''
|
||||
parser = optparse.OptionParser(
|
||||
usage='Usage: %prog [--dry-run] https://people.canonical.com/~ubuntu-archive/germinate-output/i386.focal/i386+build-depends.sources')
|
||||
parser.add_option(
|
||||
"--dry-run", help="don't change launchpad, just report the delta",
|
||||
action="store_true")
|
||||
parser.add_option(
|
||||
"-s", dest="release", default=default_release, metavar="RELEASE",
|
||||
help="release (default: %s)" % default_release)
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error('Need to specify a URL to sync from')
|
||||
|
||||
return (opts, args[0])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
default_release = 'focal'
|
||||
|
||||
(opts, url) = parse_options()
|
||||
|
||||
launchpad = Launchpad.login_with('update-i386-whitelist',
|
||||
'production',
|
||||
version="devel")
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
series = ubuntu.getSeries(name_or_version=opts.release)
|
||||
archive = ubuntu.main_archive
|
||||
|
||||
sources = get_sources_from_url(url)
|
||||
|
||||
packageset = launchpad.packagesets.getByName(name='i386-whitelist',
|
||||
distroseries=series)
|
||||
currentSet = set(packageset.getSourcesIncluded())
|
||||
newSet = set(sources)
|
||||
# hard-coded list of ppa-only additions; can maybe go away when
|
||||
# https://bugs.launchpad.net/launchpad/+bug/1855069 is fixed, but this is
|
||||
# also potentially useful for bootstrapping any additional packages into
|
||||
# the archive if needed.
|
||||
|
||||
# bootstrap new spdlog dep
|
||||
newSet.update(['fmtlib'])
|
||||
# for new lintian
|
||||
newSet.update(['libdevel-size-perl', 'libcpanel-json-xs-perl',
|
||||
'libsereal-decoder-perl', 'libsereal-encoder-perl',
|
||||
'libjson-xs-perl'])
|
||||
|
||||
# needed to bootstrap openjdk-N
|
||||
newSet.update(['openjdk-12'])
|
||||
newSet.update(['openjdk-13'])
|
||||
newSet.update(['openjdk-14'])
|
||||
newSet.update(['openjdk-15'])
|
||||
newSet.update(['openjdk-8'])
|
||||
|
||||
# we get the wrong answer from germinate about a source package's
|
||||
# whitelisting when the package provides both Arch: any and Arch: all
|
||||
# binaries but we actually only want the Arch: all ones. Rather than
|
||||
# fix this in germinate, for now just manually exclude the packages
|
||||
# we've found that have this problem.
|
||||
for pkg in ('frei0r', 'xorg', 'ubuntu-drivers-common'):
|
||||
try:
|
||||
newSet.remove(pkg)
|
||||
except KeyError:
|
||||
pass
|
||||
print("Additions:" )
|
||||
additions = list(newSet-currentSet)
|
||||
additions.sort()
|
||||
for i in additions:
|
||||
print(" * %s" % i)
|
||||
print("Removals:" )
|
||||
removals = list(currentSet-newSet)
|
||||
removals.sort()
|
||||
for i in removals:
|
||||
print(" * %s" % i)
|
||||
if opts.dry_run:
|
||||
print("--dry-run is set, doing nothing.")
|
||||
sys.exit(0)
|
||||
|
||||
if additions or removals:
|
||||
print("Commit changes to the packageset? [yN] ", end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if not response.strip().lower().startswith('y'):
|
||||
sys.exit(1)
|
||||
|
||||
if additions:
|
||||
packageset.addSources(names=additions)
|
||||
if removals:
|
||||
packageset.removeSources(names=removals)
|
||||
|
55
ubuntu-archive-tools/utils.py
Normal file
55
ubuntu-archive-tools/utils.py
Normal file
@ -0,0 +1,55 @@
|
||||
#!/usr/bin/python2.7
|
||||
|
||||
# Copyright (C) 2019 Canonical Ltd.
|
||||
# Author: Brian Murray <brian.murray@canonical.com>
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 3 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Portions of archive related code that is re-used by various tools."""
|
||||
|
||||
import gzip
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import apt_pkg
|
||||
|
||||
|
||||
def read_tag_file(path, pkg=None):
|
||||
tmp = tempfile.NamedTemporaryFile(prefix='checkrdepends.', delete=False)
|
||||
try:
|
||||
compressed = gzip.open(path)
|
||||
try:
|
||||
tmp.write(compressed.read())
|
||||
finally:
|
||||
compressed.close()
|
||||
tmp.close()
|
||||
with open(tmp.name) as uncompressed:
|
||||
tag_file = apt_pkg.TagFile(uncompressed)
|
||||
prev_name = None
|
||||
prev_stanza = None
|
||||
for stanza in tag_file:
|
||||
try:
|
||||
name = stanza['package']
|
||||
except KeyError:
|
||||
continue
|
||||
if pkg:
|
||||
if name != pkg:
|
||||
continue
|
||||
if name != prev_name and prev_stanza is not None:
|
||||
yield prev_stanza
|
||||
prev_name = name
|
||||
prev_stanza = stanza
|
||||
if prev_stanza is not None:
|
||||
yield prev_stanza
|
||||
finally:
|
||||
os.unlink(tmp.name)
|
Loading…
x
Reference in New Issue
Block a user