mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-03-12 03:41:08 +00:00
Refactoring the existing test (autopkgtest), so its features can be re-used for other criterias (boottest) tests.
This commit is contained in:
parent
7959019916
commit
46281510e3
68
britney.py
68
britney.py
@ -28,7 +28,7 @@ to always be fully installable and close to being a release candidate.
|
||||
|
||||
Britney's source code is split between two different but related tasks:
|
||||
the first one is the generation of the update excuses, while the
|
||||
second tries to update testing with the valid candidates; first
|
||||
second tries to update testing with the valid candidates; first
|
||||
each package alone, then larger and even larger sets of packages
|
||||
together. Each try is accepted if testing is not more uninstallable
|
||||
after the update than before.
|
||||
@ -53,7 +53,7 @@ Other than source and binary packages, Britney loads the following data:
|
||||
* BugsV, which contains the list of release-critical bugs for a given
|
||||
version of a source or binary package (see Britney.read_bugs).
|
||||
|
||||
* Dates, which contains the date of the upload of a given version
|
||||
* Dates, which contains the date of the upload of a given version
|
||||
of a source package (see Britney.read_dates).
|
||||
|
||||
* Urgencies, which contains the urgency of the upload of a given
|
||||
@ -72,9 +72,9 @@ instead explained in the chapter "Excuses Generation".
|
||||
= Excuses =
|
||||
|
||||
An excuse is a detailed explanation of why a package can or cannot
|
||||
be updated in the testing distribution from a newer package in
|
||||
be updated in the testing distribution from a newer package in
|
||||
another distribution (like for example unstable). The main purpose
|
||||
of the excuses is to be written in an HTML file which will be
|
||||
of the excuses is to be written in an HTML file which will be
|
||||
published over HTTP. The maintainers will be able to parse it manually
|
||||
or automatically to find the explanation of why their packages have
|
||||
been updated or not.
|
||||
@ -232,9 +232,9 @@ __version__ = '2.0'
|
||||
|
||||
class Britney(object):
|
||||
"""Britney, the Debian testing updater script
|
||||
|
||||
|
||||
This is the script that updates the testing distribution. It is executed
|
||||
each day after the installation of the updated packages. It generates the
|
||||
each day after the installation of the updated packages. It generates the
|
||||
`Packages' files for the testing distribution, but it does so in an
|
||||
intelligent manner; it tries to avoid any inconsistency and to use only
|
||||
non-buggy packages.
|
||||
@ -384,7 +384,7 @@ class Britney(object):
|
||||
parser.add_option("", "--series", action="store", dest="series", default=None,
|
||||
help="set distribution series name")
|
||||
(self.options, self.args) = parser.parse_args()
|
||||
|
||||
|
||||
# integrity checks
|
||||
if self.options.nuninst_cache and self.options.print_uninst:
|
||||
self.__log("nuninst_cache and print_uninst are mutually exclusive!", type="E")
|
||||
@ -425,7 +425,7 @@ class Britney(object):
|
||||
|
||||
def __log(self, msg, type="I"):
|
||||
"""Print info messages according to verbosity level
|
||||
|
||||
|
||||
An easy-and-simple log method which prints messages to the standard
|
||||
output. The type parameter controls the urgency of the message, and
|
||||
can be equal to `I' for `Information', `W' for `Warning' and `E' for
|
||||
@ -528,7 +528,7 @@ class Britney(object):
|
||||
|
||||
def read_sources(self, basedir, intern=intern):
|
||||
"""Read the list of source packages from the specified directory
|
||||
|
||||
|
||||
The source packages are read from the `Sources' file within the
|
||||
directory specified as `basedir' parameter. Considering the
|
||||
large amount of memory needed, not all the fields are loaded
|
||||
@ -567,14 +567,14 @@ class Britney(object):
|
||||
|
||||
def read_binaries(self, basedir, distribution, arch, intern=intern):
|
||||
"""Read the list of binary packages from the specified directory
|
||||
|
||||
|
||||
The binary packages are read from the `Packages_${arch}' files
|
||||
within the directory specified as `basedir' parameter, replacing
|
||||
${arch} with the value of the arch parameter. Considering the
|
||||
large amount of memory needed, not all the fields are loaded
|
||||
in memory. The available fields are Version, Source, Multi-Arch,
|
||||
Depends, Conflicts, Provides and Architecture.
|
||||
|
||||
|
||||
After reading the packages, reverse dependencies are computed
|
||||
and saved in the `rdepends' keys, and the `Provides' field is
|
||||
used to populate the virtual packages list.
|
||||
@ -756,7 +756,7 @@ class Britney(object):
|
||||
|
||||
def read_bugs(self, basedir):
|
||||
"""Read the release critial bug summary from the specified directory
|
||||
|
||||
|
||||
The RC bug summaries are read from the `BugsV' file within the
|
||||
directory specified in the `basedir' parameter. The file contains
|
||||
rows with the format:
|
||||
@ -784,7 +784,7 @@ class Britney(object):
|
||||
|
||||
def __maxver(self, pkg, dist):
|
||||
"""Return the maximum version for a given package name
|
||||
|
||||
|
||||
This method returns None if the specified source package
|
||||
is not available in the `dist' distribution. If the package
|
||||
exists, then it returns the maximum version between the
|
||||
@ -802,7 +802,7 @@ class Britney(object):
|
||||
|
||||
def normalize_bugs(self):
|
||||
"""Normalize the release critical bug summaries for testing and unstable
|
||||
|
||||
|
||||
The method doesn't return any value: it directly modifies the
|
||||
object attribute `bugs'.
|
||||
"""
|
||||
@ -828,7 +828,7 @@ class Britney(object):
|
||||
|
||||
def read_dates(self, basedir):
|
||||
"""Read the upload date for the packages from the specified directory
|
||||
|
||||
|
||||
The upload dates are read from the `Dates' file within the directory
|
||||
specified as `basedir' parameter. The file contains rows with the
|
||||
format:
|
||||
@ -874,7 +874,7 @@ class Britney(object):
|
||||
|
||||
def read_urgencies(self, basedir):
|
||||
"""Read the upload urgency of the packages from the specified directory
|
||||
|
||||
|
||||
The upload urgencies are read from the `Urgency' file within the
|
||||
directory specified as `basedir' parameter. The file contains rows
|
||||
with the format:
|
||||
@ -922,12 +922,12 @@ class Britney(object):
|
||||
|
||||
def read_hints(self, basedir):
|
||||
"""Read the hint commands from the specified directory
|
||||
|
||||
|
||||
The hint commands are read from the files contained in the `Hints'
|
||||
directory within the directory specified as `basedir' parameter.
|
||||
directory within the directory specified as `basedir' parameter.
|
||||
The names of the files have to be the same as the authorized users
|
||||
for the hints.
|
||||
|
||||
|
||||
The file contains rows with the format:
|
||||
|
||||
<command> <package-name>[/<version>]
|
||||
@ -1146,7 +1146,7 @@ class Britney(object):
|
||||
|
||||
def should_remove_source(self, pkg):
|
||||
"""Check if a source package should be removed from testing
|
||||
|
||||
|
||||
This method checks if a source package should be removed from the
|
||||
testing distribution; this happens if the source package is not
|
||||
present in the unstable distribution anymore.
|
||||
@ -1187,7 +1187,7 @@ class Britney(object):
|
||||
This method checks if the binary packages produced by the source
|
||||
package on the given architecture should be upgraded; this can
|
||||
happen also if the migration is a binary-NMU for the given arch.
|
||||
|
||||
|
||||
It returns False if the given packages don't need to be upgraded,
|
||||
True otherwise. In the former case, a new excuse is appended to
|
||||
the object attribute excuses.
|
||||
@ -1205,7 +1205,7 @@ class Britney(object):
|
||||
excuse.set_vers(source_t[VERSION], source_t[VERSION])
|
||||
source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip())
|
||||
source_u[SECTION] and excuse.set_section(source_u[SECTION].strip())
|
||||
|
||||
|
||||
# if there is a `remove' hint and the requested version is the same as the
|
||||
# version in testing, then stop here and return False
|
||||
# (as a side effect, a removal may generate such excuses for both the source
|
||||
@ -1335,9 +1335,9 @@ class Britney(object):
|
||||
"""Check if source package should be upgraded
|
||||
|
||||
This method checks if a source package should be upgraded. The analysis
|
||||
is performed for the source package specified by the `src' parameter,
|
||||
is performed for the source package specified by the `src' parameter,
|
||||
for the distribution `suite'.
|
||||
|
||||
|
||||
It returns False if the given package doesn't need to be upgraded,
|
||||
True otherwise. In the former case, a new excuse is appended to
|
||||
the object attribute excuses.
|
||||
@ -1366,7 +1366,7 @@ class Britney(object):
|
||||
# the starting point is that we will update the candidate and run autopkgtests
|
||||
update_candidate = True
|
||||
run_autopkgtest = True
|
||||
|
||||
|
||||
# if the version in unstable is older, then stop here with a warning in the excuse and return False
|
||||
if source_t and apt_pkg.version_compare(source_u[VERSION], source_t[VERSION]) < 0:
|
||||
excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t[VERSION], source_u[VERSION]))
|
||||
@ -1483,13 +1483,13 @@ class Britney(object):
|
||||
for arch in self.options.architectures:
|
||||
if src not in self.sources["testing"]:
|
||||
continue
|
||||
|
||||
|
||||
# if the package in testing has no binaries on this
|
||||
# architecture, it can't be out-of-date
|
||||
if not any(x for x in self.sources["testing"][src][BINARIES]
|
||||
if x.endswith("/"+arch) and self.binaries["testing"][arch][0][x.split("/")[0]][ARCHITECTURE] != 'all'):
|
||||
continue
|
||||
|
||||
|
||||
# if the (t-)p-u package has produced any binaries on
|
||||
# this architecture then we assume it's ok. this allows for
|
||||
# uploads to (t-)p-u which intentionally drop binary
|
||||
@ -1619,7 +1619,7 @@ class Britney(object):
|
||||
bugs_t.extend(self.bugs['testing'][spkg])
|
||||
if spkg in self.bugs['unstable']:
|
||||
bugs_u.extend(self.bugs['unstable'][spkg])
|
||||
|
||||
|
||||
new_bugs = sorted(set(bugs_u).difference(bugs_t))
|
||||
old_bugs = sorted(set(bugs_t).difference(bugs_u))
|
||||
|
||||
@ -1720,7 +1720,7 @@ class Britney(object):
|
||||
exclookup[x].addreason("depends")
|
||||
exclookup[x].is_valid = False
|
||||
i = i + 1
|
||||
|
||||
|
||||
def write_excuses(self, same_source=same_source):
|
||||
"""Produce and write the update excuses
|
||||
|
||||
@ -2173,7 +2173,7 @@ class Britney(object):
|
||||
suite != 'unstable' and \
|
||||
binaries_t[parch][0][binary][ARCHITECTURE] == 'all':
|
||||
continue
|
||||
else:
|
||||
else:
|
||||
rms.add((binary, version, parch))
|
||||
|
||||
# single binary removal; used for clearing up after smooth
|
||||
@ -2623,7 +2623,7 @@ class Britney(object):
|
||||
return None
|
||||
selected.append(x)
|
||||
upgrade_me.remove(x)
|
||||
|
||||
|
||||
self.output_write("start: %s\n" % self.eval_nuninst(nuninst_start))
|
||||
if not force:
|
||||
self.output_write("orig: %s\n" % self.eval_nuninst(nuninst_start))
|
||||
@ -2782,7 +2782,7 @@ class Britney(object):
|
||||
if len(removals) > 0:
|
||||
self.output_write("Removing obsolete source packages from testing (%d):\n" % (len(removals)))
|
||||
self.do_all(actions=removals)
|
||||
|
||||
|
||||
# smooth updates
|
||||
if self.options.smooth_updates:
|
||||
self.__log("> Removing old packages left in testing from smooth updates", type="I")
|
||||
@ -2977,7 +2977,7 @@ class Britney(object):
|
||||
def auto_hinter(self):
|
||||
"""Auto-generate "easy" hints.
|
||||
|
||||
This method attempts to generate "easy" hints for sets of packages which
|
||||
This method attempts to generate "easy" hints for sets of packages which
|
||||
must migrate together. Beginning with a package which does not depend on
|
||||
any other package (in terms of excuses), a list of dependencies and
|
||||
reverse dependencies is recursively created.
|
||||
@ -3100,7 +3100,7 @@ class Britney(object):
|
||||
|
||||
def main(self):
|
||||
"""Main method
|
||||
|
||||
|
||||
This is the entry point for the class: it includes the list of calls
|
||||
for the member methods which will produce the output files.
|
||||
"""
|
||||
|
159
tests/__init__.py
Normal file
159
tests/__init__.py
Normal file
@ -0,0 +1,159 @@
|
||||
# (C) 2015 Canonical Ltd.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
|
||||
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
architectures = ['amd64', 'arm64', 'armhf', 'i386', 'powerpc', 'ppc64el']
|
||||
|
||||
|
||||
class TestData:
|
||||
|
||||
def __init__(self):
|
||||
'''Construct local test package indexes.
|
||||
|
||||
The archive is initially empty. You can create new packages with
|
||||
create_deb(). self.path contains the path of the archive, and
|
||||
self.apt_source provides an apt source "deb" line.
|
||||
|
||||
It is kept in a temporary directory which gets removed when the Archive
|
||||
object gets deleted.
|
||||
'''
|
||||
self.path = tempfile.mkdtemp(prefix='testarchive.')
|
||||
self.apt_source = 'deb file://%s /' % self.path
|
||||
self.series = 'series'
|
||||
self.dirs = {False: os.path.join(self.path, 'data', self.series),
|
||||
True: os.path.join(
|
||||
self.path, 'data', '%s-proposed' % self.series)}
|
||||
os.makedirs(self.dirs[False])
|
||||
os.mkdir(self.dirs[True])
|
||||
self.added_sources = {False: set(), True: set()}
|
||||
self.added_binaries = {False: set(), True: set()}
|
||||
|
||||
# pre-create all files for all architectures
|
||||
for arch in architectures:
|
||||
for dir in self.dirs.values():
|
||||
with open(os.path.join(dir, 'Packages_' + arch), 'w'):
|
||||
pass
|
||||
for dir in self.dirs.values():
|
||||
for fname in ['Dates', 'Blocks']:
|
||||
with open(os.path.join(dir, fname), 'w'):
|
||||
pass
|
||||
for dname in ['Hints']:
|
||||
os.mkdir(os.path.join(dir, dname))
|
||||
|
||||
os.mkdir(os.path.join(self.path, 'output'))
|
||||
|
||||
# create temporary home dir for proposed-migration autopktest status
|
||||
self.home = os.path.join(self.path, 'home')
|
||||
os.environ['HOME'] = self.home
|
||||
os.makedirs(os.path.join(self.home, 'proposed-migration',
|
||||
'autopkgtest', 'work'))
|
||||
|
||||
def __del__(self):
|
||||
shutil.rmtree(self.path)
|
||||
|
||||
def add(self, name, unstable, fields={}, add_src=True):
|
||||
'''Add a binary package to the index file.
|
||||
|
||||
You need to specify at least the package name and in which list to put
|
||||
it (unstable==True for unstable/proposed, or False for
|
||||
testing/release). fields specifies all additional entries, e. g.
|
||||
{'Depends': 'foo, bar', 'Conflicts: baz'}. There are defaults for most
|
||||
fields.
|
||||
|
||||
Unless add_src is set to False, this will also automatically create a
|
||||
source record, based on fields['Source'] and name.
|
||||
'''
|
||||
assert (name not in self.added_binaries[unstable])
|
||||
self.added_binaries[unstable].add(name)
|
||||
|
||||
fields.setdefault('Architecture', architectures[0])
|
||||
fields.setdefault('Version', '1')
|
||||
fields.setdefault('Priority', 'optional')
|
||||
fields.setdefault('Section', 'devel')
|
||||
fields.setdefault('Description', 'test pkg')
|
||||
if fields['Architecture'] == 'all':
|
||||
for a in architectures:
|
||||
self._append(name, unstable, 'Packages_' + a, fields)
|
||||
else:
|
||||
self._append(name, unstable, 'Packages_' + fields['Architecture'],
|
||||
fields)
|
||||
|
||||
if add_src:
|
||||
src = fields.get('Source', name)
|
||||
if src not in self.added_sources[unstable]:
|
||||
self.add_src(src, unstable, {'Version': fields['Version'],
|
||||
'Section': fields['Section']})
|
||||
|
||||
def add_src(self, name, unstable, fields={}):
|
||||
'''Add a source package to the index file.
|
||||
|
||||
You need to specify at least the package name and in which list to put
|
||||
it (unstable==True for unstable/proposed, or False for
|
||||
testing/release). fields specifies all additional entries, which can be
|
||||
Version (default: 1), Section (default: devel), and Extra-Source-Only.
|
||||
'''
|
||||
assert (name not in self.added_sources[unstable])
|
||||
self.added_sources[unstable].add(name)
|
||||
|
||||
fields.setdefault('Version', '1')
|
||||
fields.setdefault('Section', 'devel')
|
||||
self._append(name, unstable, 'Sources', fields)
|
||||
|
||||
def _append(self, name, unstable, file_name, fields):
|
||||
with open(os.path.join(self.dirs[unstable], file_name), 'a') as f:
|
||||
f.write('''Package: %s
|
||||
Maintainer: Joe <joe@example.com>
|
||||
''' % name)
|
||||
|
||||
for k, v in fields.items():
|
||||
f.write('%s: %s\n' % (k, v))
|
||||
f.write('\n')
|
||||
|
||||
|
||||
class TestBase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBase, self).setUp()
|
||||
self.data = TestData()
|
||||
self.britney = os.path.join(PROJECT_DIR, 'britney.py')
|
||||
self.britney_conf = os.path.join(PROJECT_DIR, 'britney.conf')
|
||||
assert os.path.exists(self.britney)
|
||||
assert os.path.exists(self.britney_conf)
|
||||
|
||||
def tearDown(self):
|
||||
del self.data
|
||||
|
||||
def run_britney(self, args=[]):
|
||||
'''Run britney.
|
||||
|
||||
Assert that it succeeds and does not produce anything on stderr.
|
||||
Return (excuses.html, britney_out).
|
||||
'''
|
||||
britney = subprocess.Popen([self.britney, '-v', '-c', self.britney_conf,
|
||||
'--distribution=ubuntu',
|
||||
'--series=%s' % self.data.series],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=self.data.path,
|
||||
universal_newlines=True)
|
||||
(out, err) = britney.communicate()
|
||||
self.assertEqual(britney.returncode, 0, out + err)
|
||||
self.assertEqual(err, '')
|
||||
|
||||
with open(os.path.join(self.data.path, 'output', self.data.series,
|
||||
'excuses.html')) as f:
|
||||
excuses = f.read()
|
||||
|
||||
return (excuses, out)
|
@ -6,133 +6,40 @@
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
import tempfile
|
||||
import shutil
|
||||
import apt_pkg
|
||||
import operator
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import unittest
|
||||
import apt_pkg
|
||||
import operator
|
||||
|
||||
apt_pkg.init()
|
||||
architectures = ['amd64', 'arm64', 'armhf', 'i386', 'powerpc', 'ppc64el']
|
||||
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
sys.path.insert(0, PROJECT_DIR)
|
||||
|
||||
my_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
from autopkgtest import ADT_EXCUSES_LABELS
|
||||
from tests import TestBase
|
||||
|
||||
NOT_CONSIDERED = False
|
||||
VALID_CANDIDATE = True
|
||||
|
||||
sys.path.insert(0, my_dir)
|
||||
from autopkgtest import ADT_EXCUSES_LABELS
|
||||
|
||||
apt_pkg.init()
|
||||
|
||||
|
||||
class TestData:
|
||||
def __init__(self):
|
||||
'''Construct local test package indexes.
|
||||
class TestAutoPkgTest(TestBase):
|
||||
|
||||
The archive is initially empty. You can create new packages with
|
||||
create_deb(). self.path contains the path of the archive, and
|
||||
self.apt_source provides an apt source "deb" line.
|
||||
|
||||
It is kept in a temporary directory which gets removed when the Archive
|
||||
object gets deleted.
|
||||
'''
|
||||
self.path = tempfile.mkdtemp(prefix='testarchive.')
|
||||
self.apt_source = 'deb file://%s /' % self.path
|
||||
self.series = 'series'
|
||||
self.dirs = {False: os.path.join(self.path, 'data', self.series),
|
||||
True: os.path.join(self.path, 'data', '%s-proposed' % self.series)}
|
||||
os.makedirs(self.dirs[False])
|
||||
os.mkdir(self.dirs[True])
|
||||
self.added_sources = {False: set(), True: set()}
|
||||
self.added_binaries = {False: set(), True: set()}
|
||||
|
||||
# pre-create all files for all architectures
|
||||
for arch in architectures:
|
||||
for dir in self.dirs.values():
|
||||
with open(os.path.join(dir, 'Packages_' + arch), 'w'):
|
||||
pass
|
||||
for dir in self.dirs.values():
|
||||
for fname in ['Dates', 'Blocks']:
|
||||
with open(os.path.join(dir, fname), 'w'):
|
||||
pass
|
||||
for dname in ['Hints']:
|
||||
os.mkdir(os.path.join(dir, dname))
|
||||
|
||||
os.mkdir(os.path.join(self.path, 'output'))
|
||||
|
||||
# create temporary home dir for proposed-migration autopktest status
|
||||
self.home = os.path.join(self.path, 'home')
|
||||
os.environ['HOME'] = self.home
|
||||
os.makedirs(os.path.join(self.home, 'proposed-migration',
|
||||
'autopkgtest', 'work'))
|
||||
|
||||
def __del__(self):
|
||||
shutil.rmtree(self.path)
|
||||
|
||||
def add(self, name, unstable, fields={}, add_src=True):
|
||||
'''Add a binary package to the index file.
|
||||
|
||||
You need to specify at least the package name and in which list to put
|
||||
it (unstable==True for unstable/proposed, or False for
|
||||
testing/release). fields specifies all additional entries, e. g.
|
||||
{'Depends': 'foo, bar', 'Conflicts: baz'}. There are defaults for most
|
||||
fields.
|
||||
|
||||
Unless add_src is set to False, this will also automatically create a
|
||||
source record, based on fields['Source'] and name.
|
||||
'''
|
||||
assert (name not in self.added_binaries[unstable])
|
||||
self.added_binaries[unstable].add(name)
|
||||
|
||||
fields.setdefault('Architecture', architectures[0])
|
||||
fields.setdefault('Version', '1')
|
||||
fields.setdefault('Priority', 'optional')
|
||||
fields.setdefault('Section', 'devel')
|
||||
fields.setdefault('Description', 'test pkg')
|
||||
if fields['Architecture'] == 'all':
|
||||
for a in architectures:
|
||||
self._append(name, unstable, 'Packages_' + a, fields)
|
||||
else:
|
||||
self._append(name, unstable, 'Packages_' + fields['Architecture'],
|
||||
fields)
|
||||
|
||||
if add_src:
|
||||
src = fields.get('Source', name)
|
||||
if src not in self.added_sources[unstable]:
|
||||
self.add_src(src, unstable, {'Version': fields['Version'],
|
||||
'Section': fields['Section']})
|
||||
|
||||
def add_src(self, name, unstable, fields={}):
|
||||
'''Add a source package to the index file.
|
||||
|
||||
You need to specify at least the package name and in which list to put
|
||||
it (unstable==True for unstable/proposed, or False for
|
||||
testing/release). fields specifies all additional entries, which can be
|
||||
Version (default: 1), Section (default: devel), and Extra-Source-Only.
|
||||
'''
|
||||
assert (name not in self.added_sources[unstable])
|
||||
self.added_sources[unstable].add(name)
|
||||
|
||||
fields.setdefault('Version', '1')
|
||||
fields.setdefault('Section', 'devel')
|
||||
self._append(name, unstable, 'Sources', fields)
|
||||
|
||||
def _append(self, name, unstable, file_name, fields):
|
||||
with open(os.path.join(self.dirs[unstable], file_name), 'a') as f:
|
||||
f.write('''Package: %s
|
||||
Maintainer: Joe <joe@example.com>
|
||||
''' % name)
|
||||
|
||||
for k, v in fields.items():
|
||||
f.write('%s: %s\n' % (k, v))
|
||||
f.write('\n')
|
||||
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.data = TestData()
|
||||
super(TestAutoPkgTest, self).setUp()
|
||||
|
||||
# fake adt-britney script
|
||||
self.adt_britney = os.path.join(
|
||||
self.data.home, 'auto-package-testing', 'jenkins', 'adt-britney')
|
||||
os.makedirs(os.path.dirname(self.adt_britney))
|
||||
|
||||
with open(self.adt_britney, 'w') as f:
|
||||
f.write('''#!/bin/sh -e
|
||||
echo "$@" >> /%s/adt-britney.log ''' % self.data.path)
|
||||
os.chmod(self.adt_britney, 0o755)
|
||||
|
||||
# add a bunch of packages to testing to avoid repetition
|
||||
self.data.add('libc6', False)
|
||||
@ -146,24 +53,6 @@ class Test(unittest.TestCase):
|
||||
'Conflicts': 'green'})
|
||||
self.data.add('justdata', False, {'Architecture': 'all'})
|
||||
|
||||
self.britney = os.path.join(my_dir, 'britney.py')
|
||||
self.britney_conf = os.path.join(my_dir, 'britney.conf')
|
||||
assert os.path.exists(self.britney)
|
||||
assert os.path.exists(self.britney_conf)
|
||||
|
||||
# fake adt-britney script
|
||||
self.adt_britney = os.path.join(self.data.home, 'auto-package-testing',
|
||||
'jenkins', 'adt-britney')
|
||||
os.makedirs(os.path.dirname(self.adt_britney))
|
||||
|
||||
with open(self.adt_britney, 'w') as f:
|
||||
f.write('''#!/bin/sh -e
|
||||
echo "$@" >> /%s/adt-britney.log ''' % self.data.path)
|
||||
os.chmod(self.adt_britney, 0o755)
|
||||
|
||||
def tearDown(self):
|
||||
del self.data
|
||||
|
||||
def __merge_records(self, results, history=""):
|
||||
'''Merges a list of results with records in history.
|
||||
|
||||
@ -235,28 +124,29 @@ args.func()
|
||||
'rq': request,
|
||||
'res': self.__merge_records(request, history)})
|
||||
|
||||
def run_britney(self, args=[]):
|
||||
'''Run britney.
|
||||
def do_test(self, unstable_add, adt_request, considered, expect=None,
|
||||
no_expect=None, history=""):
|
||||
for (pkg, fields) in unstable_add:
|
||||
self.data.add(pkg, True, fields)
|
||||
|
||||
Assert that it succeeds and does not produce anything on stderr.
|
||||
Return (excuses.html, britney_out).
|
||||
'''
|
||||
britney = subprocess.Popen([self.britney, '-v', '-c', self.britney_conf,
|
||||
'--distribution=ubuntu',
|
||||
'--series=%s' % self.data.series],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=self.data.path,
|
||||
universal_newlines=True)
|
||||
(out, err) = britney.communicate()
|
||||
self.assertEqual(britney.returncode, 0, out + err)
|
||||
self.assertEqual(err, '')
|
||||
self.make_adt_britney(adt_request, history)
|
||||
|
||||
with open(os.path.join(self.data.path, 'output', self.data.series,
|
||||
'excuses.html')) as f:
|
||||
excuses = f.read()
|
||||
(excuses, out) = self.run_britney()
|
||||
#print('-------\nexcuses: %s\n-----' % excuses)
|
||||
#print('-------\nout: %s\n-----' % out)
|
||||
#print('run:\n%s -c %s\n' % (self.britney, self.britney_conf))
|
||||
#subprocess.call(['bash', '-i'], cwd=self.data.path)
|
||||
if considered:
|
||||
self.assertIn('Valid candidate', excuses)
|
||||
else:
|
||||
self.assertIn('Not considered', excuses)
|
||||
|
||||
return (excuses, out)
|
||||
if expect:
|
||||
for re in expect:
|
||||
self.assertRegexpMatches(excuses, re)
|
||||
if no_expect:
|
||||
for re in no_expect:
|
||||
self.assertNotRegexpMatches(excuses, re)
|
||||
|
||||
def test_no_request_for_uninstallable(self):
|
||||
'''Does not request a test for an uninstallable package'''
|
||||
@ -557,30 +447,6 @@ args.func()
|
||||
history="lightgreen 1 PASS lightgreen 1"
|
||||
)
|
||||
|
||||
def do_test(self, unstable_add, adt_request, considered, expect=None,
|
||||
no_expect=None, history=""):
|
||||
for (pkg, fields) in unstable_add:
|
||||
self.data.add(pkg, True, fields)
|
||||
|
||||
self.make_adt_britney(adt_request, history)
|
||||
|
||||
(excuses, out) = self.run_britney()
|
||||
#print('-------\nexcuses: %s\n-----' % excuses)
|
||||
#print('-------\nout: %s\n-----' % out)
|
||||
#print('run:\n%s -c %s\n' % (self.britney, self.britney_conf))
|
||||
#subprocess.call(['bash', '-i'], cwd=self.data.path)
|
||||
if considered:
|
||||
self.assertIn('Valid candidate', excuses)
|
||||
else:
|
||||
self.assertIn('Not considered', excuses)
|
||||
|
||||
if expect:
|
||||
for re in expect:
|
||||
self.assertRegexpMatches(excuses, re)
|
||||
if no_expect:
|
||||
for re in no_expect:
|
||||
self.assertNotRegexpMatches(excuses, re)
|
||||
|
||||
def shell(self):
|
||||
# uninstallable unstable version
|
||||
self.data.add('yellow', True, {'Version': '1.1~beta',
|
||||
@ -593,4 +459,5 @@ args.func()
|
||||
subprocess.call(['bash', '-i'], cwd=self.data.path)
|
||||
|
||||
|
||||
unittest.main()
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
Loading…
x
Reference in New Issue
Block a user