mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-02-13 15:37:02 +00:00
Merge lp:~pitti/britney/britney2-ubuntu-swiftresults
This commit is contained in:
commit
13a3ec2593
203
autopkgtest.py
203
autopkgtest.py
@ -24,8 +24,14 @@ import subprocess
|
||||
import tempfile
|
||||
from textwrap import dedent
|
||||
import time
|
||||
import apt_pkg
|
||||
import json
|
||||
import tarfile
|
||||
import io
|
||||
import copy
|
||||
import itertools
|
||||
from urllib import urlencode, urlopen
|
||||
|
||||
import apt_pkg
|
||||
import kombu
|
||||
|
||||
from consts import (AUTOPKGTEST, BINARIES, RDEPENDS, SOURCE, VERSION)
|
||||
@ -42,6 +48,28 @@ ADT_EXCUSES_LABELS = {
|
||||
}
|
||||
|
||||
|
||||
def srchash(src):
|
||||
'''archive hash prefix for source package'''
|
||||
|
||||
if src.startswith('lib'):
|
||||
return src[:4]
|
||||
else:
|
||||
return src[0]
|
||||
|
||||
|
||||
def merge_triggers(trigs1, trigs2):
|
||||
'''Merge two (pkg, ver) trigger iterables
|
||||
|
||||
Return [(pkg, ver), ...] list with only the highest version for each
|
||||
package.
|
||||
'''
|
||||
pkgvers = {}
|
||||
for pkg, ver in itertools.chain(trigs1, trigs2):
|
||||
if apt_pkg.version_compare(ver, pkgvers.setdefault(pkg, '0')) >= 0:
|
||||
pkgvers[pkg] = ver
|
||||
return list(pkgvers.items())
|
||||
|
||||
|
||||
class AutoPackageTest(object):
|
||||
"""autopkgtest integration
|
||||
|
||||
@ -71,6 +99,28 @@ class AutoPackageTest(object):
|
||||
os.mkdir(self.test_state_dir)
|
||||
self.read_pending_tests()
|
||||
|
||||
# results map: src -> arch -> [latest_stamp, ver -> (passed, triggers)]
|
||||
# - "passed" is a bool
|
||||
# - It's tempting to just use a global "latest" time stamp, but due to
|
||||
# swift's "eventual consistency" we might miss results with older time
|
||||
# stamps from other packages that we don't see in the current run, but
|
||||
# will in the next one. This doesn't hurt for older results of the same
|
||||
# package.
|
||||
# - triggers is a list of (source, version) pairs which unstable
|
||||
# packages triggered this test run. We need to track this to avoid
|
||||
# unnecessarily re-running tests.
|
||||
self.test_results = {}
|
||||
self.results_cache_file = os.path.join(self.test_state_dir, 'results.cache')
|
||||
|
||||
# read the cached results that we collected so far
|
||||
if os.path.exists(self.results_cache_file):
|
||||
with open(self.results_cache_file) as f:
|
||||
self.test_results = json.load(f)
|
||||
self.log_verbose('Read previous results from %s' % self.results_cache_file)
|
||||
else:
|
||||
self.log_verbose('%s does not exist, re-downloading all results '
|
||||
'from swift' % self.results_cache_file)
|
||||
|
||||
def log_verbose(self, msg):
|
||||
if self.britney.options.verbose:
|
||||
print('I: [%s] - %s' % (time.asctime(), msg))
|
||||
@ -175,16 +225,124 @@ class AutoPackageTest(object):
|
||||
'''Add one test request to the local self.requested_tests queue
|
||||
|
||||
This will only be done if that test wasn't already requested in a
|
||||
previous run, i. e. it is already in self.pending_tests.
|
||||
previous run (i. e. not already in self.pending_tests) or there already
|
||||
is a result for it.
|
||||
'''
|
||||
try:
|
||||
for (tsrc, tver) in self.test_results[src][arch][1][ver][1]:
|
||||
if tsrc == trigsrc and apt_pkg.version_compare(tver, trigver) >= 0:
|
||||
self.log_verbose('There already is a result for %s/%s/%s triggered by %s/%s' %
|
||||
(src, ver, arch, tsrc, tver))
|
||||
return
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if (trigsrc, trigver) in self.pending_tests.get(src, {}).get(
|
||||
ver, {}).get(arch, set()):
|
||||
ver, {}).get(arch, set()):
|
||||
self.log_verbose('test %s/%s/%s for %s/%s is already pending, not queueing' %
|
||||
(src, ver, arch, trigsrc, trigver))
|
||||
return
|
||||
self.requested_tests.setdefault(src, {}).setdefault(
|
||||
ver, {}).setdefault(arch, set()).add((trigsrc, trigver))
|
||||
|
||||
def fetch_swift_results(self, swift_url, src, arch):
|
||||
'''Download new results for source package/arch from swift'''
|
||||
|
||||
# prepare query: get all runs with a timestamp later than latest_stamp
|
||||
# for this package/arch; '@' is at the end of each run timestamp, to
|
||||
# mark the end of a test run directory path
|
||||
# example: <autopkgtest-wily>wily/amd64/libp/libpng/20150630_054517@/result.tar
|
||||
query = {'delimiter': '@',
|
||||
'prefix': '%s/%s/%s/%s/' % (self.series, arch, srchash(src), src)}
|
||||
try:
|
||||
# don't include the last run again, so make the marker
|
||||
# "infinitesimally later" by appending 'zz'
|
||||
query['marker'] = self.test_results[src][arch][0] + 'zz'
|
||||
except KeyError:
|
||||
# no stamp yet, download all results
|
||||
pass
|
||||
|
||||
# request new results from swift
|
||||
url = os.path.join(swift_url, 'autopkgtest-' + self.series)
|
||||
url += '?' + urlencode(query)
|
||||
try:
|
||||
f = urlopen(url)
|
||||
if f.getcode() == 200:
|
||||
result_paths = f.read().strip().splitlines()
|
||||
else:
|
||||
self.log_error('Failure to fetch swift results from %s: %u' %
|
||||
(url, f.getcode()))
|
||||
f.close()
|
||||
return
|
||||
f.close()
|
||||
except IOError as e:
|
||||
self.log_error('Failure to fetch swift results from %s: %s' % (url, str(e)))
|
||||
return
|
||||
|
||||
for p in result_paths:
|
||||
self.fetch_one_result(os.path.join(
|
||||
swift_url, 'autopkgtest-' + self.series, p, 'result.tar'), src, arch)
|
||||
|
||||
def fetch_one_result(self, url, src, arch):
|
||||
'''Download one result URL for source/arch
|
||||
|
||||
Remove matching pending_tests entries.
|
||||
'''
|
||||
try:
|
||||
f = urlopen(url)
|
||||
if f.getcode() == 200:
|
||||
tar_bytes = io.BytesIO(f.read())
|
||||
f.close()
|
||||
else:
|
||||
self.log_error('Failure to fetch %s: %u' % (url, f.getcode()))
|
||||
return
|
||||
except IOError as e:
|
||||
self.log_error('Failure to fetch %s: %s' % (url, str(e)))
|
||||
return
|
||||
|
||||
try:
|
||||
with tarfile.open(None, 'r', tar_bytes) as tar:
|
||||
exitcode = int(tar.extractfile('exitcode').read().strip())
|
||||
srcver = tar.extractfile('testpkg-version').read().decode().strip()
|
||||
(ressrc, ver) = srcver.split()
|
||||
except (KeyError, ValueError, tarfile.TarError) as e:
|
||||
self.log_error('%s is damaged: %s' % (url, str(e)))
|
||||
return
|
||||
|
||||
if src != ressrc:
|
||||
self.log_error('%s is a result for package %s, but expected package %s' %
|
||||
(url, ressrc, src))
|
||||
return
|
||||
|
||||
stamp = os.path.basename(os.path.dirname(url))
|
||||
# allow some skipped tests, but nothing else
|
||||
passed = exitcode in [0, 2]
|
||||
|
||||
self.log_verbose('Fetched test result for %s/%s on %s: %s' % (
|
||||
src, ver, arch, passed and 'pass' or 'fail'))
|
||||
|
||||
# remove matching test requests, remember triggers
|
||||
satisfied_triggers = set()
|
||||
for pending_ver, pending_archinfo in self.pending_tests.get(src, {}).copy().items():
|
||||
# don't consider newer requested versions
|
||||
if apt_pkg.version_compare(pending_ver, ver) <= 0:
|
||||
try:
|
||||
t = pending_archinfo[arch]
|
||||
self.log_verbose('-> matches pending request for triggers %s' % str(t))
|
||||
satisfied_triggers.update(t)
|
||||
del self.pending_tests[src][pending_ver][arch]
|
||||
except KeyError:
|
||||
self.log_error('-> does not match any pending request!')
|
||||
pass
|
||||
|
||||
# add this result
|
||||
src_arch_results = self.test_results.setdefault(src, {}).setdefault(arch, [stamp, {}])
|
||||
src_arch_results[1][ver] = (passed, merge_triggers(
|
||||
src_arch_results[1].get(ver, (None, []))[1], satisfied_triggers))
|
||||
# update latest_stamp
|
||||
if stamp > src_arch_results[0]:
|
||||
src_arch_results[0] = stamp
|
||||
|
||||
#
|
||||
# obsolete adt-britney helpers
|
||||
#
|
||||
@ -397,6 +555,38 @@ class AutoPackageTest(object):
|
||||
self._adt_britney("submit", request_path)
|
||||
|
||||
def collect(self):
|
||||
# fetch results from swift
|
||||
try:
|
||||
swift_url = self.britney.options.adt_swift_url
|
||||
except AttributeError:
|
||||
self.log_error('ADT_SWIFT_URL not set, cannot collect results')
|
||||
swift_url = None
|
||||
try:
|
||||
self.britney.options.adt_amqp
|
||||
except AttributeError:
|
||||
self.log_error('ADT_AMQP not set, not collecting results from swift')
|
||||
swift_url = None
|
||||
|
||||
if swift_url:
|
||||
# update results from swift for all packages that we are waiting
|
||||
# for, and remove pending tests that we have results for on all
|
||||
# arches
|
||||
for pkg, verinfo in copy.deepcopy(self.pending_tests.items()):
|
||||
for archinfo in verinfo.values():
|
||||
for arch in archinfo:
|
||||
self.fetch_swift_results(swift_url, pkg, arch)
|
||||
|
||||
# update the results cache
|
||||
with open(self.results_cache_file + '.new', 'w') as f:
|
||||
json.dump(self.test_results, f, indent=2)
|
||||
os.rename(self.results_cache_file + '.new', self.results_cache_file)
|
||||
self.log_verbose('Updated results cache')
|
||||
|
||||
# new results remove pending requests, update the on-disk cache
|
||||
self.update_pending_tests()
|
||||
|
||||
# deprecated results for old Jenkins/lp:auto-package-testing, will go
|
||||
# away
|
||||
self._ensure_rc_file()
|
||||
result_path = self._result_path
|
||||
self._adt_britney("collect", "-O", result_path)
|
||||
@ -413,6 +603,13 @@ class AutoPackageTest(object):
|
||||
(src, ver, trigsrc, trigver, status))
|
||||
|
||||
def results(self, trigsrc, trigver):
|
||||
'''Return test results for triggering package
|
||||
|
||||
Return (ALWAYSFAIL|PASS|FAIL, src, ver) iterator for all package tests
|
||||
that got triggered by trigsrc/trigver.
|
||||
'''
|
||||
# deprecated results for old Jenkins/lp:auto-package-testing, will go
|
||||
# away
|
||||
for status, src, ver in self.pkgcauses[trigsrc][trigver]:
|
||||
# Check for regression
|
||||
if status == 'FAIL':
|
||||
|
@ -67,6 +67,8 @@ ADT_DEBUG = no
|
||||
ADT_ARCHES = amd64 i386
|
||||
# comment this to disable autopkgtest requests
|
||||
ADT_AMQP = ampq://user:pwd@amqp.example.com
|
||||
# Swift base URL with the results (must be publicly readable and browsable)
|
||||
ADT_SWIFT_URL = https://objectstorage.mycloud.example.com/v1/AUTH_autopkgtest
|
||||
|
||||
BOOTTEST_ENABLE = yes
|
||||
BOOTTEST_DEBUG = yes
|
||||
|
30
britney.py
30
britney.py
@ -225,7 +225,7 @@ from britney_util import (old_libraries_format, same_source, undo_changes,
|
||||
from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC,
|
||||
SOURCE, SOURCEVER, ARCHITECTURE, DEPENDS, CONFLICTS,
|
||||
PROVIDES, RDEPENDS, RCONFLICTS, MULTIARCH, ESSENTIAL)
|
||||
from autopkgtest import AutoPackageTest, ADT_PASS, ADT_EXCUSES_LABELS
|
||||
from autopkgtest import AutoPackageTest, ADT_PASS, ADT_EXCUSES_LABELS, srchash
|
||||
from boottest import BootTest
|
||||
|
||||
|
||||
@ -1853,6 +1853,7 @@ class Britney(object):
|
||||
jenkins_private = (
|
||||
"http://d-jenkins.ubuntu-ci:8080/view/%s/view/AutoPkgTest/job" %
|
||||
self.options.series.title())
|
||||
cloud_url = "http://autopkgtest.ubuntu.com/packages/%(h)s/%(s)s/%(r)s/%(a)s"
|
||||
for e in autopkgtest_excuses:
|
||||
adtpass = True
|
||||
for status, adtsrc, adtver in autopkgtest.results(
|
||||
@ -1883,6 +1884,33 @@ class Britney(object):
|
||||
"%s" % (adtsrc, adtver, forces[0].user))
|
||||
else:
|
||||
adtpass = False
|
||||
|
||||
# temporary: also show results from cloud based tests,
|
||||
# until that becomes the primary mechanism
|
||||
for testsrc, testver in autopkgtest.tests_for_source(e.name, e.ver[1]):
|
||||
msg = '(informational) cloud autopkgtest for %s %s: ' % (testsrc, testver)
|
||||
archmsg = []
|
||||
for arch in self.options.adt_arches.split():
|
||||
url = cloud_url % {'h': srchash(testsrc), 's': testsrc,
|
||||
'r': self.options.series, 'a': arch}
|
||||
try:
|
||||
r = autopkgtest.test_results[testsrc][arch][1][testver][0]
|
||||
status = r and 'PASS' or 'REGRESSION'
|
||||
except KeyError:
|
||||
try:
|
||||
autopkgtest.pending_tests[testsrc][testver][arch]
|
||||
status = 'RUNNING'
|
||||
except KeyError:
|
||||
# neither done nor pending -> exclusion, or disabled
|
||||
continue
|
||||
|
||||
archmsg.append('<a href="%s">%s: %s</a>' %
|
||||
(url, arch, ADT_EXCUSES_LABELS[status]))
|
||||
|
||||
if archmsg:
|
||||
e.addhtml(msg + ', '.join(archmsg))
|
||||
# end of temporary code
|
||||
|
||||
if not adtpass and e.is_valid:
|
||||
hints = self.hints.search('force-skiptest', package=e.name)
|
||||
hints.extend(self.hints.search('force', package=e.name))
|
||||
|
@ -67,4 +67,5 @@ ADT_DEBUG = no
|
||||
ADT_ARCHES = amd64 i386
|
||||
# comment this to disable autopkgtest requests
|
||||
ADT_AMQP = ampq://user:pwd@amqp.example.com
|
||||
|
||||
# Swift base URL with the results (must be publicly readable and browsable)
|
||||
ADT_SWIFT_URL = https://objectstorage.mycloud.example.com/v1/AUTH_autopkgtest
|
||||
|
139
tests/mock_swift.py
Normal file
139
tests/mock_swift.py
Normal file
@ -0,0 +1,139 @@
|
||||
# Mock a Swift server with autopkgtest results
|
||||
# Author: Martin Pitt <martin.pitt@ubuntu.com>
|
||||
|
||||
import os
|
||||
import tarfile
|
||||
import io
|
||||
import sys
|
||||
import socket
|
||||
import time
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
except ImportError:
|
||||
# Python 2
|
||||
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
||||
from urlparse import urlparse, parse_qs
|
||||
|
||||
|
||||
class SwiftHTTPRequestHandler(BaseHTTPRequestHandler):
|
||||
'''Mock swift container with autopkgtest results
|
||||
|
||||
This accepts retrieving a particular result.tar (e. g.
|
||||
/container/path/result.tar) or listing the container contents
|
||||
(/container/?prefix=foo&delimiter=@&marker=foo/bar).
|
||||
'''
|
||||
# map container -> result.tar path -> (exitcode, testpkg-version)
|
||||
results = {}
|
||||
|
||||
def do_GET(self):
|
||||
p = urlparse(self.path)
|
||||
path_comp = p.path.split('/')
|
||||
container = path_comp[1]
|
||||
path = '/'.join(path_comp[2:])
|
||||
if path:
|
||||
self.serve_file(container, path)
|
||||
else:
|
||||
self.list_container(container, parse_qs(p.query))
|
||||
|
||||
def serve_file(self, container, path):
|
||||
if os.path.basename(path) != 'result.tar':
|
||||
self.send_error(404, 'File not found (only result.tar supported)')
|
||||
return
|
||||
try:
|
||||
(exitcode, pkgver) = self.results[container][os.path.dirname(path)]
|
||||
except KeyError:
|
||||
self.send_error(404, 'File not found')
|
||||
return
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/octet-stream')
|
||||
self.end_headers()
|
||||
|
||||
tar = io.BytesIO()
|
||||
with tarfile.open('result.tar', 'w', tar) as results:
|
||||
# add exitcode
|
||||
contents = ('%i' % exitcode).encode()
|
||||
ti = tarfile.TarInfo('exitcode')
|
||||
ti.size = len(contents)
|
||||
results.addfile(ti, io.BytesIO(contents))
|
||||
# add testpkg-version
|
||||
contents = pkgver.encode()
|
||||
ti = tarfile.TarInfo('testpkg-version')
|
||||
ti.size = len(contents)
|
||||
results.addfile(ti, io.BytesIO(contents))
|
||||
self.wfile.write(tar.getvalue())
|
||||
|
||||
def list_container(self, container, query):
|
||||
try:
|
||||
objs = set(['%s/result.tar' % r for r in self.results[container]])
|
||||
except KeyError:
|
||||
self.send_error(404, 'Container does not exist')
|
||||
return
|
||||
if 'prefix' in query:
|
||||
p = query['prefix'][-1]
|
||||
objs = set([o for o in objs if o.startswith(p)])
|
||||
if 'marker' in query:
|
||||
m = query['marker'][-1]
|
||||
objs = set([o for o in objs if o >= m])
|
||||
if 'delimiter' in query:
|
||||
d = query['delimiter'][-1]
|
||||
# if find() returns a value, we want to include the delimiter, thus
|
||||
# bump its result; for "not found" return None
|
||||
find_adapter = lambda i: (i >= 0) and (i + 1) or None
|
||||
objs = set([o[:find_adapter(o.find(d))] for o in objs])
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'text/plain')
|
||||
self.end_headers()
|
||||
self.wfile.write(('\n'.join(sorted(objs)) + '\n').encode('UTF-8'))
|
||||
|
||||
|
||||
class AutoPkgTestSwiftServer:
|
||||
def __init__(self, port=8080):
|
||||
self.port = port
|
||||
self.server_pid = None
|
||||
self.log = None
|
||||
|
||||
def __del__(self):
|
||||
if self.server_pid:
|
||||
self.stop()
|
||||
|
||||
@classmethod
|
||||
def set_results(klass, results):
|
||||
'''Set served results.
|
||||
|
||||
results is a map: container -> result.tar path ->
|
||||
(exitcode, testpkg-version)
|
||||
'''
|
||||
SwiftHTTPRequestHandler.results = results
|
||||
|
||||
def start(self):
|
||||
assert self.server_pid is None, 'already started'
|
||||
self.log = tempfile.TemporaryFile()
|
||||
p = os.fork()
|
||||
if p:
|
||||
# parent: wait until server starts
|
||||
self.server_pid = p
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
while True:
|
||||
try:
|
||||
s.connect_ex(('127.0.0.1', self.port))
|
||||
break
|
||||
except OSError:
|
||||
time.sleep(0.1)
|
||||
return
|
||||
|
||||
# child; quiesce logging on stderr
|
||||
os.dup2(self.log.fileno(), sys.stderr.fileno())
|
||||
srv = HTTPServer(('', self.port), SwiftHTTPRequestHandler)
|
||||
srv.serve_forever()
|
||||
sys.exit(0)
|
||||
|
||||
def stop(self):
|
||||
assert self.server_pid, 'not running'
|
||||
os.kill(self.server_pid, 15)
|
||||
os.waitpid(self.server_pid, 0)
|
||||
self.server_pid = None
|
@ -13,12 +13,13 @@ import sys
|
||||
import subprocess
|
||||
import fileinput
|
||||
import unittest
|
||||
import json
|
||||
|
||||
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
sys.path.insert(0, PROJECT_DIR)
|
||||
|
||||
from autopkgtest import ADT_EXCUSES_LABELS
|
||||
from tests import TestBase
|
||||
from tests import TestBase, mock_swift
|
||||
|
||||
NOT_CONSIDERED = False
|
||||
VALID_CANDIDATE = True
|
||||
@ -34,12 +35,14 @@ class TestAutoPkgTest(TestBase):
|
||||
super(TestAutoPkgTest, self).setUp()
|
||||
self.fake_amqp = os.path.join(self.data.path, 'amqp')
|
||||
|
||||
# Disable boottests and set fake AMQP server
|
||||
# Disable boottests and set fake AMQP and Swift server
|
||||
for line in fileinput.input(self.britney_conf, inplace=True):
|
||||
if line.startswith('BOOTTEST_ENABLE'):
|
||||
print('BOOTTEST_ENABLE = no')
|
||||
elif line.startswith('ADT_AMQP'):
|
||||
print('ADT_AMQP = file://%s' % self.fake_amqp)
|
||||
elif line.startswith('ADT_SWIFT_URL'):
|
||||
print('ADT_SWIFT_URL = http://localhost:18085')
|
||||
else:
|
||||
sys.stdout.write(line)
|
||||
|
||||
@ -70,11 +73,22 @@ echo "$@" >> /%s/adt-britney.log ''' % self.data.path)
|
||||
testsuite='specialtest')
|
||||
self.data.add('justdata', False, {'Architecture': 'all'})
|
||||
|
||||
# create mock Swift server (but don't start it yet, as tests first need
|
||||
# to poke in results)
|
||||
self.swift = mock_swift.AutoPkgTestSwiftServer(port=18085)
|
||||
self.swift.set_results({})
|
||||
|
||||
def tearDown(self):
|
||||
del self.swift
|
||||
|
||||
def do_test(self, unstable_add, considered, excuses_expect=None, excuses_no_expect=None):
|
||||
for (pkg, fields, testsuite) in unstable_add:
|
||||
self.data.add(pkg, True, fields, True, testsuite)
|
||||
|
||||
self.swift.start()
|
||||
(excuses, out) = self.run_britney()
|
||||
self.swift.stop()
|
||||
|
||||
#print('-------\nexcuses: %s\n-----' % excuses)
|
||||
#print('-------\nout: %s\n-----' % out)
|
||||
#print('run:\n%s -c %s\n' % (self.britney, self.britney_conf))
|
||||
@ -105,6 +119,8 @@ echo "$@" >> /%s/adt-britney.log ''' % self.data.path)
|
||||
except IOError:
|
||||
self.pending_requests = None
|
||||
|
||||
return out
|
||||
|
||||
def test_multi_rdepends_with_tests_all_running(self):
|
||||
'''Multiple reverse dependencies with tests (all running)'''
|
||||
|
||||
@ -113,7 +129,10 @@ echo "$@" >> /%s/adt-britney.log ''' % self.data.path)
|
||||
# FIXME: while we only submit requests through AMQP, but don't consider
|
||||
# their results, we don't expect this to hold back stuff.
|
||||
VALID_CANDIDATE,
|
||||
[r'\bgreen\b.*>1</a> to .*>2<'])
|
||||
[r'\bgreen\b.*>1</a> to .*>2<',
|
||||
r'autopkgtest for green 2: .*amd64.*in progress.*i386.*in progress',
|
||||
r'autopkgtest for lightgreen 1: .*amd64.*in progress.*i386.*in progress',
|
||||
r'autopkgtest for darkgreen 1: .*amd64.*in progress.*i386.*in progress'])
|
||||
|
||||
# we expect the package's and its reverse dependencies' tests to get
|
||||
# triggered
|
||||
@ -121,8 +140,7 @@ echo "$@" >> /%s/adt-britney.log ''' % self.data.path)
|
||||
self.amqp_requests,
|
||||
set(['debci-series-i386:green', 'debci-series-amd64:green',
|
||||
'debci-series-i386:lightgreen', 'debci-series-amd64:lightgreen',
|
||||
'debci-series-i386:darkgreen', 'debci-series-amd64:darkgreen',
|
||||
]))
|
||||
'debci-series-i386:darkgreen', 'debci-series-amd64:darkgreen']))
|
||||
os.unlink(self.fake_amqp)
|
||||
|
||||
# ... and that they get recorded as pending
|
||||
@ -141,6 +159,108 @@ lightgreen 1 i386 green 2
|
||||
# but the set of pending tests doesn't change
|
||||
self.assertEqual(self.pending_requests, expected_pending)
|
||||
|
||||
def test_multi_rdepends_with_tests_all_pass(self):
|
||||
'''Multiple reverse dependencies with tests (all pass)'''
|
||||
|
||||
# first run requests tests and marks them as pending
|
||||
self.do_test(
|
||||
[('libgreen1', {'Version': '2', 'Source': 'green', 'Depends': 'libc6'}, 'autopkgtest')],
|
||||
# FIXME: while we only submit requests through AMQP, but don't consider
|
||||
# their results, we don't expect this to hold back stuff.
|
||||
VALID_CANDIDATE,
|
||||
[r'\bgreen\b.*>1</a> to .*>2<',
|
||||
r'autopkgtest for green 2: .*amd64.*in progress.*i386.*in progress',
|
||||
r'autopkgtest for lightgreen 1: .*amd64.*in progress.*i386.*in progress',
|
||||
r'autopkgtest for darkgreen 1: .*amd64.*in progress.*i386.*in progress'])
|
||||
|
||||
# second run collects the results
|
||||
self.swift.set_results({'autopkgtest-series': {
|
||||
'series/i386/d/darkgreen/20150101_100000@': (0, 'darkgreen 1'),
|
||||
'series/amd64/d/darkgreen/20150101_100001@': (0, 'darkgreen 1'),
|
||||
'series/i386/l/lightgreen/20150101_100100@': (0, 'lightgreen 1'),
|
||||
'series/amd64/l/lightgreen/20150101_100101@': (0, 'lightgreen 1'),
|
||||
# version in testing fails
|
||||
'series/i386/g/green/20150101_020000@': (4, 'green 1'),
|
||||
'series/amd64/g/green/20150101_020000@': (4, 'green 1'),
|
||||
# version in unstable succeeds
|
||||
'series/i386/g/green/20150101_100200@': (0, 'green 2'),
|
||||
'series/amd64/g/green/20150101_100201@': (0, 'green 2'),
|
||||
}})
|
||||
|
||||
out = self.do_test(
|
||||
[],
|
||||
VALID_CANDIDATE,
|
||||
[r'\bgreen\b.*>1</a> to .*>2<',
|
||||
r'autopkgtest for green 2: .*amd64.*Pass.*i386.*Pass',
|
||||
r'autopkgtest for lightgreen 1: .*amd64.*Pass.*i386.*Pass',
|
||||
r'autopkgtest for darkgreen 1: .*amd64.*Pass.*i386.*Pass'])
|
||||
|
||||
# all tests ran, there should be no more pending ones
|
||||
self.assertEqual(self.pending_requests, '')
|
||||
|
||||
# not expecting any failures to retrieve from swift
|
||||
self.assertNotIn('Failure', out, out)
|
||||
|
||||
# caches the results and triggers
|
||||
with open(os.path.join(self.data.path, 'data/series-proposed/autopkgtest/results.cache')) as f:
|
||||
res = json.load(f)
|
||||
self.assertEqual(res['green']['i386'],
|
||||
['20150101_100200@', {'1': [False, []],
|
||||
'2': [True, [['green', '2']]]}])
|
||||
self.assertEqual(res['lightgreen']['amd64'],
|
||||
['20150101_100101@', {'1': [True, [['green', '2']]]}])
|
||||
|
||||
# third run should not trigger any new tests, should all be in the
|
||||
# cache
|
||||
os.unlink(self.fake_amqp)
|
||||
self.swift.set_results({})
|
||||
out = self.do_test(
|
||||
[],
|
||||
VALID_CANDIDATE,
|
||||
[r'\bgreen\b.*>1</a> to .*>2<',
|
||||
r'autopkgtest for green 2: .*amd64.*Pass.*i386.*Pass',
|
||||
r'autopkgtest for lightgreen 1: .*amd64.*Pass.*i386.*Pass',
|
||||
r'autopkgtest for darkgreen 1: .*amd64.*Pass.*i386.*Pass'])
|
||||
self.assertEqual(self.amqp_requests, set())
|
||||
self.assertEqual(self.pending_requests, '')
|
||||
self.assertNotIn('Failure', out, out)
|
||||
|
||||
def test_multi_rdepends_with_tests_mixed(self):
|
||||
'''Multiple reverse dependencies with tests (mixed results)'''
|
||||
|
||||
# first run requests tests and marks them as pending
|
||||
self.do_test(
|
||||
[('libgreen1', {'Version': '2', 'Source': 'green', 'Depends': 'libc6'}, 'autopkgtest')],
|
||||
# FIXME: while we only submit requests through AMQP, but don't consider
|
||||
# their results, we don't expect this to hold back stuff.
|
||||
VALID_CANDIDATE,
|
||||
[r'\bgreen\b.*>1</a> to .*>2<',
|
||||
r'autopkgtest for green 2: .*amd64.*in progress.*i386.*in progress',
|
||||
r'autopkgtest for lightgreen 1: .*amd64.*in progress.*i386.*in progress',
|
||||
r'autopkgtest for darkgreen 1: .*amd64.*in progress.*i386.*in progress'])
|
||||
|
||||
# second run collects the results
|
||||
self.swift.set_results({'autopkgtest-series': {
|
||||
'series/i386/d/darkgreen/20150101_100000@': (0, 'darkgreen 1'),
|
||||
'series/amd64/l/lightgreen/20150101_100101@': (4, 'lightgreen 1'),
|
||||
'series/i386/g/green/20150101_100200@': (0, 'green 2'),
|
||||
'series/amd64/g/green/20150101_100201@': (4, 'green 2'),
|
||||
}})
|
||||
|
||||
self.do_test(
|
||||
[],
|
||||
# FIXME: while we only submit requests through AMQP, but don't consider
|
||||
# their results, we don't expect this to hold back stuff.
|
||||
VALID_CANDIDATE,
|
||||
[r'\bgreen\b.*>1</a> to .*>2<',
|
||||
r'autopkgtest for green 2: .*amd64.*Regression.*i386.*Pass',
|
||||
r'autopkgtest for lightgreen 1: .*amd64.*Regression.*i386.*in progress',
|
||||
r'autopkgtest for darkgreen 1: .*amd64.*in progress.*i386.*Pass'])
|
||||
|
||||
# there should be some pending ones
|
||||
self.assertIn('darkgreen 1 amd64 green 2', self.pending_requests)
|
||||
self.assertIn('lightgreen 1 i386 green 2', self.pending_requests)
|
||||
|
||||
def test_package_pair_running(self):
|
||||
'''Two packages in unstable that need to go in together (running)'''
|
||||
|
||||
@ -159,8 +279,7 @@ lightgreen 1 i386 green 2
|
||||
self.amqp_requests,
|
||||
set(['debci-series-i386:green', 'debci-series-amd64:green',
|
||||
'debci-series-i386:lightgreen', 'debci-series-amd64:lightgreen',
|
||||
'debci-series-i386:darkgreen', 'debci-series-amd64:darkgreen',
|
||||
]))
|
||||
'debci-series-i386:darkgreen', 'debci-series-amd64:darkgreen']))
|
||||
os.unlink(self.fake_amqp)
|
||||
|
||||
# ... and that they get recorded as pending
|
||||
@ -180,7 +299,7 @@ lightgreen 2 i386 lightgreen 2
|
||||
|
||||
# Disable AMQP server config
|
||||
for line in fileinput.input(self.britney_conf, inplace=True):
|
||||
if not line.startswith('ADT_AMQP'):
|
||||
if not line.startswith('ADT_AMQP') and not line.startswith('ADT_SWIFT_URL'):
|
||||
sys.stdout.write(line)
|
||||
|
||||
self.do_test(
|
||||
|
Loading…
x
Reference in New Issue
Block a user