Reject packages if entire source ppa won't migrate

wip/linux-policy
Robert Bruce Park 8 years ago committed by Iain Lane
parent 8c3b9e5893
commit fea7d283a8

@ -217,6 +217,7 @@ from britney2.policies.policy import (AgePolicy,
LPBlockBugPolicy,
)
from britney2.policies.autopkgtest import AutopkgtestPolicy
from britney2.policies.sourceppa import SourcePPAPolicy
from britney2.utils import (log_and_format_old_libraries,
read_nuninst, write_nuninst, write_heidi,
format_and_log_uninst, newly_uninst,

@ -0,0 +1,64 @@
import json
import socket
import urllib.request
import urllib.parse
from collections import defaultdict
from urllib.error import HTTPError
LAUNCHPAD_URL = "https://api.launchpad.net/1.0/"
class Rest:
"""Wrap common REST APIs with some retry logic."""
def query_rest_api(self, obj, query):
"""Do a REST request
Request <obj>?<query>.
Returns string received from web service.
Raises HTTPError, ValueError, or ConnectionError based on different
transient failures connecting.
"""
for retry in range(5):
url = "%s?%s" % (obj, urllib.parse.urlencode(query))
try:
with urllib.request.urlopen(url, timeout=30) as req:
code = req.getcode()
if 200 <= code < 300:
return req.read().decode("UTF-8")
raise ConnectionError(
"Failed to reach launchpad, HTTP %s" % code
)
except socket.timeout as e:
self.logger.info(
"Timeout downloading '%s', will retry %d more times."
% (url, 5 - retry - 1)
)
exc = e
except HTTPError as e:
if e.code not in (503, 502):
raise
self.logger.info(
"Caught error %d downloading '%s', will retry %d more times."
% (e.code, url, 5 - retry - 1)
)
exc = e
else:
raise exc
def query_lp_rest_api(self, obj, query):
"""Do a Launchpad REST request
Request <LAUNCHPAD_URL><obj>?<query>.
Returns dict of parsed json result from launchpad.
Raises HTTPError, ValueError, or ConnectionError based on different
transient failures connecting to launchpad.
"""
if not obj.startswith(LAUNCHPAD_URL):
obj = LAUNCHPAD_URL + obj
return json.loads(self.query_rest_api(obj, query))

@ -0,0 +1,182 @@
import os
import json
import socket
import urllib.request
import urllib.parse
from collections import defaultdict
from urllib.error import HTTPError
from britney2 import SuiteClass
from britney2.policies.rest import Rest
from britney2.policies.policy import BasePolicy, PolicyVerdict
LAUNCHPAD_URL = "https://api.launchpad.net/1.0/"
PRIMARY = LAUNCHPAD_URL + "ubuntu/+archive/primary"
INCLUDE = ["~bileto-ppa-service/", "~ci-train-ppa-service/"]
class SourcePPAPolicy(BasePolicy, Rest):
"""Migrate packages copied from same source PPA together
This policy will query launchpad to determine what source PPA packages
were copied from, and ensure that all packages from the same PPA migrate
together.
"""
def __init__(self, options, suite_info):
super().__init__(
"source-ppa", options, suite_info, {SuiteClass.PRIMARY_SOURCE_SUITE}
)
self.filename = os.path.join(options.unstable, "SourcePPA")
# Dict of dicts; maps pkg name -> pkg version -> source PPA URL
self.source_ppas_by_pkg = defaultdict(dict)
# Dict of sets; maps source PPA URL -> (set of source names, set of
# friends; collected excuses for this ppa)
self.excuses_by_source_ppa = defaultdict(set)
self.source_ppa_info_by_source_ppa = defaultdict(set)
self.britney = None
# self.cache contains self.source_ppas_by_pkg from previous run
self.cache = {}
def lp_get_source_ppa(self, pkg, version):
"""Ask LP what source PPA pkg was copied from"""
cached = self.cache.get(pkg, {}).get(version)
if cached is not None:
return cached
data = self.query_lp_rest_api(
"%s/+archive/primary" % self.options.distribution,
{
"ws.op": "getPublishedSources",
"pocket": "Proposed",
"source_name": pkg,
"version": version,
"exact_match": "true",
"distro_series": "/%s/%s"
% (self.options.distribution, self.options.series),
},
)
try:
sourcepub = data["entries"][0]["self_link"]
# IndexError means no packages in -proposed matched this name/version,
# which is expected to happen when bileto runs britney.
except IndexError:
self.logger.info(
"SourcePPA getPackageUploads IndexError (%s %s)"
% (pkg, version)
)
return "IndexError"
data = self.query_lp_rest_api(
sourcepub, {"ws.op": "getPublishedBinaries"}
)
for binary in data["entries"]:
link = binary["build_link"] or ""
if "/+archive/" in link:
ppa, _, buildid = link.partition("/+build/")
return ppa
return ""
def initialise(self, britney):
"""Load cached source ppa data"""
super().initialise(britney)
self.britney = britney
if os.path.exists(self.filename):
with open(self.filename, encoding="utf-8") as data:
self.cache = json.load(data)
self.logger.info(
"Loaded cached source ppa data from %s", self.filename
)
def apply_src_policy_impl(
self,
sourceppa_info,
item,
source_data_tdist,
source_data_srcdist,
excuse,
):
"""Reject package if any other package copied from same PPA is invalid"""
source_name = item.package
accept = excuse.is_valid
version = source_data_srcdist.version
sourceppa = self.lp_get_source_ppa(source_name, version) or ""
verdict = excuse.policy_verdict
self.source_ppas_by_pkg[source_name][version] = sourceppa
if not [team for team in INCLUDE if team in sourceppa]:
return PolicyVerdict.PASS
# check for a force hint; we have to check here in addition to
# checking in britney.py, otherwise /this/ package will later be
# considered valid candidate but all the /others/ from the ppa will
# be invalidated via this policy and not fixed by the force hint.
forces = self.hints.search(
"force", package=source_name, version=source_data_srcdist.version
)
if forces:
excuse.dontinvalidate = True
changed_state = excuse.force()
if changed_state:
excuse.addhtml(
"Should ignore, but forced by %s" % (forces[0].user)
)
accept = True
shortppa = sourceppa.replace(LAUNCHPAD_URL, "")
sourceppa_info[source_name] = shortppa
if not excuse.is_valid:
self.logger.info(
"sourceppa: processing %s, which is invalid, will invalidate set",
source_name,
)
else:
# Check for other packages that might invalidate this one
for friend_exc in self.excuses_by_source_ppa[sourceppa]:
sourceppa_info[friend_exc.item.package] = shortppa
if not friend_exc.is_valid:
self.logger.info(
"sourceppa: processing %s, found invalid grouped package %s, will invalidate set"
% (source_name, friend_exc.name)
)
accept = False
break
self.excuses_by_source_ppa[sourceppa].add(excuse)
if not accept:
# Invalidate all packages in this source ppa
for friend_exc in self.excuses_by_source_ppa[sourceppa]:
self.logger.info("friend: %s", friend_exc.name)
sourceppa_info[friend_exc.item.package] = shortppa
if friend_exc.is_valid:
if friend_exc == excuse:
verdict = PolicyVerdict.REJECTED_WAITING_FOR_ANOTHER_ITEM
else:
friend_exc.policy_verdict = (
PolicyVerdict.REJECTED_WAITING_FOR_ANOTHER_ITEM
)
friend_exc.addreason("source-ppa")
self.logger.info(
"sourceppa: ... invalidating %s due to the above (ppa: %s), %s"
% (friend_exc.name, shortppa, sourceppa_info)
)
friend_exc.addinfo("Grouped with PPA %s" % shortppa)
for friend_exc in self.excuses_by_source_ppa[sourceppa]:
try:
friend_exc.policy_info["source-ppa"].update(sourceppa_info)
except KeyError:
friend_exc.policy_info["source-ppa"] = sourceppa_info.copy()
return verdict
def save_state(self, britney):
"""Write source ppa data to disk"""
tmp = self.filename + ".tmp"
with open(tmp, "w", encoding="utf-8") as data:
json.dump(self.source_ppas_by_pkg, data)
os.rename(tmp, self.filename)
self.logger.info("Wrote source ppa data to %s" % self.filename)

@ -0,0 +1,7 @@
{
"pal": {"2.0": "https://api.launchpad.net/1.0/~ci-train-ppa-service/+archive/NNNN"},
"buddy": {"2.0": "https://api.launchpad.net/1.0/~ci-train-ppa-service/+archive/NNNN"},
"friend": {"2.0": "https://api.launchpad.net/1.0/~ci-train-ppa-service/+archive/NNNN"},
"noppa": {"2.0": ""},
"unused": {"2.0": ""}
}

@ -2315,61 +2315,6 @@ class AT(TestAutopkgtestBase):
with open(shared_path) as f:
self.assertEqual(orig_contents, f.read())
################################################################
# Tests for source ppa grouping
################################################################
# # def test_sourceppa_policy(self):
# # '''Packages from same source PPA get rejected for failed peer policy'''
# #
# # self.data.add_default_packages(green=False)
# #
# # ppa = 'devel/~ci-train-ppa-service/+archive/NNNN'
# # self.sourceppa_cache['green'] = {'2': ppa}
# # self.sourceppa_cache['red'] = {'2': ppa}
# # with open(os.path.join(self.data.path, 'data/unstable/Blocks'), 'w') as f:
# # f.write('green 12345 1471505000\ndarkgreen 98765 1471500000\n')
# #
# # exc = self.run_it(
# # [('green', {'Version': '2'}, 'autopkgtest'),
# # ('red', {'Version': '2'}, 'autopkgtest'),
# # ('gcc-5', {}, 'autopkgtest')],
# # {'green': (False, {'green': {'i386': 'RUNNING-ALWAYSFAIL', 'amd64': 'RUNNING-ALWAYSFAIL'}}),
# # 'red': (False, {'red': {'i386': 'RUNNING-ALWAYSFAIL', 'amd64': 'RUNNING-ALWAYSFAIL'}}),
# # 'gcc-5': (True, {}),
# # },
# # {'green': [('reason', 'block')],
# # 'red': [('reason', 'source-ppa')]}
# # )[1]
# # self.assertEqual(exc['red']['policy_info']['source-ppa'], {'red': ppa, 'green': ppa})
# #
# # with open(os.path.join(self.data.path, 'data/unstable/SourcePPA')) as f:
# # res = json.load(f)
# # self.assertEqual(res, {'red': {'2': ppa},
# # 'green': {'2': ppa},
# # 'gcc-5': {'1': ''}})
# # def test_sourceppa_missingbuild(self):
# # '''Packages from same source PPA get rejected for failed peer FTBFS'''
# #
# # self.data.add_default_packages(green=False)
# #
# # ppa = 'devel/~ci-train-ppa-service/+archive/ZZZZ'
# # self.sourceppa_cache['green'] = {'2': ppa}
# # self.sourceppa_cache['red'] = {'2': ppa}
# #
# # self.data.add_src('green', True, {'Version': '2', 'Testsuite': 'autopkgtest'})
# # self.data.add('libgreen1', True, {'Version': '2', 'Source': 'green', 'Architecture': 'i386'}, add_src=False)
# # self.data.add('green', True, {'Version': '2', 'Source': 'green'}, add_src=False)
# #
# # exc = self.run_it(
# # [('red', {'Version': '2'}, 'autopkgtest')],
# # {'green': (False, {}), 'red': (False, {})},
# # {'green': [('missing-builds', {'on-architectures': ['amd64', 'arm64', 'armhf', 'powerpc', 'ppc64el'],
# # 'on-unimportant-architectures': []})],
# # 'red': [('reason', 'source-ppa')]}
# # )[1]
# # self.assertEqual(exc['red']['policy_info']['source-ppa'], {'red': ppa, 'green': ppa})
def test_swift_url_is_file(self):
'''Run without swift but with debci file (as Debian does)'''

@ -0,0 +1,404 @@
#!/usr/bin/python3
# (C) 2016 Canonical Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import json
import os
import sys
import unittest
from unittest.mock import DEFAULT, patch
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, PROJECT_DIR)
from britney2 import Suite, SuiteClass
from britney2.excuse import Excuse
from britney2.hints import HintCollection
from britney2.migrationitem import MigrationItem
from britney2.policies.policy import PolicyEngine, PolicyVerdict
from britney2.policies.sourceppa import LAUNCHPAD_URL, SourcePPAPolicy
# We want to reuse run_it
from tests.test_autopkgtest import TestAutopkgtestBase, tr
CACHE_FILE = os.path.join(PROJECT_DIR, "tests", "data", "sourceppa.json")
class FakeOptions:
distribution = "testbuntu"
series = "zazzy"
unstable = "/tmp"
verbose = False
class FakeExcuse(Excuse):
def __init__(self, name, suite):
self.item = MigrationItem(package=name, version="2.0", suite=suite)
Excuse.__init__(self, self.item)
self.policy_verdict = PolicyVerdict.PASS
SOURCE_SUITE = Suite(SuiteClass.PRIMARY_SOURCE_SUITE, "fakename", "fakepath")
PAL = FakeExcuse("pal", SOURCE_SUITE)
BUDDY = FakeExcuse("buddy", SOURCE_SUITE)
FRIEND = FakeExcuse("friend", SOURCE_SUITE)
NOPPA = FakeExcuse("noppa", SOURCE_SUITE)
class FakeBritney:
def __init__(self):
self._policy = SourcePPAPolicy(FakeOptions, {})
self._policy.filename = CACHE_FILE
self._policy_engine = PolicyEngine()
self._policy_engine.add_policy(self._policy)
self._policy_engine.initialise(self, HintCollection())
class FakeData:
version = "2.0"
class T(unittest.TestCase):
maxDiff = None
@patch("britney2.policies.sourceppa.urllib.request.urlopen")
def test_lp_rest_api_no_entries(self, urlopen):
"""Don't explode if LP reports no entries match pkg/version"""
context = urlopen.return_value.__enter__.return_value
context.getcode.return_value = 200
context.read.return_value = b'{"entries": []}'
pol = SourcePPAPolicy(FakeOptions, {})
self.assertEqual(pol.lp_get_source_ppa("hello", "1.0"), "IndexError")
@patch("britney2.policies.sourceppa.urllib.request.urlopen")
def test_lp_rest_api_no_source_ppa(self, urlopen):
"""Identify when package has no source PPA"""
context = urlopen.return_value.__enter__.return_value
context.getcode.return_value = 200
context.read.return_value = b'{"entries": [{"self_link": "https://api.launchpad.net/1.0/ubuntu/+archive/primary/+sourcepub/12345", "build_link": "https://api.launchpad.net/1.0/ubuntu/+source/gcc-5/5.4.1-7ubuntu1/+build/12066956", "other_stuff": "ignored"}]}'
pol = SourcePPAPolicy(FakeOptions, {})
self.assertEqual(pol.lp_get_source_ppa("hello", "1.0"), "")
@patch("britney2.policies.sourceppa.urllib.request.urlopen")
def test_lp_rest_api_with_source_ppa(self, urlopen):
"""Identify source PPA"""
context = urlopen.return_value.__enter__.return_value
context.getcode.return_value = 200
context.read.return_value = b'{"entries": [{"self_link": "https://api.launchpad.net/1.0/ubuntu/+archive/primary/+sourcepub/12345", "build_link": "https://api.launchpad.net/1.0/~ci-train-ppa-service/+archive/ubuntu/2516/+build/12063031", "other_stuff": "ignored"}]}'
pol = SourcePPAPolicy(FakeOptions, {})
self.assertEqual(
pol.lp_get_source_ppa("hello", "1.0"),
"https://api.launchpad.net/1.0/~ci-train-ppa-service/+archive/ubuntu/2516",
)
@patch("britney2.policies.sourceppa.urllib.request.urlopen")
def test_lp_rest_api_errors(self, urlopen):
"""Report errors instead of swallowing them"""
context = urlopen.return_value.__enter__.return_value
context.getcode.return_value = 500
context.read.return_value = b""
pol = SourcePPAPolicy(FakeOptions, {})
with self.assertRaisesRegex(ConnectionError, "HTTP 500"):
pol.lp_get_source_ppa("hello", "1.0")
# Yes, I have really seen "success with no json returned" in the wild
context.getcode.return_value = 200
context.read.return_value = b""
with self.assertRaisesRegex(ValueError, "Expecting value"):
pol.lp_get_source_ppa("hello", "1.0")
@patch("britney2.policies.sourceppa.urllib.request.urlopen")
def test_lp_rest_api_timeout(self, urlopen):
"""If we get a timeout connecting to LP, we try 5 times"""
import socket
# test that we're retried 5 times on timeout
urlopen.side_effect = socket.timeout
pol = SourcePPAPolicy(FakeOptions, {})
with self.assertRaises(socket.timeout):
pol.lp_get_source_ppa("hello", "1.0")
self.assertEqual(urlopen.call_count, 5)
@patch("britney2.policies.sourceppa.urllib.request.urlopen")
def test_lp_rest_api_unavailable(self, urlopen):
"""If we get a 503 connecting to LP, we try 5 times"""
from urllib.error import HTTPError
# test that we're retried 5 times on 503
urlopen.side_effect = HTTPError(
None, 503, "Service Temporarily Unavailable", None, None
)
pol = SourcePPAPolicy(FakeOptions, {})
with self.assertRaises(HTTPError):
pol.lp_get_source_ppa("hello", "1.0")
self.assertEqual(urlopen.call_count, 5)
@patch("britney2.policies.sourceppa.urllib.request.urlopen")
def test_lp_rest_api_flaky(self, urlopen):
"""If we get a 503, then a 200, we get the right result"""
from urllib.error import HTTPError
def fail_for_a_bit():
for i in range(3):
yield HTTPError(
None, 503, "Service Temporarily Unavailable", None, None
)
while True:
yield DEFAULT
context = urlopen.return_value.__enter__.return_value
context.getcode.return_value = 200
context.read.return_value = b'{"entries": [{"self_link": "https://api.launchpad.net/1.0/ubuntu/+archive/primary/+sourcepub/12345", "build_link": "https://api.launchpad.net/1.0/~ci-train-ppa-service/+archive/ubuntu/2516/+build/12063031", "other_stuff": "ignored"}]}'
urlopen.side_effect = fail_for_a_bit()
pol = SourcePPAPolicy(FakeOptions, {})
pol.lp_get_source_ppa("hello", "1.0")
self.assertEqual(urlopen.call_count, 5)
self.assertEqual(
pol.lp_get_source_ppa("hello", "1.0"),
"https://api.launchpad.net/1.0/~ci-train-ppa-service/+archive/ubuntu/2516",
)
def test_approve_ppa(self):
"""Approve packages by their PPA."""
shortppa = "~ci-train-ppa-service/+archive/NNNN"
brit = FakeBritney()
for excuse in (PAL, BUDDY, FRIEND, NOPPA):
brit._policy_engine.apply_src_policies(
excuse.item, FakeData, FakeData, excuse
)
self.assertEqual(excuse.policy_verdict, PolicyVerdict.PASS)
output = FRIEND.policy_info["source-ppa"]
self.assertDictContainsSubset(
dict(pal=shortppa, buddy=shortppa, friend=shortppa), output
)
def test_ignore_ppa(self):
"""Ignore packages in non-bileto PPAs."""
shortppa = "~kernel-or-whatever/+archive/ppa"
brit = FakeBritney()
for name, versions in brit._policy.cache.items():
for version in versions:
brit._policy.cache[name][version] = shortppa
for excuse in (PAL, BUDDY, FRIEND, NOPPA):
brit._policy_engine.apply_src_policies(
excuse.item, FakeData, FakeData, excuse
)
self.assertEqual(excuse.policy_verdict, PolicyVerdict.PASS)
output = FRIEND.policy_info["source-ppa"]
self.assertEqual(output, {"verdict": "PASS"})
def test_reject_ppa(self):
"""Reject packages by their PPA."""
shortppa = "~ci-train-ppa-service/+archive/NNNN"
brit = FakeBritney()
excuse = BUDDY
excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
# Just buddy is invalid but whole ppa fails
# This one passes because the rejection isn't known yet
excuse = PAL
brit._policy_engine.apply_src_policies(
excuse.item, FakeData, FakeData, excuse
)
self.assertEqual(excuse.policy_verdict, PolicyVerdict.PASS)
# This one fails because it is itself invalid.
excuse = BUDDY
brit._policy_engine.apply_src_policies(
excuse.item, FakeData, FakeData, excuse
)
self.assertEqual(
excuse.policy_verdict, PolicyVerdict.REJECTED_PERMANENTLY
)
# This one fails because buddy failed before it.
excuse = FRIEND
brit._policy_engine.apply_src_policies(
excuse.item, FakeData, FakeData, excuse
)
self.assertEqual(
excuse.policy_verdict,
PolicyVerdict.REJECTED_WAITING_FOR_ANOTHER_ITEM,
)
# 'noppa' not from PPA so not rejected
excuse = NOPPA
brit._policy_engine.apply_src_policies(
excuse.item, FakeData, FakeData, excuse
)
self.assertEqual(excuse.policy_verdict, PolicyVerdict.PASS)
# All are rejected however
for excuse in (PAL, BUDDY, FRIEND):
self.assertFalse(excuse.is_valid)
self.assertDictEqual(
brit._policy.excuses_by_source_ppa,
{LAUNCHPAD_URL + shortppa: {PAL, BUDDY, FRIEND}},
)
output = FRIEND.policy_info["source-ppa"]
self.assertDictEqual(
dict(
pal=shortppa,
buddy=shortppa,
friend=shortppa,
verdict="REJECTED_WAITING_FOR_ANOTHER_ITEM",
),
output,
)
output = BUDDY.policy_info["source-ppa"]
self.assertDictEqual(
dict(
pal=shortppa,
buddy=shortppa,
friend=shortppa,
verdict="REJECTED_PERMANENTLY"
),
output,
)
class AT(TestAutopkgtestBase):
""" Integration tests for source ppa grouping """
def test_sourceppa_policy(self):
"""Packages from same source PPA get rejected for failed peer policy"""
self.data.add_default_packages(green=False)
ppa = "devel/~ci-train-ppa-service/+archive/NNNN"
self.sourceppa_cache["green"] = {"2": ppa}
self.sourceppa_cache["red"] = {"2": ppa}
with open(
os.path.join(self.data.path, "data/unstable/Blocks"), "w"
) as f:
f.write("green 12345 1471505000\ndarkgreen 98765 1471500000\n")
exc = self.run_it(
[
("green", {"Version": "2"}, "autopkgtest"),
("red", {"Version": "2"}, "autopkgtest"),
("gcc-5", {}, "autopkgtest"),
],
{
"green": (
False,
{
"green": {
"i386": "RUNNING-ALWAYSFAIL",
"amd64": "RUNNING-ALWAYSFAIL",
}
},
),
"red": (
False,
{
"red": {
"i386": "RUNNING-ALWAYSFAIL",
"amd64": "RUNNING-ALWAYSFAIL",
}
},
),
"gcc-5": (True, {}),
},
{"green": [("reason", "block")], "red": [("reason", "source-ppa")]},
)[1]
self.assertEqual(
exc["red"]["policy_info"]["source-ppa"],
{
"red": ppa,
"green": ppa,
"verdict": "REJECTED_WAITING_FOR_ANOTHER_ITEM",
},
)
with open(os.path.join(self.data.path, "data/unstable/SourcePPA")) as f:
res = json.load(f)
self.assertEqual(
res,
{"red": {"2": ppa}, "green": {"2": ppa}, "gcc-5": {"1": ""}},
)
def test_sourceppa_missingbuild(self):
"""Packages from same source PPA get rejected for failed peer FTBFS"""
self.data.add_default_packages(green=False)
ppa = "devel/~ci-train-ppa-service/+archive/ZZZZ"
self.sourceppa_cache["green"] = {"2": ppa}
self.sourceppa_cache["red"] = {"2": ppa}
self.data.add_src(
"green", True, {"Version": "2", "Testsuite": "autopkgtest"}
)
self.data.add(
"libgreen1",
True,
{"Version": "2", "Source": "green", "Architecture": "i386"},
add_src=False,
)
self.data.add(
"green",
True,
{"Depends": "libc6 (>= 0.9), libgreen1", "Conflicts": "blue"},
testsuite="autopkgtest",
add_src=False,
)
self.swift.set_results(
{
"autopkgtest-testing": {
"testing/i386/d/darkgreen/20150101_100000@": (
0,
"darkgreen 1",
tr("green/2"),
),
"testing/i386/l/lightgreen/20150101_100100@": (
0,
"lightgreen 1",
tr("green/2"),
),
"testing/i386/g/green/20150101_100200@": (
0,
"green 2",
tr("green/2"),
),
}
}
)
exc = self.run_it(
[("red", {"Version": "2"}, "autopkgtest")],
{"green": (False, {}), "red": (False, {})},
{
"green": [
(
"missing-builds",
{
"on-architectures": [
"amd64",
"arm64",
"armhf",
"powerpc",
"ppc64el",
],
"on-unimportant-architectures": [],
},
)
],
"red": [("reason", "source-ppa")],
},
)[1]
self.assertEqual(
exc["red"]["policy_info"]["source-ppa"],
{
"red": ppa,
"green": ppa,
"verdict": "REJECTED_WAITING_FOR_ANOTHER_ITEM",
},
)
if __name__ == "__main__":
unittest.main()
Loading…
Cancel
Save