Performance improvements.

master
Fabio Tranchitella 18 years ago
parent b6b677620b
commit 1df9db222d

@ -314,13 +314,14 @@ class Britney:
package = None package = None
filename = os.path.join(basedir, "Sources") filename = os.path.join(basedir, "Sources")
self.__log("Loading source packages from %s" % filename) self.__log("Loading source packages from %s" % filename)
packages = apt_pkg.ParseTagFile(open(filename)) Packages = apt_pkg.ParseTagFile(open(filename))
while packages.Step(): get_field = Packages.Section.get
pkg = packages.Section.get('Package') while Packages.Step():
pkg = get_field('Package')
sources[pkg] = {'binaries': [], sources[pkg] = {'binaries': [],
'version': packages.Section.get('Version'), 'version': get_field('Version'),
'maintainer': packages.Section.get('Maintainer'), 'maintainer': get_field('Maintainer'),
'section': packages.Section.get('Section'), 'section': get_field('Section'),
} }
return sources return sources
@ -350,41 +351,43 @@ class Britney:
packages = {} packages = {}
provides = {} provides = {}
sources = self.sources
package = None package = None
filename = os.path.join(basedir, "Packages_%s" % arch) filename = os.path.join(basedir, "Packages_%s" % arch)
self.__log("Loading binary packages from %s" % filename) self.__log("Loading binary packages from %s" % filename)
Packages = apt_pkg.ParseTagFile(open(filename)) Packages = apt_pkg.ParseTagFile(open(filename))
get_field = Packages.Section.get
while Packages.Step(): while Packages.Step():
pkg = Packages.Section.get('Package') pkg = get_field('Package')
version = Packages.Section.get('Version') version = get_field('Version')
dpkg = {'version': version, dpkg = {'version': version,
'source': pkg, 'source': pkg,
'source-ver': version, 'source-ver': version,
'architecture': Packages.Section.get('Architecture'), 'architecture': get_field('Architecture'),
'rdepends': [], 'rdepends': [],
'rconflicts': [], 'rconflicts': [],
} }
for k in ('Pre-Depends', 'Depends', 'Provides', 'Conflicts'): for k in ('Pre-Depends', 'Depends', 'Provides', 'Conflicts'):
v = Packages.Section.get(k) v = get_field(k)
if v: dpkg[k.lower()] = v if v: dpkg[k.lower()] = v
# retrieve the name and the version of the source package # retrieve the name and the version of the source package
source = Packages.Section.get('Source') source = get_field('Source')
if source: if source:
dpkg['source'] = source.split(" ")[0] dpkg['source'] = source.split(" ")[0]
if "(" in source: if "(" in source:
dpkg['source-ver'] = source.split("(")[1].split(")")[0] dpkg['source-ver'] = source.split("(")[1].split(")")[0]
# if the source package is available in the distribution, then register this binary package # if the source package is available in the distribution, then register this binary package
if dpkg['source'] in self.sources[distribution]: if dpkg['source'] in sources[distribution]:
self.sources[distribution][dpkg['source']]['binaries'].append(pkg + "/" + arch) sources[distribution][dpkg['source']]['binaries'].append(pkg + "/" + arch)
# if the source package doesn't exist, create a fake one # if the source package doesn't exist, create a fake one
else: else:
self.sources[distribution][dpkg['source']] = {'binaries': [pkg + "/" + arch], sources[distribution][dpkg['source']] = {'binaries': [pkg + "/" + arch],
'version': dpkg['source-ver'], 'maintainer': None, 'section': None, 'fake': True} 'version': dpkg['source-ver'], 'maintainer': None, 'section': None, 'fake': True}
# register virtual packages and real packages that provide them # register virtual packages and real packages that provide them
if dpkg.has_key('provides'): if 'provides' in dpkg:
parts = map(string.strip, dpkg['provides'].split(",")) parts = map(string.strip, dpkg['provides'].split(","))
for p in parts: for p in parts:
try: try:
@ -397,12 +400,13 @@ class Britney:
packages[pkg] = dpkg packages[pkg] = dpkg
# loop again on the list of packages to register reverse dependencies # loop again on the list of packages to register reverse dependencies
parse_depends = apt_pkg.ParseDepends
for pkg in packages: for pkg in packages:
dependencies = [] dependencies = []
if packages[pkg].has_key('depends'): if 'depends' in packages[pkg]:
dependencies.extend(apt_pkg.ParseDepends(packages[pkg]['depends'])) dependencies.extend(parse_depends(packages[pkg]['depends']))
if packages[pkg].has_key('pre-depends'): if 'pre-depends' in packages[pkg]:
dependencies.extend(apt_pkg.ParseDepends(packages[pkg]['pre-depends'])) dependencies.extend(parse_depends(packages[pkg]['pre-depends']))
# register the list of the dependencies for the depending packages # register the list of the dependencies for the depending packages
for p in dependencies: for p in dependencies:
for a in p: for a in p:
@ -412,8 +416,8 @@ class Britney:
for i in provides[a[0]]: for i in provides[a[0]]:
packages[i]['rdepends'].append((pkg, a[1], a[2])) packages[i]['rdepends'].append((pkg, a[1], a[2]))
# register the list of the conflicts for the conflicting packages # register the list of the conflicts for the conflicting packages
if packages[pkg].has_key('conflicts'): if 'conflicts' in packages[pkg]:
for p in apt_pkg.ParseDepends(packages[pkg]['conflicts']): for p in parse_depends(packages[pkg]['conflicts']):
if a[0] in packages: if a[0] in packages:
packages[a[0]]['rconflicts'].append((pkg, a[1], a[2])) packages[a[0]]['rconflicts'].append((pkg, a[1], a[2]))
elif a[0] in provides: elif a[0] in provides:
@ -456,10 +460,10 @@ class Britney:
source package and its binary packages. source package and its binary packages.
""" """
maxver = None maxver = None
if self.sources[dist].has_key(pkg): if pkg in self.sources[dist]:
maxver = self.sources[dist][pkg]['version'] maxver = self.sources[dist][pkg]['version']
for arch in self.options.architectures: for arch in self.options.architectures:
if not self.binaries[dist][arch][0].has_key(pkg): continue if pkg not in self.binaries[dist][arch][0]: continue
pkgv = self.binaries[dist][arch][0][pkg]['version'] pkgv = self.binaries[dist][arch][0][pkg]['version']
if maxver == None or apt_pkg.VersionCompare(pkgv, maxver) > 0: if maxver == None or apt_pkg.VersionCompare(pkgv, maxver) > 0:
maxver = pkgv maxver = pkgv
@ -475,9 +479,9 @@ class Britney:
for pkg in set(self.bugs['testing'].keys() + self.bugs['unstable'].keys()): for pkg in set(self.bugs['testing'].keys() + self.bugs['unstable'].keys()):
# make sure that the key is present in both dictionaries # make sure that the key is present in both dictionaries
if not self.bugs['testing'].has_key(pkg): if pkg not in self.bugs['testing']:
self.bugs['testing'][pkg] = 0 self.bugs['testing'][pkg] = 0
elif not self.bugs['unstable'].has_key(pkg): elif pkg not in self.bugs['unstable']:
self.bugs['unstable'][pkg] = 0 self.bugs['unstable'][pkg] = 0
# retrieve the maximum version of the package in testing: # retrieve the maximum version of the package in testing:
@ -637,7 +641,7 @@ class Britney:
for x in ["block", "block-all", "unblock", "force", "urgent", "remove"]: for x in ["block", "block-all", "unblock", "force", "urgent", "remove"]:
z = {} z = {}
for a, b in hints[x]: for a, b in hints[x]:
if z.has_key(a): if a in z:
self.__log("Overriding %s[%s] = %s with %s" % (x, a, z[a], b), type="W") self.__log("Overriding %s[%s] = %s with %s" % (x, a, z[a], b), type="W")
z[a] = b z[a] = b
hints[x] = z hints[x] = z
@ -714,7 +718,7 @@ class Britney:
# loop on the list of packages which provides it # loop on the list of packages which provides it
for prov in self.binaries[distribution][arch][1][name]: for prov in self.binaries[distribution][arch][1][name]:
if prov in excluded or \ if prov in excluded or \
not self.binaries[distribution][arch][0].has_key(prov): continue prov not in self.binaries[distribution][arch][0]: continue
package = self.binaries[distribution][arch][0][prov] package = self.binaries[distribution][arch][0][prov]
# check the versioned dependency (if present) # check the versioned dependency (if present)
# TODO: this is forbidden by the debian policy, which says that versioned # TODO: this is forbidden by the debian policy, which says that versioned
@ -743,7 +747,7 @@ class Britney:
# analyze the dependency fields (if present) # analyze the dependency fields (if present)
for type in ('Pre-Depends', 'Depends'): for type in ('Pre-Depends', 'Depends'):
type_key = type.lower() type_key = type.lower()
if not binary_u.has_key(type_key): if type_key not in binary_u:
continue continue
# for every block of dependency (which is formed as conjunction of disconjunction) # for every block of dependency (which is formed as conjunction of disconjunction)
@ -783,13 +787,13 @@ class Britney:
def check_conflicts(self, pkg, arch, system, conflicts): def check_conflicts(self, pkg, arch, system, conflicts):
# if we are talking about a virtual package, skip it # if we are talking about a virtual package, skip it
if not self.binaries['testing'][arch][0].has_key(pkg): if pkg not in self.binaries['testing'][arch][0]:
return True return True
binary_u = self.binaries['testing'][arch][0][pkg] binary_u = self.binaries['testing'][arch][0][pkg]
# check the conflicts # check the conflicts
if conflicts.has_key(pkg): if pkg in conflicts:
name, version, op = conflicts[pkg] name, version, op = conflicts[pkg]
if op == '' and version == '' or apt_pkg.CheckDep(binary_u['version'], op, version): if op == '' and version == '' or apt_pkg.CheckDep(binary_u['version'], op, version):
return False return False
@ -799,7 +803,7 @@ class Britney:
system.append(pkg) system.append(pkg)
# register conflicts # register conflicts
if binary_u.has_key('conflicts'): if 'conflicts' in binary_u:
for block in map(operator.itemgetter(0), apt_pkg.ParseDepends(binary_u['conflicts'])): for block in map(operator.itemgetter(0), apt_pkg.ParseDepends(binary_u['conflicts'])):
if block[0] != pkg and block[0] in system: if block[0] != pkg and block[0] in system:
name, version, op = block name, version, op = block
@ -812,7 +816,7 @@ class Britney:
dependencies = [] dependencies = []
for type in ('Pre-Depends', 'Depends'): for type in ('Pre-Depends', 'Depends'):
type_key = type.lower() type_key = type.lower()
if not binary_u.has_key(type_key): continue if type_key not in binary_u: continue
dependencies.extend(apt_pkg.ParseDepends(binary_u[type_key])) dependencies.extend(apt_pkg.ParseDepends(binary_u[type_key]))
# go through them # go through them
@ -843,7 +847,7 @@ class Britney:
attribute excuses. attribute excuses.
""" """
# if the soruce package is available in unstable, then do nothing # if the soruce package is available in unstable, then do nothing
if self.sources['unstable'].has_key(pkg): if pkg in self.sources['unstable']:
return False return False
# otherwise, add a new excuse for its removal and return True # otherwise, add a new excuse for its removal and return True
src = self.sources['testing'][pkg] src = self.sources['testing'][pkg]
@ -881,7 +885,7 @@ class Britney:
# if there is a `remove' hint and the requested version is the same of the # if there is a `remove' hint and the requested version is the same of the
# version in testing, then stop here and return False # version in testing, then stop here and return False
if self.hints["remove"].has_key(src) and \ if src in self.hints["remove"] and \
self.same_source(source_t['version'], self.hints["remove"][src][0]): self.same_source(source_t['version'], self.hints["remove"][src][0]):
excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1])) excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1]))
excuse.addhtml("Trying to remove package, not update it") excuse.addhtml("Trying to remove package, not update it")
@ -941,7 +945,7 @@ class Britney:
# if there is nothing wrong and there is something worth doing or the source # if there is nothing wrong and there is something worth doing or the source
# package is not fake, then check what packages shuold be removed # package is not fake, then check what packages shuold be removed
if not anywrongver and (anyworthdoing or self.sources[suite][src].has_key('fake')): if not anywrongver and (anyworthdoing or 'fake' in self.sources[suite][src]):
srcv = self.sources[suite][src]['version'] srcv = self.sources[suite][src]['version']
ssrc = self.same_source(source_t['version'], srcv) ssrc = self.same_source(source_t['version'], srcv)
# for every binary package produced by this source in testing for this architecture # for every binary package produced by this source in testing for this architecture
@ -951,7 +955,7 @@ class Britney:
excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg)) excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
continue continue
# if the package is not produced by the new source package, then remove it from testing # if the package is not produced by the new source package, then remove it from testing
if not self.binaries[suite][arch][0].has_key(pkg): if pkg not in self.binaries[suite][arch][0]:
tpkgv = self.binaries['testing'][arch][0][pkg]['version'] tpkgv = self.binaries['testing'][arch][0][pkg]['version']
excuse.addhtml("Removed binary: %s %s" % (pkg, tpkgv)) excuse.addhtml("Removed binary: %s %s" % (pkg, tpkgv))
if ssrc: anyworthdoing = True if ssrc: anyworthdoing = True
@ -1008,7 +1012,7 @@ class Britney:
return False return False
# check if the source package really exists or if it is a fake one # check if the source package really exists or if it is a fake one
if source_u.has_key('fake'): if 'fake' in source_u:
excuse.addhtml("%s source package doesn't exist" % (src)) excuse.addhtml("%s source package doesn't exist" % (src))
update_candidate = False update_candidate = False
@ -1020,7 +1024,7 @@ class Britney:
# if there is a `remove' hint and the requested version is the same of the # if there is a `remove' hint and the requested version is the same of the
# version in testing, then stop here and return False # version in testing, then stop here and return False
if self.hints["remove"].has_key(src): if src in self.hints["remove"]:
if source_t and self.same_source(source_t['version'], self.hints['remove'][src][0]) or \ if source_t and self.same_source(source_t['version'], self.hints['remove'][src][0]) or \
self.same_source(source_u['version'], self.hints['remove'][src][0]): self.same_source(source_u['version'], self.hints['remove'][src][0]):
excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1])) excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1]))
@ -1029,9 +1033,9 @@ class Britney:
# check if there is a `block' hint for this package or a `block-all source' hint # check if there is a `block' hint for this package or a `block-all source' hint
blocked = None blocked = None
if self.hints["block"].has_key(src): if src in self.hints["block"]:
blocked = self.hints["block"][src] blocked = self.hints["block"][src]
elif self.hints["block-all"].has_key("source"): elif 'source' in self.hints["block-all"]:
blocked = self.hints["block-all"]["source"] blocked = self.hints["block-all"]["source"]
# if the source is blocked, then look for an `unblock' hint; the unblock request # if the source is blocked, then look for an `unblock' hint; the unblock request
@ -1051,7 +1055,7 @@ class Britney:
# permanence in unstable before updating testing; if the source package is too young, # permanence in unstable before updating testing; if the source package is too young,
# the check fails and we set update_candidate to False to block the update # the check fails and we set update_candidate to False to block the update
if suite == 'unstable': if suite == 'unstable':
if not self.dates.has_key(src): if src not in self.dates:
self.dates[src] = (source_u['version'], self.date_now) self.dates[src] = (source_u['version'], self.date_now)
elif not self.same_source(self.dates[src][0], source_u['version']): elif not self.same_source(self.dates[src][0], source_u['version']):
self.dates[src] = (source_u['version'], self.date_now) self.dates[src] = (source_u['version'], self.date_now)
@ -1060,7 +1064,7 @@ class Britney:
min_days = self.MINDAYS[urgency] min_days = self.MINDAYS[urgency]
excuse.setdaysold(days_old, min_days) excuse.setdaysold(days_old, min_days)
if days_old < min_days: if days_old < min_days:
if self.hints["urgent"].has_key(src) and self.same_source(source_u['version'], self.hints["urgent"][src][0]): if src in self.hints["urgent"] and self.same_source(source_u['version'], self.hints["urgent"][src][0]):
excuse.addhtml("Too young, but urgency pushed by %s" % (self.hints["urgent"][src][1])) excuse.addhtml("Too young, but urgency pushed by %s" % (self.hints["urgent"][src][1]))
else: else:
update_candidate = False update_candidate = False
@ -1072,7 +1076,7 @@ class Britney:
oodbins = {} oodbins = {}
# for every binary package produced by this source in the suite for this architecture # for every binary package produced by this source in the suite for this architecture
for pkg in sorted([x.split("/")[0] for x in self.sources[suite][src]['binaries'] if x.endswith("/"+arch)]): for pkg in sorted([x.split("/")[0] for x in self.sources[suite][src]['binaries'] if x.endswith("/"+arch)]):
if not pkgs.has_key(pkg): pkgs[pkg] = [] if pkg not in pkgs: pkgs[pkg] = []
pkgs[pkg].append(arch) pkgs[pkg].append(arch)
# retrieve the binary package and its source version # retrieve the binary package and its source version
@ -1081,7 +1085,7 @@ class Britney:
# if it wasn't builded by the same source, it is out-of-date # if it wasn't builded by the same source, it is out-of-date
if not self.same_source(source_u['version'], pkgsv): if not self.same_source(source_u['version'], pkgsv):
if not oodbins.has_key(pkgsv): if pkgsv not in oodbins:
oodbins[pkgsv] = [] oodbins[pkgsv] = []
oodbins[pkgsv].append(pkg) oodbins[pkgsv].append(pkg)
continue continue
@ -1123,9 +1127,9 @@ class Britney:
# one, the check fails and we set update_candidate to False to block the update # one, the check fails and we set update_candidate to False to block the update
if suite == 'unstable': if suite == 'unstable':
for pkg in pkgs.keys(): for pkg in pkgs.keys():
if not self.bugs['testing'].has_key(pkg): if pkg not in self.bugs['testing']:
self.bugs['testing'][pkg] = 0 self.bugs['testing'][pkg] = 0
if not self.bugs['unstable'].has_key(pkg): if pkg not in self.bugs['unstable']:
self.bugs['unstable'][pkg] = 0 self.bugs['unstable'][pkg] = 0
if self.bugs['unstable'][pkg] > self.bugs['testing'][pkg]: if self.bugs['unstable'][pkg] > self.bugs['testing'][pkg]:
@ -1141,7 +1145,7 @@ class Britney:
(pkg, ", ".join(pkgs[pkg]), pkg, self.bugs['unstable'][pkg], self.bugs['testing'][pkg])) (pkg, ", ".join(pkgs[pkg]), pkg, self.bugs['unstable'][pkg], self.bugs['testing'][pkg]))
# check if there is a `force' hint for this package, which allows it to go in even if it is not updateable # check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
if not update_candidate and self.hints["force"].has_key(src) and \ if not update_candidate and src in self.hints["force"] and \
self.same_source(source_u['version'], self.hints["force"][src][0]): self.same_source(source_u['version'], self.hints["force"][src][0]):
excuse.dontinvalidate = 1 excuse.dontinvalidate = 1
excuse.addhtml("Should ignore, but forced by %s" % (self.hints["force"][src][1])) excuse.addhtml("Should ignore, but forced by %s" % (self.hints["force"][src][1]))
@ -1149,8 +1153,9 @@ class Britney:
# if the suite is testing-proposed-updates, the package needs an explicit approval in order to go in # if the suite is testing-proposed-updates, the package needs an explicit approval in order to go in
if suite == "tpu": if suite == "tpu":
if self.approvals.has_key("%s_%s" % (src, source_u['version'])): key = "%s_%s" % (src, source_u['version'])
excuse.addhtml("Approved by %s" % approvals["%s_%s" % (src, source_u['version'])]) if key in self.approvals:
excuse.addhtml("Approved by %s" % approvals[key])
else: else:
excuse.addhtml("NEEDS APPROVAL BY RM") excuse.addhtml("NEEDS APPROVAL BY RM")
update_candidate = False update_candidate = False
@ -1174,7 +1179,7 @@ class Britney:
res = {} res = {}
for exc in self.excuses: for exc in self.excuses:
for d in exc.deps: for d in exc.deps:
if not res.has_key(d): res[d] = [] if d not in res: res[d] = []
res[d].append(exc.name) res[d].append(exc.name)
return res return res
@ -1197,7 +1202,7 @@ class Britney:
i = 0 i = 0
while i < len(invalid): while i < len(invalid):
# if there is no reverse dependency, skip the item # if there is no reverse dependency, skip the item
if not revdeps.has_key(invalid[i]): if invalid[i] not in revdeps:
i += 1 i += 1
continue continue
# if there dependency can be satisfied by a testing-proposed-updates excuse, skip the item # if there dependency can be satisfied by a testing-proposed-updates excuse, skip the item
@ -1243,7 +1248,7 @@ class Britney:
for pkg in self.sources['unstable']: for pkg in self.sources['unstable']:
# if the source package is already present in testing, # if the source package is already present in testing,
# check if it should be upgraded for every binary package # check if it should be upgraded for every binary package
if self.sources['testing'].has_key(pkg): if pkg in self.sources['testing']:
for arch in self.options.architectures: for arch in self.options.architectures:
if self.should_upgrade_srcarch(pkg, arch, 'unstable'): if self.should_upgrade_srcarch(pkg, arch, 'unstable'):
upgrade_me.append("%s/%s" % (pkg, arch)) upgrade_me.append("%s/%s" % (pkg, arch))
@ -1256,7 +1261,7 @@ class Britney:
for pkg in self.sources['tpu']: for pkg in self.sources['tpu']:
# if the source package is already present in testing, # if the source package is already present in testing,
# check if it should be upgraded for every binary package # check if it should be upgraded for every binary package
if self.sources['testing'].has_key(pkg): if pkg in self.sources['testing']:
for arch in self.options.architectures: for arch in self.options.architectures:
if self.should_upgrade_srcarch(pkg, arch, 'tpu'): if self.should_upgrade_srcarch(pkg, arch, 'tpu'):
upgrade_me.append("%s/%s_tpu" % (pkg, arch)) upgrade_me.append("%s/%s_tpu" % (pkg, arch))
@ -1269,7 +1274,7 @@ class Britney:
for src in self.hints["remove"].keys(): for src in self.hints["remove"].keys():
if src in upgrade_me: continue if src in upgrade_me: continue
if ("-"+src) in upgrade_me: continue if ("-"+src) in upgrade_me: continue
if not self.sources['testing'].has_key(src): continue if src not in self.sources['testing']: continue
# check if the version specified in the hint is the same of the considered package # check if the version specified in the hint is the same of the considered package
tsrcv = self.sources['testing'][src]['version'] tsrcv = self.sources['testing'][src]['version']
@ -1326,7 +1331,7 @@ class Britney:
def newlyuninst(self, nuold, nunew): def newlyuninst(self, nuold, nunew):
res = {} res = {}
for arch in self.options.architectures: for arch in self.options.architectures:
if not nuold.has_key(arch) or not nunew.has_key(arch): if arch not in nuold or arch not in nunew:
continue continue
res[arch] = \ res[arch] = \
self.slist_subtract(nunew[arch], nuold[arch]) self.slist_subtract(nunew[arch], nuold[arch])
@ -1367,9 +1372,9 @@ class Britney:
total = 0 total = 0
totalbreak = 0 totalbreak = 0
for arch in self.options.architectures: for arch in self.options.architectures:
if nuninst.has_key(arch): if arch in nuninst:
n = len(nuninst[arch]) n = len(nuninst[arch])
elif original and original.has_key(arch): elif original and arch in original:
n = len(original[arch]) n = len(original[arch])
else: continue else: continue
if arch in self.options.break_arches: if arch in self.options.break_arches:
@ -1382,7 +1387,7 @@ class Britney:
def eval_uninst(self, nuninst): def eval_uninst(self, nuninst):
res = "" res = ""
for arch in self.options.architectures: for arch in self.options.architectures:
if nuninst.has_key(arch) and nuninst[arch] != []: if arch in nuninst and nuninst[arch] != []:
res = res + " * %s: %s\n" % (arch, res = res + " * %s: %s\n" % (arch,
", ".join(nuninst[arch])) ", ".join(nuninst[arch]))
return res return res
@ -1429,7 +1434,7 @@ class Britney:
# single architecture update (eg. binNMU) # single architecture update (eg. binNMU)
else: else:
if self.binaries['testing'][arch][0].has_key(pkg_name): if pkg_name in self.binaries['testing'][arch][0]:
for j in self.binaries['testing'][arch][0][pkg_name]['rdepends']: for j in self.binaries['testing'][arch][0][pkg_name]['rdepends']:
key = (j[0], arch) key = (j[0], arch)
if key not in affected: affected.append(key) if key not in affected: affected.append(key)
@ -1498,7 +1503,7 @@ class Britney:
while len(broken) > l and not (l == 0 and l == len(broken)): while len(broken) > l and not (l == 0 and l == len(broken)):
l = len(broken) l = len(broken)
for p in filter(lambda x: x[1] == arch, affected): for p in filter(lambda x: x[1] == arch, affected):
if not self.binaries['testing'][arch][0].has_key(p[0]) or \ if p[0] not in self.binaries['testing'][arch][0] or \
skip_archall and self.binaries['testing'][arch][0][p[0]]['architecture'] == 'all': continue skip_archall and self.binaries['testing'][arch][0][p[0]]['architecture'] == 'all': continue
r = self.excuse_unsat_deps(p[0], None, arch, 'testing', None, excluded=broken, conflicts=True) r = self.excuse_unsat_deps(p[0], None, arch, 'testing', None, excluded=broken, conflicts=True)
if not r and p[0] not in broken: broken.append(p[0]) if not r and p[0] not in broken: broken.append(p[0])
@ -1510,7 +1515,7 @@ class Britney:
l = len(broken) l = len(broken)
for j in broken: for j in broken:
for p in self.binaries['testing'][arch][0][j]['rdepends']: for p in self.binaries['testing'][arch][0][j]['rdepends']:
if not self.binaries['testing'][arch][0].has_key(p[0]) or \ if p[0] not in self.binaries['testing'][arch][0] or \
skip_archall and self.binaries['testing'][arch][0][p[0]]['architecture'] == 'all': continue skip_archall and self.binaries['testing'][arch][0][p[0]]['architecture'] == 'all': continue
r = self.excuse_unsat_deps(p[0], None, arch, 'testing', None, excluded=broken, conflicts=True) r = self.excuse_unsat_deps(p[0], None, arch, 'testing', None, excluded=broken, conflicts=True)
if not r and p[0] not in broken: broken.append(p[0]) if not r and p[0] not in broken: broken.append(p[0])

Loading…
Cancel
Save