00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 import os
00019 import re
00020 import sys
00021 import string
00022 import time
00023 import optparse
00024
00025 import apt_pkg
00026
00027 from excuse import Excuse
00028
00029 __author__ = 'Fabio Tranchitella'
00030 __version__ = '2.0.alpha1'
00031
00032
00033 class Britney:
00034 """Britney, the debian testing updater script
00035
00036 This is the script that updates the testing_ distribution. It is executed
00037 each day after the installation of the updated packages. It generates the
00038 `Packages' files for the testing distribution, but it does so in an
00039 intelligent manner; it try to avoid any inconsistency and to use only
00040 non-buggy packages.
00041
00042 For more documentation on this script, please read the Developers Reference.
00043 """
00044
00045 HINTS_STANDARD = ("easy", "hint", "remove", "block", "unblock", "urgent", "approve")
00046 HINTS_ALL = ("force", "force-hint", "block-all") + HINTS_STANDARD
00047
00048 def __init__(self):
00049 """Class constructor
00050
00051 This method initializes and populates the data lists, which contain all
00052 the information needed by the other methods of the class.
00053 """
00054 self.date_now = int(((time.time() / (60*60)) - 15) / 24)
00055
00056
00057 self.__parse_arguments()
00058
00059
00060 apt_pkg.init()
00061
00062
00063 self.sources = {'testing': self.read_sources(self.options.testing),
00064 'unstable': self.read_sources(self.options.unstable),
00065 'tpu': self.read_sources(self.options.tpu),}
00066 self.binaries = {'testing': {}, 'unstable': {}, 'tpu': {}}
00067 for arch in self.options.architectures:
00068 self.binaries['testing'][arch] = self.read_binaries(self.options.testing, "testing", arch)
00069 self.binaries['unstable'][arch] = self.read_binaries(self.options.unstable, "unstable", arch)
00070 self.binaries['tpu'][arch] = self.read_binaries(self.options.tpu, "tpu", arch)
00071
00072
00073 self.bugs = {'unstable': self.read_bugs(self.options.unstable),
00074 'testing': self.read_bugs(self.options.testing),}
00075 self.normalize_bugs()
00076
00077
00078 self.dates = self.read_dates(self.options.testing)
00079 self.urgencies = self.read_urgencies(self.options.testing)
00080 self.approvals = self.read_approvals(self.options.tpu)
00081 self.hints = self.read_hints(self.options.unstable)
00082 self.excuses = []
00083
00084 def __parse_arguments(self):
00085 """Parse the command line arguments
00086
00087 This method parses and initializes the command line arguments.
00088 While doing so, it preprocesses some of the options to be converted
00089 in a suitable form for the other methods of the class.
00090 """
00091
00092 self.parser = optparse.OptionParser(version="%prog")
00093 self.parser.add_option("-v", "", action="count", dest="verbose", help="enable verbose output")
00094 self.parser.add_option("-c", "--config", action="store", dest="config",
00095 default="/etc/britney.conf", help="path for the configuration file")
00096 (self.options, self.args) = self.parser.parse_args()
00097
00098
00099 if not os.path.isfile(self.options.config):
00100 self.__log("Unable to read the configuration file (%s), exiting!" % self.options.config, type="E")
00101 sys.exit(1)
00102
00103
00104
00105 self.MINDAYS = {}
00106 self.HINTS = {}
00107 for k, v in [map(string.strip,r.split('=', 1)) for r in file(self.options.config) if '=' in r and not r.strip().startswith('#')]:
00108 if k.startswith("MINDAYS_"):
00109 self.MINDAYS[k.split("_")[1].lower()] = int(v)
00110 elif k.startswith("HINTS_"):
00111 self.HINTS[k.split("_")[1].lower()] = \
00112 reduce(lambda x,y: x+y, [hasattr(self, "HINTS_" + i) and getattr(self, "HINTS_" + i) or (i,) for i in v.split()])
00113 else:
00114 setattr(self.options, k.lower(), v)
00115
00116
00117 allarches = sorted(self.options.architectures.split())
00118 arches = [x for x in allarches if x in self.options.nobreakall_arches]
00119 arches += [x for x in allarches if x not in arches and x not in self.options.fucked_arches]
00120 arches += [x for x in allarches if x not in arches and x not in self.options.break_arches]
00121 arches += [x for x in allarches if x not in arches]
00122 self.options.architectures = arches
00123
00124 def __log(self, msg, type="I"):
00125 """Print info messages according to verbosity level
00126
00127 An easy-and-simple log method which prints messages to the standard
00128 output. The type parameter controls the urgency of the message, and
00129 can be equal to `I' for `Information', `W' for `Warning' and `E' for
00130 `Error'. Warnings and errors are always printed, and information are
00131 printed only if the verbose logging is enabled.
00132 """
00133 if self.options.verbose or type in ("E", "W"):
00134 print "%s: [%s] - %s" % (type, time.asctime(), msg)
00135
00136
00137
00138
00139 def read_sources(self, basedir):
00140 """Read the list of source packages from the specified directory
00141
00142 The source packages are read from the `Sources' file within the
00143 directory specified as `basedir' parameter. Considering the
00144 large amount of memory needed, not all the fields are loaded
00145 in memory. The available fields are Version, Maintainer and Section.
00146
00147 The method returns a list where every item represents a source
00148 package as a dictionary.
00149 """
00150 sources = {}
00151 package = None
00152 filename = os.path.join(basedir, "Sources")
00153 self.__log("Loading source packages from %s" % filename)
00154 packages = apt_pkg.ParseTagFile(open(filename))
00155 while packages.Step():
00156 pkg = packages.Section.get('Package')
00157 sources[pkg] = {'binaries': [],
00158 'version': packages.Section.get('Version'),
00159 'maintainer': packages.Section.get('Maintainer'),
00160 'section': packages.Section.get('Section'),
00161 }
00162 return sources
00163
00164 def read_binaries(self, basedir, distribution, arch):
00165 """Read the list of binary packages from the specified directory
00166
00167 The binary packages are read from the `Packages_${arch}' files
00168 within the directory specified as `basedir' parameter, replacing
00169 ${arch} with the value of the arch parameter. Considering the
00170 large amount of memory needed, not all the fields are loaded
00171 in memory. The available fields are Version, Source, Pre-Depends,
00172 Depends, Conflicts, Provides and Architecture.
00173
00174 After reading the packages, reverse dependencies are computed
00175 and saved in the `rdepends' keys, and the `Provides' field is
00176 used to populate the virtual packages list.
00177
00178 The dependencies are parsed with the apt.pkg.ParseDepends method,
00179 and they are stored both as the format of its return value and
00180 text.
00181
00182 The method returns a tuple. The first element is a list where
00183 every item represents a binary package as a dictionary; the second
00184 element is a dictionary which maps virtual packages to real
00185 packages that provide it.
00186 """
00187
00188 packages = {}
00189 provides = {}
00190 package = None
00191 filename = os.path.join(basedir, "Packages_%s" % arch)
00192 self.__log("Loading binary packages from %s" % filename)
00193 Packages = apt_pkg.ParseTagFile(open(filename))
00194 while Packages.Step():
00195 pkg = Packages.Section.get('Package')
00196 version = Packages.Section.get('Version')
00197 dpkg = {'rdepends': [],
00198 'version': version,
00199 'source': pkg,
00200 'source-ver': version,
00201 'pre-depends': Packages.Section.get('Pre-Depends'),
00202 'depends': Packages.Section.get('Depends'),
00203 'conflicts': Packages.Section.get('Conflicts'),
00204 'provides': Packages.Section.get('Provides'),
00205 'architecture': Packages.Section.get('Architecture'),
00206 }
00207
00208
00209 source = Packages.Section.get('Source')
00210 if source:
00211 dpkg['source'] = source.split(" ")[0]
00212 if "(" in source:
00213 dpkg['source-ver'] = source.split("(")[1].split(")")[0]
00214
00215
00216 if dpkg['source'] in self.sources[distribution]:
00217 self.sources[distribution][dpkg['source']]['binaries'].append(pkg + "/" + arch)
00218
00219 else:
00220 self.sources[distribution][dpkg['source']] = {'binaries': [pkg + "/" + arch],
00221 'version': dpkg['source-ver'], 'maintainer': None, 'section': None, 'fake': True}
00222
00223
00224 if dpkg['provides']:
00225 parts = map(string.strip, dpkg['provides'].split(","))
00226 for p in parts:
00227 try:
00228 provides[p].append(pkg)
00229 except KeyError:
00230 provides[p] = [pkg]
00231 del dpkg['provides']
00232
00233
00234 packages[pkg] = dpkg
00235
00236
00237 for pkg in packages:
00238 dependencies = []
00239
00240
00241 if packages[pkg]['depends']:
00242 packages[pkg]['depends-txt'] = packages[pkg]['depends']
00243 packages[pkg]['depends'] = apt_pkg.ParseDepends(packages[pkg]['depends'])
00244 dependencies.extend(packages[pkg]['depends'])
00245
00246
00247 if packages[pkg]['pre-depends']:
00248 packages[pkg]['pre-depends-txt'] = packages[pkg]['pre-depends']
00249 packages[pkg]['pre-depends'] = apt_pkg.ParseDepends(packages[pkg]['pre-depends'])
00250 dependencies.extend(packages[pkg]['pre-depends'])
00251
00252
00253 for p in dependencies:
00254 for a in p:
00255 if a[0] not in packages: continue
00256 packages[a[0]]['rdepends'].append((pkg, a[1], a[2]))
00257
00258
00259 return (packages, provides)
00260
00261 def read_bugs(self, basedir):
00262 """Read the release critial bug summary from the specified directory
00263
00264 The RC bug summaries are read from the `Bugs' file within the
00265 directory specified as `basedir' parameter. The file contains
00266 rows with the format:
00267
00268 <package-name> <count-of-rc-bugs>
00269
00270 The method returns a dictionary where the key is the binary package
00271 name and the value is the number of open RC bugs for it.
00272 """
00273 bugs = {}
00274 filename = os.path.join(basedir, "Bugs")
00275 self.__log("Loading RC bugs count from %s" % filename)
00276 for line in open(filename):
00277 l = line.strip().split()
00278 if len(l) != 2: continue
00279 try:
00280 bugs[l[0]] = int(l[1])
00281 except ValueError:
00282 self.__log("Bugs, unable to parse \"%s\"" % line, type="E")
00283 return bugs
00284
00285 def __maxver(self, pkg, dist):
00286 """Return the maximum version for a given package name
00287
00288 This method returns None if the specified source package
00289 is not available in the `dist' distribution. If the package
00290 exists, then it returns the maximum version between the
00291 source package and its binary packages.
00292 """
00293 maxver = None
00294 if self.sources[dist].has_key(pkg):
00295 maxver = self.sources[dist][pkg]['version']
00296 for arch in self.options.architectures:
00297 if not self.binaries[dist][arch][0].has_key(pkg): continue
00298 pkgv = self.binaries[dist][arch][0][pkg]['version']
00299 if maxver == None or apt_pkg.VersionCompare(pkgv, maxver) > 0:
00300 maxver = pkgv
00301 return maxver
00302
00303 def normalize_bugs(self):
00304 """Normalize the release critical bug summaries for testing and unstable
00305
00306 The method doesn't return any value: it directly modifies the
00307 object attribute `bugs'.
00308 """
00309
00310 for pkg in set(self.bugs['testing'].keys() + self.bugs['unstable'].keys()):
00311
00312
00313 if not self.bugs['testing'].has_key(pkg):
00314 self.bugs['testing'][pkg] = 0
00315 elif not self.bugs['unstable'].has_key(pkg):
00316 self.bugs['unstable'][pkg] = 0
00317
00318
00319 maxvert = self.__maxver(pkg, 'testing')
00320
00321
00322
00323 if maxvert == None or \
00324 self.bugs['testing'][pkg] == self.bugs['unstable'][pkg]:
00325 continue
00326
00327
00328 maxveru = self.__maxver(pkg, 'unstable')
00329
00330
00331 if maxveru == None:
00332 continue
00333
00334
00335 elif apt_pkg.VersionCompare(maxvert, maxveru) >= 0:
00336 self.bugs['testing'][pkg] = self.bugs['unstable'][pkg]
00337
00338 def read_dates(self, basedir):
00339 """Read the upload date for the packages from the specified directory
00340
00341 The upload dates are read from the `Date' file within the directory
00342 specified as `basedir' parameter. The file contains rows with the
00343 format:
00344
00345 <package-name> <version> <date-of-upload>
00346
00347 The dates are expressed as days starting from the 1970-01-01.
00348
00349 The method returns a dictionary where the key is the binary package
00350 name and the value is tuple with two items, the version and the date.
00351 """
00352 dates = {}
00353 filename = os.path.join(basedir, "Dates")
00354 self.__log("Loading upload data from %s" % filename)
00355 for line in open(filename):
00356 l = line.strip().split()
00357 if len(l) != 3: continue
00358 try:
00359 dates[l[0]] = (l[1], int(l[2]))
00360 except ValueError:
00361 self.__log("Dates, unable to parse \"%s\"" % line, type="E")
00362 return dates
00363
00364 def read_urgencies(self, basedir):
00365 """Read the upload urgency of the packages from the specified directory
00366
00367 The upload urgencies are read from the `Urgency' file within the
00368 directory specified as `basedir' parameter. The file contains rows
00369 with the format:
00370
00371 <package-name> <version> <urgency>
00372
00373 The method returns a dictionary where the key is the binary package
00374 name and the value is the greatest urgency from the versions of the
00375 package that are higher then the testing one.
00376 """
00377
00378 urgencies = {}
00379 filename = os.path.join(basedir, "Urgency")
00380 self.__log("Loading upload urgencies from %s" % filename)
00381 for line in open(filename):
00382 l = line.strip().split()
00383 if len(l) != 3: continue
00384
00385
00386 urgency_old = urgencies.get(l[0], self.options.default_urgency)
00387 mindays_old = self.MINDAYS.get(urgency_old, self.MINDAYS[self.options.default_urgency])
00388 mindays_new = self.MINDAYS.get(l[2], self.MINDAYS[self.options.default_urgency])
00389
00390
00391 if mindays_old <= mindays_new:
00392 continue
00393
00394
00395 tsrcv = self.sources['testing'].get(l[0], None)
00396 if tsrcv and apt_pkg.VersionCompare(tsrcv['version'], l[1]) >= 0:
00397 continue
00398
00399
00400 usrcv = self.sources['unstable'].get(l[0], None)
00401 if not usrcv or apt_pkg.VersionCompare(usrcv['version'], l[1]) < 0:
00402 continue
00403
00404
00405 urgencies[l[0]] = l[2]
00406
00407 return urgencies
00408
00409 def read_approvals(self, basedir):
00410 """Read the approval commands from the specified directory
00411
00412 The approval commands are read from the files contained by the
00413 `Approved' directory within the directory specified as `basedir'
00414 parameter. The name of the files has to be the same of the
00415 authorized users for the approvals.
00416
00417 The file contains rows with the format:
00418
00419 <package-name> <version>
00420
00421 The method returns a dictionary where the key is the binary package
00422 name followed by an underscore and the version number, and the value
00423 is the user who submitted the command.
00424 """
00425 approvals = {}
00426 for approver in self.options.approvers.split():
00427 filename = os.path.join(basedir, "Approved", approver)
00428 self.__log("Loading approvals list from %s" % filename)
00429 for line in open(filename):
00430 l = line.strip().split()
00431 if len(l) != 2: continue
00432 approvals["%s_%s" % (l[0], l[1])] = approver
00433 return approvals
00434
00435 def read_hints(self, basedir):
00436 """Read the hint commands from the specified directory
00437
00438 The hint commands are read from the files contained by the `Hints'
00439 directory within the directory specified as `basedir' parameter.
00440 The name of the files has to be the same of the authorized users
00441 for the hints.
00442
00443 The file contains rows with the format:
00444
00445 <command> <package-name>[/<version>]
00446
00447 The method returns a dictionary where the key is the command, and
00448 the value is the list of affected packages.
00449 """
00450 hints = dict([(k,[]) for k in self.HINTS_ALL])
00451
00452 for who in self.HINTS.keys():
00453 filename = os.path.join(basedir, "Hints", who)
00454 self.__log("Loading hints list from %s" % filename)
00455 for line in open(filename):
00456 line = line.strip()
00457 if line == "": continue
00458 l = line.split()
00459 if l[0] == 'finished':
00460 break
00461 elif l[0] not in self.HINTS[who]:
00462 continue
00463 elif l[0] in ["easy", "hint", "force-hint"]:
00464 hints[l[0]].append((who, [k.split("/") for k in l if "/" in k]))
00465 elif l[0] in ["block-all"]:
00466 hints[l[0]].extend([(y, who) for y in l[1:]])
00467 elif l[0] in ["block"]:
00468 hints[l[0]].extend([(y, who) for y in l[1:]])
00469 elif l[0] in ["remove", "approve", "unblock", "force", "urgent"]:
00470 hints[l[0]].extend([(k.split("/")[0], (k.split("/")[1],who) ) for k in l if "/" in k])
00471
00472 for x in ["block", "block-all", "unblock", "force", "urgent", "remove"]:
00473 z = {}
00474 for a, b in hints[x]:
00475 if z.has_key(a):
00476 self.__log("Overriding %s[%s] = %s with %s" % (x, a, z[a], b), type="W")
00477 z[a] = b
00478 hints[x] = z
00479
00480 return hints
00481
00482
00483
00484
00485 def same_source(self, sv1, sv2):
00486 """Check if two version numbers are built from the same source
00487
00488 This method returns a boolean value which is true if the two
00489 version numbers specified as parameters are built from the same
00490 source. The main use of this code is to detect binary-NMU.
00491 """
00492 if sv1 == sv2:
00493 return 1
00494
00495 m = re.match(r'^(.*)\+b\d+$', sv1)
00496 if m: sv1 = m.group(1)
00497 m = re.match(r'^(.*)\+b\d+$', sv2)
00498 if m: sv2 = m.group(1)
00499
00500 if sv1 == sv2:
00501 return 1
00502
00503 if re.search("-", sv1) or re.search("-", sv2):
00504 m = re.match(r'^(.*-[^.]+)\.0\.\d+$', sv1)
00505 if m: sv1 = m.group(1)
00506 m = re.match(r'^(.*-[^.]+\.[^.]+)\.\d+$', sv1)
00507 if m: sv1 = m.group(1)
00508
00509 m = re.match(r'^(.*-[^.]+)\.0\.\d+$', sv2)
00510 if m: sv2 = m.group(1)
00511 m = re.match(r'^(.*-[^.]+\.[^.]+)\.\d+$', sv2)
00512 if m: sv2 = m.group(1)
00513
00514 return (sv1 == sv2)
00515 else:
00516 m = re.match(r'^([^-]+)\.0\.\d+$', sv1)
00517 if m and sv2 == m.group(1): return 1
00518
00519 m = re.match(r'^([^-]+)\.0\.\d+$', sv2)
00520 if m and sv1 == m.group(1): return 1
00521
00522 return 0
00523
00524 def get_dependency_solvers(self, block, arch, distribution):
00525 """Find the packages which satisfy a dependency block
00526
00527 This method returns the list of packages which satisfy a dependency
00528 block (as returned by apt_pkg.ParseDepends) for the given architecture
00529 and distribution.
00530
00531 It returns a tuple with two items: the first is a boolean which is
00532 True if the dependency is satisfied, the second is the list of the
00533 solving packages.
00534 """
00535
00536 packages = []
00537
00538
00539 for name, version, op in block:
00540
00541 if name in self.binaries[distribution][arch][0]:
00542 package = self.binaries[distribution][arch][0][name]
00543
00544 if op == '' and version == '' or apt_pkg.CheckDep(package['version'], op, version):
00545 packages.append(name)
00546
00547
00548 if name in self.binaries[distribution][arch][1]:
00549
00550 for prov in self.binaries[distribution][arch][1][name]:
00551 package = self.binaries[distribution][arch][0][prov]
00552
00553
00554
00555
00556 if op == '' and version == '' or apt_pkg.CheckDep(package['version'], op, version):
00557 packages.append(prov)
00558 break
00559
00560 return (len(packages) > 0, packages)
00561
00562 def excuse_unsat_deps(self, pkg, src, arch, suite, excuse):
00563 """Find unsatisfied dependencies for a binary package
00564
00565 This method analyzes the dependencies of the binary package specified
00566 by the parameter `pkg', built from the source package `src', for the
00567 architecture `arch' within the suite `suite'. If the dependency can't
00568 be satisfied in testing and/or unstable, it updates the excuse passed
00569 as parameter.
00570
00571 The dependency fields checked are Pre-Depends and Depends.
00572 """
00573
00574 binary_u = self.binaries[suite][arch][0][pkg]
00575
00576
00577 for type in ('Pre-Depends', 'Depends'):
00578 type_key = type.lower()
00579 if not binary_u[type_key]:
00580 continue
00581
00582
00583 packages = []
00584
00585
00586 for block, block_txt in map(None, binary_u[type_key], binary_u[type_key + '-txt'].split(',')):
00587
00588 solved, packages = self.get_dependency_solvers(block, arch, 'testing')
00589 if solved: continue
00590
00591
00592 solved, packages = self.get_dependency_solvers(block, arch, suite)
00593 packages = [self.binaries[suite][arch][0][p]['source'] for p in packages]
00594
00595
00596
00597 if src in packages: continue
00598
00599
00600 if len(packages) == 0:
00601 excuse.addhtml("%s/%s unsatisfiable %s: %s" % (pkg, arch, type, block_txt.strip()))
00602
00603
00604 for p in packages:
00605 if arch not in self.options.break_arches.split():
00606 excuse.add_dep(p)
00607 else:
00608 excuse.add_break_dep(p, arch)
00609
00610
00611
00612
00613 def should_remove_source(self, pkg):
00614 """Check if a source package should be removed from testing
00615
00616 This method checks if a source package should be removed from the
00617 testing distribution; this happen if the source package is not
00618 present in the unstable distribution anymore.
00619
00620 It returns True if the package can be removed, False otherwise.
00621 In the former case, a new excuse is appended to the the object
00622 attribute excuses.
00623 """
00624
00625 if self.sources['unstable'].has_key(pkg):
00626 return False
00627
00628 src = self.sources['testing'][pkg]
00629 excuse = Excuse("-" + pkg)
00630 excuse.set_vers(src['version'], None)
00631 src['maintainer'] and excuse.set_maint(src['maintainer'].strip())
00632 src['section'] and excuse.set_section(src['section'].strip())
00633 excuse.addhtml("Valid candidate")
00634 self.excuses.append(excuse)
00635 return True
00636
00637 def should_upgrade_srcarch(self, src, arch, suite):
00638 """Check if binary package should be upgraded
00639
00640 This method checks if a binary package should be upgraded; this can
00641 happen only if the binary package is a binary-NMU for the given arch.
00642 The analisys is performed for the source package specified by the
00643 `src' parameter, checking the architecture `arch' for the distribution
00644 `suite'.
00645
00646 It returns False if the given package doesn't need to be upgraded,
00647 True otherwise. In the former case, a new excuse is appended to
00648 the the object attribute excuses.
00649 """
00650
00651 source_t = self.sources['testing'][src]
00652 source_u = self.sources[suite][src]
00653
00654
00655 ref = "%s/%s%s" % (src, arch, suite != 'unstable' and "_" + suite or "")
00656 excuse = Excuse(ref)
00657 excuse.set_vers(source_t['version'], source_t['version'])
00658 source_u['maintainer'] and excuse.set_maint(source_u['maintainer'].strip())
00659 source_u['section'] and excuse.set_section(source_u['section'].strip())
00660
00661
00662
00663 if self.hints["remove"].has_key(src) and \
00664 self.same_source(source_t['version'], self.hints["remove"][src][0]):
00665 excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1]))
00666 excuse.addhtml("Trying to remove package, not update it")
00667 excuse.addhtml("Not considered")
00668 self.excuses.append(excuse)
00669 return False
00670
00671
00672 anywrongver = False
00673 anyworthdoing = False
00674
00675
00676 for pkg in sorted(filter(lambda x: x.endswith("/" + arch), source_u['binaries'])):
00677 pkg_name = pkg.split("/")[0]
00678
00679
00680 binary_t = pkg in source_t['binaries'] and self.binaries['testing'][arch][0][pkg_name] or None
00681 binary_u = self.binaries[suite][arch][0][pkg_name]
00682
00683
00684 pkgsv = self.binaries[suite][arch][0][pkg_name]['source-ver']
00685
00686
00687 if binary_u['architecture'] == 'all':
00688 excuse.addhtml("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u['version'], pkgsv))
00689 continue
00690
00691
00692 if not self.same_source(source_t['version'], pkgsv):
00693 anywrongver = True
00694 excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u['version'], pkgsv, source_t['version']))
00695 break
00696
00697
00698 self.excuse_unsat_deps(pkg_name, src, arch, suite, excuse)
00699
00700
00701
00702 if not binary_t:
00703 excuse.addhtml("New binary: %s (%s)" % (pkg_name, binary_u['version']))
00704 anyworthdoing = True
00705 continue
00706
00707
00708
00709 vcompare = apt_pkg.VersionCompare(binary_t['version'], binary_u['version'])
00710
00711
00712 if vcompare > 0:
00713 anywrongver = True
00714 excuse.addhtml("Not downgrading: %s (%s to %s)" % (pkg_name, binary_t['version'], binary_u['version']))
00715 break
00716
00717 elif vcompare < 0:
00718 excuse.addhtml("Updated binary: %s (%s to %s)" % (pkg_name, binary_t['version'], binary_u['version']))
00719 anyworthdoing = True
00720
00721
00722
00723 if not anywrongver and (anyworthdoing or self.sources[suite][src].has_key('fake')):
00724 srcv = self.sources[suite][src]['version']
00725 ssrc = self.same_source(source_t['version'], srcv)
00726
00727 for pkg in sorted([x.split("/")[0] for x in self.sources['testing'][src]['binaries'] if x.endswith("/"+arch)]):
00728
00729 if self.binaries['testing'][arch][0][pkg]['architecture'] == 'all':
00730 excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
00731 continue
00732
00733 if not self.binaries[suite][arch][0].has_key(pkg):
00734 tpkgv = self.binaries['testing'][arch][0][pkg]['version']
00735 excuse.addhtml("Removed binary: %s %s" % (pkg, tpkgv))
00736 if ssrc: anyworthdoing = True
00737
00738
00739 if not anywrongver and anyworthdoing:
00740 excuse.addhtml("Valid candidate")
00741 self.excuses.append(excuse)
00742
00743 elif anyworthdoing:
00744 excuse.addhtml("Not considered")
00745 self.excuses.append(excuse)
00746 return False
00747
00748
00749 return True
00750
00751 def should_upgrade_src(self, src, suite):
00752 """Check if source package should be upgraded
00753
00754 This method checks if a source package should be upgraded. The analisys
00755 is performed for the source package specified by the `src' parameter,
00756 checking the architecture `arch' for the distribution `suite'.
00757
00758 It returns False if the given package doesn't need to be upgraded,
00759 True otherwise. In the former case, a new excuse is appended to
00760 the the object attribute excuses.
00761 """
00762
00763
00764 source_u = self.sources[suite][src]
00765 if src in self.sources['testing']:
00766 source_t = self.sources['testing'][src]
00767
00768 if apt_pkg.VersionCompare(source_t['version'], source_u['version']) == 0:
00769 return False
00770 else:
00771 source_t = None
00772
00773
00774 ref = "%s%s" % (src, suite != 'unstable' and "_" + suite or "")
00775 excuse = Excuse(ref)
00776 excuse.set_vers(source_t and source_t['version'] or None, source_u['version'])
00777 source_u['maintainer'] and excuse.set_maint(source_u['maintainer'].strip())
00778 source_u['section'] and excuse.set_section(source_u['section'].strip())
00779
00780
00781 update_candidate = True
00782
00783
00784 if source_t and apt_pkg.VersionCompare(source_u['version'], source_t['version']) < 0:
00785 excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t['version'], source_u['version']))
00786 self.excuses.append(excuse)
00787 return False
00788
00789
00790 if source_u.has_key('fake'):
00791 excuse.addhtml("%s source package doesn't exist" % (src))
00792 update_candidate = False
00793
00794
00795 urgency = self.urgencies.get(src, self.options.default_urgency)
00796 if not source_t and urgency != self.options.default_urgency:
00797 excuse.addhtml("Ignoring %s urgency setting for NEW package" % (urgency))
00798 urgency = self.options.default_urgency
00799
00800
00801
00802 if self.hints["remove"].has_key(src):
00803 if source_t and self.same_source(source_t['version'], self.hints['remove'][src][0]) or \
00804 self.same_source(source_u['version'], self.hints['remove'][src][0]):
00805 excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1]))
00806 excuse.addhtml("Trying to remove package, not update it")
00807 update_candidate = False
00808
00809
00810 blocked = None
00811 if self.hints["block"].has_key(src):
00812 blocked = self.hints["block"][src]
00813 elif self.hints["block-all"].has_key("source"):
00814 blocked = self.hints["block-all"]["source"]
00815
00816
00817
00818 if blocked:
00819 unblock = self.hints["unblock"].get(src,(None,None))
00820 if unblock[0] != None:
00821 if self.same_source(unblock[0], source_u['version']):
00822 excuse.addhtml("Ignoring request to block package by %s, due to unblock request by %s" % (blocked, unblock[1]))
00823 else:
00824 excuse.addhtml("Unblock request by %s ignored due to version mismatch: %s" % (unblock[1], unblock[0]))
00825 else:
00826 excuse.addhtml("Not touching package, as requested by %s (contact debian-release if update is needed)" % (blocked))
00827 update_candidate = False
00828
00829
00830
00831
00832 if suite == 'unstable':
00833 if not self.dates.has_key(src):
00834 self.dates[src] = (source_u['version'], self.date_now)
00835 elif not self.same_source(self.dates[src][0], source_u['version']):
00836 self.dates[src] = (source_u['version'], self.date_now)
00837
00838 days_old = self.date_now - self.dates[src][1]
00839 min_days = self.MINDAYS[urgency]
00840 excuse.setdaysold(days_old, min_days)
00841 if days_old < min_days:
00842 if self.hints["urgent"].has_key(src) and self.same_source(source_u['version'], self.hints["urgent"][src][0]):
00843 excuse.addhtml("Too young, but urgency pushed by %s" % (self.hints["urgent"][src][1]))
00844 else:
00845 update_candidate = False
00846
00847
00848
00849 pkgs = {src: ["source"]}
00850 for arch in self.options.architectures:
00851 oodbins = {}
00852
00853 for pkg in sorted([x.split("/")[0] for x in self.sources[suite][src]['binaries'] if x.endswith("/"+arch)]):
00854 if not pkgs.has_key(pkg): pkgs[pkg] = []
00855 pkgs[pkg].append(arch)
00856
00857
00858 binary_u = self.binaries[suite][arch][0][pkg]
00859 pkgsv = binary_u['source-ver']
00860
00861
00862 if not self.same_source(source_u['version'], pkgsv):
00863 if not oodbins.has_key(pkgsv):
00864 oodbins[pkgsv] = []
00865 oodbins[pkgsv].append(pkg)
00866 continue
00867
00868
00869
00870 if binary_u['architecture'] != 'all' or arch in self.options.nobreakall_arches:
00871 self.excuse_unsat_deps(pkg, src, arch, suite, excuse)
00872
00873
00874
00875
00876 if oodbins:
00877 oodtxt = ""
00878 for v in oodbins.keys():
00879 if oodtxt: oodtxt = oodtxt + "; "
00880 oodtxt = oodtxt + "%s (from <a href=\"http://buildd.debian.org/build.php?" \
00881 "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>)" % \
00882 (", ".join(sorted(oodbins[v])), arch, src, v, v)
00883 text = "out of date on <a href=\"http://buildd.debian.org/build.php?" \
00884 "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
00885 (arch, src, source_u['version'], arch, oodtxt)
00886
00887 if arch in self.options.fucked_arches:
00888 text = text + " (but %s isn't keeping up, so nevermind)" % (arch)
00889 else:
00890 update_candidate = False
00891
00892 if self.date_now != self.dates[src][1]:
00893 excuse.addhtml(text)
00894
00895
00896 if len(self.sources[suite][src]['binaries']) == 0:
00897 excuse.addhtml("%s has no binaries on any arch" % src)
00898 update_candidate = False
00899
00900
00901
00902
00903 if suite == 'unstable':
00904 for pkg in pkgs.keys():
00905 if not self.bugs['testing'].has_key(pkg):
00906 self.bugs['testing'][pkg] = 0
00907 if not self.bugs['unstable'].has_key(pkg):
00908 self.bugs['unstable'][pkg] = 0
00909
00910 if self.bugs['unstable'][pkg] > self.bugs['testing'][pkg]:
00911 excuse.addhtml("%s (%s) is <a href=\"http://bugs.debian.org/cgi-bin/pkgreport.cgi?" \
00912 "which=pkg&data=%s&sev-inc=critical&sev-inc=grave&sev-inc=serious\" " \
00913 "target=\"_blank\">buggy</a>! (%d > %d)" % \
00914 (pkg, ", ".join(pkgs[pkg]), pkg, self.bugs['unstable'][pkg], self.bugs['testing'][pkg]))
00915 update_candidate = False
00916 elif self.bugs['unstable'][pkg] > 0:
00917 excuse.addhtml("%s (%s) is (less) <a href=\"http://bugs.debian.org/cgi-bin/pkgreport.cgi?" \
00918 "which=pkg&data=%s&sev-inc=critical&sev-inc=grave&sev-inc=serious\" " \
00919 "target=\"_blank\">buggy</a>! (%d <= %d)" % \
00920 (pkg, ", ".join(pkgs[pkg]), pkg, self.bugs['unstable'][pkg], self.bugs['testing'][pkg]))
00921
00922
00923 if not update_candidate and self.hints["force"].has_key(src) and \
00924 self.same_source(source_u['version'], self.hints["force"][src][0]):
00925 excuse.dontinvalidate = 1
00926 excuse.addhtml("Should ignore, but forced by %s" % (self.hints["force"][src][1]))
00927 update_candidate = True
00928
00929
00930 if suite == "tpu":
00931 if self.approvals.has_key("%s_%s" % (src, source_u['version'])):
00932 excuse.addhtml("Approved by %s" % approvals["%s_%s" % (src, source_u['version'])])
00933 else:
00934 excuse.addhtml("NEEDS APPROVAL BY RM")
00935 update_candidate = False
00936
00937
00938 if update_candidate:
00939 excuse.addhtml("Valid candidate")
00940
00941 else:
00942 excuse.addhtml("Not considered")
00943
00944 self.excuses.append(excuse)
00945 return update_candidate
00946
00947 def reversed_exc_deps(self):
00948 """Reverse the excuses dependencies
00949
00950 This method returns a dictionary where the keys are the package names
00951 and the values are the excuse names which depend on it.
00952 """
00953 res = {}
00954 for exc in self.excuses:
00955 for d in exc.deps:
00956 if not res.has_key(d): res[d] = []
00957 res[d].append(exc.name)
00958 return res
00959
00960 def invalidate_excuses(self, valid, invalid):
00961 """Invalidate impossible excuses
00962
00963 This method invalidates the impossible excuses, which depend
00964 on invalid excuses. The two parameters contains the list of
00965 `valid' and `invalid' excuses.
00966 """
00967
00968 exclookup = {}
00969 for e in self.excuses:
00970 exclookup[e.name] = e
00971
00972
00973 revdeps = self.reversed_exc_deps()
00974
00975
00976 i = 0
00977 while i < len(invalid):
00978
00979 if not revdeps.has_key(invalid[i]):
00980 i += 1
00981 continue
00982
00983 if (invalid[i] + "_tpu") in valid:
00984 i += 1
00985 continue
00986
00987 for x in revdeps[invalid[i]]:
00988
00989 if x in valid and exclookup[x].dontinvalidate:
00990 continue
00991
00992
00993
00994 exclookup[x].invalidate_dep(invalid[i])
00995 if x in valid:
00996 p = valid.index(x)
00997 invalid.append(valid.pop(p))
00998 exclookup[x].addhtml("Invalidated by dependency")
00999 exclookup[x].addhtml("Not considered")
01000 i = i + 1
01001
01002 def write_excuses(self):
01003 """Produce and write the update excuses
01004
01005 This method handles the update excuses generation: the packages are
01006 looked to determine whether they are valid candidates. For the details
01007 of this procedure, please refer to the module docstring.
01008 """
01009
01010
01011
01012 upgrade_me = []
01013
01014
01015 for pkg in self.sources['testing']:
01016 if self.should_remove_source(pkg):
01017 upgrade_me.append("-" + pkg)
01018
01019
01020 for pkg in self.sources['unstable']:
01021
01022
01023 if self.sources['testing'].has_key(pkg):
01024 for arch in self.options.architectures:
01025 if self.should_upgrade_srcarch(pkg, arch, 'unstable'):
01026 upgrade_me.append("%s/%s" % (pkg, arch))
01027
01028
01029 if self.should_upgrade_src(pkg, 'unstable'):
01030 upgrade_me.append(pkg)
01031
01032
01033 for pkg in self.sources['tpu']:
01034
01035
01036 if self.sources['testing'].has_key(pkg):
01037 for arch in self.options.architectures:
01038 if self.should_upgrade_srcarch(pkg, arch, 'tpu'):
01039 upgrade_me.append("%s/%s_tpu" % (pkg, arch))
01040
01041
01042 if self.should_upgrade_src(pkg, 'tpu'):
01043 upgrade_me.append("%s_tpu" % pkg)
01044
01045
01046 for src in self.hints["remove"].keys():
01047 if src in upgrade_me: continue
01048 if ("-"+src) in upgrade_me: continue
01049 if not self.sources['testing'].has_key(src): continue
01050
01051
01052 tsrcv = self.sources['testing'][src]['version']
01053 if not self.same_source(tsrcv, self.hints["remove"][src][0]): continue
01054
01055
01056 upgrade_me.append("-%s" % (src))
01057 excuse = Excuse("-%s" % (src))
01058 excuse.set_vers(tsrcv, None)
01059 excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1]))
01060 excuse.addhtml("Package is broken, will try to remove")
01061 self.excuses.append(excuse)
01062
01063
01064 self.excuses.sort(lambda x, y: cmp(x.daysold, y.daysold) or cmp(x.name, y.name))
01065
01066
01067 unconsidered = [e.name for e in self.excuses if e.name not in upgrade_me]
01068
01069
01070 for e in self.excuses:
01071 for d in e.deps:
01072 if d not in upgrade_me and d not in unconsidered:
01073 e.addhtml("Unpossible dep: %s -> %s" % (e.name, d))
01074 self.invalidate_excuses(upgrade_me, unconsidered)
01075
01076
01077 f = open(self.options.excuses_output, 'w')
01078 f.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n")
01079 f.write("<html><head><title>excuses...</title>")
01080 f.write("<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\"></head><body>\n")
01081 f.write("<p>Generated: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "</p>\n")
01082 f.write("<ul>\n")
01083 for e in self.excuses:
01084 f.write("<li>%s" % e.html())
01085 f.write("</ul></body></html>\n")
01086 f.close()
01087
01088 def main(self):
01089 """Main method
01090
01091 This is the entry point for the class: it includes the list of calls
01092 for the member methods which will produce the output files.
01093 """
01094 self.write_excuses()
01095
01096 if __name__ == '__main__':
01097 Britney().main()