diff --git a/INSTALL b/INSTALL index 1d28cee..80ab1b0 100644 --- a/INSTALL +++ b/INSTALL @@ -8,3 +8,10 @@ Requirements: * Python APT/DPKG bindings aptitude install python2.5-apt libapt-pkg-dev dpkg-dev * Python dev headers aptitude install python2.5-dev +Compiling: +---------- + +Run "make all" in the lib directory and add a symlink called +britneymodule.so pointing to the freshly generated britneymodule.so in +the lib directory. + diff --git a/britney.py b/britney.py index 74ba8b1..30ce9f4 100755 --- a/britney.py +++ b/britney.py @@ -317,31 +317,31 @@ class Britney: in a suitable form for the other methods of the class. """ # initialize the parser - self.parser = optparse.OptionParser(version="%prog") - self.parser.add_option("-v", "", action="count", dest="verbose", help="enable verbose output") - self.parser.add_option("-c", "--config", action="store", dest="config", default="/etc/britney.conf", + parser = optparse.OptionParser(version="%prog") + parser.add_option("-v", "", action="count", dest="verbose", help="enable verbose output") + parser.add_option("-c", "--config", action="store", dest="config", default="/etc/britney.conf", help="path for the configuration file") - self.parser.add_option("", "--architectures", action="store", dest="architectures", default=None, + parser.add_option("", "--architectures", action="store", dest="architectures", default=None, help="override architectures from configuration file") - self.parser.add_option("", "--actions", action="store", dest="actions", default=None, + parser.add_option("", "--actions", action="store", dest="actions", default=None, help="override the list of actions to be performed") - self.parser.add_option("", "--hints", action="store", dest="hints", default=None, + parser.add_option("", "--hints", action="store", dest="hints", default=None, help="additional hints, separated by semicolons") - self.parser.add_option("", "--hint-tester", action="store_true", dest="hint_tester", default=None, + parser.add_option("", "--hint-tester", action="store_true", dest="hint_tester", default=None, help="provide a command line interface to test hints") - self.parser.add_option("", "--dry-run", action="store_true", dest="dry_run", default=False, + parser.add_option("", "--dry-run", action="store_true", dest="dry_run", default=False, help="disable all outputs to the testing directory") - self.parser.add_option("", "--compatible", action="store_true", dest="compatible", default=False, + parser.add_option("", "--compatible", action="store_true", dest="compatible", default=False, help="enable full compatibility with old britney's output") - self.parser.add_option("", "--auto-hinter", action="store_true", dest="autohinter", default=False, + parser.add_option("", "--auto-hinter", action="store_true", dest="autohinter", default=False, help="enable use of auto-hinter") - self.parser.add_option("", "--control-files", action="store_true", dest="control_files", default=False, + parser.add_option("", "--control-files", action="store_true", dest="control_files", default=False, help="enable control files generation") - self.parser.add_option("", "--nuninst-cache", action="store_true", dest="nuninst_cache", default=False, + parser.add_option("", "--nuninst-cache", action="store_true", dest="nuninst_cache", default=False, help="do not build the non-installability status, use the cache from file") - self.parser.add_option("", "--print-uninst", action="store_true", dest="print_uninst", default=False, + parser.add_option("", "--print-uninst", action="store_true", dest="print_uninst", default=False, help="just print a summary of uninstallable packages") - (self.options, self.args) = self.parser.parse_args() + (self.options, self.args) = parser.parse_args() # integrity checks if self.options.nuninst_cache and self.options.print_uninst: @@ -415,14 +415,13 @@ class Britney: package as a dictionary. """ sources = {} - package = None filename = os.path.join(basedir, "Sources") self.__log("Loading source packages from %s" % filename) try: Packages = apt_pkg.TagFile(open(filename)) get_field = Packages.section.get step = Packages.step - except AttributeError, e: + except AttributeError: Packages = apt_pkg.ParseTagFile(open(filename)) get_field = Packages.Section.get step = Packages.Step @@ -470,7 +469,6 @@ class Britney: packages = {} provides = {} sources = self.sources - package = None filename = os.path.join(basedir, "Packages_%s" % arch) self.__log("Loading binary packages from %s" % filename) @@ -478,7 +476,7 @@ class Britney: Packages = apt_pkg.TagFile(open(filename)) get_field = Packages.section.get step = Packages.step - except AttributeError, e: + except AttributeError: Packages = apt_pkg.ParseTagFile(open(filename)) get_field = Packages.Section.get step = Packages.Step @@ -940,7 +938,7 @@ class Britney: return nuninst - # Utility methods for package analisys + # Utility methods for package analysis # ------------------------------------ def same_source(self, sv1, sv2): @@ -1003,7 +1001,7 @@ class Britney: return (len(packages) > 0, packages) - def excuse_unsat_deps(self, pkg, src, arch, suite, excuse, excluded=[], conflicts=False): + def excuse_unsat_deps(self, pkg, src, arch, suite, excuse, excluded=[]): """Find unsatisfied dependencies for a binary package This method analyzes the dependencies of the binary package specified @@ -2419,21 +2417,76 @@ class Britney: self.output_write("\nNewly uninstallable packages in testing:\n%s" % \ (text)) + def generate_package_list(self): + # list of local methods and variables (for better performance) + sources = self.sources + architectures = self.options.architectures + should_remove_source = self.should_remove_source + should_upgrade_srcarch = self.should_upgrade_srcarch + should_upgrade_src = self.should_upgrade_src + + # this list will contain the packages which are valid candidates; + # if a package is going to be removed, it will have a "-" prefix + upgrade_me = [] + + # for every source package in testing, check if it should be removed + for pkg in sources['testing']: + if should_remove_source(pkg): + upgrade_me.append("-" + pkg) + + # for every source package in unstable check if it should be upgraded + for pkg in sources['unstable']: + if sources['unstable'][pkg][FAKESRC]: continue + # if the source package is already present in testing, + # check if it should be upgraded for every binary package + if pkg in sources['testing'] and not sources['testing'][pkg][FAKESRC]: + for arch in architectures: + if should_upgrade_srcarch(pkg, arch, 'unstable'): + upgrade_me.append("%s/%s/%s" % (pkg, arch, sources['unstable'][pkg][VERSION])) + + # check if the source package should be upgraded + if should_upgrade_src(pkg, 'unstable'): + upgrade_me.append("%s/%s" % (pkg, sources['unstable'][pkg][VERSION])) + + # for every source package in *-proposed-updates, check if it should be upgraded + for suite in ['pu', 'tpu']: + for pkg in sources[suite]: + # if the source package is already present in testing, + # check if it should be upgraded for every binary package + if pkg in sources['testing']: + for arch in architectures: + if should_upgrade_srcarch(pkg, arch, suite): + upgrade_me.append("%s/%s_%s" % (pkg, arch, suite)) + + # check if the source package should be upgraded + if should_upgrade_src(pkg, suite): + upgrade_me.append("%s_%s" % (pkg, suite)) + + return upgrade_me + def hint_tester(self): """Run a command line interface to test hints This method provides a command line interface for the release team to - try hints and evaulate the results. + try hints and evaluate the results. """ self.__log("> Calculating current uninstallability counters", type="I") self.nuninst_orig = self.get_nuninst() self.nuninst_orig_save = self.get_nuninst() import readline + from completer import Completer + histfile = os.path.expanduser('~/.britney2_history') if os.path.exists(histfile): readline.read_history_file(histfile) + readline.parse_and_bind('tab: complete') + readline.set_completer(Completer(self).completer) + # Package names can contain "-" and we use "/" in our presentation of them as well, + # so ensure readline does not split on these characters. + readline.set_completer_delims(readline.get_completer_delims().replace('-', '').replace('/', '')) + while True: # read the command from the command line try: @@ -2455,8 +2508,10 @@ class Britney: self.printuninstchange() except KeyboardInterrupt: continue - - readline.write_history_file(histfile) + try: + readline.write_history_file(histfile) + except IOError, e: + self.__log("Could not write %s: %s" % (histfile, e), type="W") def do_hint(self, type, who, pkgvers): """Process hints @@ -2577,7 +2632,7 @@ class Britney: for e in excuses: excuse = excuses[e] if e in self.sources['testing'] and self.sources['testing'][e][VERSION] == excuse.ver[1]: - continue + continue if len(excuse.deps) > 0: hint = find_related(e, {}, True) if isinstance(hint, dict) and e in hint and hint not in candidates: @@ -2640,9 +2695,9 @@ class Britney: pkg, arch = i.split("/") pkg = pkg[1:] if pkg in libraries: - libraries[pkg].append(arch) + libraries[pkg].append(arch) else: - libraries[pkg] = [arch] + libraries[pkg] = [arch] return "\n".join([" " + k + ": " + " ".join(libraries[k]) for k in libraries]) + "\n" def nuninst_arch_report(self, nuninst, arch): diff --git a/completer.py b/completer.py new file mode 100644 index 0000000..ea09f6a --- /dev/null +++ b/completer.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Niels Thykier + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +import readline +import bisect + +class Completer: + """Completer class + + This class provides a readline completer for the britney hint-tester + command-line interface. + """ + + def __init__(self, britney): + """Constructor + + Creates a completer for a given britney. + """ + self.matches = [] + self.cmds = ['easy', 'hint', 'force-hint', 'exit', 'quit'] + self.britney = britney + self.packages = sorted(britney.generate_package_list()) + + def completer(self, text, state): + """readline completer (see the readline API)""" + + origline = readline.get_line_buffer() + words = origline.split() + + if state < 1: + self.matches = [] + if len(words) < 1 or words[0] == text: + # complete a command + self.matches = [x for x in self.cmds if x.startswith(text)] + else: + # complete pkg/[arch/]version + prefix = '' + if len(text) > 0 and text[0] == '-': + text = text[1:] + prefix = '-' + start = bisect.bisect_left(self.packages, text) + while start < len(self.packages): + if not self.packages[start].startswith(text): + break + self.matches.append(prefix + self.packages[start]) + start += 1 + + if len(self.matches) > state: + return self.matches[state] + return None + diff --git a/doc/html/britney_8py-source.html b/doc/html/britney_8py-source.html index a04737e..f4bc762 100644 --- a/doc/html/britney_8py-source.html +++ b/doc/html/britney_8py-source.html @@ -867,7 +867,7 @@ 00853 return nuninst 00854 00855 -00856 # Utility methods for package analisys +00856 # Utility methods for package analysis 00857 # ------------------------------------ 00858 00859 def same_source(self, sv1, sv2): @@ -1007,7 +1007,7 @@ 00993 00994 return True 00995 -00996 # Package analisys methods +00996 # Package analysis methods 00997 # ------------------------ 00998 00999 def should_remove_source(self, pkg): @@ -1045,7 +1045,7 @@ 01031 01032 This method checks if a binary package should be upgraded; this can 01033 happen also if the binary package is a binary-NMU for the given arch. -01034 The analisys is performed for the source package specified by the +01034 The analysis is performed for the source package specified by the 01035 `src' parameter, checking the architecture `arch' for the distribution 01036 `suite'. 01037 @@ -1157,7 +1157,7 @@ 01143 def should_upgrade_src(self, src, suite): 01144 """Check if source package should be upgraded 01145 -01146 This method checks if a source package should be upgraded. The analisys +01146 This method checks if a source package should be upgraded. The analysis 01147 is performed for the source package specified by the `src' parameter, 01148 checking the architecture `arch' for the distribution `suite'. 01149 diff --git a/doc/html/classbritney_1_1Britney.html b/doc/html/classbritney_1_1Britney.html index e51245c..0375abe 100644 --- a/doc/html/classbritney_1_1Britney.html +++ b/doc/html/classbritney_1_1Britney.html @@ -1603,7 +1603,7 @@ Definition at line 990 o

Check if source package should be upgraded
 
-This method checks if a source package should be upgraded. The analisys
+This method checks if a source package should be upgraded. The analysis
 is performed for the source package specified by the `src' parameter, 
 checking the architecture `arch' for the distribution `suite'.
        
@@ -1657,7 +1657,7 @@ Definition at line 1134
 
 This method checks if a binary package should be upgraded; this can
 happen also if the binary package is a binary-NMU for the given arch.
-The analisys is performed for the source package specified by the
+The analysis is performed for the source package specified by the
 `src' parameter, checking the architecture `arch' for the distribution
 `suite'.
        
diff --git a/doc/latex/classbritney_1_1Britney.tex b/doc/latex/classbritney_1_1Britney.tex
index 8717a7a..9857bff 100644
--- a/doc/latex/classbritney_1_1Britney.tex
+++ b/doc/latex/classbritney_1_1Britney.tex
@@ -743,7 +743,7 @@ Definition at line 990 of file britney.py.\index{britney::Britney@{britney::Brit
 
 \footnotesize\begin{verbatim}Check if source package should be upgraded
 
-This method checks if a source package should be upgraded. The analisys
+This method checks if a source package should be upgraded. The analysis
 is performed for the source package specified by the `src' parameter, 
 checking the architecture `arch' for the distribution `suite'.
        
@@ -765,7 +765,7 @@ Definition at line 1134 of file britney.py.\index{britney::Britney@{britney::Bri
 
 This method checks if a binary package should be upgraded; this can
 happen also if the binary package is a binary-NMU for the given arch.
-The analisys is performed for the source package specified by the
+The analysis is performed for the source package specified by the
 `src' parameter, checking the architecture `arch' for the distribution
 `suite'.
        
diff --git a/lib/memory.c b/lib/memory.c
deleted file mode 100644
index d63b768..0000000
--- a/lib/memory.c
+++ /dev/null
@@ -1,389 +0,0 @@
-#include 
-#include 
-
-#include "memory.h"
-#include "templates.h"
-#include "freelist.h"
-
-/**** THEORY
- * 
-
-So, we have blocks with a freelist
-
-        XXX............XXXXXXX..XXXXX.....XXXXXX......
-        
-Within a block, we work with segments. A segment is...
-
-	   ^..........|
-
-Every now and then we make sure we've got a decent sized segment.
-
-We have multiple blocks. They're kept ordered by the size of their
-current segment.
-
- **********************************************/
-
-#define ALIGN 4
-
-#define FLBT_BITS (sizeof(flb_t)*8)
-#define MEMBLOCKSIZE (1 << 22)
-#define ALIGNEDSIZE(s) (((s) + ALIGN - 1) / ALIGN * ALIGN)
-
-struct memblock {
-    struct memblock *next;
-
-    size_t          n_bytes;          /* index of free char */
-    size_t          size;             /* size of block after char */
-
-    unsigned        n_used_chunks;    /* number of unfreed blocks */
-    size_t          n_used_bytes;     /* number of bytes actually used */
-    size_t          n_productive_bytes; /* number of bytes used usefully */
-
-    flb_t           free[MEMBLOCKSIZE/ALIGN/FLBT_BITS + 1];
-    unsigned char   mem[MEMBLOCKSIZE];
-};
-typedef struct memblock memblock;
-
-static memblock *base = NULL;
-
-#ifdef MDEBUG1
-static int valid_memblock_mdebug1(struct memblock *mb) {
-    size_t cnt, i;
-    static int rarity = 0;
-
-    assert(mb->n_bytes + mb->size <= sizeof(mb->mem));
-
-    if (mb->n_used_chunks == 0) assert(mb->n_bytes == 0);
-    assert(((unsigned long)mb->mem + mb->n_bytes) % ALIGN == 0);
-
-    assert(mb->n_productive_bytes <= mb->n_used_bytes);
-    assert(mb->n_used_bytes + mb->size <= sizeof(mb->mem));
-
-#define TWO(k)    (1ul << (k))
-#define CYCL(k)   (~0ul / (1 + TWO(TWO(k))))
-
-    rarity++; rarity %= 25000;
-    if (rarity != 0) {
-	cnt = mb->n_used_bytes;
-    } else {
-        cnt = 0;
-        for (i = 0; i < sizeof(mb->mem)/ALIGN/FLBT_BITS+1; i++) {
-    	unsigned long x = mb->free[i];
-            size_t s;
-    	    x = (x & CYCL(0)) + ((x >> TWO(0)) & CYCL(0));
-    	    x = (x & CYCL(1)) + ((x >> TWO(1)) & CYCL(1));
-            for (s = 2; (2u << s) <= FLBT_BITS; s++) {
-    		x += x >> TWO(s);
-    		x &= CYCL(s);
-    	    }
-	    cnt += x * ALIGN;
-        }
-    }
-#undef TWO
-#undef CYCL
-
-    assert(cnt == mb->n_used_bytes);
-
-    return 1;
-}
-#endif
-
-#if MDEBUG3
-static int valid_memblock_mdebug3(struct memblock *mb) {
-    size_t offset, step, used;
-    unsigned chunk = 0;
-
-    offset = 0;
-    used = 0;
-    if ((unsigned long)mb->mem % ALIGN != 0)
-        offset = ALIGN - ((unsigned long)mb->mem % ALIGN);
-
-    while(offset < mb->n_bytes) {
-        step = *(size_t*)(mb->mem + offset);
-        assert(step % ALIGN == 0 || step % ALIGN == 1);
-        if (step % ALIGN == 1) step--; /* freed */
-        else used += step;
-        assert(step > 0);
-        offset += step;
-        chunk++;
-    }
-
-    assert(used == mb->n_used_bytes);
-
-    return 1;
-}
-#endif
-
-inline static int valid_memblock(struct memblock *mb) {
-    (void)mb;
-
-    MDEBUG1_ONLY( if (!valid_memblock_mdebug1(mb)) return 0; )
-    MDEBUG3_ONLY( if (!valid_memblock_mdebug3(mb)) return 0; )
-
-    return 1;
-}
-
-void print_memblock_summary(void) {
-    struct memblock *mb;
-    unsigned long tused = 0, talloc = 0, tprod = 0, tavail = 0, nb = 0;
-
-    for (mb = base; mb != NULL; mb = mb->next) {
-        assert(valid_memblock(mb));
-
-	MDEBUG3_ONLY(
-            fprintf(stderr, "%p: [%d,%lu/%lu,%p,%p]\n", mb,
-               mb->n_used_chunks, (unsigned long)mb->n_used_bytes, 
-               (unsigned long)mb->n_bytes, mb->next, mb->mem);
-	)
-
-	if (mb != base && mb->size * 50 < sizeof(mb->mem) - mb->n_used_bytes) {
-		flb_t k; size_t s;
-		k = mb->n_bytes / ALIGN;
-		s = mb->size / ALIGN;
-		find_long_freebits(mb->free,MEMBLOCKSIZE/ALIGN/FLBT_BITS+1,&k,&s);
-		k *= ALIGN; s *= ALIGN;
-		fprintf(stderr, "%p %lu: Wasted block "
-                                "[%d chunks, %lu free bytes, %lu avail bytes, %2.2f%%], suggested [%ld,%ld] -> [%ld,%ld]\n",
-			mb->mem, nb, mb->n_used_chunks, 
-			(unsigned long) sizeof(mb->mem) - mb->n_used_bytes,
-			(unsigned long) mb->size,
-			(float) 100.0 * mb->size / (sizeof(mb->mem) - mb->n_used_bytes),
-			(unsigned long) mb->n_bytes, (unsigned long) mb->size, 
-			(unsigned long) k, (unsigned long) s);
-		if (s > mb->size * 4 || s * 25 > sizeof(mb->mem) - mb->n_used_bytes) {
-			mb->n_bytes = k;
-			mb->size = s;
-		}
-	}
-	nb++;
-	tprod += mb->n_productive_bytes; 
-	tused += mb->n_used_bytes; 
-	tavail += mb->size;
-	talloc += sizeof(memblock);
-    }
-    fprintf(stderr, "TOTAL: %lu %lu KiB alloc"
-		 "(%lu/%lu available, %2.2f%%) (%lu KiB used, %2.2f%%) (%lu KiB useful, %2.2f%%)\n", 
-	nb, talloc / 1024, 
-	(unsigned long) (base ? base->size / 1024 : 0), 
-	  tavail / 1024, (talloc > 0 ? 100.0*tavail/talloc : 0.0),
-	tused / 1024, (talloc > 0 ? 100.0*tused/talloc : 0.0), 
-	tprod / 1024, (talloc > 0 ? 100.0*tprod/talloc : 0.0));
-}
-
-MDEBUG1_ONLY(static int first_malloc = 0;)
-
-#ifdef MDEBUG3
-static void print_memblock_stats(void) {
-    struct memblock *mb;
-    size_t offset;
-   
-    for (mb = base; mb != NULL; mb = mb->next) {
-        assert(valid_memblock(mb));
-
-        printf("%p: [%d,%lu/%lu/%lu,%p,%p:\n", mb,
-               mb->n_used_chunks, (unsigned long)mb->n_productive_bytes, 
-	       (unsigned long)mb->n_used_bytes, (unsigned long)mb->n_bytes,
-               mb->next, mb->mem);
-
-        offset = 0;
-        if ((unsigned long)mb->mem % ALIGN != 0)
-             offset = ALIGN - ((unsigned long)mb->mem % ALIGN);
-        while(offset < mb->n_bytes) {
-             size_t step = *(size_t*)(mb->mem + offset);
-             if (step % ALIGN == 1) {
-                 step--;
-                 printf(" (%d)", (int) step);
-             } else {
-                 printf(" %d", (int) step);
-             }
-             offset += step;
-        }
-        printf("\n");
-    }
-    printf("\n");
-    return;
-}
-#endif
-
-void *block_malloc(size_t size) {
-    memblock *where = base;
-    void *result;
-    size_t realsize = size;
-
-    MDEBUG3_ONLY( if (first_malloc) print_memblock_stats(); )
-    MDEBUG3_ONLY( first_malloc = 0; )
-
-    (void)assert(ALIGN >= sizeof(size_t)); /* ALIGN is set too small! */
-
-    MDEBUG2_ONLY(size += ALIGN;) 
-	/* for the size, so the caller can be checked */
-
-    size = ALIGNEDSIZE(size);
- 
-    assert(size > 0 && size < sizeof(where->mem)); 
-    assert(!where || ((unsigned long)where->mem + where->n_bytes) % ALIGN == 0);
-     
-    if ( !where || where->size < size ) {
-        MDEBUG1_ONLY(print_memblock_summary();)
-        where = malloc(sizeof(memblock));
-        if (where == NULL) {
-	    int i;
-            fprintf(stderr, "block_malloc: failed trying to allocate memblock\n");
-            i = 0; where = base; while(where) {i++; where = where->next;}
-	    fprintf(stderr, "(had allocated %d blocks, each %lu bytes)\n", i, 
-		(unsigned long)sizeof(memblock));
-            return NULL;
-        }
-
-        where->n_used_chunks = 0;
-	memset(where->free, 0, sizeof(where->free));
-        where->n_bytes = 0;
-	where->size = sizeof(where->mem);
-
-	assert( (unsigned long)where->mem % ALIGN == 0);
-		/* XXX: should be able to cope with this :( */
-
-        where->n_used_bytes = where->n_bytes;
-        where->n_productive_bytes = 0;
-        (where)->next = base;
-	base = where;
-	
-        MDEBUG2_ONLY(memset(where->mem, 0xDD, sizeof(where->mem));)
-    }
-
-    result = where->mem + where->n_bytes;
-
-    assert( (unsigned long)where->mem % ALIGN == where->n_bytes % ALIGN );
-    assert( size % ALIGN == 0 );
-    mark_bits(where->free, 
-	(unsigned long)((unsigned char*)result - where->mem) / ALIGN,
-	size / ALIGN, 1);
-
-    where->n_bytes += size;
-    where->size -= size;
-    where->n_used_bytes += size;
-    where->n_productive_bytes += realsize;
-    where->n_used_chunks++;
-
-    MDEBUG2_ONLY( memset(result, 0xEE, size); )
-
-    MDEBUG2_ONLY( *(size_t *)result = realsize; )
-    MDEBUG2_ONLY( result += ALIGN; )
-
-    assert(((unsigned long)where->mem + where->n_bytes) % ALIGN == 0);
-
-    assert(valid_memblock(where));
-
-    return result;
-}
-
-static memblock **find_memblock(unsigned char *mem) {
-    memblock **where;
-
-    for (where = &base; *where != NULL; where = &(*where)->next) {
-	memblock *mb = *where;
-        assert(valid_memblock(mb));
-        if (&mb->mem[0] <= mem && (size_t)(mem - mb->mem) < sizeof(mb->mem)) {
-            return where;
-        }
-    }
-    return NULL;
-}
-
-static void free_in_memblock(memblock *mb, unsigned char *mem, size_t size) {
-    MDEBUG2_ONLY(size_t *stmem = ((size_t*)mem) - 1;)
-
-    assert(mb && mem && size > 0);
-
-    mb->n_used_chunks--;
-
-    mb->n_used_bytes -= ALIGNEDSIZE(size);
-    mark_bits(mb->free, (unsigned long)(mem - mb->mem) / ALIGN, 
-		ALIGNEDSIZE(size) / ALIGN, 0);
-
-#ifdef MDEBUG2
-    mark_bits(mb->free, (unsigned long)(mem - mb->mem) / ALIGN - 1, 1, 0);
-    mb->n_used_bytes -= ALIGN;
-#endif
-
-    if ((size_t)(mem - mb->mem) + ALIGNEDSIZE(size) == mb->n_bytes) {
-	size_t k = count_free_bits_back(mb->free, mb->n_bytes / ALIGN) * ALIGN;
-	mb->n_bytes -= k;
-	mb->size += k;
-    }
-    if ((size_t)(mem - mb->mem) == mb->n_bytes + mb->size) {
-	mb->size += count_free_bits_after(mb->free, 
-			(mb->n_bytes + mb->size) / ALIGN,
-			sizeof(mb->mem) / ALIGN) * ALIGN;
-    }
-
-    mb->n_productive_bytes -= size;
-
-    if (mb->n_used_chunks == 0) {
-        assert(mb->n_productive_bytes == 0);
-        assert(mb->n_used_bytes == 0);
-
-        mb->n_bytes = 0;
-	mb->size = sizeof(mb->mem);
-        mb->n_used_bytes = 0;
-        mb->n_productive_bytes = 0;
-    }
-
-    MDEBUG2_ONLY( memset(mem, 0xAA, size); )
-
-#ifdef MDEBUG2
-    assert((unsigned char*)stmem >= mb->mem && (unsigned char*)stmem < mb->mem + sizeof(mb->mem));
-    assert(*stmem % ALIGN == 0);
-    assert(*stmem == size);
-#endif
-
-    assert(valid_memblock(mb));
-}
-
-void block_free(void *vmem, size_t size) {
-    memblock **where;
-    MDEBUG1_ONLY(static int free_count = 0;)
-
-    if (vmem == NULL) return;
-
-    MDEBUG1_ONLY(first_malloc = 1;)
-
-    where = find_memblock(vmem);
-    assert(where);
-    free_in_memblock(*where, vmem, size);
-    if ((*where)->n_used_chunks == 0 && *where != base) {
-        memblock *mb = *where;
-        MDEBUG1_ONLY( print_memblock_summary(); )
-        *where = (*where)->next;
-        free(mb);
-        MDEBUG1_ONLY( fprintf(stderr, "Freed memblock\n"); )
-    }
-    MDEBUG1_ONLY( free_count++; free_count %= 10000; )
-    MDEBUG1_ONLY( if (!free_count) print_memblock_summary(); )
-}
-
-void *block_realloc(void *vmem, size_t oldsize, size_t newsize) {
-    void *vnewmem;
-
-    if (vmem == NULL && newsize == 0) abort();
-    if (vmem == NULL) return block_malloc(newsize);
-    if (newsize == 0) {
-        block_free(vmem, oldsize);
-        return NULL;
-    }
-
-    vnewmem = block_malloc(newsize);
-    if (vnewmem) {
-        memcpy(vnewmem, vmem, (oldsize < newsize ? oldsize : newsize));
-        block_free(vmem, oldsize);
-    }
-    return vnewmem;
-}
-
-char *block_strdup(char *from) {
-    char *result;
-    if (!from) return NULL;
-    result = block_malloc(strlen(from) + 1);
-    strcpy(result, from);
-    return result;
-}
diff --git a/lib/memory2.c b/lib/memory2.c
deleted file mode 100644
index 60a429f..0000000
--- a/lib/memory2.c
+++ /dev/null
@@ -1,20 +0,0 @@
-#include 
-#include 
-#include 
-
-#define MIN(x,y) ((x) < (y) ? (x) : (y))
-
-#define M 16
-
-void *block_malloc(size_t size) { return malloc(size); }
-void block_free(void *vmem) { free(vmem); }
-void *block_realloc(void *vmem, size_t newsize) { return realloc(vmem, newsize); }
-char *block_strdup(char *from) {
-	char *d; 
-	if (!from) return 0;
-	d = block_malloc(strlen(from+1));
-	if (d) strcpy(d, from);
-	return d; 
-}
-
-void print_memblock_summary(void) { }
diff --git a/lib/update_out.py b/lib/update_out.py
deleted file mode 100644
index 90ec7ba..0000000
--- a/lib/update_out.py
+++ /dev/null
@@ -1,1328 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2001-6 Anthony Towns
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-
-import sys, re, string, time, whrandom, math
-import britney
-
-if len(sys.argv) != 4:
-	print "Must specify testing, unstable, testing-updates directories."
-	sys.exit(1)
-
-testingdir = sys.argv[1]
-unstabledir = sys.argv[2]
-testingupdatesdir = sys.argv[3]
-
-# Configuration information
-
-expected_arches = 13
-allarches = [ 'i386', 'sparc', 'alpha', 'powerpc', 'armel', 'hppa', 'ia64', 'mips', 'mipsel', 's390', 'amd64' , 'kfreebsd-i386', 'kfreebsd-amd64']
-
-mindays = { "low" : 10, "medium" : 5, "high" : 2, "critical" : 0, 
-	    "emergency" : 0 }
-defaulturgency = "low"
-
-# if you're not in this list, arch: all packages are allowed to break on you
-nobreakarchallarches = ['i386']
-# if you're in this list, your packages may not stay in sync with the source
-fuckedarches = ['kfreebsd-i386','kfreebsd-amd64']
-# if you're in this list, your uninstallability count may increase
-breakarches = ['kfreebsd-i386','kfreebsd-amd64']
-# new architectures
-newarches = ['kfreebsd-i386','kfreebsd-amd64']
-
-allarches.sort()
-arches = [ x for x in allarches if x in nobreakarchallarches ]
-arches += [ x for x in allarches if x not in arches and x not in fuckedarches ]
-arches += [ x for x in allarches if x not in arches and x not in breakarches ]
-arches += [ x for x in allarches if x not in arches and x not in newarches ]
-arches += [ x for x in allarches if x not in arches ]
-
-# Subs
-
-def same_source(sv1, sv2):
-	if sv1 == sv2:
-		return 1
-
-	m = re.match(r'^(.*)\+b\d+$', sv1)
-	if m: sv1 = m.group(1)
-	m = re.match(r'^(.*)\+b\d+$', sv2)
-	if m: sv2 = m.group(1)
-
-	if sv1 == sv2:
-		return 1
-
-	if re.search("-", sv1) or re.search("-", sv2):
-		m = re.match(r'^(.*-[^.]+)\.0\.\d+$', sv1)
-		if m: sv1 = m.group(1)
-		m = re.match(r'^(.*-[^.]+\.[^.]+)\.\d+$', sv1)
-		if m: sv1 = m.group(1)
-
-		m = re.match(r'^(.*-[^.]+)\.0\.\d+$', sv2)
-		if m: sv2 = m.group(1)
-		m = re.match(r'^(.*-[^.]+\.[^.]+)\.\d+$', sv2)
-		if m: sv2 = m.group(1)
-
-		return (sv1 == sv2)
-	else:
-		m = re.match(r'^([^-]+)\.0\.\d+$', sv1)
-		if m and sv2 == m.group(1): return 1
-
-		m = re.match(r'^([^-]+)\.0\.\d+$', sv2)
-		if m and sv1 == m.group(1): return 1
-
-		return 0
-
-def read_approvals(dir, approver, approved):
-	f = open("%s/%s" % (dir, approver))
-	line = f.readline()
-	while line:
-		l = string.split(line)
-		if len(l) == 2:
-			[pkg,ver] = l
-			approved["%s_%s" % (pkg, ver)] = approver
-		line = f.readline()
-	f.close()
-
-def read_bugs(file):
-	bugsperpkg = {}
-
-	f = open(file)
-	line = f.readline()
-	while line:
-		l = string.split(line)
-		if len(l) == 2:
-			bugsperpkg[l[0]] = string.atoi(l[1])
-		line = f.readline()
-	f.close()
-	return bugsperpkg
-
-def write_bugs(file, bugs):
-	f = open(file, 'w')
-	pkgs = bugs.keys()
-	pkgs.sort()
-	for pkg in pkgs:
-		if bugs[pkg] == 0: continue
-		f.write("%s %d\n" % (pkg, bugs[pkg]))
-	f.close()
-
-def read_dates(file):
-	dates = {}
-
-	f = open(file)
-	line = f.readline()
-	while line:
-		l = string.split(line)
-		if len(l) == 3:
-			dates[l[0]] = (l[1], string.atoi(l[2]))
-		line = f.readline()
-	f.close()
-	return dates
-
-def write_dates(file, dates):
-	f = open(file, 'w')
-	pkgs = dates.keys()
-	pkgs.sort()
-	for pkg in dates.keys():
-		f.write("%s %s %d\n" % ((pkg,) + dates[pkg]))
-	f.close()
-
-def read_urgencies(file, testing, unstable):
-	urgency = {}
-
-	f = open(file)
-	line = f.readline()
-	while line:
-		l = string.split(line)
-		if len(l) == 3:
-			uo = urgency.get(l[0], defaulturgency)
-			mo = mindays.get(uo, mindays[defaulturgency])
-			mn = mindays.get(l[2], mindays[defaulturgency])
-			if mo <= mn: 
-				line = f.readline()
-				continue
-
-			tsrcv = testing.get_version(l[0])
-			if tsrcv and britney.versioncmp(tsrcv, l[1]) >= 0:
-				line = f.readline()
-				continue
-			usrcv = unstable.get_version(l[0])
-			if not usrcv or britney.versioncmp(usrcv, l[1]) < 0:
-				line = f.readline()
-				continue
-			
-			urgency[l[0]] = l[2]
-
-		line = f.readline()
-	f.close()
-	return urgency
-
-def read_hints(dir, hinter, hints, allowed):
-	res = {}
-	for k in allowed:
-		res[k] = []
-
-	try:
-		f = open("%s/%s" % (dir, hinter))
-	except IOError:
-		return res
-
-	while 1:
-		line = f.readline()
-		if not line: break
-
-		l = string.split(line)
-		if len(l) == 0 or line[0] == "#": 
-			continue
-
-		type = l[0]
-
-		if type == "finished":
-			break
-
-		if type not in allowed:
-			continue
-
-		def mysplit(str):
-			x = str.rfind("/")
-			if x == -1: return [str]
-			return [str[:x], str[x+1:]]
-
-		if type in ["easy", "hint", "force-hint"]:
-			l = [ tuple(mysplit(y)) for y in l[1:] ]
-			l = [ k for k in l if len(k) == 2 ]
-			res[type].append((hinter, l))
-
-		if type in ["block-all"]:
-			l = [ (y, hinter) for y in l[1:] ]
-			res[type].extend(l)
-
-		if type in ["block"]:
-			l = [ (y, hinter) for y in l[1:] ]
-			res[type].extend(l)
-
-		if type in ["remove", "approve", "unblock", "force", "urgent"]:
-			l = [ tuple(mysplit(y)+[hinter]) for y in l[1:] ]
-			l = [ k for k in l if len(k) == 3 ]
-			l = [ (p, (v,h)) for (p,v,h) in l ]
-			res[type].extend(l)
-
-	f.close()
-	return res
-
-class Excuse:
-	reemail = re.compile(r"<.*?>")
-
-	def __init__(self, name):
-		self.name = name
-		self.ver = ("-", "-")
-		self.maint = None
-		self.pri = None
-		self.date = None
-		self.urgency = None
-		self.daysold = None
-		self.mindays = None
-		self.section = None
-		self.dontinvalidate = 0
-
-		self.invalid_deps = []
-		self.deps = []
-		self.break_deps = []
-		self.bugs = []
-		self.htmlline = []
-
-	def set_vers(self, tver, uver):
-		if tver: self.ver = (tver, self.ver[1])
-		if uver: self.ver = (self.ver[0], uver)
-
-	def set_maint(self, maint):
-		self.maint = self.reemail.sub("",maint)
-#		self.maint = maint
-
-	def set_section(self, section):
-		self.section = section
-
-	def set_priority(self, pri):
-		self.pri = pri
-
-	def set_date(self, date):
-		self.date = date
-
-	def set_urgency(self, date):
-		self.urgency = date
-
-	def add_dep(self, name):
-		if name not in self.deps: self.deps.append(name)
-	def add_break_dep(self, name, arch):
-		if (name, arch) not in self.break_deps:
-			self.break_deps.append( (name, arch) )
-
-	def invalidate_dep(self, name):
-		if name not in self.invalid_deps: self.invalid_deps.append(name)
-
-	def setdaysold(self, daysold, mindays):
-		self.daysold = daysold
-		self.mindays = mindays
-
-	def addhtml(self, note):
-		self.htmlline.append(note)
-
-	def html(self):
-		res = "%s (%s to %s)\n
    \n" % \ - (self.name, self.name, self.name, self.ver[0], self.ver[1]) - if self.maint: - res = res + "
  • Maintainer: %s\n" % (self.maint) - if self.section and string.find(self.section, "/") > -1: - res = res + "
  • Section: %s\n" % (self.section) - if self.daysold != None: - if self.daysold < self.mindays: - res = res + ("
  • Too young, only %d of %d days old\n" % - (self.daysold, self.mindays)) - else: - res = res + ("
  • %d days old (needed %d days)\n" % - (self.daysold, self.mindays)) - for x in self.htmlline: - res = res + "
  • " + x + "\n" - for x in self.deps: - if x in self.invalid_deps: - res = res + "
  • Depends: %s %s (not considered)\n" % (self.name, x, x) - else: - res = res + "
  • Depends: %s %s\n" % (self.name, x, x) - for (n,a) in self.break_deps: - if n not in self.deps: - res += "
  • Ignoring %s depends: %s\n" % (a, n, n) - res = res + "
\n" - return res - - - -def should_remove_source(src, orig, new, excs): - if new.is_present(src): return 0 - - okay = 1 - - exc = Excuse("-" + src) - - exc.set_vers(orig.get_version(src), None) - m = orig.get_field(src, "Maintainer") - if m: exc.set_maint(string.strip(m)) - s = orig.get_field(src, "Section") - if s: exc.set_section(string.strip(s)) - - if hints["block"].has_key("-" + src): - blocked = hints["block"]["-" + src] - exc.addhtml("Not touching package, as requested by %s (contact debian-release if update is needed)" % (blocked)) - okay = 0 - - if okay: - exc.addhtml("Valid candidate") - else: - exc.addhtml("Not considered") - - excs.append(exc) - - return okay - -def should_upgrade_srcarch(src, arch, suite, tsrcv, orig, opkgsa, new, npkgsa, excs): - # binnmu this arch? - anywrongver = 0 - anyworthdoing = 0 - - ref = "%s/%s" % (src, arch) - if suite: ref = ref + "_%s" % (suite) - - e = Excuse(ref) - e.set_vers(tsrcv, tsrcv) - m = new.get_field(src, "Maintainer") - if m: e.set_maint(string.strip(m)) - s = new.get_field(src, "Section") - if s: e.set_section(string.strip(s)) - - if hints["remove"].has_key(src): - if same_source(tsrcv, hints["remove"][src][0]): - e.addhtml("Removal request by %s" % - (hints["remove"][src][1])) - e.addhtml("Trying to remove package, not update it") - e.addhtml("Not considered") - excs.append(e) - return 0 - - for pkg in new.binaries(src, arch): - pkgv = npkgsa.get_version(pkg) - pkgsv = npkgsa.get_sourcever(pkg) - - if npkgsa.is_arch_all(pkg): - e.addhtml("Ignoring %s %s (from %s) as it is arch: all" - % (pkg, pkgv, pkgsv)) - continue - - if not same_source(tsrcv, pkgsv): - anywrongver = 1 - e.addhtml("From wrong source: %s %s (%s not %s)" % ( - pkg, pkgv, pkgsv, tsrcv)) - break - - excuse_unsat_deps(pkg, arch, opkgsa, npkgsa, e, 0) - - if not opkgsa.is_present(pkg): - e.addhtml("New binary: %s (%s)" % (pkg, pkgv)) - anyworthdoing = 1 - continue - - tpkgv = opkgsa.get_version(pkg) - if britney.versioncmp(tpkgv, pkgv) > 0: - anywrongver = 1 - e.addhtml("Not downgrading: %s (%s to %s)" % ( - pkg, tpkgv, pkgv)) - break - elif britney.versioncmp(tpkgv, pkgv) < 0: - e.addhtml("Updated binary: %s (%s to %s)" % ( - pkg, tpkgv, pkgv)) - anyworthdoing = 1 - - if not anywrongver and (anyworthdoing or not new.is_fake(src)): - srcv = new.get_version(src) - ssrc = same_source(tsrcv, srcv) - for pkg in orig.binaries(src, arch): - if opkgsa.is_arch_all(pkg): - e.addhtml("Ignoring removal of %s as it is arch: all" - % (pkg)) - continue - if not npkgsa.is_present(pkg): - tpkgv = opkgsa.get_version(pkg) - e.addhtml("Removed binary: %s %s" % ( - pkg, tpkgv)) - if ssrc: anyworthdoing = 1 - - if not anywrongver and anyworthdoing: - e.addhtml("Valid candidate") - excs.append(e) - return 1 - else: - if anyworthdoing: - e.addhtml("Not considered") - excs.append(e) - return 0 - -def excuse_unsat_deps(pkg, arch, tpkgsarch, upkgsarch, exc, ignore_break=0): - for d in ['Pre-Depends', 'Depends']: - udt = tpkgsarch.unsatisfiable_deps(upkgsarch, pkg, d) - udu = upkgsarch.unsatisfiable_deps(upkgsarch, pkg, d) - - for t,u in map(None, udt, udu): - if t[1]: continue - l = [] - for e in u[1]: - s = upkgsarch.get_source(e) - if s not in l: l.append(s) - if src in l: continue - if l == []: - exc.addhtml("%s/%s unsatisfiable %s: %s" % (pkg, arch, d, t[0])) - for s in l: - if ignore_break or arch not in breakarches: - exc.add_dep(s) - else: - exc.add_break_dep(s, arch) - -def should_upgrade_src(src, suite, orig, origpkgs, new, newpkgs, approvals, - excs): - srcv = new.get_version(src) - - if orig.is_present(src): - tsrcv = orig.get_version(src) - if britney.versioncmp(srcv, tsrcv) == 0: - # Candidate for binnmus only - return 0 - else: - tsrcv = None - - updatecand = 1 - - ref = src - if suite: ref = ref + "_tpu" - - exc = Excuse(ref) - exc.set_vers(tsrcv, srcv) - m = new.get_field(src, "Maintainer") - if m: exc.set_maint(string.strip(m)) - s = new.get_field(src, "Section") - if s: exc.set_section(string.strip(s)) - - if tsrcv and britney.versioncmp(srcv, tsrcv) < 0: - # Version in unstable is older! - exc.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, tsrcv, srcv)) - excs.append(exc) - return 0 - - if unstable.is_fake(src): - exc.addhtml("%s source package doesn't exist" % (src)) - updatecand = 0 - - urgency = unstableurg.get(src, defaulturgency) - if not tsrcv and urgency != defaulturgency: - exc.addhtml("Ignoring %s urgency setting for NEW package" % (urgency)) - urgency = defaulturgency - - if hints["remove"].has_key(src): - if (tsrcv and same_source(tsrcv, hints["remove"][src][0])) or \ - same_source(srcv, hints["remove"][src][0]): - exc.addhtml("Removal request by %s" % - (hints["remove"][src][1])) - exc.addhtml("Trying to remove package, not update it") - updatecand = 0 - - blocked = None - if hints["block"].has_key(src): - blocked = hints["block"][src] - elif hints["block-all"].has_key("source"): - blocked = hints["block-all"]["source"] - - if blocked: - ubv = hints["unblock"].get(src,(None,None))[0] - if ubv != None and same_source(ubv, srcv): - exc.addhtml("Ignoring request to block package by %s, due to unblock request by %s" % (blocked, hints["unblock"][src][1])) - else: - if ubv != None: - exc.addhtml("Unblock request by %s ignored due to version mismatch: %s" % (hints["unblock"][src][1], hints["unblock"][src][0])) - exc.addhtml("Not touching package, as requested by %s (contact debian-release if update is needed)" % (blocked)) - updatecand = 0 - - if suite == None: - if not unstabledates.has_key(src): - unstabledates[src] = (srcv, datenow) - elif not same_source(unstabledates[src][0], srcv): - unstabledates[src] = (srcv, datenow) - - daysold = datenow - unstabledates[src][1] - mymindays = mindays[urgency] - exc.setdaysold(daysold, mymindays) - if daysold < mymindays: - if hints["urgent"].has_key(src) and same_source(srcv, hints["urgent"][src][0]): - exc.addhtml("Too young, but urgency pushed by %s" % (hints["urgent"][src][1])) - else: - updatecand = 0 - - pkgs = { src: ["source"] } - anybins = 0 - for arch in arches: - oodbins = {} - for pkg in new.binaries(src,arch): - anybins = 1 - if not pkgs.has_key(pkg): pkgs[pkg] = [] - pkgs[pkg].append(arch) - - pkgsv = newpkgs[arch].get_sourcever(pkg) - if not same_source(srcv, pkgsv): - if not oodbins.has_key(pkgsv): - oodbins[pkgsv] = [] - oodbins[pkgsv].append(pkg) - continue - - if newpkgs[arch].isnt_arch_all(pkg) or \ - arch in nobreakarchallarches: - excuse_unsat_deps(pkg, arch, - origpkgs[arch], newpkgs[arch], exc) - - if oodbins: - oodtxt = "" - for v in oodbins.keys(): - if oodtxt: oodtxt = oodtxt + "; " - oodtxt = oodtxt + "%s (from %s)" % \ - (", ".join(oodbins[v]), arch, src, v, v) - text = "out of date on %s: %s" % (arch, src, srcv, arch, oodtxt) - - if arch in fuckedarches: - text = text + " (but %s isn't keeping up," % \ - (arch) + " so nevermind)" - else: - updatecand = 0 - - if datenow != unstabledates[src][1]: - exc.addhtml(text) - - if not anybins: - exc.addhtml("%s has no binaries on any arch" % src) - updatecand = 0 - - if suite == None: - for pkg in pkgs.keys(): - if not testingbugs.has_key(pkg): testingbugs[pkg] = 0 - if not unstablebugs.has_key(pkg): unstablebugs[pkg] = 0 - - if unstablebugs[pkg] > testingbugs[pkg]: - exc.addhtml("%s (%s) is buggy! (%d > %d)" % \ - (pkg, ", ".join(pkgs[pkg]), pkg, - unstablebugs[pkg], testingbugs[pkg])) - updatecand = 0 - elif unstablebugs[pkg] > 0: - exc.addhtml("%s (%s) is (less) buggy! (%d <= %d)" % \ - (pkg, ", ".join(pkgs[pkg]), pkg, - unstablebugs[pkg], testingbugs[pkg])) - - if not updatecand and hints["force"].has_key(src) and same_source(srcv, hints["force"][src][0]) : - exc.dontinvalidate = 1 - exc.addhtml("Should ignore, but forced by %s" % (hints["force"][src][1])) - updatecand = 1 - - if approvals: - if approvals.has_key("%s_%s" % (src, srcv)): - exc.addhtml("Approved by %s" % - approvals["%s_%s" % (src, srcv)]) - else: - exc.addhtml("NEEDS APPROVAL BY RM") - updatecand = 0 - - if updatecand: - exc.addhtml("Valid candidate") - else: - exc.addhtml("Not considered") - - excuses.append(exc) - - return updatecand - -### - -# Brute force stuff - -class UpgradeRun: - def __init__(self, sn, u, tu, ps): - self.srcsn = sn - self.unstable = u - self.testingupdates = tu - self.packages = ps - - self.sortpkgs() - - self.output = open("update.OUTPUT_py", "w"); - - self.arches = [ x for x in arches if x in srcsn.arches ] - self.srcsnpkgs = {} - for arch in arches: - self.srcsnpkgs[arch] = self.srcsn.Packages(arch) - - #def __del__(): - # self.output.close() - - def sortpkgs(self): - p = self.packages - p.sort() - self.packages = p - - def writeout(self, text): - self.output.write(text) - sys.stdout.write(text) - self.output.flush() - sys.stdout.flush() - - def doop_source(self, op): - # removals = "-", - # arch = "/", - # normal = "" - which = self.unstable - if "_" in op: - ind = string.index(op, "_") - if op[ind+1:] == "tpu": - which = self.testingupdates - op = op[:ind] - - if op[0] == "-": - self.srcsn.remove_source(op[1:]) - elif "/" in op: - ind = string.index(op, "/") - self.srcsn.upgrade_arch(which, op[:ind], op[ind+1:]) - else: - self.srcsn.upgrade_source(which, op) - - def get_nuninst(self): - nuninst = {} - for arch in self.arches: - con = self.srcsnpkgs[arch].packages - if arch not in nobreakarchallarches: - con = filter( - self.srcsnpkgs[arch].isnt_arch_all, - con) - nuninst[arch] = filter( - self.srcsnpkgs[arch].is_uninstallable, - con) - return nuninst - - def get_improved_nuninst(self, old): - new = {} - for arch in self.arches: - con = self.srcsnpkgs[arch].packages - if arch not in nobreakarchallarches: - con = filter( - self.srcsnpkgs[arch].isnt_arch_all, - con) - new[arch] = filter( - self.srcsnpkgs[arch].is_uninstallable, con) - if arch in breakarches: continue - if len(new[arch]) > len(old[arch]): - return (0, new) - return (1, new) - - def arch_improved_nuninst(self, old, arch): - new = old.copy() - if "_" in arch: arch = arch[:arch.index("_")] - con = self.srcsnpkgs[arch].packages - if arch not in nobreakarchallarches: - con = filter(self.srcsnpkgs[arch].isnt_arch_all, con) - new[arch] = filter(self.srcsnpkgs[arch].is_uninstallable, con) - if arch not in newarches and len(new[arch]) > len(old[arch]): - return (0, new) - return (1, new) - - def is_nuninst_asgood(self, old, new): - for arch in self.arches: - if arch in breakarches: continue - if len(new[arch]) > len(old[arch]): - return 0 - return 1 - - def is_nuninst_asgood_generous(self, old, new): - diff = 0 - for arch in self.arches: - if arch in breakarches: continue - diff = diff + (len(new[arch]) - len(old[arch])) - return diff <= 0 - - def eval_nuninst(self, nuninst): - res = [] - total = 0 - totalbreak = 0 - for arch in self.arches: - if nuninst.has_key(arch): - n = len(nuninst[arch]) - if arch in breakarches: - totalbreak = totalbreak + n - else: - total = total + n - res.append("%s-%d" % (arch[0], n)) - return "%d+%d: %s" % (total, totalbreak, ":".join(res)) - - def slist_subtract(self, base, sub): - res = [] - for x in base: - if x not in sub: res.append(x) - return res - - def newlyuninst(self, nuold, nunew): - res = {} - for arch in self.arches: - if not nuold.has_key(arch) or not nunew.has_key(arch): - continue - res[arch] = \ - self.slist_subtract(nunew[arch], nuold[arch]) - return res - - def eval_uninst(self, nuninst): - res = "" - for arch in self.arches: - if nuninst.has_key(arch) and nuninst[arch] != []: - res = res + " * %s: %s\n" % (arch, - ", ".join(nuninst[arch])) - return res - - def do_all(self, maxdepth = 0, init = []): - self.selected = [] - self.selected_committed = 0 - packages = self.packages[:] - - earlyabort = 0 - if maxdepth == "easy": - earlyabort = 1 - maxdepth = 0 - - # meaningless to try forcing something _and_ recurse - force = 0 - if maxdepth < 0: - force = 1 - maxdepth = 0 - earlyabort = 1 - - nuninst_start = self.get_nuninst() - - if init: - self.writeout("leading: %s\n" % (",".join(init))) - - for x in init: - if x not in packages: - self.writeout("failed: %s\n" % (x)) - return None - y = packages.index(x) - self.selected.append(packages.pop(y)) - - for x in init: - self.doop_source(x) - - if force: - self.nuninst_orig = self.get_nuninst() - else: - self.nuninst_orig = nuninst_start - - self.writeout("start: %s\n" % - self.eval_nuninst(nuninst_start)) - self.writeout("orig: %s\n" % - self.eval_nuninst(self.nuninst_orig)) - - if earlyabort: - nuninst_end = self.get_nuninst() - self.writeout("easy: %s\n" % - (self.eval_nuninst(nuninst_end))) - self.writeout(self.eval_uninst( - self.newlyuninst(self.nuninst_orig, nuninst_end))) - self.writeout("\n") - if not self.is_nuninst_asgood_generous( - self.nuninst_orig, - nuninst_end): - nuninst_end, respackages = None, None - else: - respackages = packages[:] - self.selected_committed = len(self.selected) - else: - nuninst_end, respackages = \ - self.iter_some(maxdepth, packages, []) - - if nuninst_end: - assert(len(self.selected) == self.selected_committed) - - self.writeout("final: %s\n" % - ",".join(self.selected)) - self.writeout("start: %s\n" % - self.eval_nuninst(nuninst_start)) - self.writeout(" orig: %s\n" % - self.eval_nuninst(self.nuninst_orig)) - self.writeout(" end: %s\n" % - self.eval_nuninst(nuninst_end)) - - if force: - self.writeout("force breaks:\n") - self.writeout(self.eval_uninst( - self.newlyuninst(nuninst_start, nuninst_end))) - self.writeout("\n") - - if not self.is_nuninst_asgood_generous( - self.nuninst_orig, - nuninst_end): - print "NON-None RETURN THAT'S NOT BETTER" - - self.srcsn.commit_changes() - - self.writeout("SUCCESS (%d/%d)\n" % - (len(self.packages), len(respackages))) - self.packages = respackages - self.sortpkgs() - - return self.selected - - else: - assert(len(self.selected) == len(init)) - assert(self.selected_committed == 0) - - for x in init: - self.srcsn.undo_change() - if self.srcsn.can_undo: - print "MORE OPS LEFT TO UNDO THAN DONE" - - self.writeout("FAILED\n") - return None - - def iter_end(self, available): - extra = [] - count = 0 - nuninst_comp = self.get_nuninst() - while available: - x = available.pop(0) - self.writeout("trying: %s\n" % (x)) - - self.doop_source(x) - - if "/" in x: - better, nuninst_new = self.arch_improved_nuninst( - nuninst_comp, x[x.index("/")+1:]) - else: - better, nuninst_new = self.get_improved_nuninst( - nuninst_comp) - - if better: - self.selected.append(x) - count = count + 1 - available.extend(extra) - extra = [] - - self.writeout("accepted: %s\n" % (x)) - self.writeout(" ori: %s\n" % - (self.eval_nuninst(self.nuninst_orig))) - self.writeout(" pre: %s\n" % - (self.eval_nuninst(nuninst_comp))) - self.writeout(" now: %s\n" % - (self.eval_nuninst(nuninst_new))) - if len(self.selected) <= 20: - self.writeout(" all: %s\n" % ( - " ".join(self.selected))) - else: - self.writeout(" most: (%d) .. %s\n" % - (len(self.selected), - " ".join(self.selected[-20:]))) - - nuninst_comp = nuninst_new - else: - self.writeout("skipped: %s (%d <- %d)\n" % ( - x, len(extra), len(available))) - self.writeout(" got: %s\n%s" % ( - self.eval_nuninst(nuninst_new), - self.eval_uninst(self.newlyuninst( - nuninst_comp, nuninst_new)))) - - self.srcsn.undo_change() - extra.append(x) - self.writeout(" finish: [%s]\n" % - ",".join(self.selected[self.selected_committed:])) - self.writeout("endloop: %s\n" % - (self.eval_nuninst(self.nuninst_orig))) - self.writeout(" now: %s\n" % - (self.eval_nuninst(nuninst_comp))) - self.writeout(self.eval_uninst( - self.newlyuninst(self.nuninst_orig, nuninst_comp))) - self.writeout("\n") - - if self.is_nuninst_asgood_generous(self.nuninst_orig, - nuninst_comp): - self.writeout("Apparently successful\n") - self.selected_committed = len(self.selected) - return (nuninst_comp, extra) - - while count > 0: - self.srcsn.undo_change() - self.selected.pop() - count = count - 1 - - return (None, None) - - def iter_some(self, depth, available, extra): - self.writeout("recur: [%s] %s %d/%d\n" % ( - ",".join(self.selected[:self.selected_committed]), - ",".join(self.selected[self.selected_committed:]), - len(available), len(extra))) - - if depth == 0: - extra.extend(available) - return self.iter_end(extra) - - nuninst = None - - while len(available) > depth: - x = available.pop(0) - - if not skiphint(x): - self.doop_source(x) - self.selected.append(x) - - res = self.iter_some(depth - 1, available[:], extra[:]) - if res[0]: - nuninst = res[0] - available = filter(lambda x, y=res[1]: x in y, - available + extra) - # reset nuninst_orig too - self.nuninst_orig = nuninst - extra = [] - continue - - self.srcsn.undo_change() - self.selected.pop() - - extra.append(x) - - return (nuninst, extra) - -# Package information - -testing = britney.Sources(testingdir, arches) -testingpkgs = {} -for arch in arches: - testingpkgs[arch] = testing.Packages(arch) -testingbugs = read_bugs(testingdir + "/Bugs") - -unstable = britney.Sources(unstabledir, arches) -unstablepkgs = {} -for arch in arches: - unstablepkgs[arch] = unstable.Packages(arch) -unstablebugs = read_bugs(unstabledir + "/Bugs") -unstabledates = read_dates(testingdir + "/Dates") -unstableurg = read_urgencies(testingdir + "/Urgency", testing, unstable) - -testingupdates = britney.Sources(testingupdatesdir, arches) -testingupdatespkgs = {} -for arch in arches: - testingupdatespkgs[arch] = testingupdates.Packages(arch) -testingupdatesapproved = {} # pkg_ver -> who -for approver in ["ajt", "security-team", "ftpmaster", "cjwatson", "vorlon"]: - read_approvals(testingupdatesdir + "/Approved", approver, - testingupdatesapproved) - -hlphints = ["easy", "hint", "remove", "block", "unblock", "approve"] -stdhints = ["easy", "hint", "remove", "block", "unblock", "urgent", "approve"] -allhints = ["force", "force-hint", "block-all"] + stdhints -hintsallowed = { - "ajt": allhints, - "rmurray": allhints, - "cjwatson": allhints, - "vorlon": allhints, - "aba": allhints, - - "joeyh": stdhints + ["force"], - "djpig": stdhints, - - "he": hlphints, - "adeodato": hlphints, - "ballombe": hlphints, - "luk": hlphints, - - "freeze": ["block", "block-all"], - "ftpteam": ["block"] -} - -hints = {"easy":[], "hint":[], "force-hint":[], "remove":[], "block":[], "block-all":[], "unblock":[], "force":[], "urgent":[], "approve":[]} -for who in hintsallowed.keys(): - h = read_hints(unstabledir + "/Hints", who, hints, hintsallowed[who]) - for k in hintsallowed[who]: - hints[k].extend(h[k]) - -for x in ["block", "block-all", "unblock", "force", "urgent", "remove"]: - z = {} - for a, b in hints[x]: - if z.has_key(a): - print "Overriding %s[%s] = %s with %s" % (x, a, z[a], b) - z[a] = b - hints[x] = z - -for p, vh in hints["approve"]: - (v,h) = vh - testingupdatesapproved["%s_%s" % (p,v)] = h - hints["unblock"]["%s" % p] = (v,h) - -def maxver(pkg, source, pkgs): - maxver = source.get_version(pkg) - for arch in arches: - pkgv = pkgs[arch].get_version(pkg) - if pkgv == None: continue - if maxver == None or britney.versioncmp(pkgv, maxver) > 0: - maxver = pkgv - return maxver - -for pkg in testingbugs.keys() + unstablebugs.keys(): - if not testingbugs.has_key(pkg): testingbugs[pkg] = 0 - if not unstablebugs.has_key(pkg): unstablebugs[pkg] = 0 - - maxvert = maxver(pkg, testing, testingpkgs) - if maxvert == None: - testingbugs[pkg] = 0 - continue - - if testingbugs[pkg] == unstablebugs[pkg]: continue - maxveru = maxver(pkg, unstable, unstablepkgs) - - if maxveru == None: - continue - if britney.versioncmp(maxvert, maxveru) >= 0: - testingbugs[pkg] = unstablebugs[pkg] - -datenow = int(((time.time() / (60*60)) - 15) / 24); - -# Next, work out which packages are candidates to be changed. - -upgrademe = [] -excuses = [] - -# Packages to be removed -for src in testing.sources: - if should_remove_source(src, testing, unstable, excuses): - upgrademe.append("-" + src) - -# Packages to be upgraded from unstable: -for src in unstable.sources: - if testing.is_present(src): - tsrcv = testing.get_version(src) # silly optimisation - for arch in arches: - if should_upgrade_srcarch(src, arch, None, tsrcv, - testing, testingpkgs[arch], - unstable, unstablepkgs[arch], - excuses): - upgrademe.append("%s/%s" % (src, arch)) - - if should_upgrade_src(src, None, testing, testingpkgs, - unstable, unstablepkgs, None, excuses): - upgrademe.append(src) - -for src in testingupdates.sources: - if testing.is_present(src): - tsrcv = testing.get_version(src) # silly optimisation - for arch in arches: - if should_upgrade_srcarch(src, arch, "tpu", tsrcv, - testing, testingpkgs[arch], - testingupdates, - testingupdatespkgs[arch], - excuses): - upgrademe.append("%s/%s_tpu" % (src, arch)) - - if should_upgrade_src(src, "tpu", testing, testingpkgs, - testingupdates, testingupdatespkgs, - testingupdatesapproved, excuses): - upgrademe.append("%s_tpu" % src) - -for src in hints["remove"].keys(): - if src in upgrademe: continue - if ("-"+src) in upgrademe: continue - if not testing.is_present(src): continue - - tsrcv = testing.get_version(src) - if not same_source(tsrcv, hints["remove"][src][0]): continue - - upgrademe.append("-%s" % (src)) - exc = Excuse("-%s" % (src)) - exc.set_vers(tsrcv, None) - exc.addhtml("Removal request by %s" % (hints["remove"][src][1])) - exc.addhtml("Package is broken, will try to remove") - excuses.append(exc) - -def cmpexcuses(el, er): - return cmp(el.daysold, er.daysold) or cmp(el.name, er.name) -excuses.sort(cmpexcuses) - -def reversed_exc_deps(excuses): - res = {} - for exc in excuses: - for d in exc.deps: - if not res.has_key(d): res[d] = [] - res[d].append(exc.name) - return res - -def invalidate(excuses, valid, invalid): - i = 0 - exclookup = {} - for e in excuses: - exclookup[e.name] = e - revdeps = reversed_exc_deps(excuses) - while i < len(invalid): - if not revdeps.has_key(invalid[i]): - i += 1 - continue - if (invalid[i] + "_tpu") in valid: - i += 1 - continue - for x in revdeps[invalid[i]]: - if x in valid and exclookup[x].dontinvalidate: - continue - - exclookup[x].invalidate_dep(invalid[i]) - if x in valid: - p = valid.index(x) - invalid.append(valid.pop(p)) - exclookup[x].addhtml("Invalidated by dependency") - exclookup[x].addhtml("Not considered") - i = i + 1 - -unconsidered = [] -for exc in excuses: - if exc.name not in upgrademe: unconsidered.append(exc.name) - -for exc in excuses: - for d in exc.deps: - if d not in upgrademe and d not in unconsidered: - exc.addhtml("Unpossible dep: %s -> %s" % (exc.name, d)) - -invalidate(excuses, upgrademe, unconsidered) - -f = open("update.EXCUSES_py", 'w') - -f.write("\n") -f.write("excuses...") -f.write("\n") -f.write("

Generated: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "

\n") -f.write("
    \n") - -for exc in excuses: - f.write("
  • %s" % exc.html()) -f.write("
\n") - -f.close() - -del excuses - -# Changes - -srcsn = britney.SourcesNote(arches) - -# Initialise new testing to be the old testing - -for src in testing.sources: - srcsn.upgrade_source(testing, src) - -srcsn.commit_changes() - -#print "Things to do:" -#for x in upgrademe: -# print " " + x - -run = UpgradeRun(srcsn, unstable, testingupdates, upgrademe) - -def skiphint(candidate): - if "/" in candidate and candidate[candidate.rfind("/")+1:] in breakarches: - return 1 - return 0 - -def do_hint(type, who, pkgvers): - hintinfo = {"easy": "easy", - "hint": 0, - "force-hint": -1, - } - hintdoall = hintinfo[type] - - run.writeout("Trying %s from %s: %s\n" % (type, who, - " ".join( ["%s/%s" % (p,v) for (p,v) in pkgvers] ))) - - ok = 1 - for xp,v in pkgvers: - # is this version of this package present in unstable? - # (if it's also present in testing, do_all will skip it) - if "/" in xp: - p = xp[:xp.find("/")] - else: - p = xp - - if p[0] == "-": - pass - elif p.endswith("_tpu"): - if britney.versioncmp(run.testingupdates.get_version(p[:-4]),v) != 0: - ok = 0 - run.writeout(" Version mismatch, %s %s != %s\n" % - (p, v, run.testingupdates.get_version(p[:-4]))) - elif run.unstable.get_version(p) == None: - ok = 0 - run.writeout(" Source %s has no version in unstable\n" % p) - elif britney.versioncmp(run.unstable.get_version(p), v) != 0: - ok = 0 - run.writeout(" Version mismatch, %s %s != %s\n" % - (p, v, run.unstable.get_version(p))) - if ok: - run.do_all(hintdoall, map(lambda hp: hp[0], x[1])) - else: - run.writeout("Not using hint\n") - return ok - -run.writeout("Generated on: %s\n" % (time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())))) -run.writeout("Arch order is: %s\n" % ", ".join(arches)) - -for x in hints["easy"]: - do_hint("easy", x[0], x[1]) -for x in hints["force-hint"]: - do_hint("force-hint", x[0], x[1]) - -allpackages = [] -normpackages = run.packages[:] -archpackages = {} -for a in breakarches: - la = len(a) + 1 - archpackages[a] = [ p for p in normpackages if p[-la:] == "/" + a ] - normpackages = [ p for p in normpackages if p[-la:] != "/" + a ] -run.packages = normpackages -run.writeout("info: main run\n"); -run.do_all() -allpackages += run.packages -for a in breakarches: - x = breakarches - breakarches = [ ba for ba in breakarches if ba != a ] - run.packages = archpackages[a] - run.writeout("info: broken arch run for %s\n" % (a)) - run.do_all() - #run.do_all(1) - breakarches = x - allpackages += run.packages -run.packages = allpackages - -#run.do_all(0,["caudium", "sablotron"]) - -hintcnt = 1 - -rand = whrandom.whrandom() -rand.seed(23,187,96) -for q in range(datenow): - rand.random() -q = rand.random() -q = 1.0 -run.writeout("Current value is %f\n" % (q)) -if q < 0.2: - q = 0.2 - run.writeout("Current value bumped to %f\n" % (q)) -maxloops = int(math.ceil(math.log(100/(q**0.5)) / math.log(1+len(run.packages)))) -maxloops = 1 -run.writeout("Max loops for q=%.2f is %d\n" % (q, maxloops)) - -for x in hints["hint"]: - if hintcnt > 50: - run.writeout("Skipping remaining hints...") - break - - if len(x[1]) < maxloops: - run.writeout("Skipping simple hint from %s (%d<%d): %s\n" - % (x[0], len(x[1]), maxloops, str(x[1]))) - continue - - if do_hint("hint", x[0], x[1]): - hintcnt += 1 - -for i in range(1,maxloops): - run.do_all(i) - -if maxloops <= 1 and len(run.packages) < 500: - # too many to do all of them, let's try 5 at random - num_at_random = 5 - if len(run.packages) > num_at_random: - run.writeout("Trying %d at random\n" % num_at_random) - for k in range(num_at_random): - special = rand.choice(run.packages) - if skiphint(special): continue - run.writeout("Randomly trying %s\n" % (special)) - run.do_all(0, [special]) - -run.srcsn.write_notes(testingdir) - -write_bugs(testingdir + "/Bugs", testingbugs) -write_dates(testingdir + "/Dates", unstabledates) - -f = open(testingdir + '/HeidiResult', 'w') - -for arch in arches: - pkgs = srcsn.Packages(arch) - for pkg in pkgs.packages: - pkgv = pkgs.get_version(pkg) - pkgarch = pkgs.get_field(pkg, 'Architecture') - pkgsec = pkgs.get_field(pkg, 'Section') - if pkgsec == None: pkgsec = 'unknown\n' - pkgarch = pkgarch[:-1] - pkgsec = pkgsec[:-1] - f.write('%s %s %s %s\n' % (pkg, pkgv, pkgarch, pkgsec)) - -for src in srcsn.sources: - srcv = srcsn.get_version(src) - srcsec = srcsn.get_field(src, 'Section') - if srcsec == None: srcsec = 'unknown\n' - if srcsn.is_fake(src): srcsec = 'faux\n' - srcsec = srcsec[:-1] - f.write('%s %s source %s\n' % (src, srcv, srcsec)) - -f.close() - -if len(arches) != expected_arches: - sys.exit(1)