diff --git a/doc/doxygen.conf b/doc/doxygen.conf deleted file mode 100644 index 132c8aa..0000000 --- a/doc/doxygen.conf +++ /dev/null @@ -1,1237 +0,0 @@ -# Doxyfile 1.4.6 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = britney - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = 2.0.alpha1 - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = doc/ - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Brazilian, Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish, -# Dutch, Finnish, French, German, Greek, Hungarian, Italian, Japanese, -# Japanese-en (Japanese with English messages), Korean, Korean-en, Norwegian, -# Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovene, Spanish, -# Swedish, and Ukrainian. - -OUTPUT_LANGUAGE = English - -# This tag can be used to specify the encoding used in the generated output. -# The encoding is not always determined by the language that is chosen, -# but also whether or not the output is meant for Windows or non-Windows users. -# In case there is a difference, setting the USE_WINDOWS_ENCODING tag to YES -# forces the Windows encoding (this is the default for the Windows binary), -# whereas setting the tag to NO uses a Unix-style encoding (the default for -# all platforms other than Windows). - -USE_WINDOWS_ENCODING = NO - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like the Qt-style comments (thus requiring an -# explicit @brief command for a brief description. - -JAVADOC_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the DETAILS_AT_TOP tag is set to YES then Doxygen -# will output the detailed description near the top, like JavaDoc. -# If set to NO, the detailed description appears after the member -# documentation. - -DETAILS_AT_TOP = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 8 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for Java. -# For instance, namespaces will be presented as packages, qualified scopes -# will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = YES - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want to -# include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = NO - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = NO - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = NO - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = YES - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = NO - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = YES - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = NO - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = YES - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = YES - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = NO - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from the -# version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = . - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py - -FILE_PATTERNS = *.py - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = NO - -# If the REFERENCED_BY_RELATION tag is set to YES (the default) -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES (the default) -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compressed HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = NO - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be -# generated containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = YES - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. This is useful -# if you want to understand what is going on. On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = NO - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = YES - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = YES - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = NO - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = NO - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = NO - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT tags are set to YES then doxygen will -# generate a call dependency graph for every global function or class method. -# Note that enabling this option will significantly increase the time of a run. -# So in most cases it will be better to enable call graphs for selected -# functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width -# (in pixels) of the graphs generated by dot. If a graph becomes larger than -# this value, doxygen will try to truncate the graph, so that it fits within -# the specified constraint. Beware that most browsers cannot cope with very -# large images. - -MAX_DOT_GRAPH_WIDTH = 1024 - -# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height -# (in pixels) of the graphs generated by dot. If a graph becomes larger than -# this value, doxygen will try to truncate the graph, so that it fits within -# the specified constraint. Beware that most browsers cannot cope with very -# large images. - -MAX_DOT_GRAPH_HEIGHT = 1024 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that a graph may be further truncated if the graph's -# image dimensions are not sufficient to fit the graph (see MAX_DOT_GRAPH_WIDTH -# and MAX_DOT_GRAPH_HEIGHT). If 0 is used for the depth value (the default), -# the graph is not depth-constrained. - -MAX_DOT_GRAPH_DEPTH = 0 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, which results in a white background. -# Warning: Depending on the platform used, enabling this option may lead to -# badly anti-aliased labels on the edges of a graph (i.e. they become hard to -# read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = NO - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/doc/html/annotated.html b/doc/html/annotated.html deleted file mode 100644 index 57a9ccd..0000000 --- a/doc/html/annotated.html +++ /dev/null @@ -1,28 +0,0 @@ - - -briteny: Class List - - - - - - -

briteny Class List

Here are the classes, structs, unions and interfaces with brief descriptions: - - -
britney.Britney
excuse.Excuse
-
Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/britney_8py-source.html b/doc/html/britney_8py-source.html deleted file mode 100644 index f4bc762..0000000 --- a/doc/html/britney_8py-source.html +++ /dev/null @@ -1,2658 +0,0 @@ - - -briteny: britney.py Source File - - - - - -

britney.py

00001 #!/usr/bin/env python2.4 # -*- coding: utf-8 -*-
-00002 
-00003 # Copyright (C) 2001-2004 Anthony Towns <ajt@debian.org>
-00004 #                         Andreas Barth <aba@debian.org>
-00005 #                         Fabio Tranchitella <kobold@debian.org>
-00006 
-00007 # This program is free software; you can redistribute it and/or modify
-00008 # it under the terms of the GNU General Public License as published by
-00009 # the Free Software Foundation; either version 2 of the License, or
-00010 # (at your option) any later version.
-00011 
-00012 # This program is distributed in the hope that it will be useful,
-00013 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-00014 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-00015 # GNU General Public License for more details.
-00016 
-00017 """
-00018 = Introdution =
-00019 
-00020 This is the Debian testing updater script, also known as "Britney".
-00021 
-00022 Packages are usually installed into the `testing' distribution after
-00023 they have undergone some degree of testing in unstable. The goal of
-00024 this software is to do this task in a smart way, allowing testing
-00025 to be always fully installable and close to being a release candidate.
-00026 
-00027 Britney source code is splitted in two different but related tasks:
-00028 the first one is the generation of the update excuses, while the
-00029 second tries to update testing with the valid candidates; first 
-00030 each package alone, then larger and even larger sets of packages
-00031 together. Each try is accepted if testing is not more uninstallable
-00032 after the update than before.
-00033 
-00034 = Data Loading =
-00035 
-00036 In order to analyze the entire Debian distribution, Britney needs to
-00037 load in memory the whole archive: this means more than 10.000 packages
-00038 for twelve architectures, as well as the dependency interconnection
-00039 between them. For this reason, the memory requirement for running this
-00040 software are quite high and at least 1 gigabyte of RAM should be available.
-00041 
-00042 Britney loads the source packages from the `Sources' file and the binary
-00043 packages from the `Packages_${arch}' files, where ${arch} is substituted
-00044 with the supported architectures. While loading the data, the software
-00045 analyze the dependencies and build a directed weighted graph in memory
-00046 with all the interconnections between the packages (see Britney.read_sources
-00047 and Britney.read_binaries).
-00048 
-00049 Other than source and binary packages, Britney loads the following data:
-00050 
-00051   * Bugs, which contains the count of release-critical bugs for a given
-00052     version of a source package (see Britney.read_bugs).
-00053 
-00054   * Dates, which contains the date of the upload of a given version 
-00055     of a source package (see Britney.read_dates).
-00056 
-00057   * Urgencies, which contains the urgency of the upload of a given
-00058     version of a source package (see Britney.read_urgencies).
-00059 
-00060   * Approvals, which contains the list of approved testing-proposed-updates
-00061     packages (see Britney.read_approvals).
-00062 
-00063   * Hints, which contains lists of commands which modify the standard behaviour
-00064     of Britney (see Britney.read_hints).
-00065 
-00066 For a more detailed explanation about the format of these files, please read
-00067 the documentation of the related methods. The exact meaning of them will be
-00068 instead explained in the chapter "Excuses Generation".
-00069 
-00070 = Excuses =
-00071 
-00072 An excuse is a detailed explanation of why a package can or cannot
-00073 be updated in the testing distribution from a newer package in 
-00074 another distribution (like for example unstable). The main purpose
-00075 of the excuses is to be written in an HTML file which will be 
-00076 published over HTTP. The maintainers will be able to parse it manually
-00077 or automatically to find the explanation of why their packages have
-00078 been updated or not.
-00079 
-00080 == Excuses generation ==
-00081 
-00082 These are the steps (with references to method names) that Britney
-00083 does for the generation of the update excuses.
-00084 
-00085  * If a source package is available in testing but it is not
-00086    present in unstable and no binary packages in unstable are
-00087    built from it, then it is marked for removal.
-00088 
-00089  * Every source package in unstable and testing-proposed-updates,
-00090    if already present in testing, is checked for binary-NMUs, new
-00091    or dropped binary packages in all the supported architectures
-00092    (see Britney.should_upgrade_srcarch). The steps to detect if an
-00093    upgrade is needed are:
-00094 
-00095     1. If there is a `remove' hint for the source package, the package
-00096        is ignored: it will be removed and not updated.
-00097 
-00098     2. For every binary package build from the new source, it checks
-00099        for unsatisfied dependencies, new binary package and updated
-00100        binary package (binNMU) excluding the architecture-independent
-00101        ones and the packages not built from the same source.
-00102 
-00103     3. For every binary package build from the old source, it checks
-00104        if it is still built from the new source; if this is not true
-00105        and the package is not architecture-independent, the script
-00106        removes it from testing.
-00107 
-00108     4. Finally, if there is something worth doing (eg. a new or updated
-00109        binary package) and nothing wrong it marks the source package
-00110        as "Valid candidate", or "Not considered" if there is something
-00111        wrong which prevented the update.
-00112 
-00113  * Every source package in unstable and testing-proposed-updates is
-00114    checked for upgrade (see Britney.should_upgrade_src). The steps
-00115    to detect if an upgrade is needed are:
-00116 
-00117     1. If the source package in testing is more recent the new one
-00118        is ignored.
-00119 
-00120     2. If the source package doesn't exist (is fake), which means that
-00121        a binary package refers to it but it is not present in the
-00122        `Sources' file, the new one is ignored.
-00123 
-00124     3. If the package doesn't exist in testing, the urgency of the
-00125        upload is ignored and set to the default (actually `low').
-00126 
-00127     4. If there is a `remove' hint for the source package, the package
-00128        is ignored: it will be removed and not updated.
-00129 
-00130     5. If there is a `block' hint for the source package without an
-00131        `unblock` hint or a `block-all source`, the package is ignored.
-00132 
-00133     7. If the suite is unstable, the update can go ahead only if the
-00134        upload happend more then the minimum days specified by the
-00135        urgency of the upload; if this is not true, the package is
-00136        ignored as `too-young'. Note that the urgency is sticky, meaning
-00137        that the highest urgency uploaded since the previous testing
-00138        transition is taken into account.
-00139 
-00140     8. All the architecture-dependent binary packages and the
-00141        architecture-independent ones for the `nobreakall' architectures
-00142        have to be built from the source we are considering. If this is
-00143        not true, then these are called `out-of-date' architectures and
-00144        the package is ignored.
-00145 
-00146     9. The source package must have at least a binary package, otherwise
-00147        it is ignored.
-00148 
-00149    10. If the suite is unstable, the count of release critical bugs for
-00150        the new source package must be less then the count for the testing
-00151        one. If this is not true, the package is ignored as `buggy'.
-00152 
-00153    11. If there is a `force' hint for the source package, then it is
-00154        updated even if it is marked as ignored from the previous steps.
-00155 
-00156    12. If the suite is testing-proposed-updates, the source package can
-00157        be updated only if there is an explicit approval for it.
-00158 
-00159    13. If the package will be ignored, mark it as "Valid candidate",
-00160        otherwise mark it as "Not considered".
-00161 
-00162  * The list of `remove' hints is processed: if the requested source
-00163    package is not already being updated or removed and the version
-00164    actually in testing is the same specified with the `remove' hint,
-00165    it is marked for removal.
-00166 
-00167  * The excuses are sorted by the number of days from the last upload
-00168    (days-old) and by name.
-00169 
-00170  * A list of unconsidered excuses (for which the package is not upgraded)
-00171    is built. Using this list, all the excuses depending on them is marked
-00172    as invalid for "unpossible dependency".
-00173 
-00174  * The excuses are written in an HTML file.
-00175 """
-00176 
-00177 import os
-00178 import re
-00179 import sys
-00180 import string
-00181 import time
-00182 import copy
-00183 import optparse
-00184 import operator
-00185 
-00186 import apt_pkg
-00187 
-00188 from excuse import Excuse
-00189 
-00190 __author__ = 'Fabio Tranchitella'
-00191 __version__ = '2.0.alpha1'
-00192 
-00193 # source package
-00194 VERSION = 0
-00195 SECTION = 1
-00196 BINARIES = 2
-00197 MAINTAINER = 3
-00198 FAKESRC = 4
-00199 
-00200 # binary package
-00201 SOURCE = 2
-00202 SOURCEVER = 3
-00203 ARCHITECTURE = 4
-00204 PREDEPENDS = 5
-00205 DEPENDS = 6
-00206 CONFLICTS = 7
-00207 PROVIDES = 8
-00208 RDEPENDS = 9
-00209 RCONFLICTS = 10
-00210 
-00211 
-00212 class Britney:
-00213     """Britney, the debian testing updater script
-00214     
-00215     This is the script that updates the testing_ distribution. It is executed
-00216     each day after the installation of the updated packages. It generates the 
-00217     `Packages' files for the testing distribution, but it does so in an
-00218     intelligent manner; it try to avoid any inconsistency and to use only
-00219     non-buggy packages.
-00220 
-00221     For more documentation on this script, please read the Developers Reference.
-00222     """
-00223 
-00224     HINTS_STANDARD = ("easy", "hint", "remove", "block", "unblock", "urgent", "approve")
-00225     HINTS_ALL = ("force", "force-hint", "block-all") + HINTS_STANDARD
-00226 
-00227     def __init__(self):
-00228         """Class constructor
-00229 
-00230         This method initializes and populates the data lists, which contain all
-00231         the information needed by the other methods of the class.
-00232         """
-00233         self.date_now = int(((time.time() / (60*60)) - 15) / 24)
-00234 
-00235         # parse the command line arguments
-00236         self.__parse_arguments()
-00237 
-00238         # initialize the apt_pkg back-end
-00239         apt_pkg.init()
-00240 
-00241         # if requested, build the non-installable status and save it
-00242         if not self.options.nuninst_cache:
-00243             self.__log("Building the list of not installable packages for the full archive", type="I")
-00244             self.sources = {'testing': self.read_sources(self.options.testing)}
-00245             nuninst = {}
-00246             for arch in self.options.architectures:
-00247                 self.binaries = {'testing': {arch: self.read_binaries(self.options.testing, "testing", arch)}}
-00248                 self.__log("> Checking for non-installable packages for architecture %s" % arch, type="I")
-00249                 result = self.get_nuninst(arch, build=True)
-00250                 nuninst.update(result)
-00251                 self.__log("> Found %d non-installable packages" % len(nuninst[arch]), type="I")
-00252             self.write_nuninst(nuninst)
-00253         else:
-00254             self.__log("Not building the list of not installable packages, as requested", type="I")
-00255 
-00256         # read the source and binary packages for the involved distributions
-00257         self.sources = {'testing': self.read_sources(self.options.testing),
-00258                         'unstable': self.read_sources(self.options.unstable),
-00259                         'tpu': self.read_sources(self.options.tpu),}
-00260         self.binaries = {'testing': {}, 'unstable': {}, 'tpu': {}}
-00261         for arch in self.options.architectures:
-00262             self.binaries['testing'][arch] = self.read_binaries(self.options.testing, "testing", arch)
-00263             self.binaries['unstable'][arch] = self.read_binaries(self.options.unstable, "unstable", arch)
-00264             self.binaries['tpu'][arch] = self.read_binaries(self.options.tpu, "tpu", arch)
-00265 
-00266         # read the release-critical bug summaries for testing and unstable
-00267         self.bugs = {'unstable': self.read_bugs(self.options.unstable),
-00268                      'testing': self.read_bugs(self.options.testing),}
-00269         self.normalize_bugs()
-00270 
-00271         # read additional data
-00272         self.dates = self.read_dates(self.options.testing)
-00273         self.urgencies = self.read_urgencies(self.options.testing)
-00274         self.approvals = self.read_approvals(self.options.tpu)
-00275         self.hints = self.read_hints(self.options.unstable)
-00276         self.excuses = []
-00277         self.dependencies = {}
-00278 
-00279     def __parse_arguments(self):
-00280         """Parse the command line arguments
-00281 
-00282         This method parses and initializes the command line arguments.
-00283         While doing so, it preprocesses some of the options to be converted
-00284         in a suitable form for the other methods of the class.
-00285         """
-00286         # initialize the parser
-00287         self.parser = optparse.OptionParser(version="%prog")
-00288         self.parser.add_option("-v", "", action="count", dest="verbose", help="enable verbose output")
-00289         self.parser.add_option("-c", "--config", action="store", dest="config", default="/etc/britney.conf",
-00290                                help="path for the configuration file")
-00291         self.parser.add_option("", "--architectures", action="store", dest="architectures", default=None,
-00292                                help="override architectures from configuration file")
-00293         self.parser.add_option("", "--actions", action="store", dest="actions", default=None,
-00294                                help="override the list of actions to be performed")
-00295         self.parser.add_option("", "--dry-run", action="store_true", dest="dry_run", default=False,
-00296                                help="disable all outputs to the testing directory")
-00297         self.parser.add_option("", "--compatible", action="store_true", dest="compatible", default=False,
-00298                                help="enable full compatibility with old britney's output")
-00299         self.parser.add_option("", "--control-files", action="store_true", dest="control_files", default=False,
-00300                                help="enable control files generation")
-00301         self.parser.add_option("", "--nuninst-cache", action="store_true", dest="nuninst_cache", default=False,
-00302                                help="do not build the non-installability status, use the cache from file")
-00303         (self.options, self.args) = self.parser.parse_args()
-00304 
-00305         # if the configuration file exists, than read it and set the additional options
-00306         if not os.path.isfile(self.options.config):
-00307             self.__log("Unable to read the configuration file (%s), exiting!" % self.options.config, type="E")
-00308             sys.exit(1)
-00309 
-00310         # minimum days for unstable-testing transition and the list of hints
-00311         # are handled as an ad-hoc case
-00312         self.MINDAYS = {}
-00313         self.HINTS = {}
-00314         for k, v in [map(string.strip,r.split('=', 1)) for r in file(self.options.config) if '=' in r and not r.strip().startswith('#')]:
-00315             if k.startswith("MINDAYS_"):
-00316                 self.MINDAYS[k.split("_")[1].lower()] = int(v)
-00317             elif k.startswith("HINTS_"):
-00318                 self.HINTS[k.split("_")[1].lower()] = \
-00319                     reduce(lambda x,y: x+y, [hasattr(self, "HINTS_" + i) and getattr(self, "HINTS_" + i) or (i,) for i in v.split()])
-00320             elif not hasattr(self.options, k.lower()) or \
-00321                  not getattr(self.options, k.lower()):
-00322                 setattr(self.options, k.lower(), v)
-00323 
-00324         # Sort the architecture list
-00325         allarches = sorted(self.options.architectures.split())
-00326         arches = [x for x in allarches if x in self.options.nobreakall_arches]
-00327         arches += [x for x in allarches if x not in arches and x not in self.options.fucked_arches.split()]
-00328         arches += [x for x in allarches if x not in arches and x not in self.options.break_arches.split()]
-00329         arches += [x for x in allarches if x not in arches and x not in self.options.new_arches.split()]
-00330         arches += [x for x in allarches if x not in arches]
-00331         self.options.architectures = arches
-00332         self.options.smooth_updates = self.options.smooth_updates.split()
-00333 
-00334     def __log(self, msg, type="I"):
-00335         """Print info messages according to verbosity level
-00336         
-00337         An easy-and-simple log method which prints messages to the standard
-00338         output. The type parameter controls the urgency of the message, and
-00339         can be equal to `I' for `Information', `W' for `Warning' and `E' for
-00340         `Error'. Warnings and errors are always printed, and information are
-00341         printed only if the verbose logging is enabled.
-00342         """
-00343         if self.options.verbose or type in ("E", "W"):
-00344             print "%s: [%s] - %s" % (type, time.asctime(), msg)
-00345 
-00346     # Data reading/writing methods
-00347     # ----------------------------
-00348 
-00349     def read_sources(self, basedir):
-00350         """Read the list of source packages from the specified directory
-00351         
-00352         The source packages are read from the `Sources' file within the
-00353         directory specified as `basedir' parameter. Considering the
-00354         large amount of memory needed, not all the fields are loaded
-00355         in memory. The available fields are Version, Maintainer and Section.
-00356 
-00357         The method returns a list where every item represents a source
-00358         package as a dictionary.
-00359         """
-00360         sources = {}
-00361         package = None
-00362         filename = os.path.join(basedir, "Sources")
-00363         self.__log("Loading source packages from %s" % filename)
-00364         Packages = apt_pkg.ParseTagFile(open(filename))
-00365         get_field = Packages.Section.get
-00366         while Packages.Step():
-00367             pkg = get_field('Package')
-00368             sources[pkg] = [get_field('Version'),
-00369                             get_field('Section'),
-00370                             [],
-00371                             get_field('Maintainer'),
-00372                             False,
-00373                            ]
-00374         return sources
-00375 
-00376     def read_binaries(self, basedir, distribution, arch):
-00377         """Read the list of binary packages from the specified directory
-00378         
-00379         The binary packages are read from the `Packages_${arch}' files
-00380         within the directory specified as `basedir' parameter, replacing
-00381         ${arch} with the value of the arch parameter. Considering the
-00382         large amount of memory needed, not all the fields are loaded
-00383         in memory. The available fields are Version, Source, Pre-Depends,
-00384         Depends, Conflicts, Provides and Architecture.
-00385         
-00386         After reading the packages, reverse dependencies are computed
-00387         and saved in the `rdepends' keys, and the `Provides' field is
-00388         used to populate the virtual packages list.
-00389 
-00390         The dependencies are parsed with the apt.pkg.ParseDepends method,
-00391         and they are stored both as the format of its return value and
-00392         text.
-00393 
-00394         The method returns a tuple. The first element is a list where
-00395         every item represents a binary package as a dictionary; the second
-00396         element is a dictionary which maps virtual packages to real
-00397         packages that provide it.
-00398         """
-00399 
-00400         packages = {}
-00401         provides = {}
-00402         sources = self.sources
-00403         package = None
-00404 
-00405         filename = os.path.join(basedir, "Packages_%s" % arch)
-00406         self.__log("Loading binary packages from %s" % filename)
-00407         Packages = apt_pkg.ParseTagFile(open(filename))
-00408         get_field = Packages.Section.get
-00409         while Packages.Step():
-00410             pkg = get_field('Package')
-00411             version = get_field('Version')
-00412             dpkg = [version,
-00413                     get_field('Section'),
-00414                     pkg, 
-00415                     version,
-00416                     get_field('Architecture'),
-00417                     get_field('Pre-Depends'),
-00418                     get_field('Depends'),
-00419                     get_field('Conflicts'),
-00420                     get_field('Provides'),
-00421                     [],
-00422                     [],
-00423                    ]
-00424 
-00425             # retrieve the name and the version of the source package
-00426             source = get_field('Source')
-00427             if source:
-00428                 dpkg[SOURCE] = source.split(" ")[0]
-00429                 if "(" in source:
-00430                     dpkg[SOURCEVER] = source[source.find("(")+1:source.find(")")]
-00431 
-00432             # if the source package is available in the distribution, then register this binary package
-00433             if dpkg[SOURCE] in sources[distribution]:
-00434                 sources[distribution][dpkg[SOURCE]][BINARIES].append(pkg + "/" + arch)
-00435             # if the source package doesn't exist, create a fake one
-00436             else:
-00437                 sources[distribution][dpkg[SOURCE]] = [dpkg[SOURCEVER], None, [pkg + "/" + arch], None, True]
-00438 
-00439             # register virtual packages and real packages that provide them
-00440             if dpkg[PROVIDES]:
-00441                 parts = map(string.strip, dpkg[PROVIDES].split(","))
-00442                 for p in parts:
-00443                     if p not in provides:
-00444                         provides[p] = []
-00445                     provides[p].append(pkg)
-00446                 dpkg[PROVIDES] = parts
-00447             else: dpkg[PROVIDES] = []
-00448 
-00449             # add the resulting dictionary to the package list
-00450             packages[pkg] = dpkg
-00451 
-00452         # loop again on the list of packages to register reverse dependencies and conflicts
-00453         register_reverses = self.register_reverses
-00454         for pkg in packages:
-00455             register_reverses(pkg, packages, provides, check_doubles=False)
-00456 
-00457         # return a tuple with the list of real and virtual packages
-00458         return (packages, provides)
-00459 
-00460     def register_reverses(self, pkg, packages, provides, check_doubles=True, parse_depends=apt_pkg.ParseDepends):
-00461         """Register reverse dependencies and conflicts for the specified package
-00462 
-00463         This method register the reverse dependencies and conflicts for
-00464         a give package using `packages` as list of packages and `provides`
-00465         as list of virtual packages.
-00466 
-00467         The method has an optional parameter parse_depends which is there
-00468         just for performance reasons and is not meant to be overwritten.
-00469         """
-00470         # register the list of the dependencies for the depending packages
-00471         dependencies = []
-00472         if packages[pkg][DEPENDS]:
-00473             dependencies.extend(parse_depends(packages[pkg][DEPENDS]))
-00474         if packages[pkg][PREDEPENDS]:
-00475             dependencies.extend(parse_depends(packages[pkg][PREDEPENDS]))
-00476         # go through the list
-00477         for p in dependencies:
-00478             for a in p:
-00479                 # register real packages
-00480                 if a[0] in packages and (not check_doubles or pkg not in packages[a[0]][RDEPENDS]):
-00481                     packages[a[0]][RDEPENDS].append(pkg)
-00482                 # register packages which provides a virtual package
-00483                 elif a[0] in provides:
-00484                     for i in provides.get(a[0]):
-00485                         if i not in packages: continue
-00486                         if not check_doubles or pkg not in packages[i][RDEPENDS]:
-00487                             packages[i][RDEPENDS].append(pkg)
-00488         # register the list of the conflicts for the conflicting packages
-00489         if packages[pkg][CONFLICTS]:
-00490             for p in parse_depends(packages[pkg][CONFLICTS]):
-00491                 for a in p:
-00492                     # register real packages
-00493                     if a[0] in packages and (not check_doubles or pkg not in packages[a[0]][RCONFLICTS]):
-00494                         packages[a[0]][RCONFLICTS].append(pkg)
-00495                     # register packages which provides a virtual package
-00496                     elif a[0] in provides:
-00497                         for i in provides[a[0]]:
-00498                             if i not in packages: continue
-00499                             if not check_doubles or pkg not in packages[i][RCONFLICTS]:
-00500                                 packages[i][RCONFLICTS].append(pkg)
-00501      
-00502     def read_bugs(self, basedir):
-00503         """Read the release critial bug summary from the specified directory
-00504         
-00505         The RC bug summaries are read from the `Bugs' file within the
-00506         directory specified as `basedir' parameter. The file contains
-00507         rows with the format:
-00508 
-00509         <package-name> <count-of-rc-bugs>
-00510 
-00511         The method returns a dictionary where the key is the binary package
-00512         name and the value is the number of open RC bugs for it.
-00513         """
-00514         bugs = {}
-00515         filename = os.path.join(basedir, "Bugs")
-00516         self.__log("Loading RC bugs count from %s" % filename)
-00517         for line in open(filename):
-00518             l = line.split()
-00519             if len(l) != 2: continue
-00520             try:
-00521                 bugs[l[0]] = int(l[1])
-00522             except ValueError:
-00523                 self.__log("Bugs, unable to parse \"%s\"" % line, type="E")
-00524         return bugs
-00525 
-00526     def write_bugs(self, basedir, bugs):
-00527         """Write the release critical bug summary to the specified directory
-00528 
-00529         For a more detailed explanation of the format, please check the method
-00530         read_bugs.
-00531         """
-00532         filename = os.path.join(basedir, "Bugs")
-00533         self.__log("Writing RC bugs count to %s" % filename)
-00534         f = open(filename, 'w')
-00535         for pkg in sorted(bugs.keys()):
-00536             if bugs[pkg] == 0: continue
-00537             f.write("%s %d\n" % (pkg, bugs[pkg]))
-00538         f.close()
-00539 
-00540     def __maxver(self, pkg, dist):
-00541         """Return the maximum version for a given package name
-00542         
-00543         This method returns None if the specified source package
-00544         is not available in the `dist' distribution. If the package
-00545         exists, then it returns the maximum version between the
-00546         source package and its binary packages.
-00547         """
-00548         maxver = None
-00549         if pkg in self.sources[dist]:
-00550             maxver = self.sources[dist][pkg][VERSION]
-00551         for arch in self.options.architectures:
-00552             if pkg not in self.binaries[dist][arch][0]: continue
-00553             pkgv = self.binaries[dist][arch][0][pkg][VERSION]
-00554             if maxver == None or apt_pkg.VersionCompare(pkgv, maxver) > 0:
-00555                 maxver = pkgv
-00556         return maxver
-00557 
-00558     def normalize_bugs(self):
-00559         """Normalize the release critical bug summaries for testing and unstable
-00560         
-00561         The method doesn't return any value: it directly modifies the
-00562         object attribute `bugs'.
-00563         """
-00564         # loop on all the package names from testing and unstable bug summaries
-00565         for pkg in set(self.bugs['testing'].keys() + self.bugs['unstable'].keys()):
-00566 
-00567             # make sure that the key is present in both dictionaries
-00568             if pkg not in self.bugs['testing']:
-00569                 self.bugs['testing'][pkg] = 0
-00570             elif pkg not in self.bugs['unstable']:
-00571                 self.bugs['unstable'][pkg] = 0
-00572 
-00573             # retrieve the maximum version of the package in testing:
-00574             maxvert = self.__maxver(pkg, 'testing')
-00575 
-00576             # if the package is not available in testing or it has the
-00577             # same RC bug count, then do nothing
-00578             if maxvert == None or \
-00579                self.bugs['testing'][pkg] == self.bugs['unstable'][pkg]:
-00580                 continue
-00581 
-00582             # retrieve the maximum version of the package in testing:
-00583             maxveru = self.__maxver(pkg, 'unstable')
-00584 
-00585             # if the package is not available in unstable, then do nothing
-00586             if maxveru == None:
-00587                 continue
-00588             # else if the testing package is more recent, then use the
-00589             # unstable RC bug count for testing, too
-00590             elif apt_pkg.VersionCompare(maxvert, maxveru) >= 0:
-00591                 self.bugs['testing'][pkg] = self.bugs['unstable'][pkg]
-00592 
-00593     def read_dates(self, basedir):
-00594         """Read the upload date for the packages from the specified directory
-00595         
-00596         The upload dates are read from the `Date' file within the directory
-00597         specified as `basedir' parameter. The file contains rows with the
-00598         format:
-00599 
-00600         <package-name> <version> <date-of-upload>
-00601 
-00602         The dates are expressed as days starting from the 1970-01-01.
-00603 
-00604         The method returns a dictionary where the key is the binary package
-00605         name and the value is tuple with two items, the version and the date.
-00606         """
-00607         dates = {}
-00608         filename = os.path.join(basedir, "Dates")
-00609         self.__log("Loading upload data from %s" % filename)
-00610         for line in open(filename):
-00611             l = line.split()
-00612             if len(l) != 3: continue
-00613             try:
-00614                 dates[l[0]] = (l[1], int(l[2]))
-00615             except ValueError:
-00616                 self.__log("Dates, unable to parse \"%s\"" % line, type="E")
-00617         return dates
-00618 
-00619     def write_dates(self, basedir, dates):
-00620         """Write the upload date for the packages to the specified directory
-00621 
-00622         For a more detailed explanation of the format, please check the method
-00623         read_dates.
-00624         """
-00625         filename = os.path.join(basedir, "Dates")
-00626         self.__log("Writing upload data to %s" % filename)
-00627         f = open(filename, 'w')
-00628         for pkg in sorted(dates.keys()):
-00629             f.write("%s %s %d\n" % ((pkg,) + dates[pkg]))
-00630         f.close()
-00631 
-00632 
-00633     def read_urgencies(self, basedir):
-00634         """Read the upload urgency of the packages from the specified directory
-00635         
-00636         The upload urgencies are read from the `Urgency' file within the
-00637         directory specified as `basedir' parameter. The file contains rows
-00638         with the format:
-00639 
-00640         <package-name> <version> <urgency>
-00641 
-00642         The method returns a dictionary where the key is the binary package
-00643         name and the value is the greatest urgency from the versions of the
-00644         package that are higher then the testing one.
-00645         """
-00646 
-00647         urgencies = {}
-00648         filename = os.path.join(basedir, "Urgency")
-00649         self.__log("Loading upload urgencies from %s" % filename)
-00650         for line in open(filename):
-00651             l = line.split()
-00652             if len(l) != 3: continue
-00653 
-00654             # read the minimum days associated to the urgencies
-00655             urgency_old = urgencies.get(l[0], self.options.default_urgency)
-00656             mindays_old = self.MINDAYS.get(urgency_old, self.MINDAYS[self.options.default_urgency])
-00657             mindays_new = self.MINDAYS.get(l[2], self.MINDAYS[self.options.default_urgency])
-00658 
-00659             # if the new urgency is lower (so the min days are higher), do nothing
-00660             if mindays_old <= mindays_new:
-00661                 continue
-00662 
-00663             # if the package exists in testing and it is more recent, do nothing
-00664             tsrcv = self.sources['testing'].get(l[0], None)
-00665             if tsrcv and apt_pkg.VersionCompare(tsrcv[VERSION], l[1]) >= 0:
-00666                 continue
-00667 
-00668             # if the package doesn't exist in unstable or it is older, do nothing
-00669             usrcv = self.sources['unstable'].get(l[0], None)
-00670             if not usrcv or apt_pkg.VersionCompare(usrcv[VERSION], l[1]) < 0:
-00671                 continue
-00672 
-00673             # update the urgency for the package
-00674             urgencies[l[0]] = l[2]
-00675 
-00676         return urgencies
-00677 
-00678     def read_approvals(self, basedir):
-00679         """Read the approval commands from the specified directory
-00680         
-00681         The approval commands are read from the files contained by the 
-00682         `Approved' directory within the directory specified as `basedir'
-00683         parameter. The name of the files has to be the same of the
-00684         authorized users for the approvals.
-00685         
-00686         The file contains rows with the format:
-00687 
-00688         <package-name> <version>
-00689 
-00690         The method returns a dictionary where the key is the binary package
-00691         name followed by an underscore and the version number, and the value
-00692         is the user who submitted the command.
-00693         """
-00694         approvals = {}
-00695         for approver in self.options.approvers.split():
-00696             filename = os.path.join(basedir, "Approved", approver)
-00697             self.__log("Loading approvals list from %s" % filename)
-00698             for line in open(filename):
-00699                 l = line.split()
-00700                 if len(l) != 2: continue
-00701                 approvals["%s_%s" % (l[0], l[1])] = approver
-00702         return approvals
-00703 
-00704     def read_hints(self, basedir):
-00705         """Read the hint commands from the specified directory
-00706         
-00707         The hint commands are read from the files contained by the `Hints'
-00708         directory within the directory specified as `basedir' parameter. 
-00709         The name of the files has to be the same of the authorized users
-00710         for the hints.
-00711         
-00712         The file contains rows with the format:
-00713 
-00714         <command> <package-name>[/<version>]
-00715 
-00716         The method returns a dictionary where the key is the command, and
-00717         the value is the list of affected packages.
-00718         """
-00719         hints = dict([(k,[]) for k in self.HINTS_ALL])
-00720 
-00721         for who in self.HINTS.keys():
-00722             filename = os.path.join(basedir, "Hints", who)
-00723             self.__log("Loading hints list from %s" % filename)
-00724             for line in open(filename):
-00725                 line = line.strip()
-00726                 if line == "": continue
-00727                 l = line.split()
-00728                 if l[0] == 'finished':
-00729                     break
-00730                 elif l[0] not in self.HINTS[who]:
-00731                     continue
-00732                 elif l[0] in ["easy", "hint", "force-hint"]:
-00733                     hints[l[0]].append((who, [k.split("/") for k in l if "/" in k]))
-00734                 elif l[0] in ["block-all"]:
-00735                     hints[l[0]].extend([(y, who) for y in l[1:]])
-00736                 elif l[0] in ["block"]:
-00737                     hints[l[0]].extend([(y, who) for y in l[1:]])
-00738                 elif l[0] in ["remove", "approve", "unblock", "force", "urgent"]:
-00739                     hints[l[0]].extend([(k.split("/")[0], (k.split("/")[1],who) ) for k in l if "/" in k])
-00740 
-00741         for x in ["block", "block-all", "unblock", "force", "urgent", "remove"]:
-00742             z = {}
-00743             for a, b in hints[x]:
-00744                 if a in z:
-00745                     self.__log("Overriding %s[%s] = %s with %s" % (x, a, z[a], b), type="W")
-00746                 z[a] = b
-00747             hints[x] = z
-00748 
-00749         return hints
-00750 
-00751     def write_heidi(self, basedir, filename):
-00752         """Write the output HeidiResult
-00753 
-00754         This method write the output for Heidi, which contains all the
-00755         binary packages and the source packages in the form:
-00756         
-00757         <pkg-name> <pkg-version> <pkg-architecture> <pkg-section>
-00758         <src-name> <src-version> <src-section>
-00759         """
-00760         filename = os.path.join(basedir, filename)
-00761         self.__log("Writing Heidi results to %s" % filename)
-00762         f = open(filename, 'w')
-00763 
-00764         # local copies
-00765         sources = self.sources['testing']
-00766 
-00767         # write binary packages
-00768         for arch in sorted(self.options.architectures):
-00769             binaries = self.binaries['testing'][arch][0]
-00770             for pkg_name in sorted(binaries):
-00771                 pkg = binaries[pkg_name]
-00772                 pkgv = pkg[VERSION]
-00773                 pkgarch = pkg[ARCHITECTURE]
-00774                 pkgsec = pkg[SECTION] or 'unknown'
-00775                 f.write('%s %s %s %s\n' % (pkg_name, pkgv, pkgarch, pkgsec))
-00776 
-00777         # write sources
-00778         for src_name in sorted(sources):
-00779             src = sources[src_name]
-00780             srcv = src[VERSION]
-00781             srcsec = src[FAKESRC] and 'faux' or src[SECTION] or 'unknown'
-00782             f.write('%s %s source %s\n' % (src_name, srcv, srcsec))
-00783 
-00784         f.close()
-00785 
-00786     def write_controlfiles(self, basedir, suite):
-00787         """Write the control files
-00788 
-00789         This method write the control files for the binary packages of all
-00790         the architectures and for the source packages.
-00791         """
-00792         sources = self.sources[suite]
-00793 
-00794         self.__log("Writing new %s control files to %s" % (suite, basedir))
-00795         for arch in self.options.architectures:
-00796             filename = os.path.join(basedir, 'Packages_%s' % arch)
-00797             f = open(filename, 'w')
-00798             binaries = self.binaries[suite][arch][0]
-00799             for pkg in binaries:
-00800                 output = "Package: %s\n" % pkg
-00801                 for key, k in ((SECTION, 'Section'), (ARCHITECTURE, 'Architecture'), (SOURCE, 'Source'), (VERSION, 'Version'), 
-00802                           (PREDEPENDS, 'Pre-Depends'), (DEPENDS, 'Depends'), (PROVIDES, 'Provides'), (CONFLICTS, 'Conflicts')):
-00803                     if not binaries[pkg][key]: continue
-00804                     if key == SOURCE:
-00805                         if binaries[pkg][SOURCE] == pkg:
-00806                             if binaries[pkg][SOURCEVER] != binaries[pkg][VERSION]:
-00807                                 source = binaries[pkg][SOURCE] + " (" + binaries[pkg][SOURCEVER] + ")"
-00808                             else: continue
-00809                         else:
-00810                             if binaries[pkg][SOURCEVER] != binaries[pkg][VERSION]:
-00811                                 source = binaries[pkg][SOURCE] + " (" + binaries[pkg][SOURCEVER] + ")"
-00812                             else:
-00813                                 source = binaries[pkg][SOURCE]
-00814                         output += (k + ": " + source + "\n")
-00815                         if sources[binaries[pkg][SOURCE]][MAINTAINER]:
-00816                             output += (k + ": " + sources[binaries[pkg][SOURCE]][MAINTAINER] + "\n")
-00817                     elif key == PROVIDES:
-00818                         if len(binaries[pkg][key]) > 0:
-00819                             output += (k + ": " + ", ".join(binaries[pkg][key]) + "\n")
-00820                     else:
-00821                         output += (k + ": " + binaries[pkg][key] + "\n")
-00822                 f.write(output + "\n")
-00823             f.close()
-00824 
-00825         filename = os.path.join(basedir, 'Sources')
-00826         f = open(filename, 'w')
-00827         for src in sources:
-00828             output = "Package: %s\n" % src
-00829             for k in ('Version', 'Section', 'Maintainer'):
-00830                 key = k.lower()
-00831                 if key not in sources[src] or not sources[src][key]: continue
-00832                 output += (k + ": " + sources[src][key] + "\n")
-00833             f.write(output + "\n")
-00834         f.close()
-00835 
-00836     def write_nuninst(self, nuninst):
-00837         """Write the non-installable report"""
-00838         f = open(self.options.noninst_status, 'w')
-00839         f.write("Built on: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n")
-00840         f.write("Last update: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n\n")
-00841         f.write("".join([k + ": " + " ".join(nuninst[k]) + "\n" for k in nuninst]))
-00842         f.close()
-00843 
-00844     def read_nuninst(self):
-00845         """Read the non-installable report"""
-00846         f = open(self.options.noninst_status)
-00847         nuninst = {}
-00848         for r in f:
-00849             if ":" not in r: continue
-00850             arch, packages = r.strip().split(":", 1)
-00851             if arch.split("+", 1)[0] in self.options.architectures:
-00852                 nuninst[arch] = packages.split()
-00853         return nuninst
-00854 
-00855 
-00856     # Utility methods for package analysis
-00857     # ------------------------------------
-00858 
-00859     def same_source(self, sv1, sv2):
-00860         """Check if two version numbers are built from the same source
-00861 
-00862         This method returns a boolean value which is true if the two
-00863         version numbers specified as parameters are built from the same
-00864         source. The main use of this code is to detect binary-NMU.
-00865         """
-00866         if sv1 == sv2:
-00867             return 1
-00868 
-00869         m = re.match(r'^(.*)\+b\d+$', sv1)
-00870         if m: sv1 = m.group(1)
-00871         m = re.match(r'^(.*)\+b\d+$', sv2)
-00872         if m: sv2 = m.group(1)
-00873 
-00874         if sv1 == sv2:
-00875             return 1
-00876 
-00877         if re.search("-", sv1) or re.search("-", sv2):
-00878             m = re.match(r'^(.*-[^.]+)\.0\.\d+$', sv1)
-00879             if m: sv1 = m.group(1)
-00880             m = re.match(r'^(.*-[^.]+\.[^.]+)\.\d+$', sv1)
-00881             if m: sv1 = m.group(1)
-00882 
-00883             m = re.match(r'^(.*-[^.]+)\.0\.\d+$', sv2)
-00884             if m: sv2 = m.group(1)
-00885             m = re.match(r'^(.*-[^.]+\.[^.]+)\.\d+$', sv2)
-00886             if m: sv2 = m.group(1)
-00887 
-00888             return (sv1 == sv2)
-00889         else:
-00890             m = re.match(r'^([^-]+)\.0\.\d+$', sv1)
-00891             if m and sv2 == m.group(1): return 1
-00892 
-00893             m = re.match(r'^([^-]+)\.0\.\d+$', sv2)
-00894             if m and sv1 == m.group(1): return 1
-00895 
-00896             return 0
-00897 
-00898     def get_dependency_solvers(self, block, arch, distribution, excluded=[], strict=False):
-00899         """Find the packages which satisfy a dependency block
-00900 
-00901         This method returns the list of packages which satisfy a dependency
-00902         block (as returned by apt_pkg.ParseDepends) for the given architecture
-00903         and distribution.
-00904 
-00905         It returns a tuple with two items: the first is a boolean which is
-00906         True if the dependency is satisfied, the second is the list of the
-00907         solving packages.
-00908         """
-00909 
-00910         packages = []
-00911 
-00912         # local copies for better performances
-00913         binaries = self.binaries[distribution][arch]
-00914 
-00915         # for every package, version and operation in the block
-00916         for name, version, op in block:
-00917             # look for the package in unstable
-00918             if name not in excluded and name in binaries[0]:
-00919                 package = binaries[0][name]
-00920                 # check the versioned dependency (if present)
-00921                 if op == '' and version == '' or apt_pkg.CheckDep(package[VERSION], op, version):
-00922                     packages.append(name)
-00923 
-00924             # look for the package in the virtual packages list and loop on them
-00925             for prov in binaries[1].get(name, []):
-00926                 if prov in excluded or \
-00927                    prov not in binaries[0]: continue
-00928                 package = binaries[0][prov]
-00929                 # check the versioned dependency (if present)
-00930                 # TODO: this is forbidden by the debian policy, which says that versioned
-00931                 #       dependencies on virtual packages are never satisfied. The old britney
-00932                 #       does it and we have to go with it, but at least a warning should be raised.
-00933                 if op == '' and version == '' or not strict and apt_pkg.CheckDep(package[VERSION], op, version):
-00934                     packages.append(prov)
-00935                     break
-00936 
-00937         return (len(packages) > 0, packages)
-00938 
-00939     def excuse_unsat_deps(self, pkg, src, arch, suite, excuse, excluded=[], conflicts=False):
-00940         """Find unsatisfied dependencies for a binary package
-00941 
-00942         This method analyzes the dependencies of the binary package specified
-00943         by the parameter `pkg', built from the source package `src', for the
-00944         architecture `arch' within the suite `suite'. If the dependency can't
-00945         be satisfied in testing and/or unstable, it updates the excuse passed
-00946         as parameter.
-00947 
-00948         The dependency fields checked are Pre-Depends and Depends.
-00949         """
-00950         # retrieve the binary package from the specified suite and arch
-00951         binary_u = self.binaries[suite][arch][0][pkg]
-00952 
-00953         # local copies for better performances
-00954         parse_depends = apt_pkg.ParseDepends
-00955         get_dependency_solvers = self.get_dependency_solvers
-00956         strict = True # not self.options.compatible
-00957 
-00958         # analyze the dependency fields (if present)
-00959         for type_key, type in ((PREDEPENDS, 'Pre-Depends'), (DEPENDS, 'Depends')):
-00960             if not binary_u[type_key]:
-00961                 continue
-00962 
-00963             # for every block of dependency (which is formed as conjunction of disconjunction)
-00964             for block, block_txt in zip(parse_depends(binary_u[type_key]), binary_u[type_key].split(',')):
-00965                 # if the block is satisfied in testing, then skip the block
-00966                 solved, packages = get_dependency_solvers(block, arch, 'testing', excluded, strict=strict)
-00967                 if solved:
-00968                     for p in packages:
-00969                         if p not in self.binaries[suite][arch][0]: continue
-00970                         excuse.add_sane_dep(self.binaries[suite][arch][0][p][SOURCE])
-00971                     continue
-00972 
-00973                 # check if the block can be satisfied in unstable, and list the solving packages
-00974                 solved, packages = get_dependency_solvers(block, arch, suite, [], strict=strict)
-00975                 packages = [self.binaries[suite][arch][0][p][SOURCE] for p in packages]
-00976 
-00977                 # if the dependency can be satisfied by the same source package, skip the block:
-00978                 # obviously both binary packages will enter testing togheter
-00979                 if src in packages: continue
-00980 
-00981                 # if no package can satisfy the dependency, add this information to the excuse
-00982                 if len(packages) == 0:
-00983                     excuse.addhtml("%s/%s unsatisfiable %s: %s" % (pkg, arch, type, block_txt.strip()))
-00984                     if arch not in self.options.break_arches: excuse.add_unsat_dep(arch)
-00985                     continue
-00986 
-00987                 # for the solving packages, update the excuse to add the dependencies
-00988                 for p in packages:
-00989                     if arch not in self.options.break_arches.split():
-00990                         excuse.add_dep(p)
-00991                     else:
-00992                         excuse.add_break_dep(p, arch)
-00993 
-00994         return True
-00995 
-00996     # Package analysis methods
-00997     # ------------------------
-00998 
-00999     def should_remove_source(self, pkg):
-01000         """Check if a source package should be removed from testing
-01001         
-01002         This method checks if a source package should be removed from the
-01003         testing distribution; this happen if the source package is not
-01004         present in the unstable distribution anymore.
-01005 
-01006         It returns True if the package can be removed, False otherwise.
-01007         In the former case, a new excuse is appended to the the object
-01008         attribute excuses.
-01009         """
-01010         # if the soruce package is available in unstable, then do nothing
-01011         if pkg in self.sources['unstable']:
-01012             return False
-01013         # otherwise, add a new excuse for its removal and return True
-01014         src = self.sources['testing'][pkg]
-01015         excuse = Excuse("-" + pkg)
-01016         excuse.set_vers(src[VERSION], None)
-01017         src[MAINTAINER] and excuse.set_maint(src[MAINTAINER].strip())
-01018         src[SECTION] and excuse.set_section(src[SECTION].strip())
-01019 
-01020         # if the package is blocked, skip it
-01021         if self.hints['block'].has_key('-' + pkg):
-01022             exc.addhtml("Not touching package, as requested by %s (contact debian-release if update is needed)" % hints['block']['-' + pkg])
-01023             return False
-01024 
-01025         excuse.addhtml("Valid candidate")
-01026         self.excuses.append(excuse)
-01027         return True
-01028 
-01029     def should_upgrade_srcarch(self, src, arch, suite):
-01030         """Check if binary package should be upgraded
-01031 
-01032         This method checks if a binary package should be upgraded; this can
-01033         happen also if the binary package is a binary-NMU for the given arch.
-01034         The analysis is performed for the source package specified by the
-01035         `src' parameter, checking the architecture `arch' for the distribution
-01036         `suite'.
-01037        
-01038         It returns False if the given package doesn't need to be upgraded,
-01039         True otherwise. In the former case, a new excuse is appended to
-01040         the the object attribute excuses.
-01041         """
-01042         # retrieve the source packages for testing and suite
-01043         source_t = self.sources['testing'][src]
-01044         source_u = self.sources[suite][src]
-01045 
-01046         # build the common part of the excuse, which will be filled by the code below
-01047         ref = "%s/%s%s" % (src, arch, suite != 'unstable' and "_" + suite or "")
-01048         excuse = Excuse(ref)
-01049         excuse.set_vers(source_t[VERSION], source_t[VERSION])
-01050         source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip())
-01051         source_u[SECTION] and excuse.set_section(source_u[SECTION].strip())
-01052         
-01053         # if there is a `remove' hint and the requested version is the same of the
-01054         # version in testing, then stop here and return False
-01055         if src in self.hints["remove"] and \
-01056            self.same_source(source_t[VERSION], self.hints["remove"][src][0]):
-01057             excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1]))
-01058             excuse.addhtml("Trying to remove package, not update it")
-01059             excuse.addhtml("Not considered")
-01060             self.excuses.append(excuse)
-01061             return False
-01062 
-01063         # the starting point is that there is nothing wrong and nothing worth doing
-01064         anywrongver = False
-01065         anyworthdoing = False
-01066 
-01067         # for every binary package produced by this source in unstable for this architecture
-01068         for pkg in sorted(filter(lambda x: x.endswith("/" + arch), source_u[BINARIES]), key=lambda x: x.split("/")[0]):
-01069             pkg_name = pkg.split("/")[0]
-01070 
-01071             # retrieve the testing (if present) and unstable corresponding binary packages
-01072             binary_t = pkg in source_t[BINARIES] and self.binaries['testing'][arch][0][pkg_name] or None
-01073             binary_u = self.binaries[suite][arch][0][pkg_name]
-01074 
-01075             # this is the source version for the new binary package
-01076             pkgsv = self.binaries[suite][arch][0][pkg_name][SOURCEVER]
-01077 
-01078             # if the new binary package is architecture-independent, then skip it
-01079             if binary_u[ARCHITECTURE] == 'all':
-01080                 excuse.addhtml("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u[VERSION], pkgsv))
-01081                 continue
-01082 
-01083             # if the new binary package is not from the same source as the testing one, then skip it
-01084             if not self.same_source(source_t[VERSION], pkgsv):
-01085                 anywrongver = True
-01086                 excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_t[VERSION]))
-01087                 break
-01088 
-01089             # find unsatisfied dependencies for the new binary package
-01090             self.excuse_unsat_deps(pkg_name, src, arch, suite, excuse)
-01091 
-01092             # if the binary is not present in testing, then it is a new binary;
-01093             # in this case, there is something worth doing
-01094             if not binary_t:
-01095                 excuse.addhtml("New binary: %s (%s)" % (pkg_name, binary_u[VERSION]))
-01096                 anyworthdoing = True
-01097                 continue
-01098 
-01099             # at this point, the binary package is present in testing, so we can compare
-01100             # the versions of the packages ...
-01101             vcompare = apt_pkg.VersionCompare(binary_t[VERSION], binary_u[VERSION])
-01102 
-01103             # ... if updating would mean downgrading, then stop here: there is something wrong
-01104             if vcompare > 0:
-01105                 anywrongver = True
-01106                 excuse.addhtml("Not downgrading: %s (%s to %s)" % (pkg_name, binary_t[VERSION], binary_u[VERSION]))
-01107                 break
-01108             # ... if updating would mean upgrading, then there is something worth doing
-01109             elif vcompare < 0:
-01110                 excuse.addhtml("Updated binary: %s (%s to %s)" % (pkg_name, binary_t[VERSION], binary_u[VERSION]))
-01111                 anyworthdoing = True
-01112 
-01113         # if there is nothing wrong and there is something worth doing or the source
-01114         # package is not fake, then check what packages shuold be removed
-01115         if not anywrongver and (anyworthdoing or self.sources[suite][src][FAKESRC]):
-01116             srcv = self.sources[suite][src][VERSION]
-01117             ssrc = self.same_source(source_t[VERSION], srcv)
-01118             # for every binary package produced by this source in testing for this architecture
-01119             for pkg in sorted([x.split("/")[0] for x in self.sources['testing'][src][BINARIES] if x.endswith("/"+arch)]):
-01120                 # if the package is architecture-independent, then ignore it
-01121                 if self.binaries['testing'][arch][0][pkg][ARCHITECTURE] == 'all':
-01122                     excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
-01123                     continue
-01124                 # if the package is not produced by the new source package, then remove it from testing
-01125                 if pkg not in self.binaries[suite][arch][0]:
-01126                     tpkgv = self.binaries['testing'][arch][0][pkg][VERSION]
-01127                     excuse.addhtml("Removed binary: %s %s" % (pkg, tpkgv))
-01128                     if ssrc: anyworthdoing = True
-01129 
-01130         # if there is nothing wrong and there is something worth doing, this is valid candidate
-01131         if not anywrongver and anyworthdoing:
-01132             excuse.addhtml("Valid candidate")
-01133             self.excuses.append(excuse)
-01134             return True
-01135         # else if there is something worth doing (but something wrong, too) this package won't be considered
-01136         elif anyworthdoing:
-01137             excuse.addhtml("Not considered")
-01138             self.excuses.append(excuse)
-01139 
-01140         # otherwise, return False
-01141         return False
-01142 
-01143     def should_upgrade_src(self, src, suite):
-01144         """Check if source package should be upgraded
-01145 
-01146         This method checks if a source package should be upgraded. The analysis
-01147         is performed for the source package specified by the `src' parameter, 
-01148         checking the architecture `arch' for the distribution `suite'.
-01149        
-01150         It returns False if the given package doesn't need to be upgraded,
-01151         True otherwise. In the former case, a new excuse is appended to
-01152         the the object attribute excuses.
-01153         """
-01154 
-01155         # retrieve the source packages for testing (if available) and suite
-01156         source_u = self.sources[suite][src]
-01157         if src in self.sources['testing']:
-01158             source_t = self.sources['testing'][src]
-01159             # if testing and unstable have the same version, then this is a candidate for binary-NMUs only
-01160             if apt_pkg.VersionCompare(source_t[VERSION], source_u[VERSION]) == 0:
-01161                 return False
-01162         else:
-01163             source_t = None
-01164 
-01165         # build the common part of the excuse, which will be filled by the code below
-01166         ref = "%s%s" % (src, suite != 'unstable' and "_" + suite or "")
-01167         excuse = Excuse(ref)
-01168         excuse.set_vers(source_t and source_t[VERSION] or None, source_u[VERSION])
-01169         source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip())
-01170         source_u[SECTION] and excuse.set_section(source_u[SECTION].strip())
-01171 
-01172         # the starting point is that we will update the candidate
-01173         update_candidate = True
-01174         
-01175         # if the version in unstable is older, then stop here with a warning in the excuse and return False
-01176         if source_t and apt_pkg.VersionCompare(source_u[VERSION], source_t[VERSION]) < 0:
-01177             excuse.addhtml("ALERT: %s is newer in testing (%s %s)" % (src, source_t[VERSION], source_u[VERSION]))
-01178             self.excuses.append(excuse)
-01179             return False
-01180 
-01181         # check if the source package really exists or if it is a fake one
-01182         if source_u[FAKESRC]:
-01183             excuse.addhtml("%s source package doesn't exist" % (src))
-01184             update_candidate = False
-01185 
-01186         # retrieve the urgency for the upload, ignoring it if this is a NEW package (not present in testing)
-01187         urgency = self.urgencies.get(src, self.options.default_urgency)
-01188         if not source_t and urgency != self.options.default_urgency:
-01189             excuse.addhtml("Ignoring %s urgency setting for NEW package" % (urgency))
-01190             urgency = self.options.default_urgency
-01191 
-01192         # if there is a `remove' hint and the requested version is the same of the
-01193         # version in testing, then stop here and return False
-01194         if src in self.hints["remove"]:
-01195             if source_t and self.same_source(source_t[VERSION], self.hints['remove'][src][0]) or \
-01196                self.same_source(source_u[VERSION], self.hints['remove'][src][0]):
-01197                 excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1]))
-01198                 excuse.addhtml("Trying to remove package, not update it")
-01199                 update_candidate = False
-01200 
-01201         # check if there is a `block' hint for this package or a `block-all source' hint
-01202         blocked = None
-01203         if src in self.hints["block"]:
-01204             blocked = self.hints["block"][src]
-01205         elif 'source' in self.hints["block-all"]:
-01206             blocked = self.hints["block-all"]["source"]
-01207 
-01208         # if the source is blocked, then look for an `unblock' hint; the unblock request
-01209         # is processed only if the specified version is correct
-01210         if blocked:
-01211             unblock = self.hints["unblock"].get(src,(None,None))
-01212             if unblock[0] != None:
-01213                 if self.same_source(unblock[0], source_u[VERSION]):
-01214                     excuse.addhtml("Ignoring request to block package by %s, due to unblock request by %s" % (blocked, unblock[1]))
-01215                 else:
-01216                     excuse.addhtml("Unblock request by %s ignored due to version mismatch: %s" % (unblock[1], unblock[0]))
-01217             else:
-01218                 excuse.addhtml("Not touching package, as requested by %s (contact debian-release if update is needed)" % (blocked))
-01219                 update_candidate = False
-01220 
-01221         # if the suite is unstable, then we have to check the urgency and the minimum days of
-01222         # permanence in unstable before updating testing; if the source package is too young,
-01223         # the check fails and we set update_candidate to False to block the update
-01224         if suite == 'unstable':
-01225             if src not in self.dates:
-01226                 self.dates[src] = (source_u[VERSION], self.date_now)
-01227             elif not self.same_source(self.dates[src][0], source_u[VERSION]):
-01228                 self.dates[src] = (source_u[VERSION], self.date_now)
-01229 
-01230             days_old = self.date_now - self.dates[src][1]
-01231             min_days = self.MINDAYS[urgency]
-01232             excuse.setdaysold(days_old, min_days)
-01233             if days_old < min_days:
-01234                 if src in self.hints["urgent"] and self.same_source(source_u[VERSION], self.hints["urgent"][src][0]):
-01235                     excuse.addhtml("Too young, but urgency pushed by %s" % (self.hints["urgent"][src][1]))
-01236                 else:
-01237                     update_candidate = False
-01238 
-01239         # at this point, we check what is the status of the builds on all the supported architectures
-01240         # to catch the out-of-date ones
-01241         pkgs = {src: ["source"]}
-01242         for arch in self.options.architectures:
-01243             oodbins = {}
-01244             # for every binary package produced by this source in the suite for this architecture
-01245             for pkg in sorted([x.split("/")[0] for x in self.sources[suite][src][BINARIES] if x.endswith("/"+arch)]):
-01246                 if pkg not in pkgs: pkgs[pkg] = []
-01247                 pkgs[pkg].append(arch)
-01248 
-01249                 # retrieve the binary package and its source version
-01250                 binary_u = self.binaries[suite][arch][0][pkg]
-01251                 pkgsv = binary_u[SOURCEVER]
-01252 
-01253                 # if it wasn't builded by the same source, it is out-of-date
-01254                 if not self.same_source(source_u[VERSION], pkgsv):
-01255                     if pkgsv not in oodbins:
-01256                         oodbins[pkgsv] = []
-01257                     oodbins[pkgsv].append(pkg)
-01258                     continue
-01259 
-01260                 # if the package is architecture-dependent or the current arch is `nobreakall'
-01261                 # find unsatisfied dependencies for the binary package
-01262                 if binary_u[ARCHITECTURE] != 'all' or arch in self.options.nobreakall_arches:
-01263                     self.excuse_unsat_deps(pkg, src, arch, suite, excuse)
-01264 
-01265             # if there are out-of-date packages, warn about them in the excuse and set update_candidate
-01266             # to False to block the update; if the architecture where the package is out-of-date is
-01267             # in the `fucked_arches' list, then do not block the update
-01268             if oodbins:
-01269                 oodtxt = ""
-01270                 for v in oodbins.keys():
-01271                     if oodtxt: oodtxt = oodtxt + "; "
-01272                     oodtxt = oodtxt + "%s (from <a href=\"http://buildd.debian.org/build.php?" \
-01273                         "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>)" % \
-01274                         (", ".join(sorted(oodbins[v])), arch, src, v, v)
-01275                 text = "out of date on <a href=\"http://buildd.debian.org/build.php?" \
-01276                     "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
-01277                     (arch, src, source_u[VERSION], arch, oodtxt)
-01278 
-01279                 if arch in self.options.fucked_arches:
-01280                     text = text + " (but %s isn't keeping up, so nevermind)" % (arch)
-01281                 else:
-01282                     update_candidate = False
-01283 
-01284                 if self.date_now != self.dates[src][1]:
-01285                     excuse.addhtml(text)
-01286 
-01287         # if the source package has no binaries, set update_candidate to False to block the update
-01288         if len(self.sources[suite][src][BINARIES]) == 0:
-01289             excuse.addhtml("%s has no binaries on any arch" % src)
-01290             update_candidate = False
-01291 
-01292         # if the suite is unstable, then we have to check the release-critical bug counts before
-01293         # updating testing; if the unstable package have a RC bug count greater than the testing
-01294         # one,  the check fails and we set update_candidate to False to block the update
-01295         if suite == 'unstable':
-01296             for pkg in pkgs.keys():
-01297                 if pkg not in self.bugs['testing']:
-01298                     self.bugs['testing'][pkg] = 0
-01299                 if pkg not in self.bugs['unstable']:
-01300                     self.bugs['unstable'][pkg] = 0
-01301 
-01302                 if self.bugs['unstable'][pkg] > self.bugs['testing'][pkg]:
-01303                     excuse.addhtml("%s (%s) is <a href=\"http://bugs.debian.org/cgi-bin/pkgreport.cgi?" \
-01304                                    "which=pkg&data=%s&sev-inc=critical&sev-inc=grave&sev-inc=serious\" " \
-01305                                    "target=\"_blank\">buggy</a>! (%d > %d)" % \
-01306                                    (pkg, ", ".join(pkgs[pkg]), pkg, self.bugs['unstable'][pkg], self.bugs['testing'][pkg]))
-01307                     update_candidate = False
-01308                 elif self.bugs['unstable'][pkg] > 0:
-01309                     excuse.addhtml("%s (%s) is (less) <a href=\"http://bugs.debian.org/cgi-bin/pkgreport.cgi?" \
-01310                                    "which=pkg&data=%s&sev-inc=critical&sev-inc=grave&sev-inc=serious\" " \
-01311                                    "target=\"_blank\">buggy</a>! (%d <= %d)" % \
-01312                                    (pkg, ", ".join(pkgs[pkg]), pkg, self.bugs['unstable'][pkg], self.bugs['testing'][pkg]))
-01313 
-01314         # check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
-01315         if not update_candidate and src in self.hints["force"] and \
-01316            self.same_source(source_u[VERSION], self.hints["force"][src][0]):
-01317             excuse.dontinvalidate = 1
-01318             excuse.addhtml("Should ignore, but forced by %s" % (self.hints["force"][src][1]))
-01319             update_candidate = True
-01320 
-01321         # if the suite is testing-proposed-updates, the package needs an explicit approval in order to go in
-01322         if suite == "tpu":
-01323             key = "%s_%s" % (src, source_u[VERSION])
-01324             if key in self.approvals:
-01325                 excuse.addhtml("Approved by %s" % approvals[key])
-01326             else:
-01327                 excuse.addhtml("NEEDS APPROVAL BY RM")
-01328                 update_candidate = False
-01329 
-01330         # if the package can be updated, it is a valid candidate
-01331         if update_candidate:
-01332             excuse.addhtml("Valid candidate")
-01333         # else it won't be considered
-01334         else:
-01335             excuse.addhtml("Not considered")
-01336 
-01337         self.excuses.append(excuse)
-01338         return update_candidate
-01339 
-01340     def reversed_exc_deps(self):
-01341         """Reverse the excuses dependencies
-01342 
-01343         This method returns a dictionary where the keys are the package names
-01344         and the values are the excuse names which depend on it.
-01345         """
-01346         res = {}
-01347         for exc in self.excuses:
-01348             for d in exc.deps:
-01349                 if d not in res: res[d] = []
-01350                 res[d].append(exc.name)
-01351         return res
-01352 
-01353     def invalidate_excuses(self, valid, invalid):
-01354         """Invalidate impossible excuses
-01355 
-01356         This method invalidates the impossible excuses, which depend
-01357         on invalid excuses. The two parameters contains the list of
-01358         `valid' and `invalid' excuses.
-01359         """
-01360         # build a lookup-by-name map
-01361         exclookup = {}
-01362         for e in self.excuses:
-01363             exclookup[e.name] = e
-01364 
-01365         # build the reverse dependencies
-01366         revdeps = self.reversed_exc_deps()
-01367 
-01368         # loop on the invalid excuses
-01369         i = 0
-01370         while i < len(invalid):
-01371             # if there is no reverse dependency, skip the item
-01372             if invalid[i] not in revdeps:
-01373                 i += 1
-01374                 continue
-01375             # if there dependency can be satisfied by a testing-proposed-updates excuse, skip the item
-01376             if (invalid[i] + "_tpu") in valid:
-01377                 i += 1
-01378                 continue
-01379             # loop on the reverse dependencies
-01380             for x in revdeps[invalid[i]]:
-01381                 # if the item is valid and it is marked as `dontinvalidate', skip the item
-01382                 if x in valid and exclookup[x].dontinvalidate:
-01383                     continue
-01384 
-01385                 # otherwise, invalidate the dependency and mark as invalidated and
-01386                 # remove the depending excuses
-01387                 exclookup[x].invalidate_dep(invalid[i])
-01388                 if x in valid:
-01389                     p = valid.index(x)
-01390                     invalid.append(valid.pop(p))
-01391                     exclookup[x].addhtml("Invalidated by dependency")
-01392                     exclookup[x].addhtml("Not considered")
-01393             i = i + 1
-01394  
-01395     def write_excuses(self):
-01396         """Produce and write the update excuses
-01397 
-01398         This method handles the update excuses generation: the packages are
-01399         looked to determine whether they are valid candidates. For the details
-01400         of this procedure, please refer to the module docstring.
-01401         """
-01402 
-01403         self.__log("Update Excuses generation started", type="I")
-01404 
-01405         # list of local methods and variables (for better performance)
-01406         sources = self.sources
-01407         architectures = self.options.architectures
-01408         should_remove_source = self.should_remove_source
-01409         should_upgrade_srcarch = self.should_upgrade_srcarch
-01410         should_upgrade_src = self.should_upgrade_src
-01411 
-01412         # this list will contain the packages which are valid candidates;
-01413         # if a package is going to be removed, it will have a "-" prefix
-01414         upgrade_me = []
-01415 
-01416         # for every source package in testing, check if it should be removed
-01417         for pkg in sources['testing']:
-01418             if should_remove_source(pkg):
-01419                 upgrade_me.append("-" + pkg)
-01420 
-01421         # for every source package in unstable check if it should be upgraded
-01422         for pkg in sources['unstable']:
-01423             if sources['unstable'][pkg][FAKESRC]: continue
-01424             # if the source package is already present in testing,
-01425             # check if it should be upgraded for every binary package
-01426             if pkg in sources['testing'] and not sources['testing'][pkg][FAKESRC]:
-01427                 for arch in architectures:
-01428                     if should_upgrade_srcarch(pkg, arch, 'unstable'):
-01429                         upgrade_me.append("%s/%s" % (pkg, arch))
-01430 
-01431             # check if the source package should be upgraded
-01432             if should_upgrade_src(pkg, 'unstable'):
-01433                 upgrade_me.append(pkg)
-01434 
-01435         # for every source package in testing-proposed-updates, check if it should be upgraded
-01436         for pkg in sources['tpu']:
-01437             if sources['tpu'][pkg][FAKESRC]: continue
-01438             # if the source package is already present in testing,
-01439             # check if it should be upgraded for every binary package
-01440             if pkg in sources['testing']:
-01441                 for arch in architectures:
-01442                     if should_upgrade_srcarch(pkg, arch, 'tpu'):
-01443                         upgrade_me.append("%s/%s_tpu" % (pkg, arch))
-01444 
-01445             # check if the source package should be upgraded
-01446             if should_upgrade_src(pkg, 'tpu'):
-01447                 upgrade_me.append("%s_tpu" % pkg)
-01448 
-01449         # process the `remove' hints, if the given package is not yet in upgrade_me
-01450         for src in self.hints["remove"].keys():
-01451             if src in upgrade_me: continue
-01452             if ("-"+src) in upgrade_me: continue
-01453             if src not in sources['testing']: continue
-01454 
-01455             # check if the version specified in the hint is the same of the considered package
-01456             tsrcv = sources['testing'][src][VERSION]
-01457             if not self.same_source(tsrcv, self.hints["remove"][src][0]): continue
-01458 
-01459             # add the removal of the package to upgrade_me and build a new excuse
-01460             upgrade_me.append("-%s" % (src))
-01461             excuse = Excuse("-%s" % (src))
-01462             excuse.set_vers(tsrcv, None)
-01463             excuse.addhtml("Removal request by %s" % (self.hints["remove"][src][1]))
-01464             excuse.addhtml("Package is broken, will try to remove")
-01465             self.excuses.append(excuse)
-01466 
-01467         # sort the excuses by daysold and name
-01468         self.excuses.sort(lambda x, y: cmp(x.daysold, y.daysold) or cmp(x.name, y.name))
-01469 
-01470         # extract the not considered packages, which are in the excuses but not in upgrade_me
-01471         unconsidered = [e.name for e in self.excuses if e.name not in upgrade_me]
-01472 
-01473         # invalidate impossible excuses
-01474         for e in self.excuses:
-01475             for d in e.deps:
-01476                 if d not in upgrade_me and d not in unconsidered:
-01477                     e.addhtml("Unpossible dep: %s -> %s" % (e.name, d))
-01478         self.invalidate_excuses(upgrade_me, unconsidered)
-01479 
-01480         # sort the list of candidates
-01481         self.upgrade_me = sorted(upgrade_me)
-01482 
-01483         # write excuses to the output file
-01484         self.__log("> Writing Excuses to %s" % self.options.excuses_output, type="I")
-01485 
-01486         f = open(self.options.excuses_output, 'w')
-01487         f.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n")
-01488         f.write("<html><head><title>excuses...</title>")
-01489         f.write("<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\"></head><body>\n")
-01490         f.write("<p>Generated: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "</p>\n")
-01491         f.write("<ul>\n")
-01492         for e in self.excuses:
-01493             f.write("<li>%s" % e.html())
-01494         f.write("</ul></body></html>\n")
-01495         f.close()
-01496 
-01497         self.__log("Update Excuses generation completed", type="I")
-01498 
-01499     # Upgrade run
-01500     # -----------
-01501 
-01502     def newlyuninst(self, nuold, nunew):
-01503         """Return a nuninst statstic with only new uninstallable packages
-01504 
-01505         This method subtract the uninstallabla packages of the statistic
-01506         `nunew` from the statistic `nuold`.
-01507 
-01508         It returns a dictionary with the architectures as keys and the list
-01509         of uninstallable packages as values.
-01510         """
-01511         res = {}
-01512         for arch in nuold:
-01513             if arch not in nunew: continue
-01514             res[arch] = [x for x in nunew[arch] if x not in nuold[arch]]
-01515         return res
-01516 
-01517     def get_nuninst(self, requested_arch=None, build=False):
-01518         """Return the uninstallability statistic for all the architectures
-01519 
-01520         To calculate the uninstallability counters, the method checks the
-01521         installability of all the packages for all the architectures, and
-01522         tracking dependencies in a recursive way. The architecture
-01523         indipendent packages are checked only for the `nobreakall`
-01524         architectures.
-01525 
-01526         It returns a dictionary with the architectures as keys and the list
-01527         of uninstallable packages as values.
-01528         """
-01529         # if we are not asked to build the nuninst, read it from the cache
-01530         if not build:
-01531             return self.read_nuninst()
-01532 
-01533         nuninst = {}
-01534 
-01535         # local copies for better performances
-01536         binaries = self.binaries['testing']
-01537         check_installable = self.check_installable
-01538 
-01539         # when a new uninstallable package is discovered, check again all the
-01540         # reverse dependencies and if they are uninstallable, too, call itself
-01541         # recursively
-01542         def add_nuninst(pkg, arch):
-01543             if pkg not in nuninst[arch]:
-01544                 nuninst[arch].append(pkg)
-01545                 for p in binaries[arch][0][pkg][RDEPENDS]:
-01546                     r = check_installable(p, arch, 'testing', excluded=nuninst[arch], conflicts=True)
-01547                     if not r:
-01548                         add_nuninst(p, arch)
-01549 
-01550         # for all the architectures
-01551         for arch in self.options.architectures:
-01552             if requested_arch and arch != requested_arch: continue
-01553             # if it is in the nobreakall ones, check arch-indipendent packages too
-01554             if arch not in self.options.nobreakall_arches:
-01555                 skip_archall = True
-01556             else: skip_archall = False
-01557 
-01558             # check all the packages for this architecture, calling add_nuninst if a new
-01559             # uninstallable package is found
-01560             nuninst[arch] = []
-01561             for pkg_name in binaries[arch][0]:
-01562                 r = check_installable(pkg_name, arch, 'testing', excluded=nuninst[arch], conflicts=True)
-01563                 if not r:
-01564                     add_nuninst(pkg_name, arch)
-01565 
-01566             # if they are not required, removed architecture-indipendent packages
-01567             nuninst[arch + "+all"] = nuninst[arch][:]
-01568             if skip_archall:
-01569                 for pkg in nuninst[arch + "+all"]:
-01570                     bpkg = binaries[arch][0][pkg]
-01571                     if bpkg[ARCHITECTURE] == 'all':
-01572                         nuninst[arch].remove(pkg)
-01573 
-01574         # return the dictionary with the results
-01575         return nuninst
-01576 
-01577     def eval_nuninst(self, nuninst, original=None):
-01578         """Return a string which represents the uninstallability counters
-01579 
-01580         This method returns a string which represents the uninstallability
-01581         counters reading the uninstallability statistics `nuninst` and, if
-01582         present, merging the results with the `original` one.
-01583 
-01584         An example of the output string is:
-01585         1+2: i-0:a-0:a-0:h-0:i-1:m-0:m-0:p-0:a-0:m-0:s-2:s-0
-01586 
-01587         where the first part is the number of broken packages in non-break
-01588         architectures + the total number of broken packages for all the
-01589         architectures.
-01590         """
-01591         res = []
-01592         total = 0
-01593         totalbreak = 0
-01594         for arch in self.options.architectures:
-01595             if arch in nuninst:
-01596                 n = len(nuninst[arch])
-01597             elif original and arch in original:
-01598                 n = len(original[arch])
-01599             else: continue
-01600             if arch in self.options.break_arches:
-01601                 totalbreak = totalbreak + n
-01602             else:
-01603                 total = total + n
-01604             res.append("%s-%d" % (arch[0], n))
-01605         return "%d+%d: %s" % (total, totalbreak, ":".join(res))
-01606 
-01607     def eval_uninst(self, nuninst):
-01608         """Return a string which represents the uninstallable packages
-01609 
-01610         This method returns a string which represents the uninstallable
-01611         packages reading the uninstallability statistics `nuninst`.
-01612 
-01613         An example of the output string is:
-01614             * i386: broken-pkg1, broken-pkg2
-01615         """
-01616         parts = []
-01617         for arch in self.options.architectures:
-01618             if arch in nuninst and len(nuninst[arch]) > 0:
-01619                 parts.append("    * %s: %s\n" % (arch,", ".join(sorted(nuninst[arch]))))
-01620         return "".join(parts)
-01621 
-01622     def is_nuninst_asgood_generous(self, old, new):
-01623         diff = 0
-01624         for arch in self.options.architectures:
-01625             if arch in self.options.break_arches: continue
-01626             diff = diff + (len(new[arch]) - len(old[arch]))
-01627         return diff <= 0
-01628 
-01629     def check_installable(self, pkg, arch, suite, excluded=[], conflicts=False):
-01630         """Check if a package is installable
-01631 
-01632         This method analyzes the dependencies of the binary package specified
-01633         by the parameter `pkg' for the architecture `arch' within the suite
-01634         `suite'. If the dependency can be satisfied in the given `suite` and
-01635         `conflicts` parameter is True, then the co-installability with 
-01636         conflicts handling is checked.
-01637 
-01638         The dependency fields checked are Pre-Depends and Depends.
-01639 
-01640         The method returns a boolean which is True if the given package is
-01641         installable.
-01642         """
-01643         # retrieve the binary package from the specified suite and arch
-01644         binary_u = self.binaries[suite][arch][0][pkg]
-01645 
-01646         # local copies for better performances
-01647         parse_depends = apt_pkg.ParseDepends
-01648         get_dependency_solvers = self.get_dependency_solvers
-01649 
-01650         # analyze the dependency fields (if present)
-01651         for type in (PREDEPENDS, DEPENDS):
-01652             if not binary_u[type]:
-01653                 continue
-01654 
-01655             # for every block of dependency (which is formed as conjunction of disconjunction)
-01656             for block in parse_depends(binary_u[type]):
-01657                 # if the block is not satisfied, return False
-01658                 solved, packages = get_dependency_solvers(block, arch, 'testing', excluded, strict=True)
-01659                 if not solved:
-01660                     return False
-01661 
-01662         # otherwise, the package is installable (not considering conflicts)
-01663         # if the conflicts handling is enabled, then check conflicts before
-01664         # saying that the package is really installable
-01665         if conflicts:
-01666             return self.check_conflicts(pkg, arch, excluded, {}, {})
-01667 
-01668         return True
-01669 
-01670     def check_conflicts(self, pkg, arch, broken, system, conflicts):
-01671         """Check if a package can be installed satisfying the conflicts
-01672 
-01673         This method checks if the `pkg` package from the `arch` architecture
-01674         can be installed (excluding `broken` packages) within the system
-01675         `system` along with all its dependencies. This means that all the
-01676         conflicts relationships are checked in order to achieve the test
-01677         co-installability of the package.
-01678 
-01679         The method returns a boolean which is True if the given package is
-01680         co-installable in the given system.
-01681         """
-01682 
-01683         # local copies for better performances
-01684         binaries = self.binaries['testing'][arch]
-01685         parse_depends = apt_pkg.ParseDepends
-01686         check_depends = apt_pkg.CheckDep
-01687 
-01688         # unregister conflicts, local method to remove conflicts
-01689         # registered from a given package.
-01690         def unregister_conflicts(pkg, conflicts):
-01691             for c in conflicts.keys():
-01692                 i = 0
-01693                 while i < len(conflicts[c]):
-01694                     if conflicts[c][i][3] == pkg:
-01695                         del conflicts[c][i]
-01696                     else: i = i + 1
-01697                 if len(conflicts[c]) == 0:
-01698                     del conflicts[c]
-01699 
-01700         def remove_package(pkg, system, conflicts):
-01701             for k in system:
-01702                 if pkg in system[k][1]:
-01703                     system[k][1].remove(pkg)
-01704             unregister_conflicts(pkg, conflicts)
-01705 
-01706         # handle a conflict, local method to solve a conflict which happened
-01707         # in the system; the behaviour of the conflict-solver is:
-01708         #   1. If there are alternatives for the package which must be removed,
-01709         #      try them, and if one of them resolves the system return True;
-01710         #   2. If none of the alternatives can solve the conflict, then call
-01711         #      itself for the package which depends on the conflicting package.
-01712         #   3. If the top of the dependency tree is reached, then the conflict
-01713         #      can't be solved, so return False.
-01714         def handle_conflict(pkg, source, system, conflicts):
-01715             # skip packages which don't have reverse dependencies
-01716             if source not in system or system[source][1] == []:
-01717                 remove_package(source, system, conflicts)
-01718                 return (system, conflicts)
-01719             # reached the top of the tree
-01720             if not system[source][1][0]:
-01721                 return False
-01722             # remove its conflicts
-01723             unregister_conflicts(source, conflicts)
-01724             # if there are alternatives, try them
-01725             alternatives = system[source][0]
-01726             for alt in alternatives:
-01727                 if satisfy(alt, [x for x in alternatives if x != alt], pkg_from=system[source][1],
-01728                         system=system, conflicts=conflicts, excluded=[source]):
-01729                     remove_package(source, system, conflicts)
-01730                     return (system, conflicts)
-01731             # there are no good alternatives, so remove the package which depends on it
-01732             for p in system[source][1]:
-01733                 # the package does not exist, we reached the top of the tree
-01734                 if not p: return False
-01735                 # we are providing the package we conflict on (eg. exim4 and mail-transfer-agent), skip it
-01736                 if p == pkg: continue
-01737                 output = handle_conflict(pkg, p, system, conflicts)
-01738                 if output:
-01739                     system, conflicts = output
-01740                 else: return False
-01741             remove_package(source, system, conflicts)
-01742             return (system, conflicts)
-01743 
-01744         # dependency tree satisfier, local method which tries to satisfy the dependency
-01745         # tree for a given package. It calls itself recursively in order to check the
-01746         # co-installability of the full tree of dependency of the starting package.
-01747         # If a conflict is detected, it tries to handle it calling the handle_conflict
-01748         # method; if it can't be resolved, then it returns False.
-01749         def satisfy(pkg, pkg_alt=None, pkg_from=None, system=system, conflicts=conflicts, excluded=[]):
-01750             # if it is real package and it is already installed, skip it and return True
-01751             if pkg in binaries[0]:
-01752                 if pkg in system:
-01753                     if type(pkg_from) == list:
-01754                         system[pkg][1].extend(pkg_from)
-01755                     else:
-01756                         system[pkg][1].append(pkg_from)
-01757                     system[pkg] = (system[pkg][1], filter(lambda x: x in pkg_alt, system[pkg][0]))
-01758                     return True
-01759                 binary_u = binaries[0][pkg]
-01760             else: binary_u = None
-01761 
-01762             # if it is a virtual package
-01763             providers = []
-01764             if pkg_from and pkg in binaries[1]:
-01765                 providers = binaries[1][pkg]
-01766                 # it is both real and virtual, so the providers are alternatives
-01767                 if binary_u:
-01768                     providers = filter(lambda x: (not pkg_alt or x not in pkg_alt) and x != pkg, providers)
-01769                     if not pkg_alt:
-01770                         pkg_alt = []
-01771                     pkg_alt.extend(providers)
-01772                 # try all the alternatives and if none of them suits, give up and return False
-01773                 else:
-01774                     # if we already have a provider in the system, everything is ok and return True
-01775                     if len(filter(lambda x: x in providers and x not in excluded, system)) > 0:
-01776                         return True
-01777                     for p in providers:
-01778                         # try to install the providers skipping excluded packages,
-01779                         # which we already tried but do not work
-01780                         if p in excluded: continue
-01781                         elif satisfy(p, [a for a in providers if a != p], pkg_from):
-01782                             return True
-01783                     # if none of them suits, return False
-01784                     return False
-01785 
-01786             # if the package doesn't exist, return False
-01787             if not binary_u: return False
-01788 
-01789             # it is broken, but we have providers
-01790             if pkg in broken and pkg_from:
-01791                 for p in providers:
-01792                     # try to install the providers skipping excluded packages,
-01793                     # which we already tried but do not work
-01794                     if p in excluded: continue
-01795                     elif satisfy(p, [a for a in providers if a != p], pkg_from):
-01796                         return True
-01797                 return False
-01798 
-01799             # install the package into the system, recording which package required it
-01800             if type(pkg_from) != list:
-01801                 pkg_from = [pkg_from]
-01802             system[pkg] = (pkg_alt or [], pkg_from)
-01803 
-01804             # register provided packages
-01805             if binary_u[PROVIDES]:
-01806                 for p in binary_u[PROVIDES]:
-01807                     if p in system:
-01808                         # do not consider packages providing the one which we are checking
-01809                         if len(system[p][1]) == 1 and system[p][1][0] == None: continue
-01810                         system[p][1].append(pkg)
-01811                     else:
-01812                         system[p] = ([], [pkg])
-01813 
-01814             # check the conflicts
-01815             if pkg in conflicts:
-01816                 for name, version, op, conflicting in conflicts[pkg]:
-01817                     if conflicting in binary_u[PROVIDES] and system[conflicting][1] == [pkg]: continue
-01818                     if op == '' and version == '' or check_depends(binary_u[VERSION], op, version):
-01819                         # if conflict is found, check if it can be solved removing
-01820                         # already-installed packages without broking the system; if
-01821                         # this is not possible, give up and return False
-01822                         output = handle_conflict(pkg, conflicting, system.copy(), conflicts.copy())
-01823                         if output:
-01824                             system, conflicts = output
-01825                         else:
-01826                             del system[pkg]
-01827                             return False
-01828 
-01829             # register conflicts from the just-installed package
-01830             if binary_u[CONFLICTS]:
-01831                 for block in map(operator.itemgetter(0), parse_depends(binary_u[CONFLICTS] or [])):
-01832                     name, version, op = block
-01833                     # skip conflicts for packages provided by itself
-01834                     # if the conflicting package is in the system (and it is not a self-conflict)
-01835                     if not (name in binary_u[PROVIDES] and system[name][1] == [pkg]) and \
-01836                        block[0] != pkg and block[0] in system:
-01837                         if block[0] in binaries[0]:
-01838                             binary_c = binaries[0][block[0]]
-01839                         else: binary_c = None
-01840                         if op == '' and version == '' or binary_c and check_depends(binary_c[VERSION], op, version):
-01841                             # if conflict is found, check if it can be solved removing
-01842                             # already-installed packages without broking the system; if
-01843                             # this is not possible, give up and return False
-01844                             output = handle_conflict(pkg, name, system.copy(), conflicts.copy())
-01845                             if output:
-01846                                 system, conflicts = output
-01847                             else:
-01848                                 del system[pkg]
-01849                                 unregister_conflicts(pkg, conflicts)
-01850                                 return False
-01851                     # register the conflict)
-01852                     if block[0] not in conflicts:
-01853                         conflicts[block[0]] = []
-01854                     conflicts[block[0]].append((name, version, op, pkg))
-01855 
-01856             # list all its dependencies ...
-01857             dependencies = []
-01858             for key in (PREDEPENDS, DEPENDS):
-01859                 if not binary_u[key]: continue
-01860                 dependencies.extend(parse_depends(binary_u[key]))
-01861 
-01862             # ... and go through them
-01863             for block in dependencies:
-01864                 # list the possible alternatives, in case of a conflict
-01865                 alternatives = map(operator.itemgetter(0), block)
-01866                 valid = False
-01867                 for name, version, op in block:
-01868                     # otherwise, if it is already installed or it is installable, the block is satisfied
-01869                     if name in system or satisfy(name, [a for a in alternatives if a != name], pkg):
-01870                         valid = True
-01871                         break
-01872                 # if the block can't be satisfied, the package is not installable so
-01873                 # we need to remove it, its conflicts and its provided packages and
-01874                 # return False
-01875                 if not valid:
-01876                     del system[pkg]
-01877                     unregister_conflicts(pkg, conflicts)
-01878                     for p in providers:
-01879                         if satisfy(p, [a for a in providers if a != p], pkg_from):
-01880                             return True
-01881                     return False
-01882 
-01883             # if all the blocks have been satisfied, the package is installable
-01884             return True
-01885 
-01886         # check the package at the top of the tree
-01887         return satisfy(pkg)
-01888 
-01889     def doop_source(self, pkg):
-01890         """Apply a change to the testing distribution as requested by `pkg`
-01891 
-01892         This method apply the changes required by the action `pkg` tracking
-01893         them so it will be possible to revert them.
-01894 
-01895         The method returns a list of the package name, the suite where the
-01896         package comes from, the list of packages affected by the change and
-01897         the dictionary undo which can be used to rollback the changes.
-01898         """
-01899         undo = {'binaries': {}, 'sources': {}, 'virtual': {}, 'nvirtual': []}
-01900 
-01901         affected = []
-01902         arch = None
-01903 
-01904         # local copies for better performances
-01905         sources = self.sources
-01906         binaries = self.binaries['testing']
-01907 
-01908         # removal of single-arch binary package = "-<package>/<arch>"
-01909         if pkg[0] == "-" and "/" in pkg:
-01910             pkg_name, arch = pkg.split("/")
-01911             pkg_name = pkg_name[1:]
-01912             if arch.endswith("_tpu"):
-01913                 arch, suite = arch.split("_")
-01914             else: suite = "testing"
-01915         # arch = "<source>/<arch>",
-01916         elif "/" in pkg:
-01917             pkg_name, arch = pkg.split("/")
-01918             suite = "unstable"
-01919         # removal of source packages = "-<source>",
-01920         elif pkg[0] == "-":
-01921             pkg_name = pkg[1:]
-01922             suite = "testing"
-01923         # testing-proposed-updates = "<source>_tpu"
-01924         elif pkg[0].endswith("_tpu"):
-01925             pkg_name = pkg[:-4]
-01926             suite = "tpu"
-01927         # normal update of source packages = "<source>"
-01928         else:
-01929             pkg_name = pkg
-01930             suite = "unstable"
-01931 
-01932         # remove all binary packages (if the source already exists)
-01933         if not (arch and pkg[0] == '-'):
-01934             if pkg_name in sources['testing']:
-01935                 source = sources['testing'][pkg_name]
-01936                 # remove all the binaries
-01937                 for p in source[BINARIES]:
-01938                     binary, parch = p.split("/")
-01939                     if arch and parch != arch: continue
-01940                     # if a smooth update is possible for the package, skip it
-01941                     if not self.options.compatible and suite == 'unstable' and \
-01942                        binary not in self.binaries[suite][parch][0] and \
-01943                        ('ALL' in self.options.smooth_updates or \
-01944                         binaries[parch][0][binary][SECTION] in self.options.smooth_updates):
-01945                         continue
-01946                     # save the old binary for undo
-01947                     undo['binaries'][p] = binaries[parch][0][binary]
-01948                     # all the reverse dependencies are affected by the change
-01949                     for j in binaries[parch][0][binary][RDEPENDS]:
-01950                         key = (j, parch)
-01951                         if key not in affected: affected.append(key)
-01952                     # remove the provided virtual packages
-01953                     for j in binaries[parch][0][binary][PROVIDES]:
-01954                         key = j + "/" + parch
-01955                         if key not in undo['virtual']:
-01956                             undo['virtual'][key] = binaries[parch][1][j][:]
-01957                         binaries[parch][1][j].remove(binary)
-01958                         if len(binaries[parch][1][j]) == 0:
-01959                             del binaries[parch][1][j]
-01960                     # finally, remove the binary package
-01961                     del binaries[parch][0][binary]
-01962                 # remove the source package
-01963                 if not arch:
-01964                     undo['sources'][pkg_name] = source
-01965                     del sources['testing'][pkg_name]
-01966             else:
-01967                 # the package didn't exist, so we mark it as to-be-removed in case of undo
-01968                 undo['sources']['-' + pkg_name] = True
-01969 
-01970         # single binary removal
-01971         elif pkg_name in binaries[arch][0]:
-01972             undo['binaries'][pkg_name + "/" + arch] = binaries[arch][0][pkg_name]
-01973             for j in binaries[arch][0][pkg_name][RDEPENDS]:
-01974                 key = (j, arch)
-01975                 if key not in affected: affected.append(key)
-01976             del binaries[arch][0][pkg_name]
-01977 
-01978         # add the new binary packages (if we are not removing)
-01979         if pkg[0] != "-":
-01980             source = sources[suite][pkg_name]
-01981             for p in source[BINARIES]:
-01982                 binary, parch = p.split("/")
-01983                 if arch and parch != arch: continue
-01984                 key = (binary, parch)
-01985                 # obviously, added/modified packages are affected
-01986                 if key not in affected: affected.append(key)
-01987                 # if the binary already exists (built from another ource)
-01988                 if binary in binaries[parch][0]:
-01989                     # save the old binary package
-01990                     undo['binaries'][p] = binaries[parch][0][binary]
-01991                     # all the reverse dependencies are affected by the change
-01992                     for j in binaries[parch][0][binary][RDEPENDS]:
-01993                         key = (j, parch)
-01994                         if key not in affected: affected.append(key)
-01995                     # all the reverse conflicts and their dependency tree are affected by the change
-01996                     for j in binaries[parch][0][binary][RCONFLICTS]:
-01997                         key = (j, parch)
-01998                         if key not in affected: affected.append(key)
-01999                         for p in self.get_full_tree(j, parch, 'testing'):
-02000                             key = (p, parch)
-02001                             if key not in affected: affected.append(key)
-02002                 # add/update the binary package
-02003                 binaries[parch][0][binary] = self.binaries[suite][parch][0][binary]
-02004                 # register new provided packages
-02005                 for j in binaries[parch][0][binary][PROVIDES]:
-02006                     key = j + "/" + parch
-02007                     if j not in binaries[parch][1]:
-02008                         undo['nvirtual'].append(key)
-02009                         binaries[parch][1][j] = []
-02010                     elif key not in undo['virtual']:
-02011                         undo['virtual'][key] = binaries[parch][1][j][:]
-02012                     binaries[parch][1][j].append(binary)
-02013                 # all the reverse dependencies are affected by the change
-02014                 for j in binaries[parch][0][binary][RDEPENDS]:
-02015                     key = (j, parch)
-02016                     if key not in affected: affected.append(key)
-02017 
-02018             # register reverse dependencies and conflicts for the new binary packages
-02019             for p in source[BINARIES]:
-02020                 binary, parch = p.split("/")
-02021                 if arch and parch != arch: continue
-02022                 self.register_reverses(binary, binaries[parch][0] , binaries[parch][1])
-02023 
-02024             # add/update the source package
-02025             if not arch:
-02026                 sources['testing'][pkg_name] = sources[suite][pkg_name]
-02027 
-02028         # return the package name, the suite, the list of affected packages and the undo dictionary
-02029         return (pkg_name, suite, affected, undo)
-02030 
-02031     def get_full_tree(self, pkg, arch, suite):
-02032         """Calculate the full dependency tree for the given package
-02033 
-02034         This method returns the full dependency tree for the package `pkg`,
-02035         inside the `arch` architecture for the suite `suite`.
-02036         """
-02037         packages = [pkg]
-02038         binaries = self.binaries[suite][arch][0]
-02039         l = n = 0
-02040         while len(packages) > l:
-02041             l = len(packages)
-02042             for p in packages[n:]:
-02043                 packages.extend([x for x in binaries[p][RDEPENDS] if x not in packages and x in binaries])
-02044             n = l
-02045         return packages
-02046 
-02047     def iter_packages(self, packages, selected, hint=False, nuninst=None):
-02048         """Iter on the list of actions and apply them one-by-one
-02049 
-02050         This method apply the changes from `packages` to testing, checking the uninstallability
-02051         counters for every action performed. If the action do not improve the it, it is reverted.
-02052         The method returns the new uninstallability counters and the remaining actions if the
-02053         final result is successful, otherwise (None, None).
-02054         """
-02055         extra = []
-02056         deferred = []
-02057         skipped = []
-02058         mark_passed = False
-02059         position = len(packages)
-02060 
-02061         if nuninst:
-02062             nuninst_comp = nuninst.copy()
-02063         else:
-02064             nuninst_comp = self.nuninst_orig.copy()
-02065 
-02066         # local copies for better performances
-02067         check_installable = self.check_installable
-02068         binaries = self.binaries['testing']
-02069         sources = self.sources
-02070         architectures = self.options.architectures
-02071         nobreakall_arches = self.options.nobreakall_arches
-02072         new_arches = self.options.new_arches
-02073         break_arches = self.options.break_arches
-02074         dependencies = self.dependencies
-02075         compatible = self.options.compatible
-02076 
-02077         # pre-process a hint batch
-02078         pre_process = {}
-02079         if selected and hint:
-02080             for pkg in selected:
-02081                 pkg_name, suite, affected, undo = self.doop_source(pkg)
-02082                 pre_process[pkg] = (pkg_name, suite, affected, undo)
-02083 
-02084         lundo = []
-02085         if not hint:
-02086             self.output_write("recur: [%s] %s %d/%d\n" % ("", ",".join(selected), len(packages), len(extra)))
-02087 
-02088         # loop on the packages (or better, actions)
-02089         while packages:
-02090             pkg = packages.pop(0)
-02091 
-02092             # this is the marker for the first loop
-02093             if not compatible and not mark_passed and position < 0:
-02094                 mark_passed = True
-02095                 packages.extend(deferred)
-02096                 del deferred
-02097             else: position -= 1
-02098 
-02099             # defer packages if their dependency has been already skipped
-02100             if not compatible and not mark_passed:
-02101                 defer = False
-02102                 for p in dependencies.get(pkg, []):
-02103                     if p in skipped:
-02104                         deferred.append(pkg)
-02105                         skipped.append(pkg)
-02106                         defer = True
-02107                         break
-02108                 if defer: continue
-02109 
-02110             if not hint:
-02111                 self.output_write("trying: %s\n" % (pkg))
-02112 
-02113             better = True
-02114             nuninst = {}
-02115 
-02116             # apply the changes
-02117             if pkg in pre_process:
-02118                 pkg_name, suite, affected, undo = pre_process[pkg]
-02119             else:
-02120                 pkg_name, suite, affected, undo = self.doop_source(pkg)
-02121             if hint:
-02122                 lundo.append((undo, pkg, pkg_name, suite))
-02123 
-02124             # check the affected packages on all the architectures
-02125             for arch in ("/" in pkg and (pkg.split("/")[1].split("_")[0],) or architectures):
-02126                 if arch not in nobreakall_arches:
-02127                     skip_archall = True
-02128                 else: skip_archall = False
-02129 
-02130                 nuninst[arch] = [x for x in nuninst_comp[arch] if x in binaries[arch][0]]
-02131                 nuninst[arch + "+all"] = [x for x in nuninst_comp[arch + "+all"] if x in binaries[arch][0]]
-02132                 broken = nuninst[arch + "+all"]
-02133                 to_check = [x[0] for x in affected if x[1] == arch]
-02134                 # broken packages (first round)
-02135                 repaired = []
-02136                 broken_changed = True
-02137                 last_broken = None
-02138                 while broken_changed:
-02139                     broken_changed = False
-02140                     for p in to_check:
-02141                         if p == last_broken: break
-02142                         if p not in binaries[arch][0]: continue
-02143                         r = check_installable(p, arch, 'testing', excluded=broken, conflicts=True)
-02144                         if not r and p not in broken:
-02145                             last_broken = p
-02146                             broken.append(p)
-02147                             broken_changed = True
-02148                             if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'):
-02149                                 nuninst[arch].append(p)
-02150                         elif r and p in broken:
-02151                             last_broken = p
-02152                             repaired.append(p)
-02153                             broken.remove(p)
-02154                             broken_changed = True
-02155                             if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'):
-02156                                 nuninst[arch].remove(p)
-02157 
-02158                 # broken packages (second round, reverse dependencies of the first round)
-02159                 l = 0
-02160                 broken_changed = True
-02161                 last_broken = None
-02162                 while broken_changed:
-02163                     broken_changed = False
-02164                     for j in broken + repaired:
-02165                         if j not in binaries[arch][0]: continue
-02166                         for p in binaries[arch][0][j][RDEPENDS]:
-02167                             if p in broken or p not in binaries[arch][0]: continue
-02168                             r = check_installable(p, arch, 'testing', excluded=broken, conflicts=True)
-02169                             if not r and p not in broken:
-02170                                 l = -1
-02171                                 last_broken = j
-02172                                 broken.append(p)
-02173                                 broken_changed = True
-02174                                 if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'):
-02175                                     nuninst[arch].append(p)
-02176                             elif r and p in nuninst[arch + "+all"]:
-02177                                 last_broken = p
-02178                                 repaired.append(p)
-02179                                 broken.remove(p)
-02180                                 broken_changed = True
-02181                                 if not (skip_archall and binaries[arch][0][p][ARCHITECTURE] == 'all'):
-02182                                     nuninst[arch].remove(p)
-02183                     if l != -1 and last_broken == j: break
-02184 
-02185                 # if we are processing hints, go ahead
-02186                 if hint:
-02187                     nuninst_comp[arch] = nuninst[arch]
-02188                     nuninst_comp[arch + "+all"] = nuninst[arch + "+all"]
-02189                     continue
-02190 
-02191                 # if the uninstallability counter is worse than before, break the loop
-02192                 if (("/" in pkg and arch not in new_arches) or \
-02193                     (arch not in break_arches)) and len(nuninst[arch]) > len(nuninst_comp[arch]):
-02194                     better = False
-02195                     break
-02196 
-02197             # if we are processing hints or the package is already accepted, go ahead
-02198             if hint or pkg in selected: continue
-02199 
-02200             # check if the action improved the uninstallability counters
-02201             if better:
-02202                 lundo.append((undo, pkg, pkg_name, suite))
-02203                 selected.append(pkg)
-02204                 packages.extend(extra)
-02205                 extra = []
-02206                 self.output_write("accepted: %s\n" % (pkg))
-02207                 self.output_write("   ori: %s\n" % (self.eval_nuninst(self.nuninst_orig)))
-02208                 self.output_write("   pre: %s\n" % (self.eval_nuninst(nuninst_comp)))
-02209                 self.output_write("   now: %s\n" % (self.eval_nuninst(nuninst, nuninst_comp)))
-02210                 if len(selected) <= 20:
-02211                     self.output_write("   all: %s\n" % (" ".join(selected)))
-02212                 else:
-02213                     self.output_write("  most: (%d) .. %s\n" % (len(selected), " ".join(selected[-20:])))
-02214                 for k in nuninst:
-02215                     nuninst_comp[k] = nuninst[k]
-02216             else:
-02217                 self.output_write("skipped: %s (%d <- %d)\n" % (pkg, len(extra), len(packages)))
-02218                 self.output_write("    got: %s\n" % (self.eval_nuninst(nuninst, "/" in pkg and nuninst_comp or None)))
-02219                 self.output_write("    * %s: %s\n" % (arch, ", ".join(sorted([b for b in nuninst[arch] if b not in nuninst_comp[arch]]))))
-02220 
-02221                 extra.append(pkg)
-02222                 if not mark_passed:
-02223                     skipped.append(pkg)
-02224 
-02225                 # undo the changes (source)
-02226                 for k in undo['sources'].keys():
-02227                     if k[0] == '-':
-02228                         del sources['testing'][k[1:]]
-02229                     else: sources['testing'][k] = undo['sources'][k]
-02230 
-02231                 # undo the changes (new binaries)
-02232                 if pkg[0] != '-' and pkg_name in sources[suite]:
-02233                     for p in sources[suite][pkg_name][BINARIES]:
-02234                         binary, arch = p.split("/")
-02235                         if "/" in pkg and arch != pkg[pkg.find("/")+1:]: continue
-02236                         del binaries[arch][0][binary]
-02237 
-02238                 # undo the changes (binaries)
-02239                 for p in undo['binaries'].keys():
-02240                     binary, arch = p.split("/")
-02241                     if binary[0] == "-":
-02242                         del binaries[arch][0][binary[1:]]
-02243                     else: binaries[arch][0][binary] = undo['binaries'][p]
-02244 
-02245                 # undo the changes (virtual packages)
-02246                 for p in undo['nvirtual']:
-02247                     j, arch = p.split("/")
-02248                     del binaries[arch][1][j]
-02249                 for p in undo['virtual']:
-02250                     j, arch = p.split("/")
-02251                     if j[0] == '-':
-02252                         del binaries[arch][1][j[1:]]
-02253                     else: binaries[arch][1][j] = undo['virtual'][p]
-02254 
-02255         # if we are processing hints, return now
-02256         if hint:
-02257             return (nuninst_comp, [], lundo)
-02258 
-02259         self.output_write(" finish: [%s]\n" % ",".join(selected))
-02260         self.output_write("endloop: %s\n" % (self.eval_nuninst(self.nuninst_orig)))
-02261         self.output_write("    now: %s\n" % (self.eval_nuninst(nuninst_comp)))
-02262         self.output_write(self.eval_uninst(self.newlyuninst(self.nuninst_orig, nuninst_comp)))
-02263         self.output_write("\n")
-02264 
-02265         return (nuninst_comp, extra, lundo)
-02266 
-02267     def do_all(self, maxdepth=0, init=None, actions=None):
-02268         """Testing update runner
-02269 
-02270         This method tries to update testing checking the uninstallability
-02271         counters before and after the actions to decide if the update was
-02272         successful or not.
-02273         """
-02274         selected = []
-02275         if actions:
-02276             upgrade_me = actions[:]
-02277         else:
-02278             upgrade_me = self.upgrade_me[:]
-02279         nuninst_start = self.nuninst_orig
-02280 
-02281         # these are special parameters for hints processing
-02282         undo = False
-02283         force = False
-02284         earlyabort = False
-02285         if maxdepth == "easy" or maxdepth < 0:
-02286             force = maxdepth < 0
-02287             earlyabort = True
-02288             maxdepth = 0
-02289 
-02290         # if we have a list of initial packages, check them
-02291         if init:
-02292             self.output_write("leading: %s\n" % (",".join(init)))
-02293             for x in init:
-02294                 if x not in upgrade_me:
-02295                     self.output_write("failed: %s\n" % (x))
-02296                     return None
-02297                 selected.append(x)
-02298                 upgrade_me.remove(x)
-02299         
-02300         self.output_write("start: %s\n" % self.eval_nuninst(nuninst_start))
-02301         self.output_write("orig: %s\n" % self.eval_nuninst(nuninst_start))
-02302 
-02303         if earlyabort:
-02304             extra = upgrade_me[:]
-02305             (nuninst_end, extra, lundo) = self.iter_packages(init, selected, hint=True)
-02306             undo = True
-02307             self.output_write("easy: %s\n" % (self.eval_nuninst(nuninst_end)))
-02308             self.output_write(self.eval_uninst(self.newlyuninst(nuninst_start, nuninst_end)) + "\n")
-02309             if not force and not self.is_nuninst_asgood_generous(self.nuninst_orig, nuninst_end):
-02310                 nuninst_end, extra = None, None
-02311         else:
-02312             lundo = []
-02313             if init:
-02314                 (nuninst_end, extra, tundo) = self.iter_packages(init, selected, hint=True)
-02315                 lundo.extend(tundo)
-02316                 undo = True
-02317             else: nuninst_end = None
-02318             (nuninst_end, extra, tundo) = self.iter_packages(upgrade_me, selected, nuninst=nuninst_end)
-02319             lundo.extend(tundo)
-02320             if not self.is_nuninst_asgood_generous(self.nuninst_orig, nuninst_end):
-02321                 nuninst_end, extra = None, None
-02322 
-02323         if nuninst_end:
-02324             self.output_write("Apparently successful\n")
-02325             self.output_write("final: %s\n" % ",".join(sorted(selected)))
-02326             self.output_write("start: %s\n" % self.eval_nuninst(nuninst_start))
-02327             self.output_write(" orig: %s\n" % self.eval_nuninst(self.nuninst_orig))
-02328             self.output_write("  end: %s\n" % self.eval_nuninst(nuninst_end))
-02329             if force:
-02330                 self.output_write("force breaks:\n")
-02331                 self.output_write(self.eval_uninst(self.newlyuninst(nuninst_start, nuninst_end)) + "\n")
-02332             self.output_write("SUCCESS (%d/%d)\n" % (len(actions or self.upgrade_me), len(extra)))
-02333             self.nuninst_orig = nuninst_end
-02334             if not actions:
-02335                 self.upgrade_me = sorted(extra)
-02336                 if not self.options.compatible:
-02337                     self.sort_actions()
-02338         else:
-02339             self.output_write("FAILED\n")
-02340             if not undo: return
-02341 
-02342             # undo all the changes
-02343             for (undo, pkg, pkg_name, suite) in lundo:
-02344                 # undo the changes (source)
-02345                 for k in undo['sources'].keys():
-02346                     if k[0] == '-':
-02347                         del self.sources['testing'][k[1:]]
-02348                     else: self.sources['testing'][k] = undo['sources'][k]
-02349 
-02350                 # undo the changes (new binaries)
-02351                 if pkg[0] != '-' and pkg_name in self.sources[suite]:
-02352                     for p in self.sources[suite][pkg_name][BINARIES]:
-02353                         binary, arch = p.split("/")
-02354                         if "/" in pkg and arch != pkg[pkg.find("/")+1:]: continue
-02355                         del self.binaries['testing'][arch][0][binary]
-02356 
-02357                 # undo the changes (binaries)
-02358                 for p in undo['binaries'].keys():
-02359                     binary, arch = p.split("/")
-02360                     if binary[0] == "-":
-02361                         del self.binaries['testing'][arch][0][binary[1:]]
-02362                     else: self.binaries['testing'][arch][0][binary] = undo['binaries'][p]
-02363 
-02364                 # undo the changes (virtual packages)
-02365                 for p in undo['nvirtual']:
-02366                     j, arch = p.split("/")
-02367                     del self.binaries['testing'][arch][1][j]
-02368                 for p in undo['virtual']:
-02369                     j, arch = p.split("/")
-02370                     if j[0] == '-':
-02371                         del self.binaries['testing'][arch][1][j[1:]]
-02372                     else: self.binaries['testing'][arch][1][j] = undo['virtual'][p]
-02373 
-02374     def upgrade_testing(self):
-02375         """Upgrade testing using the unstable packages
-02376 
-02377         This method tries to upgrade testing using the packages from unstable.
-02378         Before running the do_all method, it tries the easy and force-hint
-02379         commands.
-02380         """
-02381 
-02382         self.__log("Starting the upgrade test", type="I")
-02383         self.__output = open(self.options.upgrade_output, 'w')
-02384         self.output_write("Generated on: %s\n" % (time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time()))))
-02385         self.output_write("Arch order is: %s\n" % ", ".join(self.options.architectures))
-02386 
-02387         self.__log("> Calculating current uninstallability counters", type="I")
-02388         self.nuninst_orig = self.get_nuninst()
-02389 
-02390         if not self.options.actions:
-02391             # process `easy' hints
-02392             for x in self.hints['easy']:
-02393                 self.do_hint("easy", x[0], x[1])
-02394 
-02395             # process `force-hint' hints
-02396             for x in self.hints["force-hint"]:
-02397                 self.do_hint("force-hint", x[0], x[1])
-02398 
-02399         # run the first round of the upgrade
-02400         self.__log("> First loop on the packages with depth = 0", type="I")
-02401 
-02402         # separate runs for break arches
-02403         allpackages = []
-02404         normpackages = self.upgrade_me[:]
-02405         archpackages = {}
-02406         for a in self.options.break_arches.split():
-02407             archpackages[a] = [p for p in normpackages if p.endswith("/" + a)]
-02408             normpackages = [p for p in normpackages if not p.endswith("/" + a)]
-02409         self.upgrade_me = normpackages
-02410         self.output_write("info: main run\n")
-02411         self.do_all()
-02412         allpackages += self.upgrade_me
-02413         for a in self.options.break_arches.split():
-02414             backup = self.options.break_arches
-02415             self.options.break_arches = " ".join([x for x in self.options.break_arches.split() if x != a])
-02416             self.upgrade_me = archpackages[a]
-02417             self.output_write("info: broken arch run for %s\n" % (a))
-02418             self.do_all()
-02419             allpackages += self.upgrade_me
-02420             self.options.break_arches = backup
-02421         self.upgrade_me = allpackages
-02422 
-02423         if self.options.actions:
-02424             return
-02425 
-02426         # process `hint' hints
-02427         hintcnt = 0
-02428         for x in self.hints["hint"][:50]:
-02429             if hintcnt > 50:
-02430                 self.output_write("Skipping remaining hints...")
-02431                 break
-02432             if self.do_hint("hint", x[0], x[1]):
-02433                 hintcnt += 1
-02434 
-02435         # run the auto hinter
-02436         if not self.options.compatible:
-02437             self.auto_hinter()
-02438 
-02439         # smooth updates
-02440         if not self.options.compatible and len(self.options.smooth_updates) > 0:
-02441             self.__log("> Removing old packages left in testing from smooth updates", type="I")
-02442             removals = self.old_libraries()
-02443             if len(removals) > 0:
-02444                 self.output_write("Removing packages left in testing for smooth updates (%d):\n%s" % \
-02445                     (len(removals), self.old_libraries_format(removals)))
-02446                 self.do_all(actions=removals)
-02447                 removals = self.old_libraries()
-02448 
-02449         if not self.options.compatible:
-02450             self.output_write("List of old libraries in testing (%d):\n%s" % \
-02451                 (len(removals), self.old_libraries_format(removals)))
-02452 
-02453         # output files
-02454         if not self.options.dry_run:
-02455             # re-write control files
-02456             if self.options.control_files:
-02457                 self.write_controlfiles(self.options.testing, 'testing')
-02458 
-02459             # write bugs and dates
-02460             self.write_bugs(self.options.testing, self.bugs['testing'])
-02461             self.write_dates(self.options.testing, self.dates)
-02462 
-02463             # write HeidiResult
-02464             self.write_heidi(self.options.testing, 'HeidiResult')
-02465 
-02466         self.__output.close()
-02467         self.__log("Test completed!", type="I")
-02468 
-02469     def do_hint(self, type, who, pkgvers):
-02470         """Process hints
-02471 
-02472         This method process `easy`, `hint` and `force-hint` hints. If the
-02473         requested version is not in unstable, than the hint is skipped.
-02474         """
-02475         hintinfo = {"easy": "easy",
-02476                     "hint": 0,
-02477                     "force-hint": -1,}
-02478 
-02479         self.__log("> Processing '%s' hint from %s" % (type, who), type="I")
-02480         self.output_write("Trying %s from %s: %s\n" % (type, who, " ".join( ["%s/%s" % (p,v) for (p,v) in pkgvers])))
-02481 
-02482         ok = True
-02483         # loop on the requested packages and versions
-02484         for pkg, v in pkgvers:
-02485             # remove architecture
-02486             if "/" in pkg:
-02487                 pkg = pkg[:pkg.find("/")]
-02488 
-02489             # skip removal requests
-02490             if pkg[0] == "-":
-02491                 continue
-02492             # handle testing-proposed-updates
-02493             elif pkg.endswith("_tpu"):
-02494                 pkg = pkg[:-4]
-02495                 if pkg not in self.sources['tpu']: continue
-02496                 if apt_pkg.VersionCompare(self.sources['tpu'][pkg][VERSION], v) != 0:
-02497                     self.output_write(" Version mismatch, %s %s != %s\n" % (pkg, v, self.sources['tpu'][pkg][VERSION]))
-02498                     ok = False
-02499             # does the package exist in unstable?
-02500             elif pkg not in self.sources['unstable']:
-02501                 self.output_write(" Source %s has no version in unstable\n" % pkg)
-02502                 ok = False
-02503             elif apt_pkg.VersionCompare(self.sources['unstable'][pkg][VERSION], v) != 0:
-02504                 self.output_write(" Version mismatch, %s %s != %s\n" % (pkg, v, self.sources['unstable'][pkg][VERSION]))
-02505                 ok = False
-02506         if not ok:
-02507             self.output_write("Not using hint\n")
-02508             return False
-02509 
-02510         self.do_all(hintinfo[type], map(operator.itemgetter(0), pkgvers))
-02511         return True
-02512 
-02513     def sort_actions(self):
-02514         """Sort actions in a smart way
-02515 
-02516         This method sorts the list of actions in a smart way. In details, it uses
-02517         as base sort the number of days the excuse is old, then reordering packages
-02518         so the ones with most reverse dependencies are at the end of the loop.
-02519         If an action depends on another one, it is put after it.
-02520         """
-02521         upgrade_me = [x.name for x in self.excuses if x.name in self.upgrade_me]
-02522         for e in self.excuses:
-02523             if e.name not in upgrade_me: continue
-02524             # try removes at the end of the loop
-02525             elif e.name[0] == '-':
-02526                 upgrade_me.remove(e.name)
-02527                 upgrade_me.append(e.name)
-02528             # otherwise, put it in a good position checking its dependencies
-02529             else:
-02530                 pos = []
-02531                 udeps = [upgrade_me.index(x) for x in e.deps if x in upgrade_me and x != e.name]
-02532                 if len(udeps) > 0:
-02533                     pos.append(max(udeps))
-02534                 sdeps = [upgrade_me.index(x) for x in e.sane_deps if x in upgrade_me and x != e.name]
-02535                 if len(sdeps) > 0:
-02536                     pos.append(min(sdeps))
-02537                 if len(pos) == 0: continue
-02538                 upgrade_me.remove(e.name)
-02539                 upgrade_me.insert(max(pos)+1, e.name)
-02540                 self.dependencies[e.name] = e.deps
-02541 
-02542         # replace the list of actions with the new one
-02543         self.upgrade_me = upgrade_me
-02544 
-02545     def auto_hinter(self):
-02546         """Auto hint circular dependencies
-02547 
-02548         This method tries to auto hint circular dependencies analyzing the update
-02549         excuses relationships. If they build a circular dependency, which we already
-02550         know as not-working with the standard do_all algorithm, try to `easy` them.
-02551         """
-02552         self.__log("> Processing hints from the auto hinter", type="I")
-02553 
-02554         # consider only excuses which are valid candidates
-02555         excuses = dict([(x.name, x) for x in self.excuses if x.name in self.upgrade_me])
-02556 
-02557         def find_related(e, hint, first=False):
-02558             if e not in excuses:
-02559                 return False
-02560             excuse = excuses[e]
-02561             if e in self.sources['testing'] and self.sources['testing'][e][VERSION] == excuse.ver[1]:
-02562                 return True
-02563             if not first:
-02564                 hint[e] = excuse.ver[1]
-02565             if len(excuse.deps) == 0:
-02566                 return hint
-02567             for p in excuse.deps:
-02568                 if p in hint: continue
-02569                 if not find_related(p, hint):
-02570                     return False
-02571             return hint
-02572 
-02573         # loop on them
-02574         cache = []
-02575         for e in excuses:
-02576             excuse = excuses[e]
-02577             if e in self.sources['testing'] and self.sources['testing'][e][VERSION] == excuse.ver[1] or \
-02578                len(excuse.deps) == 0:
-02579                 continue
-02580             hint = find_related(e, {}, True)
-02581             if hint and e in hint and hint not in cache:
-02582                 self.do_hint("easy", "autohinter", hint.items())
-02583                 cache.append(hint)
-02584 
-02585     def old_libraries(self):
-02586         """Detect old libraries left in testing for smooth transitions
-02587 
-02588         This method detect old libraries which are in testing but no longer
-02589         built from the source package: they are still there because other
-02590         packages still depend on them, but they should be removed as soon
-02591         as possible.
-02592         """
-02593         sources = self.sources['testing']
-02594         testing = self.binaries['testing']
-02595         unstable = self.binaries['unstable']
-02596         removals = []
-02597         for arch in self.options.architectures:
-02598             for pkg_name in testing[arch][0]:
-02599                 pkg = testing[arch][0][pkg_name]
-02600                 if pkg_name not in unstable[arch][0] and \
-02601                    not self.same_source(sources[pkg[SOURCE]][VERSION], pkg[SOURCEVER]):
-02602                     removals.append("-" + pkg_name + "/" + arch)
-02603         return removals
-02604 
-02605     def old_libraries_format(self, libs):
-02606         """Format old libraries in a smart table"""
-02607         libraries = {}
-02608         for i in libs:
-02609             pkg, arch = i.split("/")
-02610             pkg = pkg[1:]
-02611             if pkg in libraries:
-02612                     libraries[pkg].append(arch)
-02613             else:
-02614                     libraries[pkg] = [arch]
-02615         return "\n".join(["  " + k + ": " + " ".join(libraries[k]) for k in libraries]) + "\n"
-02616 
-02617     def output_write(self, msg):
-02618         """Simple wrapper for output writing"""
-02619         self.__output.write(msg)
-02620 
-02621     def main(self):
-02622         """Main method
-02623         
-02624         This is the entry point for the class: it includes the list of calls
-02625         for the member methods which will produce the output files.
-02626         """
-02627         # if no actions are provided, build the excuses and sort them
-02628         if not self.options.actions:
-02629             self.write_excuses()
-02630             if not self.options.compatible:
-02631                 self.sort_actions()
-02632         # otherwise, use the actions provided by the command line
-02633         else: self.upgrade_me = self.options.actions.split()
-02634 
-02635         # run the upgrade test
-02636         self.upgrade_testing()
-02637 
-02638 if __name__ == '__main__':
-02639     Britney().main()
-

Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/classbritney_1_1Britney-members.html b/doc/html/classbritney_1_1Britney-members.html deleted file mode 100644 index 2b988f1..0000000 --- a/doc/html/classbritney_1_1Britney-members.html +++ /dev/null @@ -1,71 +0,0 @@ - - -briteny: Member List - - - - - - -

britney.Britney Member List

This is the complete list of members for britney.Britney, including all inherited members.

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
__init__britney.Britney
auto_hinterbritney.Britney
check_conflictsbritney.Britney
check_installablebritney.Britney
do_allbritney.Britney
do_hintbritney.Britney
doop_sourcebritney.Britney
eval_nuninstbritney.Britney
eval_uninstbritney.Britney
excuse_unsat_depsbritney.Britney
get_dependency_solversbritney.Britney
get_full_treebritney.Britney
get_nuninstbritney.Britney
HINTS_ALLbritney.Britney [static]
HINTS_STANDARDbritney.Britney [static]
invalidate_excusesbritney.Britney
is_nuninst_asgood_generousbritney.Britney
iter_packagesbritney.Britney
mainbritney.Britney
newlyuninstbritney.Britney
normalize_bugsbritney.Britney
old_librariesbritney.Britney
old_libraries_formatbritney.Britney
output_writebritney.Britney
read_approvalsbritney.Britney
read_binariesbritney.Britney
read_bugsbritney.Britney
read_datesbritney.Britney
read_hintsbritney.Britney
read_nuninstbritney.Britney
read_sourcesbritney.Britney
read_urgenciesbritney.Britney
register_reversesbritney.Britney
reversed_exc_depsbritney.Britney
same_sourcebritney.Britney
should_remove_sourcebritney.Britney
should_upgrade_srcbritney.Britney
should_upgrade_srcarchbritney.Britney
sort_actionsbritney.Britney
upgrade_testingbritney.Britney
write_bugsbritney.Britney
write_controlfilesbritney.Britney
write_datesbritney.Britney
write_excusesbritney.Britney
write_heidibritney.Britney
write_nuninstbritney.Britney


Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/classbritney_1_1Britney.html b/doc/html/classbritney_1_1Britney.html deleted file mode 100644 index 0375abe..0000000 --- a/doc/html/classbritney_1_1Britney.html +++ /dev/null @@ -1,1958 +0,0 @@ - - -briteny: britney.Britney Class Reference - - - - - - - -

britney.Britney Class Reference

List of all members. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

Public Member Functions

def __init__
def read_sources
def read_binaries
def register_reverses
def read_bugs
def write_bugs
def normalize_bugs
def read_dates
def write_dates
def read_urgencies
def read_approvals
def read_hints
def write_heidi
def write_controlfiles
def write_nuninst
def read_nuninst
def same_source
def get_dependency_solvers
def excuse_unsat_deps
def should_remove_source
def should_upgrade_srcarch
def should_upgrade_src
def reversed_exc_deps
def invalidate_excuses
def write_excuses
def newlyuninst
def get_nuninst
def eval_nuninst
def eval_uninst
-def is_nuninst_asgood_generous
def check_installable
def check_conflicts
def doop_source
def get_full_tree
def iter_packages
def do_all
def upgrade_testing
def do_hint
def sort_actions
def auto_hinter
def old_libraries
def old_libraries_format
def output_write
def main

Static Public Attributes

-tuple HINTS_STANDARD = ("easy", "hint", "remove", "block", "unblock", "urgent", "approve")
-tuple HINTS_ALL = ("force", "force-hint", "block-all")
-

Detailed Description

-
Britney, the debian testing updater script
-
-This is the script that updates the testing_ distribution. It is executed
-each day after the installation of the updated packages. It generates the 
-`Packages' files for the testing distribution, but it does so in an
-intelligent manner; it try to avoid any inconsistency and to use only
-non-buggy packages.
-
-For more documentation on this script, please read the Developers Reference.
-
-

- -

-Definition at line 212 of file britney.py.


Member Function Documentation

- -
-
- - - - - - - - - -
def britney.Britney.__init__ (  self  ) 
-
-
- -

-

Class constructor
-
-This method initializes and populates the data lists, which contain all
-the information needed by the other methods of the class.
-
-

-Definition at line 218 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.auto_hinter (  self  ) 
-
-
- -

-

Auto hint circular dependencies
-
-This method tries to auto hint circular dependencies analyzing the update
-excuses relationships. If they build a circular dependency, which we already
-know as not-working with the standard do_all algorithm, try to `easy` them.
-
-

-Definition at line 2536 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.check_conflicts (  self,
  pkg,
  arch,
  broken,
  system,
  conflicts 
)
-
-
- -

-

Check if a package can be installed satisfying the conflicts
-
-This method checks if the `pkg` package from the `arch` architecture
-can be installed (excluding `broken` packages) within the system
-`system` along with all its dependencies. This means that all the
-conflicts relationships are checked in order to achieve the test
-co-installability of the package.
-
-The method returns a boolean which is True if the given package is
-co-installable in the given system.
-
-

-Definition at line 1661 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.check_installable (  self,
  pkg,
  arch,
  suite,
  excluded = [],
  conflicts = False 
)
-
-
- -

-

Check if a package is installable
-
-This method analyzes the dependencies of the binary package specified
-by the parameter `pkg' for the architecture `arch' within the suite
-`suite'. If the dependency can be satisfied in the given `suite` and
-`conflicts` parameter is True, then the co-installability with 
-conflicts handling is checked.
-
-The dependency fields checked are Pre-Depends and Depends.
-
-The method returns a boolean which is True if the given package is
-installable.
-
-

-Definition at line 1620 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.do_all (  self,
  maxdepth = 0,
  init = None,
  actions = None 
)
-
-
- -

-

Testing update runner
-
-This method tries to update testing checking the uninstallability
-counters before and after the actions to decide if the update was
-successful or not.
-
-

-Definition at line 2258 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.do_hint (  self,
  type,
  who,
  pkgvers 
)
-
-
- -

-

Process hints
-
-This method process `easy`, `hint` and `force-hint` hints. If the
-requested version is not in unstable, than the hint is skipped.
-
-

-Definition at line 2460 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.doop_source (  self,
  pkg 
)
-
-
- -

-

Apply a change to the testing distribution as requested by `pkg`
-
-This method apply the changes required by the action `pkg` tracking
-them so it will be possible to revert them.
-
-The method returns a list of the package name, the suite where the
-package comes from, the list of packages affected by the change and
-the dictionary undo which can be used to rollback the changes.
-
-

-Definition at line 1880 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.eval_nuninst (  self,
  nuninst,
  original = None 
)
-
-
- -

-

Return a string which represents the uninstallability counters
-
-This method returns a string which represents the uninstallability
-counters reading the uninstallability statistics `nuninst` and, if
-present, merging the results with the `original` one.
-
-An example of the output string is:
-1+2: i-0:a-0:a-0:h-0:i-1:m-0:m-0:p-0:a-0:m-0:s-2:s-0
-
-where the first part is the number of broken packages in non-break
-architectures + the total number of broken packages for all the
-architectures.
-
-

-Definition at line 1568 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.eval_uninst (  self,
  nuninst 
)
-
-
- -

-

Return a string which represents the uninstallable packages
-
-This method returns a string which represents the uninstallable
-packages reading the uninstallability statistics `nuninst`.
-
-An example of the output string is:
-    * i386: broken-pkg1, broken-pkg2
-
-

-Definition at line 1598 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.excuse_unsat_deps (  self,
  pkg,
  src,
  arch,
  suite,
  excuse,
  excluded = [],
  conflicts = False 
)
-
-
- -

-

Find unsatisfied dependencies for a binary package
-
-This method analyzes the dependencies of the binary package specified
-by the parameter `pkg', built from the source package `src', for the
-architecture `arch' within the suite `suite'. If the dependency can't
-be satisfied in testing and/or unstable, it updates the excuse passed
-as parameter.
-
-The dependency fields checked are Pre-Depends and Depends.
-
-

-Definition at line 930 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.get_dependency_solvers (  self,
  block,
  arch,
  distribution,
  excluded = [],
  strict = False 
)
-
-
- -

-

Find the packages which satisfy a dependency block
-
-This method returns the list of packages which satisfy a dependency
-block (as returned by apt_pkg.ParseDepends) for the given architecture
-and distribution.
-
-It returns a tuple with two items: the first is a boolean which is
-True if the dependency is satisfied, the second is the list of the
-solving packages.
-
-

-Definition at line 889 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.get_full_tree (  self,
  pkg,
  arch,
  suite 
)
-
-
- -

-

Calculate the full dependency tree for the given package
-
-This method returns the full dependency tree for the package `pkg`,
-inside the `arch` architecture for the suite `suite`.
-
-

-Definition at line 2022 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.get_nuninst (  self,
  requested_arch = None,
  build = False 
)
-
-
- -

-

Return the uninstallability statistic for all the architectures
-
-To calculate the uninstallability counters, the method checks the
-installability of all the packages for all the architectures, and
-tracking dependencies in a recursive way. The architecture
-indipendent packages are checked only for the `nobreakall`
-architectures.
-
-It returns a dictionary with the architectures as keys and the list
-of uninstallable packages as values.
-
-

-Definition at line 1508 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.invalidate_excuses (  self,
  valid,
  invalid 
)
-
-
- -

-

Invalidate impossible excuses
-
-This method invalidates the impossible excuses, which depend
-on invalid excuses. The two parameters contains the list of
-`valid' and `invalid' excuses.
-
-

-Definition at line 1344 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.iter_packages (  self,
  packages,
  selected,
  hint = False,
  nuninst = None 
)
-
-
- -

-

Iter on the list of actions and apply them one-by-one
-
-This method apply the changes from `packages` to testing, checking the uninstallability
-counters for every action performed. If the action do not improve the it, it is reverted.
-The method returns the new uninstallability counters and the remaining actions if the
-final result is successful, otherwise (None, None).
-
-

-Definition at line 2038 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.main (  self  ) 
-
-
- -

-

Main method
-
-This is the entry point for the class: it includes the list of calls
-for the member methods which will produce the output files.
-
-

-Definition at line 2612 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.newlyuninst (  self,
  nuold,
  nunew 
)
-
-
- -

-

Return a nuninst statstic with only new uninstallable packages
-
-This method subtract the uninstallabla packages of the statistic
-`nunew` from the statistic `nuold`.
-
-It returns a dictionary with the architectures as keys and the list
-of uninstallable packages as values.
-
-

-Definition at line 1493 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.normalize_bugs (  self  ) 
-
-
- -

-

Normalize the release critical bug summaries for testing and unstable
-
-The method doesn't return any value: it directly modifies the
-object attribute `bugs'.
-
-

-Definition at line 549 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.old_libraries (  self  ) 
-
-
- -

-

Detect old libraries left in testing for smooth transitions
-
-This method detect old libraries which are in testing but no longer
-built from the source package: they are still there because other
-packages still depend on them, but they should be removed as soon
-as possible.
-
-

-Definition at line 2576 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.old_libraries_format (  self,
  libs 
)
-
-
- -

-

Format old libraries in a smart table
-

-Definition at line 2596 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.output_write (  self,
  msg 
)
-
-
- -

-

Simple wrapper for output writing
-

-Definition at line 2608 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.read_approvals (  self,
  basedir 
)
-
-
- -

-

Read the approval commands from the specified directory
-
-The approval commands are read from the files contained by the 
-`Approved' directory within the directory specified as `basedir'
-parameter. The name of the files has to be the same of the
-authorized users for the approvals.
-
-The file contains rows with the format:
-
-<package-name> <version>
-
-The method returns a dictionary where the key is the binary package
-name followed by an underscore and the version number, and the value
-is the user who submitted the command.
-
-

-Definition at line 669 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.read_binaries (  self,
  basedir,
  distribution,
  arch 
)
-
-
- -

-

Read the list of binary packages from the specified directory
-
-The binary packages are read from the `Packages_${arch}' files
-within the directory specified as `basedir' parameter, replacing
-${arch} with the value of the arch parameter. Considering the
-large amount of memory needed, not all the fields are loaded
-in memory. The available fields are Version, Source, Pre-Depends,
-Depends, Conflicts, Provides and Architecture.
-
-After reading the packages, reverse dependencies are computed
-and saved in the `rdepends' keys, and the `Provides' field is
-used to populate the virtual packages list.
-
-The dependencies are parsed with the apt.pkg.ParseDepends method,
-and they are stored both as the format of its return value and
-text.
-
-The method returns a tuple. The first element is a list where
-every item represents a binary package as a dictionary; the second
-element is a dictionary which maps virtual packages to real
-packages that provide it.
-
-

-Definition at line 367 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.read_bugs (  self,
  basedir 
)
-
-
- -

-

Read the release critial bug summary from the specified directory
-
-The RC bug summaries are read from the `Bugs' file within the
-directory specified as `basedir' parameter. The file contains
-rows with the format:
-
-<package-name> <count-of-rc-bugs>
-
-The method returns a dictionary where the key is the binary package
-name and the value is the number of open RC bugs for it.
-
-

-Definition at line 493 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.read_dates (  self,
  basedir 
)
-
-
- -

-

Read the upload date for the packages from the specified directory
-
-The upload dates are read from the `Date' file within the directory
-specified as `basedir' parameter. The file contains rows with the
-format:
-
-<package-name> <version> <date-of-upload>
-
-The dates are expressed as days starting from the 1970-01-01.
-
-The method returns a dictionary where the key is the binary package
-name and the value is tuple with two items, the version and the date.
-
-

-Definition at line 584 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.read_hints (  self,
  basedir 
)
-
-
- -

-

Read the hint commands from the specified directory
-
-The hint commands are read from the files contained by the `Hints'
-directory within the directory specified as `basedir' parameter. 
-The name of the files has to be the same of the authorized users
-for the hints.
-
-The file contains rows with the format:
-
-<command> <package-name>[/<version>]
-
-The method returns a dictionary where the key is the command, and
-the value is the list of affected packages.
-
-

-Definition at line 695 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.read_nuninst (  self  ) 
-
-
- -

-

Read the non-installable report
-

-Definition at line 835 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.read_sources (  self,
  basedir 
)
-
-
- -

-

Read the list of source packages from the specified directory
-
-The source packages are read from the `Sources' file within the
-directory specified as `basedir' parameter. Considering the
-large amount of memory needed, not all the fields are loaded
-in memory. The available fields are Version, Maintainer and Section.
-
-The method returns a list where every item represents a source
-package as a dictionary.
-
-

-Definition at line 340 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.read_urgencies (  self,
  basedir 
)
-
-
- -

-

Read the upload urgency of the packages from the specified directory
-
-The upload urgencies are read from the `Urgency' file within the
-directory specified as `basedir' parameter. The file contains rows
-with the format:
-
-<package-name> <version> <urgency>
-
-The method returns a dictionary where the key is the binary package
-name and the value is the greatest urgency from the versions of the
-package that are higher then the testing one.
-
-

-Definition at line 624 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.register_reverses (  self,
  pkg,
  packages,
  provides,
  check_doubles = True,
  parse_depends = apt_pkg.ParseDepends 
)
-
-
- -

-

Register reverse dependencies and conflicts for the specified package
-
-This method register the reverse dependencies and conflicts for
-a give package using `packages` as list of packages and `provides`
-as list of virtual packages.
-
-The method has an optional parameter parse_depends which is there
-just for performance reasons and is not meant to be overwritten.
-
-

-Definition at line 451 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.reversed_exc_deps (  self  ) 
-
-
- -

-

Reverse the excuses dependencies
-
-This method returns a dictionary where the keys are the package names
-and the values are the excuse names which depend on it.
-
-

-Definition at line 1331 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.same_source (  self,
  sv1,
  sv2 
)
-
-
- -

-

Check if two version numbers are built from the same source
-
-This method returns a boolean value which is true if the two
-version numbers specified as parameters are built from the same
-source. The main use of this code is to detect binary-NMU.
-
-

-Definition at line 850 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.should_remove_source (  self,
  pkg 
)
-
-
- -

-

Check if a source package should be removed from testing
-
-This method checks if a source package should be removed from the
-testing distribution; this happen if the source package is not
-present in the unstable distribution anymore.
-
-It returns True if the package can be removed, False otherwise.
-In the former case, a new excuse is appended to the the object
-attribute excuses.
-
-

-Definition at line 990 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.should_upgrade_src (  self,
  src,
  suite 
)
-
-
- -

-

Check if source package should be upgraded
-
-This method checks if a source package should be upgraded. The analysis
-is performed for the source package specified by the `src' parameter, 
-checking the architecture `arch' for the distribution `suite'.
-       
-It returns False if the given package doesn't need to be upgraded,
-True otherwise. In the former case, a new excuse is appended to
-the the object attribute excuses.
-
-

-Definition at line 1134 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.should_upgrade_srcarch (  self,
  src,
  arch,
  suite 
)
-
-
- -

-

Check if binary package should be upgraded
-
-This method checks if a binary package should be upgraded; this can
-happen also if the binary package is a binary-NMU for the given arch.
-The analysis is performed for the source package specified by the
-`src' parameter, checking the architecture `arch' for the distribution
-`suite'.
-       
-It returns False if the given package doesn't need to be upgraded,
-True otherwise. In the former case, a new excuse is appended to
-the the object attribute excuses.
-
-

-Definition at line 1020 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.sort_actions (  self  ) 
-
-
- -

-

Sort actions in a smart way
-
-This method sorts the list of actions in a smart way. In details, it uses
-as base sort the number of days the excuse is old, then reordering packages
-so the ones with most reverse dependencies are at the end of the loop.
-If an action depends on another one, it is put after it.
-
-

-Definition at line 2504 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.upgrade_testing (  self  ) 
-
-
- -

-

Upgrade testing using the unstable packages
-
-This method tries to upgrade testing using the packages from unstable.
-Before running the do_all method, it tries the easy and force-hint
-commands.
-
-

-Definition at line 2365 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.write_bugs (  self,
  basedir,
  bugs 
)
-
-
- -

-

Write the release critical bug summary to the specified directory
-
-For a more detailed explanation of the format, please check the method
-read_bugs.
-
-

-Definition at line 517 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.write_controlfiles (  self,
  basedir,
  suite 
)
-
-
- -

-

Write the control files
-
-This method write the control files for the binary packages of all
-the architectures and for the source packages.
-
-

-Definition at line 777 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.write_dates (  self,
  basedir,
  dates 
)
-
-
- -

-

Write the upload date for the packages to the specified directory
-
-For a more detailed explanation of the format, please check the method
-read_dates.
-
-

-Definition at line 610 of file britney.py. -

-

- -

-
- - - - - - - - - -
def britney.Britney.write_excuses (  self  ) 
-
-
- -

-

Produce and write the update excuses
-
-This method handles the update excuses generation: the packages are
-looked to determine whether they are valid candidates. For the details
-of this procedure, please refer to the module docstring.
-
-

-Definition at line 1386 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def britney.Britney.write_heidi (  self,
  basedir,
  filename 
)
-
-
- -

-

Write the output HeidiResult
-
-This method write the output for Heidi, which contains all the
-binary packages and the source packages in the form:
-
-<pkg-name> <pkg-version> <pkg-architecture> <pkg-section>
-<src-name> <src-version> <src-section>
-
-

-Definition at line 742 of file britney.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def britney.Britney.write_nuninst (  self,
  nuninst 
)
-
-
- -

-

Write the non-installable report
-

-Definition at line 827 of file britney.py. -

-

-


The documentation for this class was generated from the following file: -
Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/classexcuse_1_1Excuse-members.html b/doc/html/classexcuse_1_1Excuse-members.html deleted file mode 100644 index e7cd138..0000000 --- a/doc/html/classexcuse_1_1Excuse-members.html +++ /dev/null @@ -1,41 +0,0 @@ - - -briteny: Member List - - - - - - -

excuse.Excuse Member List

This is the complete list of members for excuse.Excuse, including all inherited members.

- - - - - - - - - - - - - - - - -
__init__excuse.Excuse
add_break_depexcuse.Excuse
add_depexcuse.Excuse
add_sane_depexcuse.Excuse
add_unsat_depexcuse.Excuse
addhtmlexcuse.Excuse
htmlexcuse.Excuse
invalidate_depexcuse.Excuse
reemailexcuse.Excuse [static]
set_dateexcuse.Excuse
set_maintexcuse.Excuse
set_priorityexcuse.Excuse
set_sectionexcuse.Excuse
set_urgencyexcuse.Excuse
set_versexcuse.Excuse
setdaysoldexcuse.Excuse


Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/classexcuse_1_1Excuse.html b/doc/html/classexcuse_1_1Excuse.html deleted file mode 100644 index 1baa28a..0000000 --- a/doc/html/classexcuse_1_1Excuse.html +++ /dev/null @@ -1,561 +0,0 @@ - - -briteny: excuse.Excuse Class Reference - - - - - - - -

excuse.Excuse Class Reference

List of all members. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

Public Member Functions

def __init__
def set_vers
def set_maint
def set_section
def set_priority
def set_date
def set_urgency
def add_dep
def add_sane_dep
def add_break_dep
def add_unsat_dep
def invalidate_dep
def setdaysold
def addhtml
def html

Static Public Attributes

-tuple reemail = re.compile(r"<.*?>")
 Regular expression for removing the email address.
-

Detailed Description

-
Excuse class
-
-This class represents an update excuse, which is a detailed explanation
-of why a package can or cannot be updated in the testing distribution from
-a newer package in another distribution (like for example unstable).
-
-The main purpose of the excuses is to be written in an HTML file which
-will be published over HTTP. The maintainers will be able to parse it
-manually or automatically to find the explanation of why their packages
-have been updated or not.
-
-

- -

-Definition at line 21 of file excuse.py.


Member Function Documentation

- -
-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.__init__ (  self,
  name 
)
-
-
- -

-

Class constructor
-
-This method initializes the excuse with the specified name and
-the default values.
-
-

-Definition at line 28 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def excuse.Excuse.add_break_dep (  self,
  name,
  arch 
)
-
-
- -

-

Add a break dependency
-

-Definition at line 86 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.add_dep (  self,
  name 
)
-
-
- -

-

Add a dependency
-

-Definition at line 78 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.add_sane_dep (  self,
  name 
)
-
-
- -

-

Add a sane dependency
-

-Definition at line 82 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.add_unsat_dep (  self,
  arch 
)
-
-
- -

-

Add a flag for unsatisfied dependencies
-

-Definition at line 91 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.addhtml (  self,
  note 
)
-
-
- -

-

Add a note in HTML
-

-Definition at line 104 of file excuse.py. -

-

- -

-
- - - - - - - - - -
def excuse.Excuse.html (  self  ) 
-
-
- -

-

Render the excuse in HTML
-

-Definition at line 108 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.invalidate_dep (  self,
  name 
)
-
-
- -

-

Invalidate dependency
-

-Definition at line 95 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.set_date (  self,
  date 
)
-
-
- -

-

Set the date of upload of the package
-

-Definition at line 70 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.set_maint (  self,
  maint 
)
-
-
- -

-

Set the package maintainer's name
-

-Definition at line 58 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.set_priority (  self,
  pri 
)
-
-
- -

-

Set the priority of the package
-

-Definition at line 66 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.set_section (  self,
  section 
)
-
-
- -

-

Set the section of the package
-

-Definition at line 62 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - -
def excuse.Excuse.set_urgency (  self,
  date 
)
-
-
- -

-

Set the urgency of upload of the package
-

-Definition at line 74 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def excuse.Excuse.set_vers (  self,
  tver,
  uver 
)
-
-
- -

-

Set the testing and unstable versions
-

-Definition at line 53 of file excuse.py. -

-

- -

-
- - - - - - - - - - - - - - - - - - - - - - - - -
def excuse.Excuse.setdaysold (  self,
  daysold,
  mindays 
)
-
-
- -

-

Set the number of days from the upload and the minimum number of days for the update
-

-Definition at line 99 of file excuse.py. -

-

-


The documentation for this class was generated from the following file: -
Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/doxygen.css b/doc/html/doxygen.css deleted file mode 100644 index 5d58369..0000000 --- a/doc/html/doxygen.css +++ /dev/null @@ -1,358 +0,0 @@ -BODY,H1,H2,H3,H4,H5,H6,P,CENTER,TD,TH,UL,DL,DIV { - font-family: Geneva, Arial, Helvetica, sans-serif; -} -BODY,TD { - font-size: 90%; -} -H1 { - text-align: center; - font-size: 160%; -} -H2 { - font-size: 120%; -} -H3 { - font-size: 100%; -} -CAPTION { font-weight: bold } -DIV.qindex { - width: 100%; - background-color: #e8eef2; - border: 1px solid #84b0c7; - text-align: center; - margin: 2px; - padding: 2px; - line-height: 140%; -} -DIV.nav { - width: 100%; - background-color: #e8eef2; - border: 1px solid #84b0c7; - text-align: center; - margin: 2px; - padding: 2px; - line-height: 140%; -} -DIV.navtab { - background-color: #e8eef2; - border: 1px solid #84b0c7; - text-align: center; - margin: 2px; - margin-right: 15px; - padding: 2px; -} -TD.navtab { - font-size: 70%; -} -A.qindex { - text-decoration: none; - font-weight: bold; - color: #1A419D; -} -A.qindex:visited { - text-decoration: none; - font-weight: bold; - color: #1A419D -} -A.qindex:hover { - text-decoration: none; - background-color: #ddddff; -} -A.qindexHL { - text-decoration: none; - font-weight: bold; - background-color: #6666cc; - color: #ffffff; - border: 1px double #9295C2; -} -A.qindexHL:hover { - text-decoration: none; - background-color: #6666cc; - color: #ffffff; -} -A.qindexHL:visited { text-decoration: none; background-color: #6666cc; color: #ffffff } -A.el { text-decoration: none; font-weight: bold } -A.elRef { font-weight: bold } -A.code:link { text-decoration: none; font-weight: normal; color: #0000FF} -A.code:visited { text-decoration: none; font-weight: normal; color: #0000FF} -A.codeRef:link { font-weight: normal; color: #0000FF} -A.codeRef:visited { font-weight: normal; color: #0000FF} -A:hover { text-decoration: none; background-color: #f2f2ff } -DL.el { margin-left: -1cm } -.fragment { - font-family: monospace, fixed; - font-size: 95%; -} -PRE.fragment { - border: 1px solid #CCCCCC; - background-color: #f5f5f5; - margin-top: 4px; - margin-bottom: 4px; - margin-left: 2px; - margin-right: 8px; - padding-left: 6px; - padding-right: 6px; - padding-top: 4px; - padding-bottom: 4px; -} -DIV.ah { background-color: black; font-weight: bold; color: #ffffff; margin-bottom: 3px; margin-top: 3px } - -DIV.groupHeader { - margin-left: 16px; - margin-top: 12px; - margin-bottom: 6px; - font-weight: bold; -} -DIV.groupText { margin-left: 16px; font-style: italic; font-size: 90% } -BODY { - background: white; - color: black; - margin-right: 20px; - margin-left: 20px; -} -TD.indexkey { - background-color: #e8eef2; - font-weight: bold; - padding-right : 10px; - padding-top : 2px; - padding-left : 10px; - padding-bottom : 2px; - margin-left : 0px; - margin-right : 0px; - margin-top : 2px; - margin-bottom : 2px; - border: 1px solid #CCCCCC; -} -TD.indexvalue { - background-color: #e8eef2; - font-style: italic; - padding-right : 10px; - padding-top : 2px; - padding-left : 10px; - padding-bottom : 2px; - margin-left : 0px; - margin-right : 0px; - margin-top : 2px; - margin-bottom : 2px; - border: 1px solid #CCCCCC; -} -TR.memlist { - background-color: #f0f0f0; -} -P.formulaDsp { text-align: center; } -IMG.formulaDsp { } -IMG.formulaInl { vertical-align: middle; } -SPAN.keyword { color: #008000 } -SPAN.keywordtype { color: #604020 } -SPAN.keywordflow { color: #e08000 } -SPAN.comment { color: #800000 } -SPAN.preprocessor { color: #806020 } -SPAN.stringliteral { color: #002080 } -SPAN.charliteral { color: #008080 } -.mdescLeft { - padding: 0px 8px 4px 8px; - font-size: 80%; - font-style: italic; - background-color: #FAFAFA; - border-top: 1px none #E0E0E0; - border-right: 1px none #E0E0E0; - border-bottom: 1px none #E0E0E0; - border-left: 1px none #E0E0E0; - margin: 0px; -} -.mdescRight { - padding: 0px 8px 4px 8px; - font-size: 80%; - font-style: italic; - background-color: #FAFAFA; - border-top: 1px none #E0E0E0; - border-right: 1px none #E0E0E0; - border-bottom: 1px none #E0E0E0; - border-left: 1px none #E0E0E0; - margin: 0px; -} -.memItemLeft { - padding: 1px 0px 0px 8px; - margin: 4px; - border-top-width: 1px; - border-right-width: 1px; - border-bottom-width: 1px; - border-left-width: 1px; - border-top-color: #E0E0E0; - border-right-color: #E0E0E0; - border-bottom-color: #E0E0E0; - border-left-color: #E0E0E0; - border-top-style: solid; - border-right-style: none; - border-bottom-style: none; - border-left-style: none; - background-color: #FAFAFA; - font-size: 80%; -} -.memItemRight { - padding: 1px 8px 0px 8px; - margin: 4px; - border-top-width: 1px; - border-right-width: 1px; - border-bottom-width: 1px; - border-left-width: 1px; - border-top-color: #E0E0E0; - border-right-color: #E0E0E0; - border-bottom-color: #E0E0E0; - border-left-color: #E0E0E0; - border-top-style: solid; - border-right-style: none; - border-bottom-style: none; - border-left-style: none; - background-color: #FAFAFA; - font-size: 80%; -} -.memTemplItemLeft { - padding: 1px 0px 0px 8px; - margin: 4px; - border-top-width: 1px; - border-right-width: 1px; - border-bottom-width: 1px; - border-left-width: 1px; - border-top-color: #E0E0E0; - border-right-color: #E0E0E0; - border-bottom-color: #E0E0E0; - border-left-color: #E0E0E0; - border-top-style: none; - border-right-style: none; - border-bottom-style: none; - border-left-style: none; - background-color: #FAFAFA; - font-size: 80%; -} -.memTemplItemRight { - padding: 1px 8px 0px 8px; - margin: 4px; - border-top-width: 1px; - border-right-width: 1px; - border-bottom-width: 1px; - border-left-width: 1px; - border-top-color: #E0E0E0; - border-right-color: #E0E0E0; - border-bottom-color: #E0E0E0; - border-left-color: #E0E0E0; - border-top-style: none; - border-right-style: none; - border-bottom-style: none; - border-left-style: none; - background-color: #FAFAFA; - font-size: 80%; -} -.memTemplParams { - padding: 1px 0px 0px 8px; - margin: 4px; - border-top-width: 1px; - border-right-width: 1px; - border-bottom-width: 1px; - border-left-width: 1px; - border-top-color: #E0E0E0; - border-right-color: #E0E0E0; - border-bottom-color: #E0E0E0; - border-left-color: #E0E0E0; - border-top-style: solid; - border-right-style: none; - border-bottom-style: none; - border-left-style: none; - color: #606060; - background-color: #FAFAFA; - font-size: 80%; -} -.search { color: #003399; - font-weight: bold; -} -FORM.search { - margin-bottom: 0px; - margin-top: 0px; -} -INPUT.search { font-size: 75%; - color: #000080; - font-weight: normal; - background-color: #e8eef2; -} -TD.tiny { font-size: 75%; -} -a { - color: #1A41A8; -} -a:visited { - color: #2A3798; -} -.dirtab { padding: 4px; - border-collapse: collapse; - border: 1px solid #84b0c7; -} -TH.dirtab { background: #e8eef2; - font-weight: bold; -} -HR { height: 1px; - border: none; - border-top: 1px solid black; -} - -/* Style for detailed member documentation */ -.memtemplate { - font-size: 80%; - color: #606060; - font-weight: normal; -} -.memnav { - background-color: #e8eef2; - border: 1px solid #84b0c7; - text-align: center; - margin: 2px; - margin-right: 15px; - padding: 2px; -} -.memitem { - padding: 4px; - background-color: #eef3f5; - border-width: 1px; - border-style: solid; - border-color: #dedeee; - -moz-border-radius: 8px 8px 8px 8px; -} -.memname { - white-space: nowrap; - font-weight: bold; -} -.memdoc{ - padding-left: 10px; -} -.memproto { - background-color: #d5e1e8; - width: 100%; - border-width: 1px; - border-style: solid; - border-color: #84b0c7; - font-weight: bold; - -moz-border-radius: 8px 8px 8px 8px; -} -.paramkey { - text-align: right; -} -.paramtype { - white-space: nowrap; -} -.paramname { - color: #602020; - font-style: italic; -} -/* End Styling for detailed member documentation */ - -/* for the tree view */ -.ftvtree { - font-family: sans-serif; - margin:0.5em; -} -.directory { font-size: 9pt; font-weight: bold; } -.directory h3 { margin: 0px; margin-top: 1em; font-size: 11pt; } -.directory > h3 { margin-top: 0; } -.directory p { margin: 0px; white-space: nowrap; } -.directory div { display: none; margin: 0px; } -.directory img { vertical-align: -30%; } - diff --git a/doc/html/doxygen.png b/doc/html/doxygen.png deleted file mode 100644 index f0a274b..0000000 Binary files a/doc/html/doxygen.png and /dev/null differ diff --git a/doc/html/excuse_8py-source.html b/doc/html/excuse_8py-source.html deleted file mode 100644 index 9858d7e..0000000 --- a/doc/html/excuse_8py-source.html +++ /dev/null @@ -1,163 +0,0 @@ - - -briteny: excuse.py Source File - - - - - -

excuse.py

00001 # -*- coding: utf-8 -*-
-00002 
-00003 # Copyright (C) 2001-2004 Anthony Towns <ajt@debian.org>
-00004 #                         Andreas Barth <aba@debian.org>
-00005 #                         Fabio Tranchitella <kobold@debian.org>
-00006 
-00007 # This program is free software; you can redistribute it and/or modify
-00008 # it under the terms of the GNU General Public License as published by
-00009 # the Free Software Foundation; either version 2 of the License, or
-00010 # (at your option) any later version.
-00011 
-00012 # This program is distributed in the hope that it will be useful,
-00013 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-00014 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-00015 # GNU General Public License for more details.
-00016 
-00017 import re
-00018 import string
-00019 
-00020 
-00021 class Excuse:
-00022     """Excuse class
-00023     
-00024     This class represents an update excuse, which is a detailed explanation
-00025     of why a package can or cannot be updated in the testing distribution from
-00026     a newer package in another distribution (like for example unstable).
-00027 
-00028     The main purpose of the excuses is to be written in an HTML file which
-00029     will be published over HTTP. The maintainers will be able to parse it
-00030     manually or automatically to find the explanation of why their packages
-00031     have been updated or not.
-00032     """
-00033 
-00034     ## @var reemail
-00035     # Regular expression for removing the email address
-00036     reemail = re.compile(r"<.*?>")
-00037 
-00038     def __init__(self, name):
-00039         """Class constructor
-00040         
-00041         This method initializes the excuse with the specified name and
-00042         the default values.
-00043         """
-00044         self.name = name
-00045         self.ver = ("-", "-")
-00046         self.maint = None
-00047         self.pri = None
-00048         self.date = None
-00049         self.urgency = None
-00050         self.daysold = None
-00051         self.mindays = None
-00052         self.section = None
-00053         self.dontinvalidate = 0
-00054 
-00055         self.invalid_deps = []
-00056         self.deps = []
-00057         self.sane_deps = []
-00058         self.break_deps = []
-00059         self.unsat_deps = {}
-00060         self.bugs = []
-00061         self.htmlline = []
-00062 
-00063     def set_vers(self, tver, uver):
-00064         """Set the testing and unstable versions"""
-00065         if tver: self.ver = (tver, self.ver[1])
-00066         if uver: self.ver = (self.ver[0], uver)
-00067 
-00068     def set_maint(self, maint):
-00069         """Set the package maintainer's name"""
-00070         self.maint = self.reemail.sub("", maint)
-00071 
-00072     def set_section(self, section):
-00073         """Set the section of the package"""
-00074         self.section = section
-00075 
-00076     def set_priority(self, pri):
-00077         """Set the priority of the package"""
-00078         self.pri = pri
-00079 
-00080     def set_date(self, date):
-00081         """Set the date of upload of the package"""
-00082         self.date = date
-00083 
-00084     def set_urgency(self, date):
-00085         """Set the urgency of upload of the package"""
-00086         self.urgency = date
-00087 
-00088     def add_dep(self, name):
-00089         """Add a dependency"""
-00090         if name not in self.deps: self.deps.append(name)
-00091 
-00092     def add_sane_dep(self, name):
-00093         """Add a sane dependency"""
-00094         if name not in self.sane_deps: self.sane_deps.append(name)
-00095 
-00096     def add_break_dep(self, name, arch):
-00097         """Add a break dependency"""
-00098         if (name, arch) not in self.break_deps:
-00099             self.break_deps.append( (name, arch) )
-00100 
-00101     def add_unsat_dep(self, arch):
-00102         """Add a flag for unsatisfied dependencies"""
-00103         self.unsat_deps[arch] = True
-00104 
-00105     def invalidate_dep(self, name):
-00106         """Invalidate dependency"""
-00107         if name not in self.invalid_deps: self.invalid_deps.append(name)
-00108 
-00109     def setdaysold(self, daysold, mindays):
-00110         """Set the number of days from the upload and the minimum number of days for the update"""
-00111         self.daysold = daysold
-00112         self.mindays = mindays
-00113 
-00114     def addhtml(self, note):
-00115         """Add a note in HTML"""
-00116         self.htmlline.append(note)
-00117 
-00118     def html(self):
-00119         """Render the excuse in HTML"""
-00120         res = "<a id=\"%s\" name=\"%s\">%s</a> (%s to %s)\n<ul>\n" % \
-00121             (self.name, self.name, self.name, self.ver[0], self.ver[1])
-00122         if self.maint:
-00123             res = res + "<li>Maintainer: %s\n" % (self.maint)
-00124         if self.section and string.find(self.section, "/") > -1:
-00125             res = res + "<li>Section: %s\n" % (self.section)
-00126         if self.daysold != None:
-00127             if self.daysold < self.mindays:
-00128                 res = res + ("<li>Too young, only %d of %d days old\n" %
-00129                 (self.daysold, self.mindays))
-00130             else:
-00131                 res = res + ("<li>%d days old (needed %d days)\n" %
-00132                 (self.daysold, self.mindays))
-00133         for x in self.htmlline:
-00134             res = res + "<li>" + x + "\n"
-00135         for x in self.deps:
-00136             if x in self.invalid_deps:
-00137                 res = res + "<li>Depends: %s <a href=\"#%s\">%s</a> (not considered)\n" % (self.name, x, x)
-00138             else:
-00139                 res = res + "<li>Depends: %s <a href=\"#%s\">%s</a>\n" % (self.name, x, x)
-00140         for (n,a) in self.break_deps:
-00141             if n not in self.deps:
-00142                 res += "<li>Ignoring %s depends: <a href=\"#%s\">%s</a>\n" % (a, n, n)
-00143         res = res + "</ul>\n"
-00144         return res
-

Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/files.html b/doc/html/files.html deleted file mode 100644 index f21e492..0000000 --- a/doc/html/files.html +++ /dev/null @@ -1,23 +0,0 @@ - - -briteny: File Index - - - - - -

briteny File List

Here is a list of all documented files with brief descriptions: - - -
britney.py [code]
excuse.py [code]
-
Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/functions.html b/doc/html/functions.html deleted file mode 100644 index 630da2d..0000000 --- a/doc/html/functions.html +++ /dev/null @@ -1,145 +0,0 @@ - - -briteny: Class Members - - - - - - -
- -
-
- -
- -

-Here is a list of all documented class members with links to the class documentation for each member: -

-

- _ -

-

- a -

-

- c -

-

- d -

-

- e -

-

- g -

-

- h -

-

- i -

-

- m -

-

- n -

-

- o -

-

- r -

-

- s -

-

- u -

-

- w -

-
Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/functions_func.html b/doc/html/functions_func.html deleted file mode 100644 index 95facf9..0000000 --- a/doc/html/functions_func.html +++ /dev/null @@ -1,142 +0,0 @@ - - -briteny: Class Members - Functions - - - - - - -
- -
-
- -
- -

-  -

-

- _ -

-

- a -

-

- c -

-

- d -

-

- e -

-

- g -

-

- h -

-

- i -

-

- m -

-

- n -

-

- o -

-

- r -

-

- s -

-

- u -

-

- w -

-
Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/functions_vars.html b/doc/html/functions_vars.html deleted file mode 100644 index 2a11ba9..0000000 --- a/doc/html/functions_vars.html +++ /dev/null @@ -1,38 +0,0 @@ - - -briteny: Class Members - Variables - - - - - - -
- -
-  -

-

-
Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/hierarchy.html b/doc/html/hierarchy.html deleted file mode 100644 index e55f482..0000000 --- a/doc/html/hierarchy.html +++ /dev/null @@ -1,29 +0,0 @@ - - -briteny: Hierarchical Index - - - - - - -

briteny Class Hierarchy

This inheritance list is sorted roughly, but not completely, alphabetically: -
Generated on Sun Jun 25 12:04:03 2006 for briteny by  - -doxygen 1.4.6
- - diff --git a/doc/html/index.html b/doc/html/index.html deleted file mode 100644 index b2e3692..0000000 --- a/doc/html/index.html +++ /dev/null @@ -1,21 +0,0 @@ - - -briteny: Main Page - - - - - -

briteny Documentation

-

-

2.0.alpha1


Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/namespacebritney.html b/doc/html/namespacebritney.html deleted file mode 100644 index 82c9a06..0000000 --- a/doc/html/namespacebritney.html +++ /dev/null @@ -1,27 +0,0 @@ - - -briteny: Package britney - - - - - -

Package britney

-

- - - - - -

Classes

class  Britney
-


Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/namespaceexcuse.html b/doc/html/namespaceexcuse.html deleted file mode 100644 index f4c09ec..0000000 --- a/doc/html/namespaceexcuse.html +++ /dev/null @@ -1,27 +0,0 @@ - - -briteny: Package excuse - - - - - -

Package excuse

-

- - - - - -

Classes

class  Excuse
-


Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/namespaces.html b/doc/html/namespaces.html deleted file mode 100644 index 7b294e2..0000000 --- a/doc/html/namespaces.html +++ /dev/null @@ -1,23 +0,0 @@ - - -briteny: Package List - - - - - -

briteny Package List

Here are the packages with brief descriptions (if available): - - -
britney
excuse
-
Generated on Fri Aug 18 23:23:25 2006 for briteny by  - -doxygen 1.4.7
- - diff --git a/doc/html/tab_b.gif b/doc/html/tab_b.gif deleted file mode 100644 index 0d62348..0000000 Binary files a/doc/html/tab_b.gif and /dev/null differ diff --git a/doc/html/tab_l.gif b/doc/html/tab_l.gif deleted file mode 100644 index 9b1e633..0000000 Binary files a/doc/html/tab_l.gif and /dev/null differ diff --git a/doc/html/tab_r.gif b/doc/html/tab_r.gif deleted file mode 100644 index ce9dd9f..0000000 Binary files a/doc/html/tab_r.gif and /dev/null differ diff --git a/doc/html/tabs.css b/doc/html/tabs.css deleted file mode 100644 index a61552a..0000000 --- a/doc/html/tabs.css +++ /dev/null @@ -1,102 +0,0 @@ -/* tabs styles, based on http://www.alistapart.com/articles/slidingdoors */ - -DIV.tabs -{ - float : left; - width : 100%; - background : url("tab_b.gif") repeat-x bottom; - margin-bottom : 4px; -} - -DIV.tabs UL -{ - margin : 0px; - padding-left : 10px; - list-style : none; -} - -DIV.tabs LI, DIV.tabs FORM -{ - display : inline; - margin : 0px; - padding : 0px; -} - -DIV.tabs FORM -{ - float : right; -} - -DIV.tabs A -{ - float : left; - background : url("tab_r.gif") no-repeat right top; - border-bottom : 1px solid #84B0C7; - font-size : x-small; - font-weight : bold; - text-decoration : none; -} - -DIV.tabs A:hover -{ - background-position: 100% -150px; -} - -DIV.tabs A:link, DIV.tabs A:visited, -DIV.tabs A:active, DIV.tabs A:hover -{ - color: #1A419D; -} - -DIV.tabs SPAN -{ - float : left; - display : block; - background : url("tab_l.gif") no-repeat left top; - padding : 5px 9px; - white-space : nowrap; -} - -DIV.tabs INPUT -{ - float : right; - display : inline; - font-size : 1em; -} - -DIV.tabs TD -{ - font-size : x-small; - font-weight : bold; - text-decoration : none; -} - - - -/* Commented Backslash Hack hides rule from IE5-Mac \*/ -DIV.tabs SPAN {float : none;} -/* End IE5-Mac hack */ - -DIV.tabs A:hover SPAN -{ - background-position: 0% -150px; -} - -DIV.tabs LI#current A -{ - background-position: 100% -150px; - border-width : 0px; -} - -DIV.tabs LI#current SPAN -{ - background-position: 0% -150px; - padding-bottom : 6px; -} - -DIV.nav -{ - background : none; - border : none; - border-bottom : 1px solid #84B0C7; -} diff --git a/doc/latex/FreeSans.ttf b/doc/latex/FreeSans.ttf deleted file mode 100644 index b550b90..0000000 Binary files a/doc/latex/FreeSans.ttf and /dev/null differ diff --git a/doc/latex/Helvetica.ttf b/doc/latex/Helvetica.ttf deleted file mode 100644 index 8051f8a..0000000 Binary files a/doc/latex/Helvetica.ttf and /dev/null differ diff --git a/doc/latex/Makefile b/doc/latex/Makefile deleted file mode 100644 index 776fcf9..0000000 --- a/doc/latex/Makefile +++ /dev/null @@ -1,39 +0,0 @@ -all: clean refman.dvi - -ps: refman.ps - -pdf: refman.pdf - -ps_2on1: refman_2on1.ps - -pdf_2on1: refman_2on1.pdf - -refman.ps: refman.dvi - dvips -o refman.ps refman.dvi - -refman.pdf: refman.ps - ps2pdf refman.ps refman.pdf - -refman.dvi: refman.tex doxygen.sty - echo "Running latex..." - latex refman.tex - echo "Running makeindex..." - makeindex refman.idx - echo "Rerunning latex...." - latex refman.tex - latex_count=5 ; \ - while egrep -s 'Rerun (LaTeX|to get cross-references right)' refman.log && [ $$latex_count -gt 0 ] ;\ - do \ - echo "Rerunning latex...." ;\ - latex refman.tex ;\ - latex_count=`expr $$latex_count - 1` ;\ - done - -refman_2on1.ps: refman.ps - psnup -2 refman.ps >refman_2on1.ps - -refman_2on1.pdf: refman_2on1.ps - ps2pdf refman_2on1.ps refman_2on1.pdf - -clean: - rm -f *.ps *.dvi *.aux *.toc *.idx *.ind *.ilg *.log *.out refman.pdf diff --git a/doc/latex/annotated.tex b/doc/latex/annotated.tex deleted file mode 100644 index 9203e2a..0000000 --- a/doc/latex/annotated.tex +++ /dev/null @@ -1,5 +0,0 @@ -\section{briteny Class List} -Here are the classes, structs, unions and interfaces with brief descriptions:\begin{CompactList} -\item\contentsline{section}{\bf{britney.Britney} }{\pageref{classbritney_1_1Britney}}{} -\item\contentsline{section}{\bf{excuse.Excuse} }{\pageref{classexcuse_1_1Excuse}}{} -\end{CompactList} diff --git a/doc/latex/classbritney_1_1Britney.tex b/doc/latex/classbritney_1_1Britney.tex deleted file mode 100644 index 9857bff..0000000 --- a/doc/latex/classbritney_1_1Britney.tex +++ /dev/null @@ -1,906 +0,0 @@ -\section{britney.Britney Class Reference} -\label{classbritney_1_1Britney}\index{britney::Britney@{britney::Britney}} -\subsection*{Public Member Functions} -\begin{CompactItemize} -\item -def \bf{\_\-\_\-init\_\-\_\-} -\item -def \bf{read\_\-sources} -\item -def \bf{read\_\-binaries} -\item -def \bf{register\_\-reverses} -\item -def \bf{read\_\-bugs} -\item -def \bf{write\_\-bugs} -\item -def \bf{normalize\_\-bugs} -\item -def \bf{read\_\-dates} -\item -def \bf{write\_\-dates} -\item -def \bf{read\_\-urgencies} -\item -def \bf{read\_\-approvals} -\item -def \bf{read\_\-hints} -\item -def \bf{write\_\-heidi} -\item -def \bf{write\_\-controlfiles} -\item -def \bf{write\_\-nuninst} -\item -def \bf{read\_\-nuninst} -\item -def \bf{same\_\-source} -\item -def \bf{get\_\-dependency\_\-solvers} -\item -def \bf{excuse\_\-unsat\_\-deps} -\item -def \bf{should\_\-remove\_\-source} -\item -def \bf{should\_\-upgrade\_\-srcarch} -\item -def \bf{should\_\-upgrade\_\-src} -\item -def \bf{reversed\_\-exc\_\-deps} -\item -def \bf{invalidate\_\-excuses} -\item -def \bf{write\_\-excuses} -\item -def \bf{newlyuninst} -\item -def \bf{get\_\-nuninst} -\item -def \bf{eval\_\-nuninst} -\item -def \bf{eval\_\-uninst} -\item -def \bf{is\_\-nuninst\_\-asgood\_\-generous}\label{classbritney_1_1Britney_2f7401c8e41b8095bd99bd656bb41546} - -\item -def \bf{check\_\-installable} -\item -def \bf{check\_\-conflicts} -\item -def \bf{doop\_\-source} -\item -def \bf{get\_\-full\_\-tree} -\item -def \bf{iter\_\-packages} -\item -def \bf{do\_\-all} -\item -def \bf{upgrade\_\-testing} -\item -def \bf{do\_\-hint} -\item -def \bf{sort\_\-actions} -\item -def \bf{auto\_\-hinter} -\item -def \bf{old\_\-libraries} -\item -def \bf{old\_\-libraries\_\-format} -\item -def \bf{output\_\-write} -\item -def \bf{main} -\end{CompactItemize} -\subsection*{Static Public Attributes} -\begin{CompactItemize} -\item -tuple \bf{HINTS\_\-STANDARD} = (\char`\"{}easy\char`\"{}, \char`\"{}hint\char`\"{}, \char`\"{}remove\char`\"{}, \char`\"{}block\char`\"{}, \char`\"{}unblock\char`\"{}, \char`\"{}urgent\char`\"{}, \char`\"{}approve\char`\"{})\label{classbritney_1_1Britney_ebbe3f40cca59e2de275b0558556ee63} - -\item -tuple \bf{HINTS\_\-ALL} = (\char`\"{}force\char`\"{}, \char`\"{}force-hint\char`\"{}, \char`\"{}block-all\char`\"{})\label{classbritney_1_1Britney_a088d6fd96963f87f88c9c40cda10bfa} - -\end{CompactItemize} - - -\subsection{Detailed Description} - - -\footnotesize\begin{verbatim}Britney, the debian testing updater script - -This is the script that updates the testing_ distribution. It is executed -each day after the installation of the updated packages. It generates the -`Packages' files for the testing distribution, but it does so in an -intelligent manner; it try to avoid any inconsistency and to use only -non-buggy packages. - -For more documentation on this script, please read the Developers Reference. -\end{verbatim} -\normalsize - - - - -Definition at line 212 of file britney.py. - -\subsection{Member Function Documentation} -\index{britney::Britney@{britney::Britney}!__init__@{\_\-\_\-init\_\-\_\-}} -\index{__init__@{\_\-\_\-init\_\-\_\-}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.\_\-\_\-init\_\-\_\- ( {\em self})}\label{classbritney_1_1Britney_5846d81eace24f479292c47e30fd1851} - - - - -\footnotesize\begin{verbatim}Class constructor - -This method initializes and populates the data lists, which contain all -the information needed by the other methods of the class. -\end{verbatim} -\normalsize - - -Definition at line 218 of file britney.py.\index{britney::Britney@{britney::Britney}!auto_hinter@{auto\_\-hinter}} -\index{auto_hinter@{auto\_\-hinter}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.auto\_\-hinter ( {\em self})}\label{classbritney_1_1Britney_c9c109f54ddce53b17513405e07dba1a} - - - - -\footnotesize\begin{verbatim}Auto hint circular dependencies - -This method tries to auto hint circular dependencies analyzing the update -excuses relationships. If they build a circular dependency, which we already -know as not-working with the standard do_all algorithm, try to `easy` them. -\end{verbatim} -\normalsize - - -Definition at line 2536 of file britney.py.\index{britney::Britney@{britney::Britney}!check_conflicts@{check\_\-conflicts}} -\index{check_conflicts@{check\_\-conflicts}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.check\_\-conflicts ( {\em self}, {\em pkg}, {\em arch}, {\em broken}, {\em system}, {\em conflicts})}\label{classbritney_1_1Britney_998b3ddbaedbb6cdb732b2307477e96f} - - - - -\footnotesize\begin{verbatim}Check if a package can be installed satisfying the conflicts - -This method checks if the `pkg` package from the `arch` architecture -can be installed (excluding `broken` packages) within the system -`system` along with all its dependencies. This means that all the -conflicts relationships are checked in order to achieve the test -co-installability of the package. - -The method returns a boolean which is True if the given package is -co-installable in the given system. -\end{verbatim} -\normalsize - - -Definition at line 1661 of file britney.py.\index{britney::Britney@{britney::Britney}!check_installable@{check\_\-installable}} -\index{check_installable@{check\_\-installable}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.check\_\-installable ( {\em self}, {\em pkg}, {\em arch}, {\em suite}, {\em excluded} = {\tt []}, {\em conflicts} = {\tt False})}\label{classbritney_1_1Britney_4c06ddd116a40752c62d4395e92a97df} - - - - -\footnotesize\begin{verbatim}Check if a package is installable - -This method analyzes the dependencies of the binary package specified -by the parameter `pkg' for the architecture `arch' within the suite -`suite'. If the dependency can be satisfied in the given `suite` and -`conflicts` parameter is True, then the co-installability with -conflicts handling is checked. - -The dependency fields checked are Pre-Depends and Depends. - -The method returns a boolean which is True if the given package is -installable. -\end{verbatim} -\normalsize - - -Definition at line 1620 of file britney.py.\index{britney::Britney@{britney::Britney}!do_all@{do\_\-all}} -\index{do_all@{do\_\-all}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.do\_\-all ( {\em self}, {\em maxdepth} = {\tt 0}, {\em init} = {\tt None}, {\em actions} = {\tt None})}\label{classbritney_1_1Britney_3ef9b6f600eac492fc5aa4b31638198f} - - - - -\footnotesize\begin{verbatim}Testing update runner - -This method tries to update testing checking the uninstallability -counters before and after the actions to decide if the update was -successful or not. -\end{verbatim} -\normalsize - - -Definition at line 2258 of file britney.py.\index{britney::Britney@{britney::Britney}!do_hint@{do\_\-hint}} -\index{do_hint@{do\_\-hint}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.do\_\-hint ( {\em self}, {\em type}, {\em who}, {\em pkgvers})}\label{classbritney_1_1Britney_7bf70a5ad5754eb949205db3dc4b9c8a} - - - - -\footnotesize\begin{verbatim}Process hints - -This method process `easy`, `hint` and `force-hint` hints. If the -requested version is not in unstable, than the hint is skipped. -\end{verbatim} -\normalsize - - -Definition at line 2460 of file britney.py.\index{britney::Britney@{britney::Britney}!doop_source@{doop\_\-source}} -\index{doop_source@{doop\_\-source}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.doop\_\-source ( {\em self}, {\em pkg})}\label{classbritney_1_1Britney_22af61b8a7f6fe71a79d28b5016cae1d} - - - - -\footnotesize\begin{verbatim}Apply a change to the testing distribution as requested by `pkg` - -This method apply the changes required by the action `pkg` tracking -them so it will be possible to revert them. - -The method returns a list of the package name, the suite where the -package comes from, the list of packages affected by the change and -the dictionary undo which can be used to rollback the changes. -\end{verbatim} -\normalsize - - -Definition at line 1880 of file britney.py.\index{britney::Britney@{britney::Britney}!eval_nuninst@{eval\_\-nuninst}} -\index{eval_nuninst@{eval\_\-nuninst}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.eval\_\-nuninst ( {\em self}, {\em nuninst}, {\em original} = {\tt None})}\label{classbritney_1_1Britney_e5c18b706e6598474435b1996313cf27} - - - - -\footnotesize\begin{verbatim}Return a string which represents the uninstallability counters - -This method returns a string which represents the uninstallability -counters reading the uninstallability statistics `nuninst` and, if -present, merging the results with the `original` one. - -An example of the output string is: -1+2: i-0:a-0:a-0:h-0:i-1:m-0:m-0:p-0:a-0:m-0:s-2:s-0 - -where the first part is the number of broken packages in non-break -architectures + the total number of broken packages for all the -architectures. -\end{verbatim} -\normalsize - - -Definition at line 1568 of file britney.py.\index{britney::Britney@{britney::Britney}!eval_uninst@{eval\_\-uninst}} -\index{eval_uninst@{eval\_\-uninst}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.eval\_\-uninst ( {\em self}, {\em nuninst})}\label{classbritney_1_1Britney_b8dd18ead23b6e1126bb4c2a5c3cd8ba} - - - - -\footnotesize\begin{verbatim}Return a string which represents the uninstallable packages - -This method returns a string which represents the uninstallable -packages reading the uninstallability statistics `nuninst`. - -An example of the output string is: - * i386: broken-pkg1, broken-pkg2 -\end{verbatim} -\normalsize - - -Definition at line 1598 of file britney.py.\index{britney::Britney@{britney::Britney}!excuse_unsat_deps@{excuse\_\-unsat\_\-deps}} -\index{excuse_unsat_deps@{excuse\_\-unsat\_\-deps}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.excuse\_\-unsat\_\-deps ( {\em self}, {\em pkg}, {\em src}, {\em arch}, {\em suite}, {\em excuse}, {\em excluded} = {\tt []}, {\em conflicts} = {\tt False})}\label{classbritney_1_1Britney_f51c60a69f3a9dc2bc5afdb2ffaf3990} - - - - -\footnotesize\begin{verbatim}Find unsatisfied dependencies for a binary package - -This method analyzes the dependencies of the binary package specified -by the parameter `pkg', built from the source package `src', for the -architecture `arch' within the suite `suite'. If the dependency can't -be satisfied in testing and/or unstable, it updates the excuse passed -as parameter. - -The dependency fields checked are Pre-Depends and Depends. -\end{verbatim} -\normalsize - - -Definition at line 930 of file britney.py.\index{britney::Britney@{britney::Britney}!get_dependency_solvers@{get\_\-dependency\_\-solvers}} -\index{get_dependency_solvers@{get\_\-dependency\_\-solvers}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.get\_\-dependency\_\-solvers ( {\em self}, {\em block}, {\em arch}, {\em distribution}, {\em excluded} = {\tt []}, {\em strict} = {\tt False})}\label{classbritney_1_1Britney_5461f49e3e75a251ebedfd37d2a5ff0c} - - - - -\footnotesize\begin{verbatim}Find the packages which satisfy a dependency block - -This method returns the list of packages which satisfy a dependency -block (as returned by apt_pkg.ParseDepends) for the given architecture -and distribution. - -It returns a tuple with two items: the first is a boolean which is -True if the dependency is satisfied, the second is the list of the -solving packages. -\end{verbatim} -\normalsize - - -Definition at line 889 of file britney.py.\index{britney::Britney@{britney::Britney}!get_full_tree@{get\_\-full\_\-tree}} -\index{get_full_tree@{get\_\-full\_\-tree}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.get\_\-full\_\-tree ( {\em self}, {\em pkg}, {\em arch}, {\em suite})}\label{classbritney_1_1Britney_a923f6b90e2574e3bb2bb099b05925dd} - - - - -\footnotesize\begin{verbatim}Calculate the full dependency tree for the given package - -This method returns the full dependency tree for the package `pkg`, -inside the `arch` architecture for the suite `suite`. -\end{verbatim} -\normalsize - - -Definition at line 2022 of file britney.py.\index{britney::Britney@{britney::Britney}!get_nuninst@{get\_\-nuninst}} -\index{get_nuninst@{get\_\-nuninst}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.get\_\-nuninst ( {\em self}, {\em requested\_\-arch} = {\tt None}, {\em build} = {\tt False})}\label{classbritney_1_1Britney_41c5ee0b9d64b2e76a0c1a1c2b28c73e} - - - - -\footnotesize\begin{verbatim}Return the uninstallability statistic for all the architectures - -To calculate the uninstallability counters, the method checks the -installability of all the packages for all the architectures, and -tracking dependencies in a recursive way. The architecture -indipendent packages are checked only for the `nobreakall` -architectures. - -It returns a dictionary with the architectures as keys and the list -of uninstallable packages as values. -\end{verbatim} -\normalsize - - -Definition at line 1508 of file britney.py.\index{britney::Britney@{britney::Britney}!invalidate_excuses@{invalidate\_\-excuses}} -\index{invalidate_excuses@{invalidate\_\-excuses}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.invalidate\_\-excuses ( {\em self}, {\em valid}, {\em invalid})}\label{classbritney_1_1Britney_171969785db449d7a06c3f762774e0cd} - - - - -\footnotesize\begin{verbatim}Invalidate impossible excuses - -This method invalidates the impossible excuses, which depend -on invalid excuses. The two parameters contains the list of -`valid' and `invalid' excuses. -\end{verbatim} -\normalsize - - -Definition at line 1344 of file britney.py.\index{britney::Britney@{britney::Britney}!iter_packages@{iter\_\-packages}} -\index{iter_packages@{iter\_\-packages}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.iter\_\-packages ( {\em self}, {\em packages}, {\em selected}, {\em hint} = {\tt False}, {\em nuninst} = {\tt None})}\label{classbritney_1_1Britney_d453398832baaa7f477f720cfb643029} - - - - -\footnotesize\begin{verbatim}Iter on the list of actions and apply them one-by-one - -This method apply the changes from `packages` to testing, checking the uninstallability -counters for every action performed. If the action do not improve the it, it is reverted. -The method returns the new uninstallability counters and the remaining actions if the -final result is successful, otherwise (None, None). -\end{verbatim} -\normalsize - - -Definition at line 2038 of file britney.py.\index{britney::Britney@{britney::Britney}!main@{main}} -\index{main@{main}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.main ( {\em self})}\label{classbritney_1_1Britney_0e9551bdf927388f55be5ce15a48c94f} - - - - -\footnotesize\begin{verbatim}Main method - -This is the entry point for the class: it includes the list of calls -for the member methods which will produce the output files. -\end{verbatim} -\normalsize - - -Definition at line 2612 of file britney.py.\index{britney::Britney@{britney::Britney}!newlyuninst@{newlyuninst}} -\index{newlyuninst@{newlyuninst}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.newlyuninst ( {\em self}, {\em nuold}, {\em nunew})}\label{classbritney_1_1Britney_60ccd1851bcea5659fed15010541e244} - - - - -\footnotesize\begin{verbatim}Return a nuninst statstic with only new uninstallable packages - -This method subtract the uninstallabla packages of the statistic -`nunew` from the statistic `nuold`. - -It returns a dictionary with the architectures as keys and the list -of uninstallable packages as values. -\end{verbatim} -\normalsize - - -Definition at line 1493 of file britney.py.\index{britney::Britney@{britney::Britney}!normalize_bugs@{normalize\_\-bugs}} -\index{normalize_bugs@{normalize\_\-bugs}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.normalize\_\-bugs ( {\em self})}\label{classbritney_1_1Britney_5a6af4a100cfd54e872a27fa7f48ac3c} - - - - -\footnotesize\begin{verbatim}Normalize the release critical bug summaries for testing and unstable - -The method doesn't return any value: it directly modifies the -object attribute `bugs'. -\end{verbatim} -\normalsize - - -Definition at line 549 of file britney.py.\index{britney::Britney@{britney::Britney}!old_libraries@{old\_\-libraries}} -\index{old_libraries@{old\_\-libraries}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.old\_\-libraries ( {\em self})}\label{classbritney_1_1Britney_39c07e55b274ff98cb9cecb5f051bc5d} - - - - -\footnotesize\begin{verbatim}Detect old libraries left in testing for smooth transitions - -This method detect old libraries which are in testing but no longer -built from the source package: they are still there because other -packages still depend on them, but they should be removed as soon -as possible. -\end{verbatim} -\normalsize - - -Definition at line 2576 of file britney.py.\index{britney::Britney@{britney::Britney}!old_libraries_format@{old\_\-libraries\_\-format}} -\index{old_libraries_format@{old\_\-libraries\_\-format}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.old\_\-libraries\_\-format ( {\em self}, {\em libs})}\label{classbritney_1_1Britney_7c45f1c5b1dbd442fb9b85fef3fe1823} - - - - -\footnotesize\begin{verbatim}Format old libraries in a smart table\end{verbatim} -\normalsize - - -Definition at line 2596 of file britney.py.\index{britney::Britney@{britney::Britney}!output_write@{output\_\-write}} -\index{output_write@{output\_\-write}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.output\_\-write ( {\em self}, {\em msg})}\label{classbritney_1_1Britney_b0406f20fbbbb08cd3c6e5e6b35f77d5} - - - - -\footnotesize\begin{verbatim}Simple wrapper for output writing\end{verbatim} -\normalsize - - -Definition at line 2608 of file britney.py.\index{britney::Britney@{britney::Britney}!read_approvals@{read\_\-approvals}} -\index{read_approvals@{read\_\-approvals}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.read\_\-approvals ( {\em self}, {\em basedir})}\label{classbritney_1_1Britney_39248f0cfea1c8798b2ca5a97d37eaf8} - - - - -\footnotesize\begin{verbatim}Read the approval commands from the specified directory - -The approval commands are read from the files contained by the -`Approved' directory within the directory specified as `basedir' -parameter. The name of the files has to be the same of the -authorized users for the approvals. - -The file contains rows with the format: - - - -The method returns a dictionary where the key is the binary package -name followed by an underscore and the version number, and the value -is the user who submitted the command. -\end{verbatim} -\normalsize - - -Definition at line 669 of file britney.py.\index{britney::Britney@{britney::Britney}!read_binaries@{read\_\-binaries}} -\index{read_binaries@{read\_\-binaries}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.read\_\-binaries ( {\em self}, {\em basedir}, {\em distribution}, {\em arch})}\label{classbritney_1_1Britney_1b2b0f42e4af1cee472f93e955b30421} - - - - -\footnotesize\begin{verbatim}Read the list of binary packages from the specified directory - -The binary packages are read from the `Packages_${arch}' files -within the directory specified as `basedir' parameter, replacing -${arch} with the value of the arch parameter. Considering the -large amount of memory needed, not all the fields are loaded -in memory. The available fields are Version, Source, Pre-Depends, -Depends, Conflicts, Provides and Architecture. - -After reading the packages, reverse dependencies are computed -and saved in the `rdepends' keys, and the `Provides' field is -used to populate the virtual packages list. - -The dependencies are parsed with the apt.pkg.ParseDepends method, -and they are stored both as the format of its return value and -text. - -The method returns a tuple. The first element is a list where -every item represents a binary package as a dictionary; the second -element is a dictionary which maps virtual packages to real -packages that provide it. -\end{verbatim} -\normalsize - - -Definition at line 367 of file britney.py.\index{britney::Britney@{britney::Britney}!read_bugs@{read\_\-bugs}} -\index{read_bugs@{read\_\-bugs}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.read\_\-bugs ( {\em self}, {\em basedir})}\label{classbritney_1_1Britney_6c777aae69e7bec2efebaf23ddd4a86c} - - - - -\footnotesize\begin{verbatim}Read the release critial bug summary from the specified directory - -The RC bug summaries are read from the `Bugs' file within the -directory specified as `basedir' parameter. The file contains -rows with the format: - - - -The method returns a dictionary where the key is the binary package -name and the value is the number of open RC bugs for it. -\end{verbatim} -\normalsize - - -Definition at line 493 of file britney.py.\index{britney::Britney@{britney::Britney}!read_dates@{read\_\-dates}} -\index{read_dates@{read\_\-dates}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.read\_\-dates ( {\em self}, {\em basedir})}\label{classbritney_1_1Britney_085af5ac906813ea40fc2e623748f517} - - - - -\footnotesize\begin{verbatim}Read the upload date for the packages from the specified directory - -The upload dates are read from the `Date' file within the directory -specified as `basedir' parameter. The file contains rows with the -format: - - - -The dates are expressed as days starting from the 1970-01-01. - -The method returns a dictionary where the key is the binary package -name and the value is tuple with two items, the version and the date. -\end{verbatim} -\normalsize - - -Definition at line 584 of file britney.py.\index{britney::Britney@{britney::Britney}!read_hints@{read\_\-hints}} -\index{read_hints@{read\_\-hints}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.read\_\-hints ( {\em self}, {\em basedir})}\label{classbritney_1_1Britney_46d535f617fcf1faaaf5d841ea23c184} - - - - -\footnotesize\begin{verbatim}Read the hint commands from the specified directory - -The hint commands are read from the files contained by the `Hints' -directory within the directory specified as `basedir' parameter. -The name of the files has to be the same of the authorized users -for the hints. - -The file contains rows with the format: - - [/] - -The method returns a dictionary where the key is the command, and -the value is the list of affected packages. -\end{verbatim} -\normalsize - - -Definition at line 695 of file britney.py.\index{britney::Britney@{britney::Britney}!read_nuninst@{read\_\-nuninst}} -\index{read_nuninst@{read\_\-nuninst}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.read\_\-nuninst ( {\em self})}\label{classbritney_1_1Britney_24fe2c117eadac8eb783e3cb86300265} - - - - -\footnotesize\begin{verbatim}Read the non-installable report\end{verbatim} -\normalsize - - -Definition at line 835 of file britney.py.\index{britney::Britney@{britney::Britney}!read_sources@{read\_\-sources}} -\index{read_sources@{read\_\-sources}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.read\_\-sources ( {\em self}, {\em basedir})}\label{classbritney_1_1Britney_054f44c47f17c0c4f5a069e821b7f868} - - - - -\footnotesize\begin{verbatim}Read the list of source packages from the specified directory - -The source packages are read from the `Sources' file within the -directory specified as `basedir' parameter. Considering the -large amount of memory needed, not all the fields are loaded -in memory. The available fields are Version, Maintainer and Section. - -The method returns a list where every item represents a source -package as a dictionary. -\end{verbatim} -\normalsize - - -Definition at line 340 of file britney.py.\index{britney::Britney@{britney::Britney}!read_urgencies@{read\_\-urgencies}} -\index{read_urgencies@{read\_\-urgencies}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.read\_\-urgencies ( {\em self}, {\em basedir})}\label{classbritney_1_1Britney_09fc27899506b4830b1961f125a7b6a4} - - - - -\footnotesize\begin{verbatim}Read the upload urgency of the packages from the specified directory - -The upload urgencies are read from the `Urgency' file within the -directory specified as `basedir' parameter. The file contains rows -with the format: - - - -The method returns a dictionary where the key is the binary package -name and the value is the greatest urgency from the versions of the -package that are higher then the testing one. -\end{verbatim} -\normalsize - - -Definition at line 624 of file britney.py.\index{britney::Britney@{britney::Britney}!register_reverses@{register\_\-reverses}} -\index{register_reverses@{register\_\-reverses}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.register\_\-reverses ( {\em self}, {\em pkg}, {\em packages}, {\em provides}, {\em check\_\-doubles} = {\tt True}, {\em parse\_\-depends} = {\tt apt\_\-pkg.ParseDepends})}\label{classbritney_1_1Britney_f86b93b64ac43c1d04fe88a6a8b1777e} - - - - -\footnotesize\begin{verbatim}Register reverse dependencies and conflicts for the specified package - -This method register the reverse dependencies and conflicts for -a give package using `packages` as list of packages and `provides` -as list of virtual packages. - -The method has an optional parameter parse_depends which is there -just for performance reasons and is not meant to be overwritten. -\end{verbatim} -\normalsize - - -Definition at line 451 of file britney.py.\index{britney::Britney@{britney::Britney}!reversed_exc_deps@{reversed\_\-exc\_\-deps}} -\index{reversed_exc_deps@{reversed\_\-exc\_\-deps}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.reversed\_\-exc\_\-deps ( {\em self})}\label{classbritney_1_1Britney_be1b4af9d6c6650c70b24267412bc1a8} - - - - -\footnotesize\begin{verbatim}Reverse the excuses dependencies - -This method returns a dictionary where the keys are the package names -and the values are the excuse names which depend on it. -\end{verbatim} -\normalsize - - -Definition at line 1331 of file britney.py.\index{britney::Britney@{britney::Britney}!same_source@{same\_\-source}} -\index{same_source@{same\_\-source}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.same\_\-source ( {\em self}, {\em sv1}, {\em sv2})}\label{classbritney_1_1Britney_85d2e45e8431779b62f398c34972ddf1} - - - - -\footnotesize\begin{verbatim}Check if two version numbers are built from the same source - -This method returns a boolean value which is true if the two -version numbers specified as parameters are built from the same -source. The main use of this code is to detect binary-NMU. -\end{verbatim} -\normalsize - - -Definition at line 850 of file britney.py.\index{britney::Britney@{britney::Britney}!should_remove_source@{should\_\-remove\_\-source}} -\index{should_remove_source@{should\_\-remove\_\-source}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.should\_\-remove\_\-source ( {\em self}, {\em pkg})}\label{classbritney_1_1Britney_f8a6c9adbdec7a5a982dd2b74febcc08} - - - - -\footnotesize\begin{verbatim}Check if a source package should be removed from testing - -This method checks if a source package should be removed from the -testing distribution; this happen if the source package is not -present in the unstable distribution anymore. - -It returns True if the package can be removed, False otherwise. -In the former case, a new excuse is appended to the the object -attribute excuses. -\end{verbatim} -\normalsize - - -Definition at line 990 of file britney.py.\index{britney::Britney@{britney::Britney}!should_upgrade_src@{should\_\-upgrade\_\-src}} -\index{should_upgrade_src@{should\_\-upgrade\_\-src}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.should\_\-upgrade\_\-src ( {\em self}, {\em src}, {\em suite})}\label{classbritney_1_1Britney_94785175a85f44b1afaf3add167a211f} - - - - -\footnotesize\begin{verbatim}Check if source package should be upgraded - -This method checks if a source package should be upgraded. The analysis -is performed for the source package specified by the `src' parameter, -checking the architecture `arch' for the distribution `suite'. - -It returns False if the given package doesn't need to be upgraded, -True otherwise. In the former case, a new excuse is appended to -the the object attribute excuses. -\end{verbatim} -\normalsize - - -Definition at line 1134 of file britney.py.\index{britney::Britney@{britney::Britney}!should_upgrade_srcarch@{should\_\-upgrade\_\-srcarch}} -\index{should_upgrade_srcarch@{should\_\-upgrade\_\-srcarch}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.should\_\-upgrade\_\-srcarch ( {\em self}, {\em src}, {\em arch}, {\em suite})}\label{classbritney_1_1Britney_bd18d7acde434387e94344a39db5b0e5} - - - - -\footnotesize\begin{verbatim}Check if binary package should be upgraded - -This method checks if a binary package should be upgraded; this can -happen also if the binary package is a binary-NMU for the given arch. -The analysis is performed for the source package specified by the -`src' parameter, checking the architecture `arch' for the distribution -`suite'. - -It returns False if the given package doesn't need to be upgraded, -True otherwise. In the former case, a new excuse is appended to -the the object attribute excuses. -\end{verbatim} -\normalsize - - -Definition at line 1020 of file britney.py.\index{britney::Britney@{britney::Britney}!sort_actions@{sort\_\-actions}} -\index{sort_actions@{sort\_\-actions}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.sort\_\-actions ( {\em self})}\label{classbritney_1_1Britney_70115e28e2ee6aa3a09d1efbd81b4a87} - - - - -\footnotesize\begin{verbatim}Sort actions in a smart way - -This method sorts the list of actions in a smart way. In details, it uses -as base sort the number of days the excuse is old, then reordering packages -so the ones with most reverse dependencies are at the end of the loop. -If an action depends on another one, it is put after it. -\end{verbatim} -\normalsize - - -Definition at line 2504 of file britney.py.\index{britney::Britney@{britney::Britney}!upgrade_testing@{upgrade\_\-testing}} -\index{upgrade_testing@{upgrade\_\-testing}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.upgrade\_\-testing ( {\em self})}\label{classbritney_1_1Britney_921effe0d64ed713c38888684a0a26d4} - - - - -\footnotesize\begin{verbatim}Upgrade testing using the unstable packages - -This method tries to upgrade testing using the packages from unstable. -Before running the do_all method, it tries the easy and force-hint -commands. -\end{verbatim} -\normalsize - - -Definition at line 2365 of file britney.py.\index{britney::Britney@{britney::Britney}!write_bugs@{write\_\-bugs}} -\index{write_bugs@{write\_\-bugs}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.write\_\-bugs ( {\em self}, {\em basedir}, {\em bugs})}\label{classbritney_1_1Britney_0a5591fc94b89d53d12cbb8f27d100a7} - - - - -\footnotesize\begin{verbatim}Write the release critical bug summary to the specified directory - -For a more detailed explanation of the format, please check the method -read_bugs. -\end{verbatim} -\normalsize - - -Definition at line 517 of file britney.py.\index{britney::Britney@{britney::Britney}!write_controlfiles@{write\_\-controlfiles}} -\index{write_controlfiles@{write\_\-controlfiles}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.write\_\-controlfiles ( {\em self}, {\em basedir}, {\em suite})}\label{classbritney_1_1Britney_1a3695a37b15ebd7e81cc33683fde461} - - - - -\footnotesize\begin{verbatim}Write the control files - -This method write the control files for the binary packages of all -the architectures and for the source packages. -\end{verbatim} -\normalsize - - -Definition at line 777 of file britney.py.\index{britney::Britney@{britney::Britney}!write_dates@{write\_\-dates}} -\index{write_dates@{write\_\-dates}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.write\_\-dates ( {\em self}, {\em basedir}, {\em dates})}\label{classbritney_1_1Britney_83e0bd993e7a099e6bf89d760183a76b} - - - - -\footnotesize\begin{verbatim}Write the upload date for the packages to the specified directory - -For a more detailed explanation of the format, please check the method -read_dates. -\end{verbatim} -\normalsize - - -Definition at line 610 of file britney.py.\index{britney::Britney@{britney::Britney}!write_excuses@{write\_\-excuses}} -\index{write_excuses@{write\_\-excuses}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.write\_\-excuses ( {\em self})}\label{classbritney_1_1Britney_010f6deffca32f7f71ecf1f5c1bb4985} - - - - -\footnotesize\begin{verbatim}Produce and write the update excuses - -This method handles the update excuses generation: the packages are -looked to determine whether they are valid candidates. For the details -of this procedure, please refer to the module docstring. -\end{verbatim} -\normalsize - - -Definition at line 1386 of file britney.py.\index{britney::Britney@{britney::Britney}!write_heidi@{write\_\-heidi}} -\index{write_heidi@{write\_\-heidi}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.write\_\-heidi ( {\em self}, {\em basedir}, {\em filename})}\label{classbritney_1_1Britney_20f846da79d595d36628c8da3195ac7a} - - - - -\footnotesize\begin{verbatim}Write the output HeidiResult - -This method write the output for Heidi, which contains all the -binary packages and the source packages in the form: - - - -\end{verbatim} -\normalsize - - -Definition at line 742 of file britney.py.\index{britney::Britney@{britney::Britney}!write_nuninst@{write\_\-nuninst}} -\index{write_nuninst@{write\_\-nuninst}!britney::Britney@{britney::Britney}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def britney.Britney.write\_\-nuninst ( {\em self}, {\em nuninst})}\label{classbritney_1_1Britney_1dd8c704e1dab571e4bb62ae15f6ee52} - - - - -\footnotesize\begin{verbatim}Write the non-installable report\end{verbatim} -\normalsize - - -Definition at line 827 of file britney.py. - -The documentation for this class was generated from the following file:\begin{CompactItemize} -\item -britney.py\end{CompactItemize} diff --git a/doc/latex/classexcuse_1_1Excuse.tex b/doc/latex/classexcuse_1_1Excuse.tex deleted file mode 100644 index 6723888..0000000 --- a/doc/latex/classexcuse_1_1Excuse.tex +++ /dev/null @@ -1,239 +0,0 @@ -\section{excuse.Excuse Class Reference} -\label{classexcuse_1_1Excuse}\index{excuse::Excuse@{excuse::Excuse}} -\subsection*{Public Member Functions} -\begin{CompactItemize} -\item -def \bf{\_\-\_\-init\_\-\_\-} -\item -def \bf{set\_\-vers} -\item -def \bf{set\_\-maint} -\item -def \bf{set\_\-section} -\item -def \bf{set\_\-priority} -\item -def \bf{set\_\-date} -\item -def \bf{set\_\-urgency} -\item -def \bf{add\_\-dep} -\item -def \bf{add\_\-sane\_\-dep} -\item -def \bf{add\_\-break\_\-dep} -\item -def \bf{add\_\-unsat\_\-dep} -\item -def \bf{invalidate\_\-dep} -\item -def \bf{setdaysold} -\item -def \bf{addhtml} -\item -def \bf{html} -\end{CompactItemize} -\subsection*{Static Public Attributes} -\begin{CompactItemize} -\item -tuple \bf{reemail} = re.compile(r\char`\"{}$<$.$\ast$?$>$\char`\"{})\label{classexcuse_1_1Excuse_bb15f55eed8f034db8a64b4ddc46460d} - -\begin{CompactList}\small\item\em Regular expression for removing the email address. \item\end{CompactList}\end{CompactItemize} - - -\subsection{Detailed Description} - - -\footnotesize\begin{verbatim}Excuse class - -This class represents an update excuse, which is a detailed explanation -of why a package can or cannot be updated in the testing distribution from -a newer package in another distribution (like for example unstable). - -The main purpose of the excuses is to be written in an HTML file which -will be published over HTTP. The maintainers will be able to parse it -manually or automatically to find the explanation of why their packages -have been updated or not. -\end{verbatim} -\normalsize - - - - -Definition at line 21 of file excuse.py. - -\subsection{Member Function Documentation} -\index{excuse::Excuse@{excuse::Excuse}!__init__@{\_\-\_\-init\_\-\_\-}} -\index{__init__@{\_\-\_\-init\_\-\_\-}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.\_\-\_\-init\_\-\_\- ( {\em self}, {\em name})}\label{classexcuse_1_1Excuse_4bdb0917f763d74951c621e466e98bdb} - - - - -\footnotesize\begin{verbatim}Class constructor - -This method initializes the excuse with the specified name and -the default values. -\end{verbatim} -\normalsize - - -Definition at line 28 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!add_break_dep@{add\_\-break\_\-dep}} -\index{add_break_dep@{add\_\-break\_\-dep}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.add\_\-break\_\-dep ( {\em self}, {\em name}, {\em arch})}\label{classexcuse_1_1Excuse_60e00fe0515f2dab003bd29baceedd34} - - - - -\footnotesize\begin{verbatim}Add a break dependency\end{verbatim} -\normalsize - - -Definition at line 86 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!add_dep@{add\_\-dep}} -\index{add_dep@{add\_\-dep}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.add\_\-dep ( {\em self}, {\em name})}\label{classexcuse_1_1Excuse_fa97c9f61fef17d6028491362153a766} - - - - -\footnotesize\begin{verbatim}Add a dependency\end{verbatim} -\normalsize - - -Definition at line 78 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!add_sane_dep@{add\_\-sane\_\-dep}} -\index{add_sane_dep@{add\_\-sane\_\-dep}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.add\_\-sane\_\-dep ( {\em self}, {\em name})}\label{classexcuse_1_1Excuse_8a89f54df8036e6f5c7c6bfa3f0cc6fc} - - - - -\footnotesize\begin{verbatim}Add a sane dependency\end{verbatim} -\normalsize - - -Definition at line 82 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!add_unsat_dep@{add\_\-unsat\_\-dep}} -\index{add_unsat_dep@{add\_\-unsat\_\-dep}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.add\_\-unsat\_\-dep ( {\em self}, {\em arch})}\label{classexcuse_1_1Excuse_7c76b47749dc3da2b1988a6991664a40} - - - - -\footnotesize\begin{verbatim}Add a flag for unsatisfied dependencies\end{verbatim} -\normalsize - - -Definition at line 91 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!addhtml@{addhtml}} -\index{addhtml@{addhtml}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.addhtml ( {\em self}, {\em note})}\label{classexcuse_1_1Excuse_eb0a1ea0fae66a571e5efa703e53ba3a} - - - - -\footnotesize\begin{verbatim}Add a note in HTML\end{verbatim} -\normalsize - - -Definition at line 104 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!html@{html}} -\index{html@{html}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.html ( {\em self})}\label{classexcuse_1_1Excuse_84049740652a58b248fabdb3fa9d4b2c} - - - - -\footnotesize\begin{verbatim}Render the excuse in HTML\end{verbatim} -\normalsize - - -Definition at line 108 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!invalidate_dep@{invalidate\_\-dep}} -\index{invalidate_dep@{invalidate\_\-dep}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.invalidate\_\-dep ( {\em self}, {\em name})}\label{classexcuse_1_1Excuse_8594c46ccf4182fa8b37fe487bf53850} - - - - -\footnotesize\begin{verbatim}Invalidate dependency\end{verbatim} -\normalsize - - -Definition at line 95 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!set_date@{set\_\-date}} -\index{set_date@{set\_\-date}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.set\_\-date ( {\em self}, {\em date})}\label{classexcuse_1_1Excuse_ac01c3b9802ad26571f01b55ffc1098c} - - - - -\footnotesize\begin{verbatim}Set the date of upload of the package\end{verbatim} -\normalsize - - -Definition at line 70 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!set_maint@{set\_\-maint}} -\index{set_maint@{set\_\-maint}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.set\_\-maint ( {\em self}, {\em maint})}\label{classexcuse_1_1Excuse_189ec1709eef0bd8acb9cd093b8350b5} - - - - -\footnotesize\begin{verbatim}Set the package maintainer's name\end{verbatim} -\normalsize - - -Definition at line 58 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!set_priority@{set\_\-priority}} -\index{set_priority@{set\_\-priority}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.set\_\-priority ( {\em self}, {\em pri})}\label{classexcuse_1_1Excuse_3a0ebe3eb87c1af8f093e80a874ea0fa} - - - - -\footnotesize\begin{verbatim}Set the priority of the package\end{verbatim} -\normalsize - - -Definition at line 66 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!set_section@{set\_\-section}} -\index{set_section@{set\_\-section}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.set\_\-section ( {\em self}, {\em section})}\label{classexcuse_1_1Excuse_6b435fa4d19b929d9fb70c8d28688387} - - - - -\footnotesize\begin{verbatim}Set the section of the package\end{verbatim} -\normalsize - - -Definition at line 62 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!set_urgency@{set\_\-urgency}} -\index{set_urgency@{set\_\-urgency}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.set\_\-urgency ( {\em self}, {\em date})}\label{classexcuse_1_1Excuse_c504d40ac6d07ffdb08b7ff8ed555d10} - - - - -\footnotesize\begin{verbatim}Set the urgency of upload of the package\end{verbatim} -\normalsize - - -Definition at line 74 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!set_vers@{set\_\-vers}} -\index{set_vers@{set\_\-vers}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.set\_\-vers ( {\em self}, {\em tver}, {\em uver})}\label{classexcuse_1_1Excuse_b8751fc5d0033b4c734c476d92841d99} - - - - -\footnotesize\begin{verbatim}Set the testing and unstable versions\end{verbatim} -\normalsize - - -Definition at line 53 of file excuse.py.\index{excuse::Excuse@{excuse::Excuse}!setdaysold@{setdaysold}} -\index{setdaysold@{setdaysold}!excuse::Excuse@{excuse::Excuse}} -\subsubsection{\setlength{\rightskip}{0pt plus 5cm}def excuse.Excuse.setdaysold ( {\em self}, {\em daysold}, {\em mindays})}\label{classexcuse_1_1Excuse_cf1fa7c6fb741bbe7e3120113748f3a5} - - - - -\footnotesize\begin{verbatim}Set the number of days from the upload and the minimum number of days for the update\end{verbatim} -\normalsize - - -Definition at line 99 of file excuse.py. - -The documentation for this class was generated from the following file:\begin{CompactItemize} -\item -excuse.py\end{CompactItemize} diff --git a/doc/latex/doxygen.sty b/doc/latex/doxygen.sty deleted file mode 100644 index 8d3c774..0000000 --- a/doc/latex/doxygen.sty +++ /dev/null @@ -1,78 +0,0 @@ -\NeedsTeXFormat{LaTeX2e} -\ProvidesPackage{doxygen} -\RequirePackage{calc} -\RequirePackage{array} -\pagestyle{fancyplain} -\newcommand{\clearemptydoublepage}{\newpage{\pagestyle{empty}\cleardoublepage}} -\renewcommand{\chaptermark}[1]{\markboth{#1}{}} -\renewcommand{\sectionmark}[1]{\markright{\thesection\ #1}} -\lhead[\fancyplain{}{\bfseries\thepage}] - {\fancyplain{}{\bfseries\rightmark}} -\rhead[\fancyplain{}{\bfseries\leftmark}] - {\fancyplain{}{\bfseries\thepage}} -\rfoot[\fancyplain{}{\bfseries\scriptsize Generated on Fri Aug 18 23:23:25 2006 for briteny by Doxygen }]{} -\lfoot[]{\fancyplain{}{\bfseries\scriptsize Generated on Fri Aug 18 23:23:25 2006 for briteny by Doxygen }} -\cfoot{} -\newenvironment{Code} -{\footnotesize} -{\normalsize} -\newcommand{\doxyref}[3]{\textbf{#1} (\textnormal{#2}\,\pageref{#3})} -\newenvironment{DocInclude} -{\footnotesize} -{\normalsize} -\newenvironment{VerbInclude} -{\footnotesize} -{\normalsize} -\newenvironment{Image} -{\begin{figure}[H]} -{\end{figure}} -\newenvironment{ImageNoCaption}{}{} -\newenvironment{CompactList} -{\begin{list}{}{ - \setlength{\leftmargin}{0.5cm} - \setlength{\itemsep}{0pt} - \setlength{\parsep}{0pt} - \setlength{\topsep}{0pt} - \renewcommand{\makelabel}{\hfill}}} -{\end{list}} -\newenvironment{CompactItemize} -{ - \begin{itemize} - \setlength{\itemsep}{-3pt} - \setlength{\parsep}{0pt} - \setlength{\topsep}{0pt} - \setlength{\partopsep}{0pt} -} -{\end{itemize}} -\newcommand{\PBS}[1]{\let\temp=\\#1\let\\=\temp} -\newlength{\tmplength} -\newenvironment{TabularC}[1] -{ -\setlength{\tmplength} - {\linewidth/(#1)-\tabcolsep*2-\arrayrulewidth*(#1+1)/(#1)} - \par\begin{tabular*}{\linewidth} - {*{#1}{|>{\PBS\raggedright\hspace{0pt}}p{\the\tmplength}}|} -} -{\end{tabular*}\par} -\newcommand{\entrylabel}[1]{ - {\parbox[b]{\labelwidth-4pt}{\makebox[0pt][l]{\textbf{#1}}\vspace{1.5\baselineskip}}}} -\newenvironment{Desc} -{\begin{list}{} - { - \settowidth{\labelwidth}{40pt} - \setlength{\leftmargin}{\labelwidth} - \setlength{\parsep}{0pt} - \setlength{\itemsep}{-4pt} - \renewcommand{\makelabel}{\entrylabel} - } -} -{\end{list}} -\newenvironment{Indent} - {\begin{list}{}{\setlength{\leftmargin}{0.5cm}} - \item[]\ignorespaces} - {\unskip\end{list}} -\setlength{\parindent}{0cm} -\setlength{\parskip}{0.2cm} -\addtocounter{secnumdepth}{1} -\sloppy -\usepackage[T1]{fontenc} diff --git a/doc/latex/hierarchy.tex b/doc/latex/hierarchy.tex deleted file mode 100644 index d73be1d..0000000 --- a/doc/latex/hierarchy.tex +++ /dev/null @@ -1,5 +0,0 @@ -\section{briteny Class Hierarchy} -This inheritance list is sorted roughly, but not completely, alphabetically:\begin{CompactList} -\item \contentsline{section}{britney.Britney}{\pageref{classbritney_1_1Britney}}{} -\item \contentsline{section}{excuse.Excuse}{\pageref{classexcuse_1_1Excuse}}{} -\end{CompactList} diff --git a/doc/latex/namespacebritney.tex b/doc/latex/namespacebritney.tex deleted file mode 100644 index 1af6167..0000000 --- a/doc/latex/namespacebritney.tex +++ /dev/null @@ -1,9 +0,0 @@ -\section{Package britney} -\label{namespacebritney}\index{britney@{britney}} - - -\subsection*{Classes} -\begin{CompactItemize} -\item -class \bf{Britney} -\end{CompactItemize} diff --git a/doc/latex/namespaceexcuse.tex b/doc/latex/namespaceexcuse.tex deleted file mode 100644 index 4d878c0..0000000 --- a/doc/latex/namespaceexcuse.tex +++ /dev/null @@ -1,9 +0,0 @@ -\section{Package excuse} -\label{namespaceexcuse}\index{excuse@{excuse}} - - -\subsection*{Classes} -\begin{CompactItemize} -\item -class \bf{Excuse} -\end{CompactItemize} diff --git a/doc/latex/namespaces.tex b/doc/latex/namespaces.tex deleted file mode 100644 index 1bf85c5..0000000 --- a/doc/latex/namespaces.tex +++ /dev/null @@ -1,5 +0,0 @@ -\section{briteny Package List} -Here are the packages with brief descriptions (if available):\begin{CompactList} -\item\contentsline{section}{\bf{britney} }{\pageref{namespacebritney}}{} -\item\contentsline{section}{\bf{excuse} }{\pageref{namespaceexcuse}}{} -\end{CompactList} diff --git a/doc/latex/refman.tex b/doc/latex/refman.tex deleted file mode 100644 index eacbe7f..0000000 --- a/doc/latex/refman.tex +++ /dev/null @@ -1,41 +0,0 @@ -\documentclass[a4paper]{book} -\usepackage{a4wide} -\usepackage{makeidx} -\usepackage{fancyhdr} -\usepackage{graphicx} -\usepackage{multicol} -\usepackage{float} -\usepackage{textcomp} -\usepackage{alltt} -\usepackage{doxygen} -\makeindex -\setcounter{tocdepth}{1} -\renewcommand{\footrulewidth}{0.4pt} -\begin{document} -\begin{titlepage} -\vspace*{7cm} -\begin{center} -{\Large briteny Reference Manual\\[1ex]\large 2.0.alpha1 }\\ -\vspace*{1cm} -{\large Generated by Doxygen 1.4.7}\\ -\vspace*{0.5cm} -{\small Fri Aug 18 23:23:25 2006}\\ -\end{center} -\end{titlepage} -\clearemptydoublepage -\pagenumbering{roman} -\tableofcontents -\clearemptydoublepage -\pagenumbering{arabic} -\chapter{briteny Namespace Index} -\input{namespaces} -\chapter{briteny Class Index} -\input{annotated} -\chapter{briteny Namespace Documentation} -\input{namespacebritney} -\include{namespaceexcuse} -\chapter{briteny Class Documentation} -\input{classbritney_1_1Britney} -\include{classexcuse_1_1Excuse} -\printindex -\end{document}