|
|
|
@ -312,93 +312,6 @@ def write_excuses(excuselist, dest_file, output_format="yaml"):
|
|
|
|
|
raise ValueError('Output format must be either "yaml or "legacy-html"')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def write_sources(sources_s, filename):
|
|
|
|
|
"""Write a sources file from Britney's state for a given suite
|
|
|
|
|
|
|
|
|
|
Britney discards fields she does not care about, so the resulting
|
|
|
|
|
file omits a lot of regular fields.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
key_pairs = ((VERSION, 'Version'), (SECTION, 'Section'),
|
|
|
|
|
(MAINTAINER, 'Maintainer'))
|
|
|
|
|
|
|
|
|
|
with open(filename, 'w', encoding='utf-8') as f:
|
|
|
|
|
for src in sources_s:
|
|
|
|
|
src_data = sources_s[src]
|
|
|
|
|
output = "Package: %s\n" % src
|
|
|
|
|
output += "\n".join(k + ": "+ src_data[key]
|
|
|
|
|
for key, k in key_pairs if src_data[key])
|
|
|
|
|
f.write(output + "\n\n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def relation_atom_to_string(atom):
|
|
|
|
|
"""Take a parsed dependency and turn it into a string
|
|
|
|
|
"""
|
|
|
|
|
pkg, version, rel_op = atom
|
|
|
|
|
if rel_op != '':
|
|
|
|
|
if rel_op in ('<', '>'):
|
|
|
|
|
# APT translate "<<" and ">>" into "<" and ">". We have
|
|
|
|
|
# deparse those into the original form.
|
|
|
|
|
rel_op += rel_op
|
|
|
|
|
return "%s (%s %s)" % (pkg, rel_op, version)
|
|
|
|
|
return pkg
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def write_controlfiles(target_suite):
|
|
|
|
|
"""Write the control files
|
|
|
|
|
|
|
|
|
|
This method writes the control files for the binary packages of all
|
|
|
|
|
the architectures and for the source packages. Note that Britney
|
|
|
|
|
discards a lot of fields that she does not care about. Therefore,
|
|
|
|
|
these files may omit a lot of regular fields.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
basedir = target_suite.path
|
|
|
|
|
sources_s = target_suite.sources
|
|
|
|
|
packages_s = target_suite.binaries
|
|
|
|
|
|
|
|
|
|
key_pairs = ((SECTION, 'Section'), (ARCHITECTURE, 'Architecture'),
|
|
|
|
|
(MULTIARCH, 'Multi-Arch'), (SOURCE, 'Source'),
|
|
|
|
|
(VERSION, 'Version'), (DEPENDS, 'Depends'),
|
|
|
|
|
(PROVIDES, 'Provides'), (CONFLICTS, 'Conflicts'),
|
|
|
|
|
(ESSENTIAL, 'Essential'))
|
|
|
|
|
|
|
|
|
|
for arch in packages_s:
|
|
|
|
|
filename = os.path.join(basedir, 'Packages_%s' % arch)
|
|
|
|
|
binaries = packages_s[arch]
|
|
|
|
|
with open(filename, 'w', encoding='utf-8') as f:
|
|
|
|
|
for pkg in binaries:
|
|
|
|
|
output = "Package: %s\n" % pkg
|
|
|
|
|
bin_data = binaries[pkg]
|
|
|
|
|
for key, k in key_pairs:
|
|
|
|
|
if not bin_data[key]:
|
|
|
|
|
continue
|
|
|
|
|
if key == SOURCE:
|
|
|
|
|
src = bin_data.source
|
|
|
|
|
if sources_s[src].maintainer:
|
|
|
|
|
output += ("Maintainer: " + sources_s[src].maintainer + "\n")
|
|
|
|
|
|
|
|
|
|
if src == pkg:
|
|
|
|
|
if bin_data.source_version != bin_data.version:
|
|
|
|
|
source = src + " (" + bin_data.source_version + ")"
|
|
|
|
|
else: continue
|
|
|
|
|
else:
|
|
|
|
|
if bin_data.source_version != bin_data.version:
|
|
|
|
|
source = src + " (" + bin_data.source_version + ")"
|
|
|
|
|
else:
|
|
|
|
|
source = src
|
|
|
|
|
output += (k + ": " + source + "\n")
|
|
|
|
|
elif key == PROVIDES:
|
|
|
|
|
output += (k + ": " + ", ".join(relation_atom_to_string(p) for p in bin_data[key]) + "\n")
|
|
|
|
|
elif key == ESSENTIAL:
|
|
|
|
|
output += (k + ": " + " yes\n")
|
|
|
|
|
else:
|
|
|
|
|
output += (k + ": " + bin_data[key] + "\n")
|
|
|
|
|
f.write(output + "\n")
|
|
|
|
|
|
|
|
|
|
write_sources(sources_s, os.path.join(basedir, 'Sources'))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def old_libraries(mi_factory, suite_info, outofsync_arches=frozenset()):
|
|
|
|
|
"""Detect old libraries left in the target suite for smooth transitions
|
|
|
|
|
|
|
|
|
|