mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-06-08 08:11:33 +00:00
Remove --control-files; It is not used any where
Signed-off-by: Niels Thykier <niels@thykier.net>
This commit is contained in:
parent
603aa9fdca
commit
2fea931da3
15
britney.py
15
britney.py
@ -207,7 +207,7 @@ from britney2.policies.autopkgtest import AutopkgtestPolicy
|
|||||||
from britney2.utils import (log_and_format_old_libraries, get_dependency_solvers,
|
from britney2.utils import (log_and_format_old_libraries, get_dependency_solvers,
|
||||||
read_nuninst, write_nuninst, write_heidi,
|
read_nuninst, write_nuninst, write_heidi,
|
||||||
format_and_log_uninst, newly_uninst,
|
format_and_log_uninst, newly_uninst,
|
||||||
write_excuses, write_heidi_delta, write_controlfiles,
|
write_excuses, write_heidi_delta,
|
||||||
old_libraries, is_nuninst_asgood_generous,
|
old_libraries, is_nuninst_asgood_generous,
|
||||||
clone_nuninst,
|
clone_nuninst,
|
||||||
invalidate_excuses, compile_nuninst,
|
invalidate_excuses, compile_nuninst,
|
||||||
@ -396,8 +396,6 @@ class Britney(object):
|
|||||||
help="provide a command line interface to test hints")
|
help="provide a command line interface to test hints")
|
||||||
parser.add_option("", "--dry-run", action="store_true", dest="dry_run", default=False,
|
parser.add_option("", "--dry-run", action="store_true", dest="dry_run", default=False,
|
||||||
help="disable all outputs to the testing directory")
|
help="disable all outputs to the testing directory")
|
||||||
parser.add_option("", "--control-files", action="store_true", dest="control_files", default=False,
|
|
||||||
help="enable control files generation")
|
|
||||||
parser.add_option("", "--nuninst-cache", action="store_true", dest="nuninst_cache", default=False,
|
parser.add_option("", "--nuninst-cache", action="store_true", dest="nuninst_cache", default=False,
|
||||||
help="do not build the non-installability status, use the cache from file")
|
help="do not build the non-installability status, use the cache from file")
|
||||||
parser.add_option("", "--print-uninst", action="store_true", dest="print_uninst", default=False,
|
parser.add_option("", "--print-uninst", action="store_true", dest="print_uninst", default=False,
|
||||||
@ -470,12 +468,6 @@ class Britney(object):
|
|||||||
self.options.break_arches = suite_loader.break_arches
|
self.options.break_arches = suite_loader.break_arches
|
||||||
self.options.new_arches = suite_loader.new_arches
|
self.options.new_arches = suite_loader.new_arches
|
||||||
|
|
||||||
if self.options.control_files and self.options.components: # pragma: no cover
|
|
||||||
# We cannot regenerate the control files correctly when reading from an
|
|
||||||
# actual mirror (we don't which package goes in what component etc.).
|
|
||||||
self.logger.error("Cannot use --control-files with mirror-layout (components)!")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not hasattr(self.options, "heidi_delta_output"):
|
if not hasattr(self.options, "heidi_delta_output"):
|
||||||
self.options.heidi_delta_output = self.options.heidi_output + "Delta"
|
self.options.heidi_delta_output = self.options.heidi_output + "Delta"
|
||||||
|
|
||||||
@ -1817,11 +1809,6 @@ class Britney(object):
|
|||||||
# output files
|
# output files
|
||||||
if not self.options.dry_run:
|
if not self.options.dry_run:
|
||||||
target_suite = self.suite_info.target_suite
|
target_suite = self.suite_info.target_suite
|
||||||
# re-write control files
|
|
||||||
if self.options.control_files:
|
|
||||||
self.logger.info("Writing new control files for the target suite to %s",
|
|
||||||
target_suite.path)
|
|
||||||
write_controlfiles(target_suite)
|
|
||||||
|
|
||||||
self._policy_engine.save_state(self)
|
self._policy_engine.save_state(self)
|
||||||
|
|
||||||
|
@ -312,93 +312,6 @@ def write_excuses(excuselist, dest_file, output_format="yaml"):
|
|||||||
raise ValueError('Output format must be either "yaml or "legacy-html"')
|
raise ValueError('Output format must be either "yaml or "legacy-html"')
|
||||||
|
|
||||||
|
|
||||||
def write_sources(sources_s, filename):
|
|
||||||
"""Write a sources file from Britney's state for a given suite
|
|
||||||
|
|
||||||
Britney discards fields she does not care about, so the resulting
|
|
||||||
file omits a lot of regular fields.
|
|
||||||
"""
|
|
||||||
|
|
||||||
key_pairs = ((VERSION, 'Version'), (SECTION, 'Section'),
|
|
||||||
(MAINTAINER, 'Maintainer'))
|
|
||||||
|
|
||||||
with open(filename, 'w', encoding='utf-8') as f:
|
|
||||||
for src in sources_s:
|
|
||||||
src_data = sources_s[src]
|
|
||||||
output = "Package: %s\n" % src
|
|
||||||
output += "\n".join(k + ": "+ src_data[key]
|
|
||||||
for key, k in key_pairs if src_data[key])
|
|
||||||
f.write(output + "\n\n")
|
|
||||||
|
|
||||||
|
|
||||||
def relation_atom_to_string(atom):
|
|
||||||
"""Take a parsed dependency and turn it into a string
|
|
||||||
"""
|
|
||||||
pkg, version, rel_op = atom
|
|
||||||
if rel_op != '':
|
|
||||||
if rel_op in ('<', '>'):
|
|
||||||
# APT translate "<<" and ">>" into "<" and ">". We have
|
|
||||||
# deparse those into the original form.
|
|
||||||
rel_op += rel_op
|
|
||||||
return "%s (%s %s)" % (pkg, rel_op, version)
|
|
||||||
return pkg
|
|
||||||
|
|
||||||
|
|
||||||
def write_controlfiles(target_suite):
|
|
||||||
"""Write the control files
|
|
||||||
|
|
||||||
This method writes the control files for the binary packages of all
|
|
||||||
the architectures and for the source packages. Note that Britney
|
|
||||||
discards a lot of fields that she does not care about. Therefore,
|
|
||||||
these files may omit a lot of regular fields.
|
|
||||||
"""
|
|
||||||
|
|
||||||
basedir = target_suite.path
|
|
||||||
sources_s = target_suite.sources
|
|
||||||
packages_s = target_suite.binaries
|
|
||||||
|
|
||||||
key_pairs = ((SECTION, 'Section'), (ARCHITECTURE, 'Architecture'),
|
|
||||||
(MULTIARCH, 'Multi-Arch'), (SOURCE, 'Source'),
|
|
||||||
(VERSION, 'Version'), (DEPENDS, 'Depends'),
|
|
||||||
(PROVIDES, 'Provides'), (CONFLICTS, 'Conflicts'),
|
|
||||||
(ESSENTIAL, 'Essential'))
|
|
||||||
|
|
||||||
for arch in packages_s:
|
|
||||||
filename = os.path.join(basedir, 'Packages_%s' % arch)
|
|
||||||
binaries = packages_s[arch]
|
|
||||||
with open(filename, 'w', encoding='utf-8') as f:
|
|
||||||
for pkg in binaries:
|
|
||||||
output = "Package: %s\n" % pkg
|
|
||||||
bin_data = binaries[pkg]
|
|
||||||
for key, k in key_pairs:
|
|
||||||
if not bin_data[key]:
|
|
||||||
continue
|
|
||||||
if key == SOURCE:
|
|
||||||
src = bin_data.source
|
|
||||||
if sources_s[src].maintainer:
|
|
||||||
output += ("Maintainer: " + sources_s[src].maintainer + "\n")
|
|
||||||
|
|
||||||
if src == pkg:
|
|
||||||
if bin_data.source_version != bin_data.version:
|
|
||||||
source = src + " (" + bin_data.source_version + ")"
|
|
||||||
else: continue
|
|
||||||
else:
|
|
||||||
if bin_data.source_version != bin_data.version:
|
|
||||||
source = src + " (" + bin_data.source_version + ")"
|
|
||||||
else:
|
|
||||||
source = src
|
|
||||||
output += (k + ": " + source + "\n")
|
|
||||||
elif key == PROVIDES:
|
|
||||||
output += (k + ": " + ", ".join(relation_atom_to_string(p) for p in bin_data[key]) + "\n")
|
|
||||||
elif key == ESSENTIAL:
|
|
||||||
output += (k + ": " + " yes\n")
|
|
||||||
else:
|
|
||||||
output += (k + ": " + bin_data[key] + "\n")
|
|
||||||
f.write(output + "\n")
|
|
||||||
|
|
||||||
write_sources(sources_s, os.path.join(basedir, 'Sources'))
|
|
||||||
|
|
||||||
|
|
||||||
def old_libraries(mi_factory, suite_info, outofsync_arches=frozenset()):
|
def old_libraries(mi_factory, suite_info, outofsync_arches=frozenset()):
|
||||||
"""Detect old libraries left in the target suite for smooth transitions
|
"""Detect old libraries left in the target suite for smooth transitions
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user