mirror of
https://git.launchpad.net/~ubuntu-release/britney/+git/britney2-ubuntu
synced 2025-02-13 23:38:20 +00:00
Add explicit encoding to all files
Signed-off-by: Julien Cristau <jcristau@debian.org>
This commit is contained in:
parent
f64f7072c0
commit
62c1c9ec3c
35
britney.py
35
britney.py
@ -363,7 +363,7 @@ class Britney(object):
|
|||||||
# are handled as an ad-hoc case
|
# are handled as an ad-hoc case
|
||||||
self.MINDAYS = {}
|
self.MINDAYS = {}
|
||||||
self.HINTS = {'command-line': self.HINTS_ALL}
|
self.HINTS = {'command-line': self.HINTS_ALL}
|
||||||
with open(self.options.config) as config:
|
with open(self.options.config, encoding='utf-8') as config:
|
||||||
for line in config:
|
for line in config:
|
||||||
if '=' in line and not line.strip().startswith('#'):
|
if '=' in line and not line.strip().startswith('#'):
|
||||||
k, v = line.split('=', 1)
|
k, v = line.split('=', 1)
|
||||||
@ -675,7 +675,7 @@ class Britney(object):
|
|||||||
bugs = defaultdict(list)
|
bugs = defaultdict(list)
|
||||||
filename = os.path.join(basedir, "BugsV")
|
filename = os.path.join(basedir, "BugsV")
|
||||||
self.__log("Loading RC bugs data from %s" % filename)
|
self.__log("Loading RC bugs data from %s" % filename)
|
||||||
for line in open(filename):
|
for line in open(filename, encoding='ascii'):
|
||||||
l = line.split()
|
l = line.split()
|
||||||
if len(l) != 2:
|
if len(l) != 2:
|
||||||
self.__log("Malformed line found in line %s" % (line), type='W')
|
self.__log("Malformed line found in line %s" % (line), type='W')
|
||||||
@ -745,7 +745,7 @@ class Britney(object):
|
|||||||
dates = {}
|
dates = {}
|
||||||
filename = os.path.join(basedir, "Dates")
|
filename = os.path.join(basedir, "Dates")
|
||||||
self.__log("Loading upload data from %s" % filename)
|
self.__log("Loading upload data from %s" % filename)
|
||||||
for line in open(filename):
|
for line in open(filename, encoding='ascii'):
|
||||||
l = line.split()
|
l = line.split()
|
||||||
if len(l) != 3: continue
|
if len(l) != 3: continue
|
||||||
try:
|
try:
|
||||||
@ -762,10 +762,9 @@ class Britney(object):
|
|||||||
"""
|
"""
|
||||||
filename = os.path.join(basedir, "Dates")
|
filename = os.path.join(basedir, "Dates")
|
||||||
self.__log("Writing upload data to %s" % filename)
|
self.__log("Writing upload data to %s" % filename)
|
||||||
f = open(filename, 'w')
|
with open(filename, 'w', encoding='utf-8'):
|
||||||
for pkg in sorted(dates):
|
for pkg in sorted(dates):
|
||||||
f.write("%s %s %d\n" % ((pkg,) + dates[pkg]))
|
f.write("%s %s %d\n" % ((pkg,) + dates[pkg]))
|
||||||
f.close()
|
|
||||||
|
|
||||||
|
|
||||||
def read_urgencies(self, basedir):
|
def read_urgencies(self, basedir):
|
||||||
@ -785,7 +784,7 @@ class Britney(object):
|
|||||||
urgencies = {}
|
urgencies = {}
|
||||||
filename = os.path.join(basedir, "Urgency")
|
filename = os.path.join(basedir, "Urgency")
|
||||||
self.__log("Loading upload urgencies from %s" % filename)
|
self.__log("Loading upload urgencies from %s" % filename)
|
||||||
for line in open(filename, errors='surrogateescape'):
|
for line in open(filename, errors='surrogateescape', encoding='ascii'):
|
||||||
l = line.split()
|
l = line.split()
|
||||||
if len(l) != 3: continue
|
if len(l) != 3: continue
|
||||||
|
|
||||||
@ -839,7 +838,8 @@ class Britney(object):
|
|||||||
self.__log("Cannot read hints list from %s, no such file!" % filename, type="E")
|
self.__log("Cannot read hints list from %s, no such file!" % filename, type="E")
|
||||||
continue
|
continue
|
||||||
self.__log("Loading hints list from %s" % filename)
|
self.__log("Loading hints list from %s" % filename)
|
||||||
lines = open(filename)
|
with open(filename, encoding='utf-8') as f:
|
||||||
|
lines = f.readlines()
|
||||||
for line in lines:
|
for line in lines:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line == "": continue
|
if line == "": continue
|
||||||
@ -2858,16 +2858,15 @@ class Britney(object):
|
|||||||
else:
|
else:
|
||||||
self.upgrade_me = self.options.actions.split()
|
self.upgrade_me = self.options.actions.split()
|
||||||
|
|
||||||
self.__output = open(self.options.upgrade_output, 'w')
|
with open(self.options.upgrade_output, 'w', encoding='utf-8') as f:
|
||||||
|
self.__output = f
|
||||||
|
|
||||||
# run the hint tester
|
# run the hint tester
|
||||||
if self.options.hint_tester:
|
if self.options.hint_tester:
|
||||||
self.hint_tester()
|
self.hint_tester()
|
||||||
# run the upgrade test
|
# run the upgrade test
|
||||||
else:
|
else:
|
||||||
self.upgrade_testing()
|
self.upgrade_testing()
|
||||||
|
|
||||||
self.__output.close()
|
|
||||||
|
|
||||||
def _installability_test(self, pkg_name, pkg_version, pkg_arch, broken, to_check, nuninst_arch):
|
def _installability_test(self, pkg_name, pkg_version, pkg_arch, broken, to_check, nuninst_arch):
|
||||||
"""Test for installability of a package on an architecture
|
"""Test for installability of a package on an architecture
|
||||||
|
@ -306,7 +306,7 @@ def write_nuninst(filename, nuninst):
|
|||||||
Write the non-installable report derived from "nuninst" to the
|
Write the non-installable report derived from "nuninst" to the
|
||||||
file denoted by "filename".
|
file denoted by "filename".
|
||||||
"""
|
"""
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w', encoding='utf-8') as f:
|
||||||
# Having two fields with (almost) identical dates seems a bit
|
# Having two fields with (almost) identical dates seems a bit
|
||||||
# redundant.
|
# redundant.
|
||||||
f.write("Built on: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n")
|
f.write("Built on: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n")
|
||||||
@ -323,7 +323,7 @@ def read_nuninst(filename, architectures):
|
|||||||
will be included in the report.
|
will be included in the report.
|
||||||
"""
|
"""
|
||||||
nuninst = {}
|
nuninst = {}
|
||||||
with open(filename) as f:
|
with open(filename, encoding='ascii') as f:
|
||||||
for r in f:
|
for r in f:
|
||||||
if ":" not in r: continue
|
if ":" not in r: continue
|
||||||
arch, packages = r.strip().split(":", 1)
|
arch, packages = r.strip().split(":", 1)
|
||||||
@ -381,7 +381,7 @@ def write_heidi(filename, sources_t, packages_t,
|
|||||||
The "X=X" parameters are optimizations to avoid "load global" in
|
The "X=X" parameters are optimizations to avoid "load global" in
|
||||||
the loops.
|
the loops.
|
||||||
"""
|
"""
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w', encoding='ascii') as f:
|
||||||
|
|
||||||
# write binary packages
|
# write binary packages
|
||||||
for arch in sorted(packages_t):
|
for arch in sorted(packages_t):
|
||||||
@ -420,7 +420,7 @@ def write_heidi_delta(filename, all_selected):
|
|||||||
|
|
||||||
The order corresponds to that shown in update_output.
|
The order corresponds to that shown in update_output.
|
||||||
"""
|
"""
|
||||||
with open(filename, "w") as fd:
|
with open(filename, "w", encoding='ascii') as fd:
|
||||||
|
|
||||||
fd.write("#HeidiDelta\n")
|
fd.write("#HeidiDelta\n")
|
||||||
|
|
||||||
@ -456,7 +456,7 @@ def write_excuses(excuses, dest_file, output_format="yaml"):
|
|||||||
or "legacy-html".
|
or "legacy-html".
|
||||||
"""
|
"""
|
||||||
if output_format == "yaml":
|
if output_format == "yaml":
|
||||||
with open(dest_file, 'w') as f:
|
with open(dest_file, 'w', encoding='utf-8') as f:
|
||||||
excuselist = []
|
excuselist = []
|
||||||
for e in excuses:
|
for e in excuses:
|
||||||
excuselist.append(e.excusedata())
|
excuselist.append(e.excusedata())
|
||||||
@ -465,7 +465,7 @@ def write_excuses(excuses, dest_file, output_format="yaml"):
|
|||||||
excusesdata["generated-date"] = datetime.utcnow()
|
excusesdata["generated-date"] = datetime.utcnow()
|
||||||
f.write(yaml.dump(excusesdata, default_flow_style=False, allow_unicode=True))
|
f.write(yaml.dump(excusesdata, default_flow_style=False, allow_unicode=True))
|
||||||
elif output_format == "legacy-html":
|
elif output_format == "legacy-html":
|
||||||
with open(dest_file, 'w') as f:
|
with open(dest_file, 'w', encoding='utf-8') as f:
|
||||||
f.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n")
|
f.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n")
|
||||||
f.write("<html><head><title>excuses...</title>")
|
f.write("<html><head><title>excuses...</title>")
|
||||||
f.write("<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\"></head><body>\n")
|
f.write("<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\"></head><body>\n")
|
||||||
@ -488,7 +488,7 @@ def write_sources(sources_s, filename):
|
|||||||
key_pairs = ((VERSION, 'Version'), (SECTION, 'Section'),
|
key_pairs = ((VERSION, 'Version'), (SECTION, 'Section'),
|
||||||
(MAINTAINER, 'Maintainer'))
|
(MAINTAINER, 'Maintainer'))
|
||||||
|
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w', encoding='utf-8') as f:
|
||||||
for src in sources_s:
|
for src in sources_s:
|
||||||
src_data = sources_s[src]
|
src_data = sources_s[src]
|
||||||
output = "Package: %s\n" % src
|
output = "Package: %s\n" % src
|
||||||
@ -518,7 +518,7 @@ def write_controlfiles(sources, packages, suite, basedir):
|
|||||||
for arch in packages_s:
|
for arch in packages_s:
|
||||||
filename = os.path.join(basedir, 'Packages_%s' % arch)
|
filename = os.path.join(basedir, 'Packages_%s' % arch)
|
||||||
binaries = packages_s[arch][0]
|
binaries = packages_s[arch][0]
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w', encoding='utf-8') as f:
|
||||||
for pkg in binaries:
|
for pkg in binaries:
|
||||||
output = "Package: %s\n" % pkg
|
output = "Package: %s\n" % pkg
|
||||||
bin_data = binaries[pkg]
|
bin_data = binaries[pkg]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user