mirror of
https://git.launchpad.net/ubuntu-dev-tools
synced 2025-03-12 15:41:09 +00:00
Make pylint a little bit happier.
This commit is contained in:
parent
f8d3f9f2f5
commit
0c8520ee8c
15
ack-sync
15
ack-sync
@ -153,9 +153,9 @@ def unsubscribe_sponsors(launchpad, bug):
|
||||
print "ubuntu-sponsors unsubscribed"
|
||||
|
||||
|
||||
def main(bug_numbers, all_package, all_version, all_section, update,
|
||||
all_uploader_email, key, upload, lpinstance, verbose=False,
|
||||
silent=False):
|
||||
def ack_sync(bug_numbers, all_package, all_version, all_section, update,
|
||||
all_uploader_email, key, upload, lpinstance, verbose=False,
|
||||
silent=False):
|
||||
launchpad = get_launchpad("ubuntu-dev-tools", server=lpinstance)
|
||||
# TODO: use release-info (once available)
|
||||
series = launchpad.distributions["ubuntu"].current_series
|
||||
@ -333,7 +333,7 @@ def usage():
|
||||
-v, --verbose be more verbosive
|
||||
-V, --version=<version> set the version"""
|
||||
|
||||
if __name__ == '__main__':
|
||||
def main():
|
||||
try:
|
||||
long_opts = ["help", "key=", "lvm=", "package=", "section=", "silent",
|
||||
"update", "upload", "verbose", "version=", "with-sbuild",
|
||||
@ -429,5 +429,8 @@ if __name__ == '__main__':
|
||||
update = config.get_value('UPDATE_BUILDER', boolean=True)
|
||||
#TODO: Support WORKDIR
|
||||
|
||||
main(bug_numbers, package, version, section, update, uploader_email, key,
|
||||
upload, lpinstance, verbose, silent)
|
||||
ack_sync(bug_numbers, package, version, section, update, uploader_email,
|
||||
key, upload, lpinstance, verbose, silent)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -27,7 +27,7 @@ import tempfile
|
||||
import urllib
|
||||
|
||||
from debian.deb822 import Dsc
|
||||
import launchpadlib.launchpad
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
import lsb_release
|
||||
|
||||
from ubuntutools.config import UDTConfig, ubu_email
|
||||
@ -134,8 +134,8 @@ def parse(args):
|
||||
|
||||
return opts, args
|
||||
|
||||
def find_release_package(lp, package, version, source_release):
|
||||
ubuntu = lp.distributions['ubuntu']
|
||||
def find_release_package(launchpad, package, version, source_release):
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
archive = ubuntu.main_archive
|
||||
series = ubuntu.getSeries(name_or_version=source_release)
|
||||
status = 'Published'
|
||||
@ -159,8 +159,8 @@ def find_release_package(lp, package, version, source_release):
|
||||
|
||||
return srcpkg
|
||||
|
||||
def find_version_package(lp, package, version):
|
||||
ubuntu = lp.distributions['ubuntu']
|
||||
def find_version_package(launchpad, package, version):
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
archive = ubuntu.main_archive
|
||||
try:
|
||||
# Might get more than one (i.e. same version in multiple
|
||||
@ -171,24 +171,25 @@ def find_version_package(lp, package, version):
|
||||
error('Version %s of package %s was never published in Ubuntu.' %
|
||||
(version, package))
|
||||
|
||||
def dscurls_from_package(lp, mirror, workdir, package, version, source_release):
|
||||
def dscurls_from_package(launchpad, mirror, package, version, source_release):
|
||||
if not source_release and not version:
|
||||
source_release = lp.distributions['ubuntu'].current_series.name
|
||||
source_release = launchpad.distributions['ubuntu'].current_series.name
|
||||
|
||||
# If source_release is specified, then version is just for verification
|
||||
if source_release:
|
||||
srcpkg = find_release_package(lp, package, version, source_release)
|
||||
srcpkg = find_release_package(launchpad, package, version,
|
||||
source_release)
|
||||
else:
|
||||
srcpkg = find_version_package(lp, package, version)
|
||||
srcpkg = find_version_package(launchpad, package, version)
|
||||
|
||||
urls = []
|
||||
if mirror:
|
||||
urls.append(dsc_url(mirror, srcpkg.component_name, package,
|
||||
srcpkg.source_package_version))
|
||||
|
||||
for f in srcpkg.sourceFileUrls():
|
||||
if f.endswith('.dsc'):
|
||||
urls.append(urllib.unquote(f))
|
||||
for source_file in srcpkg.sourceFileUrls():
|
||||
if source_file.endswith('.dsc'):
|
||||
urls.append(urllib.unquote(source_file))
|
||||
return urls
|
||||
else:
|
||||
error('Package %s contains no .dsc file.' % package)
|
||||
@ -201,13 +202,13 @@ def dscurl_from_dsc(package):
|
||||
# Can't resolve it as a local path? Let's just hope it's good as-is
|
||||
return package
|
||||
|
||||
def fetch_package(lp, mirror, workdir, package, version, source_release):
|
||||
def fetch_package(launchpad, mirror, workdir, package, version, source_release):
|
||||
# Returns the path to the .dsc file that was fetched
|
||||
if package.endswith('.dsc'):
|
||||
dscs = [dscurl_from_dsc(package)]
|
||||
else:
|
||||
dscs = dscurls_from_package(lp, mirror, workdir, package, version,
|
||||
source_release)
|
||||
dscs = dscurls_from_package(launchpad, mirror, package, version,
|
||||
source_release)
|
||||
|
||||
for dsc in dscs:
|
||||
cmd = ('dget', '--download-only', '--allow-unauthenticated', dsc)
|
||||
@ -217,12 +218,12 @@ def fetch_package(lp, mirror, workdir, package, version, source_release):
|
||||
return os.path.join(workdir, os.path.basename(dsc))
|
||||
|
||||
def get_backport_version(version, suffix, upload, release):
|
||||
v = version + ('~%s1' % release)
|
||||
backport_version = version + ('~%s1' % release)
|
||||
if suffix is not None:
|
||||
v += suffix
|
||||
backport_version += suffix
|
||||
elif upload and upload.startswith('ppa:'):
|
||||
v += '~ppa1'
|
||||
return v
|
||||
backport_version += '~ppa1'
|
||||
return backport_version
|
||||
|
||||
def get_backport_dist(upload, release):
|
||||
if not upload or upload == 'ubuntu':
|
||||
@ -293,14 +294,13 @@ def main(args):
|
||||
opts, (package_or_dsc,) = parse(args[1:])
|
||||
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
lp = launchpadlib.launchpad.Launchpad.login_anonymously(script_name,
|
||||
opts.lpinstance)
|
||||
launchpad = Launchpad.login_anonymously(script_name, opts.lpinstance)
|
||||
|
||||
if not opts.dest_releases:
|
||||
distinfo = lsb_release.get_distro_information()
|
||||
try:
|
||||
distinfo = lsb_release.get_distro_information()
|
||||
opts.dest_releases = [distinfo['CODENAME']]
|
||||
except:
|
||||
except KeyError:
|
||||
error('No destination release specified and unable to guess yours.')
|
||||
|
||||
if opts.workdir:
|
||||
@ -312,7 +312,7 @@ def main(args):
|
||||
os.makedirs(workdir)
|
||||
|
||||
try:
|
||||
dscfile = fetch_package(lp,
|
||||
dscfile = fetch_package(launchpad,
|
||||
opts.ubuntu_mirror,
|
||||
workdir,
|
||||
package_or_dsc,
|
||||
|
171
sponsor-patch
171
sponsor-patch
@ -23,93 +23,98 @@ import tempfile
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.builder import get_builder
|
||||
from ubuntutools.logger import Logger
|
||||
from ubuntutools.sponsor_patch.main import main
|
||||
from ubuntutools.sponsor_patch.sponsor_patch import sponsor_patch
|
||||
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
usage = "%s [options] <bug number>" % (script_name)
|
||||
epilog = "See %s(1) for more info." % (script_name)
|
||||
parser = optparse.OptionParser(usage=usage, epilog=epilog)
|
||||
def main():
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
usage = "%s [options] <bug number>" % (script_name)
|
||||
epilog = "See %s(1) for more info." % (script_name)
|
||||
parser = optparse.OptionParser(usage=usage, epilog=epilog)
|
||||
|
||||
parser.add_option("-b", "--build", dest="build",
|
||||
help="Build the package with the specified builder.",
|
||||
action="store_true", default=False)
|
||||
parser.add_option("-B", "--builder", dest="builder", default=None,
|
||||
help="Specify the package builder (default pbuilder)")
|
||||
parser.add_option("-e", "--edit",
|
||||
help="launch sub-shell to allow editing of the patch",
|
||||
dest="edit", action="store_true", default=False)
|
||||
parser.add_option("-k", "--key", dest="keyid", default=None,
|
||||
help="Specify the key ID to be used for signing.")
|
||||
parser.add_option("-l", "--lpinstance", dest="lpinstance", default=None,
|
||||
help="Launchpad instance to connect to (default: production)",
|
||||
metavar="INSTANCE")
|
||||
parser.add_option("--no-conf", dest="no_conf", default=False,
|
||||
help="Don't read config files or environment variables.",
|
||||
action="store_true")
|
||||
parser.add_option("-s", "--sponsor", help="sponsoring; equals -b -u ubuntu",
|
||||
dest="sponsoring", action="store_true", default=False)
|
||||
parser.add_option("-u", "--upload", dest="upload", default=None,
|
||||
help="Specify an upload destination (default none).")
|
||||
parser.add_option("-U", "--update", dest="update", default=False,
|
||||
action="store_true",
|
||||
help="Update the build environment before building.")
|
||||
parser.add_option("-v", "--verbose", help="print more information",
|
||||
dest="verbose", action="store_true", default=False)
|
||||
parser.add_option("-w", "--workdir", dest="workdir", default=None,
|
||||
help="Specify a working directory.")
|
||||
parser.add_option("-b", "--build", dest="build",
|
||||
help="Build the package with the specified builder.",
|
||||
action="store_true", default=False)
|
||||
parser.add_option("-B", "--builder", dest="builder", default=None,
|
||||
help="Specify the package builder (default pbuilder)")
|
||||
parser.add_option("-e", "--edit",
|
||||
help="launch sub-shell to allow editing of the patch",
|
||||
dest="edit", action="store_true", default=False)
|
||||
parser.add_option("-k", "--key", dest="keyid", default=None,
|
||||
help="Specify the key ID to be used for signing.")
|
||||
parser.add_option("-l", "--lpinstance", dest="lpinstance", default=None,
|
||||
help="Launchpad instance to connect to "
|
||||
"(default: production)",
|
||||
metavar="INSTANCE")
|
||||
parser.add_option("--no-conf", dest="no_conf", default=False,
|
||||
help="Don't read config files or environment variables.",
|
||||
action="store_true")
|
||||
parser.add_option("-s", "--sponsor", help="sponsoring; equals -b -u ubuntu",
|
||||
dest="sponsoring", action="store_true", default=False)
|
||||
parser.add_option("-u", "--upload", dest="upload", default=None,
|
||||
help="Specify an upload destination (default none).")
|
||||
parser.add_option("-U", "--update", dest="update", default=False,
|
||||
action="store_true",
|
||||
help="Update the build environment before building.")
|
||||
parser.add_option("-v", "--verbose", help="print more information",
|
||||
dest="verbose", action="store_true", default=False)
|
||||
parser.add_option("-w", "--workdir", dest="workdir", default=None,
|
||||
help="Specify a working directory.")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
Logger.set_verbosity(options.verbose)
|
||||
(options, args) = parser.parse_args()
|
||||
Logger.set_verbosity(options.verbose)
|
||||
|
||||
if len(args) == 0:
|
||||
Logger.error("No bug number specified.")
|
||||
sys.exit(1)
|
||||
elif len(args) > 1:
|
||||
Logger.error("Multiple bug numbers specified: %s" % (", ".join(args)))
|
||||
sys.exit(1)
|
||||
if len(args) == 0:
|
||||
Logger.error("No bug number specified.")
|
||||
sys.exit(1)
|
||||
elif len(args) > 1:
|
||||
Logger.error("Multiple bug numbers specified: %s" % (", ".join(args)))
|
||||
sys.exit(1)
|
||||
|
||||
bug_number = args[0]
|
||||
if bug_number.isdigit():
|
||||
bug_number = int(bug_number)
|
||||
else:
|
||||
Logger.error("Invalid bug number specified: %s" % (bug_number))
|
||||
sys.exit(1)
|
||||
bug_number = args[0]
|
||||
if bug_number.isdigit():
|
||||
bug_number = int(bug_number)
|
||||
else:
|
||||
Logger.error("Invalid bug number specified: %s" % (bug_number))
|
||||
sys.exit(1)
|
||||
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.builder is None:
|
||||
options.builder = config.get_value("BUILDER")
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value("LPINSTANCE")
|
||||
if not options.update:
|
||||
options.update = config.get_value("UPDATE_BUILDER", boolean=True)
|
||||
if options.workdir is None:
|
||||
options.workdir = config.get_value("WORKDIR")
|
||||
|
||||
builder = get_builder(options.builder)
|
||||
if not builder:
|
||||
sys.exit(1)
|
||||
|
||||
if options.sponsoring:
|
||||
options.build = True
|
||||
options.upload = "ubuntu"
|
||||
|
||||
if not options.upload and not options.workdir:
|
||||
Logger.error("Please specify either a working directory or an upload "
|
||||
"target!")
|
||||
sys.exit(1)
|
||||
|
||||
if options.workdir is None:
|
||||
workdir = tempfile.mkdtemp(prefix=script_name+"-")
|
||||
else:
|
||||
workdir = options.workdir
|
||||
|
||||
try:
|
||||
main(bug_number, options.build, builder, options.edit, options.keyid,
|
||||
options.lpinstance, options.update, options.upload, workdir,
|
||||
options.verbose)
|
||||
except KeyboardInterrupt:
|
||||
print "\nUser abort."
|
||||
sys.exit(2)
|
||||
finally:
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.builder is None:
|
||||
options.builder = config.get_value("BUILDER")
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value("LPINSTANCE")
|
||||
if not options.update:
|
||||
options.update = config.get_value("UPDATE_BUILDER", boolean=True)
|
||||
if options.workdir is None:
|
||||
shutil.rmtree(workdir)
|
||||
options.workdir = config.get_value("WORKDIR")
|
||||
|
||||
builder = get_builder(options.builder)
|
||||
if not builder:
|
||||
sys.exit(1)
|
||||
|
||||
if options.sponsoring:
|
||||
options.build = True
|
||||
options.upload = "ubuntu"
|
||||
|
||||
if not options.upload and not options.workdir:
|
||||
Logger.error("Please specify either a working directory or an upload "
|
||||
"target!")
|
||||
sys.exit(1)
|
||||
|
||||
if options.workdir is None:
|
||||
workdir = tempfile.mkdtemp(prefix=script_name+"-")
|
||||
else:
|
||||
workdir = options.workdir
|
||||
|
||||
try:
|
||||
sponsor_patch(bug_number, options.build, builder, options.edit,
|
||||
options.keyid, options.lpinstance, options.update,
|
||||
options.upload, workdir, options.verbose)
|
||||
except KeyboardInterrupt:
|
||||
print "\nUser abort."
|
||||
sys.exit(2)
|
||||
finally:
|
||||
if options.workdir is None:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -84,17 +84,17 @@ DEFAULT_WHITELISTED_EXTENSIONS = [
|
||||
".xgf", # font source format: Xgridfit
|
||||
]
|
||||
|
||||
def main(whitelisted_mimetypes, whitelisted_extensions, directory,
|
||||
verbose=False):
|
||||
ms = magic.open(magic.MAGIC_MIME_TYPE)
|
||||
ms.load()
|
||||
def suspicious_source(whitelisted_mimetypes, whitelisted_extensions, directory,
|
||||
verbose=False):
|
||||
magic_cookie = magic.open(magic.MAGIC_MIME_TYPE)
|
||||
magic_cookie.load()
|
||||
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for f in files:
|
||||
mimetype = ms.file(os.path.join(root, f))
|
||||
mimetype = magic_cookie.file(os.path.join(root, f))
|
||||
if mimetype not in whitelisted_mimetypes:
|
||||
if not filter(lambda x: f.lower().endswith(x),
|
||||
whitelisted_extensions):
|
||||
if not [x for x in whitelisted_extensions
|
||||
if f.lower().endswith(x)]:
|
||||
if verbose:
|
||||
print "%s (%s)" % (os.path.join(root, f), mimetype)
|
||||
else:
|
||||
@ -103,7 +103,7 @@ def main(whitelisted_mimetypes, whitelisted_extensions, directory,
|
||||
if vcs_dir in dirs:
|
||||
dirs.remove(vcs_dir)
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main():
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
usage = "%s [options]" % (script_name)
|
||||
epilog = "See %s(1) for more info." % (script_name)
|
||||
@ -130,5 +130,8 @@ if __name__ == "__main__":
|
||||
sys.exit(1)
|
||||
|
||||
whitelisted_extensions = [x.lower() for x in options.whitelisted_extensions]
|
||||
main(options.whitelisted_mimetypes, whitelisted_extensions,
|
||||
options.directory, options.verbose)
|
||||
suspicious_source(options.whitelisted_mimetypes, whitelisted_extensions,
|
||||
options.directory, options.verbose)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
69
syncpackage
69
syncpackage
@ -117,31 +117,32 @@ def quote_parameter(parameter):
|
||||
return parameter
|
||||
|
||||
def print_command(script_name, cmd):
|
||||
print "%s: I: %s" % (script_name, " ".join(map(quote_parameter, cmd)))
|
||||
cmd = [quote_parameter(x) for x in cmd]
|
||||
print "%s: I: %s" % (script_name, " ".join(cmd))
|
||||
|
||||
def remove_signature(dscname):
|
||||
'''Removes the signature from a .dsc file if the .dsc file is signed.'''
|
||||
|
||||
f = open(dscname)
|
||||
if f.readline().strip() == "-----BEGIN PGP SIGNED MESSAGE-----":
|
||||
dsc_file = open(dscname)
|
||||
if dsc_file.readline().strip() == "-----BEGIN PGP SIGNED MESSAGE-----":
|
||||
unsigned_file = []
|
||||
# search until begin of body found
|
||||
for line in f:
|
||||
for line in dsc_file:
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
# search for end of body
|
||||
for line in f:
|
||||
for line in dsc_file:
|
||||
if line.strip() == "":
|
||||
break
|
||||
unsigned_file.append(line)
|
||||
|
||||
f.close()
|
||||
f = open(dscname, "w")
|
||||
f.writelines(unsigned_file)
|
||||
f.close()
|
||||
dsc_file.close()
|
||||
dsc_file = open(dscname, "w")
|
||||
dsc_file.writelines(unsigned_file)
|
||||
dsc_file.close()
|
||||
|
||||
def dsc_getfiles(dscurl):
|
||||
def dsc_getfiles(dscurl, script_name):
|
||||
'''Return list of files in a .dsc file (excluding the .dsc file itself).'''
|
||||
|
||||
basepath = os.path.dirname(dscurl)
|
||||
@ -154,17 +155,17 @@ def dsc_getfiles(dscurl):
|
||||
sys.exit(1)
|
||||
|
||||
files = []
|
||||
for f in dsc['Files']:
|
||||
url = os.path.join(basepath, f['name'])
|
||||
if not f['name'].endswith('.dsc'):
|
||||
files.append(File(url, f['md5sum'], f['size']))
|
||||
for source_file in dsc['Files']:
|
||||
url = os.path.join(basepath, source_file['name'])
|
||||
if not source_file['name'].endswith('.dsc'):
|
||||
files.append(File(url, source_file['md5sum'], source_file['size']))
|
||||
return files
|
||||
|
||||
def add_fixed_bugs(changes, bugs, script_name=None, verbose=False):
|
||||
def add_fixed_bugs(changes, bugs):
|
||||
'''Add additional Launchpad bugs to the list of fixed bugs in changes
|
||||
file.'''
|
||||
|
||||
changes = filter(lambda l: l.strip() != "", changes.split("\n"))
|
||||
changes = [l for l in changes.split("\n") if l.strip() != ""]
|
||||
# Remove duplicates
|
||||
bugs = set(bugs)
|
||||
|
||||
@ -208,8 +209,8 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
||||
try:
|
||||
ubuntu_source = getUbuntuSrcPkg(srcpkg, release)
|
||||
ubuntu_ver = Version(ubuntu_source.getVersion())
|
||||
ubuntu_dsc = filter(lambda f: f.endswith(".dsc"),
|
||||
ubuntu_source.sourceFileUrls())
|
||||
ubuntu_dsc = [f for f in ubuntu_source.sourceFileUrls()
|
||||
if f.endswith(".dsc")]
|
||||
assert len(ubuntu_dsc) == 1
|
||||
ubuntu_dsc = ubuntu_dsc[0]
|
||||
except udtexceptions.PackageNotFoundException:
|
||||
@ -226,7 +227,7 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
||||
print '%s: D: Source %s: current version %s, new version %s' % \
|
||||
(script_name, srcpkg, ubuntu_ver, new_ver)
|
||||
|
||||
files = dsc_getfiles(dscurl)
|
||||
files = dsc_getfiles(dscurl, script_name)
|
||||
source_files = [f for f in files if f.is_source_file()]
|
||||
if verbose:
|
||||
print '%s: D: Files: %s' % (script_name,
|
||||
@ -238,7 +239,7 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
||||
if ubuntu_dsc is None:
|
||||
ubuntu_files = None
|
||||
else:
|
||||
ubuntu_files = dsc_getfiles(ubuntu_dsc)
|
||||
ubuntu_files = dsc_getfiles(ubuntu_dsc, script_name)
|
||||
|
||||
# do we need the orig.tar.gz?
|
||||
need_orig = True
|
||||
@ -247,9 +248,8 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
||||
# We need to check if all .orig*.tar.* tarballs exist in Ubuntu
|
||||
need_orig = False
|
||||
for source_file in source_files:
|
||||
ubuntu_file = filter(lambda f: f.get_name() ==
|
||||
source_file.get_name(),
|
||||
ubuntu_files)
|
||||
ubuntu_file = [f for f in ubuntu_files
|
||||
if f.get_name() == source_file.get_name()]
|
||||
if len(ubuntu_file) == 0:
|
||||
# The source file does not exist in Ubuntu
|
||||
if verbose:
|
||||
@ -304,7 +304,8 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
||||
|
||||
if len(fakesync_files) == 0:
|
||||
# create the changes file
|
||||
changes_file = "%s_%s_source.changes" % (srcpkg, new_ver.strip_epoch())
|
||||
changes_filename = "%s_%s_source.changes" % \
|
||||
(srcpkg, new_ver.strip_epoch())
|
||||
cmd = ["dpkg-genchanges", "-S", "-v" + cur_ver.full_version,
|
||||
"-DDistribution=" + release,
|
||||
"-DOrigin=debian/" + debian_dist,
|
||||
@ -316,13 +317,13 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
||||
if not verbose:
|
||||
cmd += ["-q"]
|
||||
if verbose:
|
||||
print_command(script_name, cmd + [">", "../" + changes_file])
|
||||
print_command(script_name, cmd + [">", "../" + changes_filename])
|
||||
changes = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
env={"DEB_VENDOR": "Ubuntu"}).communicate()[0]
|
||||
|
||||
# Add additional bug numbers
|
||||
if len(bugs) > 0:
|
||||
changes = add_fixed_bugs(changes, bugs, verbose)
|
||||
changes = add_fixed_bugs(changes, bugs)
|
||||
|
||||
# remove extracted (temporary) files
|
||||
if verbose:
|
||||
@ -331,14 +332,14 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
||||
shutil.rmtree(directory, True)
|
||||
|
||||
# write changes file
|
||||
f = open(changes_file, "w")
|
||||
f.writelines(changes)
|
||||
f.close()
|
||||
changes_file = open(changes_filename, "w")
|
||||
changes_file.writelines(changes)
|
||||
changes_file.close()
|
||||
|
||||
# remove signature and sign package
|
||||
remove_signature(dscname)
|
||||
if keyid is not False:
|
||||
cmd = ["debsign", changes_file]
|
||||
cmd = ["debsign", changes_filename]
|
||||
if not keyid is None:
|
||||
cmd.insert(1, "-k" + keyid)
|
||||
if verbose:
|
||||
@ -347,7 +348,8 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
||||
else:
|
||||
# Create fakesync changelog entry
|
||||
new_ver = Version(new_ver.full_version + "fakesync1")
|
||||
changes_file = "%s_%s_source.changes" % (srcpkg, new_ver.strip_epoch())
|
||||
changes_filename = "%s_%s_source.changes" % \
|
||||
(srcpkg, new_ver.strip_epoch())
|
||||
if len(bugs) > 0:
|
||||
message = "Fake sync due to mismatching orig tarball (LP: %s)." % \
|
||||
(", ".join(["#" + str(b) for b in bugs]))
|
||||
@ -418,7 +420,7 @@ def get_debian_dscurl(package, dist, release, version=None, component=None):
|
||||
package, dsc_file)
|
||||
return dscurl
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main():
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
usage = "%s [options] <.dsc URL/path or package name>" % (script_name)
|
||||
epilog = "See %s(1) for more info." % (script_name)
|
||||
@ -510,3 +512,6 @@ if __name__ == "__main__":
|
||||
sync_dsc(script_name, dscurl, options.dist, options.release,
|
||||
options.uploader_name, options.uploader_email, options.bugs,
|
||||
options.keyid, options.verbose)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -136,8 +136,10 @@ class Sbuild(Builder):
|
||||
["sbuild-distupgrade"],
|
||||
["sbuild-clean", "-a", "-c"]]
|
||||
for cmd in commands:
|
||||
#pylint: disable=W0631
|
||||
Logger.command(cmd + [chroot])
|
||||
ret = subprocess.call(cmd + [chroot])
|
||||
#pylint: enable=W0631
|
||||
if ret != 0:
|
||||
return self._update_failure(ret, dist)
|
||||
return 0
|
||||
|
@ -35,11 +35,11 @@ class Patch(object):
|
||||
def get_strip_level(self):
|
||||
strip_level = None
|
||||
if self.is_debdiff():
|
||||
changelog = filter(lambda f: f.endswith("debian/changelog"),
|
||||
self.changed_files)[0]
|
||||
changelog = [f for f in self.changed_files
|
||||
if f.endswith("debian/changelog")][0]
|
||||
strip_level = len(changelog.split(os.sep)) - 2
|
||||
return strip_level
|
||||
|
||||
def is_debdiff(self):
|
||||
return len(filter(lambda f: f.endswith("debian/changelog"),
|
||||
self.changed_files)) > 0
|
||||
return len([f for f in self.changed_files
|
||||
if f.endswith("debian/changelog")]) > 0
|
||||
|
@ -97,7 +97,7 @@ def ask_for_manual_fixing():
|
||||
def get_patch_or_branch(bug):
|
||||
patch = None
|
||||
branch = None
|
||||
attached_patches = filter(lambda a: a.type == "Patch", bug.attachments)
|
||||
attached_patches = [a for a in bug.attachments if a.type == "Patch"]
|
||||
linked_branches = [b.branch for b in bug.linked_branches]
|
||||
if len(attached_patches) == 0 and len(linked_branches) == 0:
|
||||
if len(bug.attachments) == 0:
|
||||
@ -114,15 +114,16 @@ def get_patch_or_branch(bug):
|
||||
branch = linked_branches[0].bzr_identity
|
||||
else:
|
||||
if len(attached_patches) == 0:
|
||||
Logger.normal("https://launchpad.net/bugs/%i has %i branches " \
|
||||
"linked:" % (bug.id, len(linked_branches)))
|
||||
msg = "https://launchpad.net/bugs/%i has %i branches linked:" % \
|
||||
(bug.id, len(linked_branches))
|
||||
elif len(linked_branches) == 0:
|
||||
Logger.normal("https://launchpad.net/bugs/%i has %i patches" \
|
||||
" attached:" % (bug.id, len(attached_patches)))
|
||||
msg = "https://launchpad.net/bugs/%i has %i patches attached:" % \
|
||||
(bug.id, len(attached_patches))
|
||||
else:
|
||||
Logger.normal("https://launchpad.net/bugs/%i has %i branch(es)" \
|
||||
" linked and %i patch(es) attached:" % \
|
||||
(bug.id, len(linked_branches), len(attached_patches)))
|
||||
msg = ("https://launchpad.net/bugs/%i has %i branch(es) linked and "
|
||||
"%i patch(es) attached:") % (bug.id, len(linked_branches),
|
||||
len(attached_patches))
|
||||
Logger.normal(msg)
|
||||
i = 0
|
||||
for linked_branch in linked_branches:
|
||||
i += 1
|
||||
@ -205,8 +206,8 @@ def apply_patch(task, patch):
|
||||
edit = True
|
||||
return edit
|
||||
|
||||
def main(bug_number, build, builder, edit, keyid, lpinstance, update, upload,
|
||||
workdir, verbose=False):
|
||||
def sponsor_patch(bug_number, build, builder, edit, keyid, lpinstance, update,
|
||||
upload, workdir, verbose=False):
|
||||
workdir = os.path.expanduser(workdir)
|
||||
if not os.path.isdir(workdir):
|
||||
try:
|
||||
@ -228,7 +229,7 @@ def main(bug_number, build, builder, edit, keyid, lpinstance, update, upload,
|
||||
(patch, branch) = get_patch_or_branch(bug)
|
||||
|
||||
bug_tasks = [BugTask(x, launchpad) for x in bug.bug_tasks]
|
||||
ubuntu_tasks = filter(lambda x: x.is_ubuntu_task(), bug_tasks)
|
||||
ubuntu_tasks = [x for x in bug_tasks if x.is_ubuntu_task()]
|
||||
if len(ubuntu_tasks) == 0:
|
||||
Logger.error("No Ubuntu bug task found on bug #%i." % (bug_number))
|
||||
sys.exit(1)
|
||||
@ -240,7 +241,7 @@ def main(bug_number, build, builder, edit, keyid, lpinstance, update, upload,
|
||||
(len(ubuntu_tasks), bug_number))
|
||||
for task in ubuntu_tasks:
|
||||
print task.get_short_info()
|
||||
open_ubuntu_tasks = filter(lambda x: not x.is_complete(), ubuntu_tasks)
|
||||
open_ubuntu_tasks = [x for x in ubuntu_tasks if x.is_complete()]
|
||||
if len(open_ubuntu_tasks) == 1:
|
||||
task = open_ubuntu_tasks[0]
|
||||
else:
|
@ -87,6 +87,8 @@ class WrapAndSortControl(Control):
|
||||
|
||||
class Install(object):
|
||||
def __init__(self, filename):
|
||||
self.content = None
|
||||
self.filename = None
|
||||
self.open(filename)
|
||||
|
||||
def open(self, filename):
|
||||
@ -97,9 +99,9 @@ class Install(object):
|
||||
def save(self, filename=None):
|
||||
if filename:
|
||||
self.filename = filename
|
||||
f = open(self.filename, "w")
|
||||
f.write("".join(self.content))
|
||||
f.close()
|
||||
install_file = open(self.filename, "w")
|
||||
install_file.write("".join(self.content))
|
||||
install_file.close()
|
||||
|
||||
def sort(self):
|
||||
self.content = sorted(self.content)
|
||||
@ -120,11 +122,11 @@ def sort_list(unsorted_list):
|
||||
param = [x for x in unsorted_list if x.startswith("${")]
|
||||
return sorted(normal) + sorted(param)
|
||||
|
||||
def main(options):
|
||||
def wrap_and_sort(options):
|
||||
debdir = lambda x: os.path.join(options.debian_directory, x)
|
||||
|
||||
control_files = filter(os.path.isfile,
|
||||
[debdir("control"), debdir("control.in")])
|
||||
possible_control_files = [debdir("control"), debdir("control.in")]
|
||||
control_files = [f for f in possible_control_files if os.path.isfile(f)]
|
||||
for control_file in control_files:
|
||||
if options.verbose:
|
||||
print control_file
|
||||
@ -135,8 +137,8 @@ def main(options):
|
||||
options.sort_binary_packages, options.keep_first)
|
||||
control.save()
|
||||
|
||||
copyright_files = filter(os.path.isfile,
|
||||
[debdir("copyright"), debdir("copyright.in")])
|
||||
possible_copyright_files = [debdir("copyright"), debdir("copyright.in")]
|
||||
copyright_files = [f for f in possible_copyright_files if os.path.isfile(f)]
|
||||
for copyright_file in copyright_files:
|
||||
if options.verbose:
|
||||
print copyright_file
|
||||
@ -152,7 +154,7 @@ def main(options):
|
||||
install.sort()
|
||||
install.save()
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main():
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
usage = "%s [options]" % (script_name)
|
||||
epilog = "See %s(1) for more info." % (script_name)
|
||||
@ -196,4 +198,7 @@ if __name__ == "__main__":
|
||||
options.debian_directory)
|
||||
sys.exit(1)
|
||||
|
||||
main(options)
|
||||
wrap_and_sort(options)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
Loading…
x
Reference in New Issue
Block a user