mirror of
https://git.launchpad.net/ubuntu-dev-tools
synced 2025-03-16 01:21:07 +00:00
Make pylint a little bit happier.
This commit is contained in:
parent
f8d3f9f2f5
commit
0c8520ee8c
15
ack-sync
15
ack-sync
@ -153,9 +153,9 @@ def unsubscribe_sponsors(launchpad, bug):
|
|||||||
print "ubuntu-sponsors unsubscribed"
|
print "ubuntu-sponsors unsubscribed"
|
||||||
|
|
||||||
|
|
||||||
def main(bug_numbers, all_package, all_version, all_section, update,
|
def ack_sync(bug_numbers, all_package, all_version, all_section, update,
|
||||||
all_uploader_email, key, upload, lpinstance, verbose=False,
|
all_uploader_email, key, upload, lpinstance, verbose=False,
|
||||||
silent=False):
|
silent=False):
|
||||||
launchpad = get_launchpad("ubuntu-dev-tools", server=lpinstance)
|
launchpad = get_launchpad("ubuntu-dev-tools", server=lpinstance)
|
||||||
# TODO: use release-info (once available)
|
# TODO: use release-info (once available)
|
||||||
series = launchpad.distributions["ubuntu"].current_series
|
series = launchpad.distributions["ubuntu"].current_series
|
||||||
@ -333,7 +333,7 @@ def usage():
|
|||||||
-v, --verbose be more verbosive
|
-v, --verbose be more verbosive
|
||||||
-V, --version=<version> set the version"""
|
-V, --version=<version> set the version"""
|
||||||
|
|
||||||
if __name__ == '__main__':
|
def main():
|
||||||
try:
|
try:
|
||||||
long_opts = ["help", "key=", "lvm=", "package=", "section=", "silent",
|
long_opts = ["help", "key=", "lvm=", "package=", "section=", "silent",
|
||||||
"update", "upload", "verbose", "version=", "with-sbuild",
|
"update", "upload", "verbose", "version=", "with-sbuild",
|
||||||
@ -429,5 +429,8 @@ if __name__ == '__main__':
|
|||||||
update = config.get_value('UPDATE_BUILDER', boolean=True)
|
update = config.get_value('UPDATE_BUILDER', boolean=True)
|
||||||
#TODO: Support WORKDIR
|
#TODO: Support WORKDIR
|
||||||
|
|
||||||
main(bug_numbers, package, version, section, update, uploader_email, key,
|
ack_sync(bug_numbers, package, version, section, update, uploader_email,
|
||||||
upload, lpinstance, verbose, silent)
|
key, upload, lpinstance, verbose, silent)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
@ -27,7 +27,7 @@ import tempfile
|
|||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from debian.deb822 import Dsc
|
from debian.deb822 import Dsc
|
||||||
import launchpadlib.launchpad
|
from launchpadlib.launchpad import Launchpad
|
||||||
import lsb_release
|
import lsb_release
|
||||||
|
|
||||||
from ubuntutools.config import UDTConfig, ubu_email
|
from ubuntutools.config import UDTConfig, ubu_email
|
||||||
@ -134,8 +134,8 @@ def parse(args):
|
|||||||
|
|
||||||
return opts, args
|
return opts, args
|
||||||
|
|
||||||
def find_release_package(lp, package, version, source_release):
|
def find_release_package(launchpad, package, version, source_release):
|
||||||
ubuntu = lp.distributions['ubuntu']
|
ubuntu = launchpad.distributions['ubuntu']
|
||||||
archive = ubuntu.main_archive
|
archive = ubuntu.main_archive
|
||||||
series = ubuntu.getSeries(name_or_version=source_release)
|
series = ubuntu.getSeries(name_or_version=source_release)
|
||||||
status = 'Published'
|
status = 'Published'
|
||||||
@ -159,8 +159,8 @@ def find_release_package(lp, package, version, source_release):
|
|||||||
|
|
||||||
return srcpkg
|
return srcpkg
|
||||||
|
|
||||||
def find_version_package(lp, package, version):
|
def find_version_package(launchpad, package, version):
|
||||||
ubuntu = lp.distributions['ubuntu']
|
ubuntu = launchpad.distributions['ubuntu']
|
||||||
archive = ubuntu.main_archive
|
archive = ubuntu.main_archive
|
||||||
try:
|
try:
|
||||||
# Might get more than one (i.e. same version in multiple
|
# Might get more than one (i.e. same version in multiple
|
||||||
@ -171,24 +171,25 @@ def find_version_package(lp, package, version):
|
|||||||
error('Version %s of package %s was never published in Ubuntu.' %
|
error('Version %s of package %s was never published in Ubuntu.' %
|
||||||
(version, package))
|
(version, package))
|
||||||
|
|
||||||
def dscurls_from_package(lp, mirror, workdir, package, version, source_release):
|
def dscurls_from_package(launchpad, mirror, package, version, source_release):
|
||||||
if not source_release and not version:
|
if not source_release and not version:
|
||||||
source_release = lp.distributions['ubuntu'].current_series.name
|
source_release = launchpad.distributions['ubuntu'].current_series.name
|
||||||
|
|
||||||
# If source_release is specified, then version is just for verification
|
# If source_release is specified, then version is just for verification
|
||||||
if source_release:
|
if source_release:
|
||||||
srcpkg = find_release_package(lp, package, version, source_release)
|
srcpkg = find_release_package(launchpad, package, version,
|
||||||
|
source_release)
|
||||||
else:
|
else:
|
||||||
srcpkg = find_version_package(lp, package, version)
|
srcpkg = find_version_package(launchpad, package, version)
|
||||||
|
|
||||||
urls = []
|
urls = []
|
||||||
if mirror:
|
if mirror:
|
||||||
urls.append(dsc_url(mirror, srcpkg.component_name, package,
|
urls.append(dsc_url(mirror, srcpkg.component_name, package,
|
||||||
srcpkg.source_package_version))
|
srcpkg.source_package_version))
|
||||||
|
|
||||||
for f in srcpkg.sourceFileUrls():
|
for source_file in srcpkg.sourceFileUrls():
|
||||||
if f.endswith('.dsc'):
|
if source_file.endswith('.dsc'):
|
||||||
urls.append(urllib.unquote(f))
|
urls.append(urllib.unquote(source_file))
|
||||||
return urls
|
return urls
|
||||||
else:
|
else:
|
||||||
error('Package %s contains no .dsc file.' % package)
|
error('Package %s contains no .dsc file.' % package)
|
||||||
@ -201,13 +202,13 @@ def dscurl_from_dsc(package):
|
|||||||
# Can't resolve it as a local path? Let's just hope it's good as-is
|
# Can't resolve it as a local path? Let's just hope it's good as-is
|
||||||
return package
|
return package
|
||||||
|
|
||||||
def fetch_package(lp, mirror, workdir, package, version, source_release):
|
def fetch_package(launchpad, mirror, workdir, package, version, source_release):
|
||||||
# Returns the path to the .dsc file that was fetched
|
# Returns the path to the .dsc file that was fetched
|
||||||
if package.endswith('.dsc'):
|
if package.endswith('.dsc'):
|
||||||
dscs = [dscurl_from_dsc(package)]
|
dscs = [dscurl_from_dsc(package)]
|
||||||
else:
|
else:
|
||||||
dscs = dscurls_from_package(lp, mirror, workdir, package, version,
|
dscs = dscurls_from_package(launchpad, mirror, package, version,
|
||||||
source_release)
|
source_release)
|
||||||
|
|
||||||
for dsc in dscs:
|
for dsc in dscs:
|
||||||
cmd = ('dget', '--download-only', '--allow-unauthenticated', dsc)
|
cmd = ('dget', '--download-only', '--allow-unauthenticated', dsc)
|
||||||
@ -217,12 +218,12 @@ def fetch_package(lp, mirror, workdir, package, version, source_release):
|
|||||||
return os.path.join(workdir, os.path.basename(dsc))
|
return os.path.join(workdir, os.path.basename(dsc))
|
||||||
|
|
||||||
def get_backport_version(version, suffix, upload, release):
|
def get_backport_version(version, suffix, upload, release):
|
||||||
v = version + ('~%s1' % release)
|
backport_version = version + ('~%s1' % release)
|
||||||
if suffix is not None:
|
if suffix is not None:
|
||||||
v += suffix
|
backport_version += suffix
|
||||||
elif upload and upload.startswith('ppa:'):
|
elif upload and upload.startswith('ppa:'):
|
||||||
v += '~ppa1'
|
backport_version += '~ppa1'
|
||||||
return v
|
return backport_version
|
||||||
|
|
||||||
def get_backport_dist(upload, release):
|
def get_backport_dist(upload, release):
|
||||||
if not upload or upload == 'ubuntu':
|
if not upload or upload == 'ubuntu':
|
||||||
@ -293,14 +294,13 @@ def main(args):
|
|||||||
opts, (package_or_dsc,) = parse(args[1:])
|
opts, (package_or_dsc,) = parse(args[1:])
|
||||||
|
|
||||||
script_name = os.path.basename(sys.argv[0])
|
script_name = os.path.basename(sys.argv[0])
|
||||||
lp = launchpadlib.launchpad.Launchpad.login_anonymously(script_name,
|
launchpad = Launchpad.login_anonymously(script_name, opts.lpinstance)
|
||||||
opts.lpinstance)
|
|
||||||
|
|
||||||
if not opts.dest_releases:
|
if not opts.dest_releases:
|
||||||
|
distinfo = lsb_release.get_distro_information()
|
||||||
try:
|
try:
|
||||||
distinfo = lsb_release.get_distro_information()
|
|
||||||
opts.dest_releases = [distinfo['CODENAME']]
|
opts.dest_releases = [distinfo['CODENAME']]
|
||||||
except:
|
except KeyError:
|
||||||
error('No destination release specified and unable to guess yours.')
|
error('No destination release specified and unable to guess yours.')
|
||||||
|
|
||||||
if opts.workdir:
|
if opts.workdir:
|
||||||
@ -312,7 +312,7 @@ def main(args):
|
|||||||
os.makedirs(workdir)
|
os.makedirs(workdir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dscfile = fetch_package(lp,
|
dscfile = fetch_package(launchpad,
|
||||||
opts.ubuntu_mirror,
|
opts.ubuntu_mirror,
|
||||||
workdir,
|
workdir,
|
||||||
package_or_dsc,
|
package_or_dsc,
|
||||||
|
171
sponsor-patch
171
sponsor-patch
@ -23,93 +23,98 @@ import tempfile
|
|||||||
from ubuntutools.config import UDTConfig
|
from ubuntutools.config import UDTConfig
|
||||||
from ubuntutools.builder import get_builder
|
from ubuntutools.builder import get_builder
|
||||||
from ubuntutools.logger import Logger
|
from ubuntutools.logger import Logger
|
||||||
from ubuntutools.sponsor_patch.main import main
|
from ubuntutools.sponsor_patch.sponsor_patch import sponsor_patch
|
||||||
|
|
||||||
script_name = os.path.basename(sys.argv[0])
|
def main():
|
||||||
usage = "%s [options] <bug number>" % (script_name)
|
script_name = os.path.basename(sys.argv[0])
|
||||||
epilog = "See %s(1) for more info." % (script_name)
|
usage = "%s [options] <bug number>" % (script_name)
|
||||||
parser = optparse.OptionParser(usage=usage, epilog=epilog)
|
epilog = "See %s(1) for more info." % (script_name)
|
||||||
|
parser = optparse.OptionParser(usage=usage, epilog=epilog)
|
||||||
|
|
||||||
parser.add_option("-b", "--build", dest="build",
|
parser.add_option("-b", "--build", dest="build",
|
||||||
help="Build the package with the specified builder.",
|
help="Build the package with the specified builder.",
|
||||||
action="store_true", default=False)
|
action="store_true", default=False)
|
||||||
parser.add_option("-B", "--builder", dest="builder", default=None,
|
parser.add_option("-B", "--builder", dest="builder", default=None,
|
||||||
help="Specify the package builder (default pbuilder)")
|
help="Specify the package builder (default pbuilder)")
|
||||||
parser.add_option("-e", "--edit",
|
parser.add_option("-e", "--edit",
|
||||||
help="launch sub-shell to allow editing of the patch",
|
help="launch sub-shell to allow editing of the patch",
|
||||||
dest="edit", action="store_true", default=False)
|
dest="edit", action="store_true", default=False)
|
||||||
parser.add_option("-k", "--key", dest="keyid", default=None,
|
parser.add_option("-k", "--key", dest="keyid", default=None,
|
||||||
help="Specify the key ID to be used for signing.")
|
help="Specify the key ID to be used for signing.")
|
||||||
parser.add_option("-l", "--lpinstance", dest="lpinstance", default=None,
|
parser.add_option("-l", "--lpinstance", dest="lpinstance", default=None,
|
||||||
help="Launchpad instance to connect to (default: production)",
|
help="Launchpad instance to connect to "
|
||||||
metavar="INSTANCE")
|
"(default: production)",
|
||||||
parser.add_option("--no-conf", dest="no_conf", default=False,
|
metavar="INSTANCE")
|
||||||
help="Don't read config files or environment variables.",
|
parser.add_option("--no-conf", dest="no_conf", default=False,
|
||||||
action="store_true")
|
help="Don't read config files or environment variables.",
|
||||||
parser.add_option("-s", "--sponsor", help="sponsoring; equals -b -u ubuntu",
|
action="store_true")
|
||||||
dest="sponsoring", action="store_true", default=False)
|
parser.add_option("-s", "--sponsor", help="sponsoring; equals -b -u ubuntu",
|
||||||
parser.add_option("-u", "--upload", dest="upload", default=None,
|
dest="sponsoring", action="store_true", default=False)
|
||||||
help="Specify an upload destination (default none).")
|
parser.add_option("-u", "--upload", dest="upload", default=None,
|
||||||
parser.add_option("-U", "--update", dest="update", default=False,
|
help="Specify an upload destination (default none).")
|
||||||
action="store_true",
|
parser.add_option("-U", "--update", dest="update", default=False,
|
||||||
help="Update the build environment before building.")
|
action="store_true",
|
||||||
parser.add_option("-v", "--verbose", help="print more information",
|
help="Update the build environment before building.")
|
||||||
dest="verbose", action="store_true", default=False)
|
parser.add_option("-v", "--verbose", help="print more information",
|
||||||
parser.add_option("-w", "--workdir", dest="workdir", default=None,
|
dest="verbose", action="store_true", default=False)
|
||||||
help="Specify a working directory.")
|
parser.add_option("-w", "--workdir", dest="workdir", default=None,
|
||||||
|
help="Specify a working directory.")
|
||||||
|
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
Logger.set_verbosity(options.verbose)
|
Logger.set_verbosity(options.verbose)
|
||||||
|
|
||||||
if len(args) == 0:
|
if len(args) == 0:
|
||||||
Logger.error("No bug number specified.")
|
Logger.error("No bug number specified.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
elif len(args) > 1:
|
elif len(args) > 1:
|
||||||
Logger.error("Multiple bug numbers specified: %s" % (", ".join(args)))
|
Logger.error("Multiple bug numbers specified: %s" % (", ".join(args)))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
bug_number = args[0]
|
bug_number = args[0]
|
||||||
if bug_number.isdigit():
|
if bug_number.isdigit():
|
||||||
bug_number = int(bug_number)
|
bug_number = int(bug_number)
|
||||||
else:
|
else:
|
||||||
Logger.error("Invalid bug number specified: %s" % (bug_number))
|
Logger.error("Invalid bug number specified: %s" % (bug_number))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
config = UDTConfig(options.no_conf)
|
config = UDTConfig(options.no_conf)
|
||||||
if options.builder is None:
|
if options.builder is None:
|
||||||
options.builder = config.get_value("BUILDER")
|
options.builder = config.get_value("BUILDER")
|
||||||
if options.lpinstance is None:
|
if options.lpinstance is None:
|
||||||
options.lpinstance = config.get_value("LPINSTANCE")
|
options.lpinstance = config.get_value("LPINSTANCE")
|
||||||
if not options.update:
|
if not options.update:
|
||||||
options.update = config.get_value("UPDATE_BUILDER", boolean=True)
|
options.update = config.get_value("UPDATE_BUILDER", boolean=True)
|
||||||
if options.workdir is None:
|
|
||||||
options.workdir = config.get_value("WORKDIR")
|
|
||||||
|
|
||||||
builder = get_builder(options.builder)
|
|
||||||
if not builder:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if options.sponsoring:
|
|
||||||
options.build = True
|
|
||||||
options.upload = "ubuntu"
|
|
||||||
|
|
||||||
if not options.upload and not options.workdir:
|
|
||||||
Logger.error("Please specify either a working directory or an upload "
|
|
||||||
"target!")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if options.workdir is None:
|
|
||||||
workdir = tempfile.mkdtemp(prefix=script_name+"-")
|
|
||||||
else:
|
|
||||||
workdir = options.workdir
|
|
||||||
|
|
||||||
try:
|
|
||||||
main(bug_number, options.build, builder, options.edit, options.keyid,
|
|
||||||
options.lpinstance, options.update, options.upload, workdir,
|
|
||||||
options.verbose)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print "\nUser abort."
|
|
||||||
sys.exit(2)
|
|
||||||
finally:
|
|
||||||
if options.workdir is None:
|
if options.workdir is None:
|
||||||
shutil.rmtree(workdir)
|
options.workdir = config.get_value("WORKDIR")
|
||||||
|
|
||||||
|
builder = get_builder(options.builder)
|
||||||
|
if not builder:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if options.sponsoring:
|
||||||
|
options.build = True
|
||||||
|
options.upload = "ubuntu"
|
||||||
|
|
||||||
|
if not options.upload and not options.workdir:
|
||||||
|
Logger.error("Please specify either a working directory or an upload "
|
||||||
|
"target!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if options.workdir is None:
|
||||||
|
workdir = tempfile.mkdtemp(prefix=script_name+"-")
|
||||||
|
else:
|
||||||
|
workdir = options.workdir
|
||||||
|
|
||||||
|
try:
|
||||||
|
sponsor_patch(bug_number, options.build, builder, options.edit,
|
||||||
|
options.keyid, options.lpinstance, options.update,
|
||||||
|
options.upload, workdir, options.verbose)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print "\nUser abort."
|
||||||
|
sys.exit(2)
|
||||||
|
finally:
|
||||||
|
if options.workdir is None:
|
||||||
|
shutil.rmtree(workdir)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
@ -84,17 +84,17 @@ DEFAULT_WHITELISTED_EXTENSIONS = [
|
|||||||
".xgf", # font source format: Xgridfit
|
".xgf", # font source format: Xgridfit
|
||||||
]
|
]
|
||||||
|
|
||||||
def main(whitelisted_mimetypes, whitelisted_extensions, directory,
|
def suspicious_source(whitelisted_mimetypes, whitelisted_extensions, directory,
|
||||||
verbose=False):
|
verbose=False):
|
||||||
ms = magic.open(magic.MAGIC_MIME_TYPE)
|
magic_cookie = magic.open(magic.MAGIC_MIME_TYPE)
|
||||||
ms.load()
|
magic_cookie.load()
|
||||||
|
|
||||||
for root, dirs, files in os.walk(directory):
|
for root, dirs, files in os.walk(directory):
|
||||||
for f in files:
|
for f in files:
|
||||||
mimetype = ms.file(os.path.join(root, f))
|
mimetype = magic_cookie.file(os.path.join(root, f))
|
||||||
if mimetype not in whitelisted_mimetypes:
|
if mimetype not in whitelisted_mimetypes:
|
||||||
if not filter(lambda x: f.lower().endswith(x),
|
if not [x for x in whitelisted_extensions
|
||||||
whitelisted_extensions):
|
if f.lower().endswith(x)]:
|
||||||
if verbose:
|
if verbose:
|
||||||
print "%s (%s)" % (os.path.join(root, f), mimetype)
|
print "%s (%s)" % (os.path.join(root, f), mimetype)
|
||||||
else:
|
else:
|
||||||
@ -103,7 +103,7 @@ def main(whitelisted_mimetypes, whitelisted_extensions, directory,
|
|||||||
if vcs_dir in dirs:
|
if vcs_dir in dirs:
|
||||||
dirs.remove(vcs_dir)
|
dirs.remove(vcs_dir)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
def main():
|
||||||
script_name = os.path.basename(sys.argv[0])
|
script_name = os.path.basename(sys.argv[0])
|
||||||
usage = "%s [options]" % (script_name)
|
usage = "%s [options]" % (script_name)
|
||||||
epilog = "See %s(1) for more info." % (script_name)
|
epilog = "See %s(1) for more info." % (script_name)
|
||||||
@ -130,5 +130,8 @@ if __name__ == "__main__":
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
whitelisted_extensions = [x.lower() for x in options.whitelisted_extensions]
|
whitelisted_extensions = [x.lower() for x in options.whitelisted_extensions]
|
||||||
main(options.whitelisted_mimetypes, whitelisted_extensions,
|
suspicious_source(options.whitelisted_mimetypes, whitelisted_extensions,
|
||||||
options.directory, options.verbose)
|
options.directory, options.verbose)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
69
syncpackage
69
syncpackage
@ -117,31 +117,32 @@ def quote_parameter(parameter):
|
|||||||
return parameter
|
return parameter
|
||||||
|
|
||||||
def print_command(script_name, cmd):
|
def print_command(script_name, cmd):
|
||||||
print "%s: I: %s" % (script_name, " ".join(map(quote_parameter, cmd)))
|
cmd = [quote_parameter(x) for x in cmd]
|
||||||
|
print "%s: I: %s" % (script_name, " ".join(cmd))
|
||||||
|
|
||||||
def remove_signature(dscname):
|
def remove_signature(dscname):
|
||||||
'''Removes the signature from a .dsc file if the .dsc file is signed.'''
|
'''Removes the signature from a .dsc file if the .dsc file is signed.'''
|
||||||
|
|
||||||
f = open(dscname)
|
dsc_file = open(dscname)
|
||||||
if f.readline().strip() == "-----BEGIN PGP SIGNED MESSAGE-----":
|
if dsc_file.readline().strip() == "-----BEGIN PGP SIGNED MESSAGE-----":
|
||||||
unsigned_file = []
|
unsigned_file = []
|
||||||
# search until begin of body found
|
# search until begin of body found
|
||||||
for line in f:
|
for line in dsc_file:
|
||||||
if line.strip() == "":
|
if line.strip() == "":
|
||||||
break
|
break
|
||||||
|
|
||||||
# search for end of body
|
# search for end of body
|
||||||
for line in f:
|
for line in dsc_file:
|
||||||
if line.strip() == "":
|
if line.strip() == "":
|
||||||
break
|
break
|
||||||
unsigned_file.append(line)
|
unsigned_file.append(line)
|
||||||
|
|
||||||
f.close()
|
dsc_file.close()
|
||||||
f = open(dscname, "w")
|
dsc_file = open(dscname, "w")
|
||||||
f.writelines(unsigned_file)
|
dsc_file.writelines(unsigned_file)
|
||||||
f.close()
|
dsc_file.close()
|
||||||
|
|
||||||
def dsc_getfiles(dscurl):
|
def dsc_getfiles(dscurl, script_name):
|
||||||
'''Return list of files in a .dsc file (excluding the .dsc file itself).'''
|
'''Return list of files in a .dsc file (excluding the .dsc file itself).'''
|
||||||
|
|
||||||
basepath = os.path.dirname(dscurl)
|
basepath = os.path.dirname(dscurl)
|
||||||
@ -154,17 +155,17 @@ def dsc_getfiles(dscurl):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
files = []
|
files = []
|
||||||
for f in dsc['Files']:
|
for source_file in dsc['Files']:
|
||||||
url = os.path.join(basepath, f['name'])
|
url = os.path.join(basepath, source_file['name'])
|
||||||
if not f['name'].endswith('.dsc'):
|
if not source_file['name'].endswith('.dsc'):
|
||||||
files.append(File(url, f['md5sum'], f['size']))
|
files.append(File(url, source_file['md5sum'], source_file['size']))
|
||||||
return files
|
return files
|
||||||
|
|
||||||
def add_fixed_bugs(changes, bugs, script_name=None, verbose=False):
|
def add_fixed_bugs(changes, bugs):
|
||||||
'''Add additional Launchpad bugs to the list of fixed bugs in changes
|
'''Add additional Launchpad bugs to the list of fixed bugs in changes
|
||||||
file.'''
|
file.'''
|
||||||
|
|
||||||
changes = filter(lambda l: l.strip() != "", changes.split("\n"))
|
changes = [l for l in changes.split("\n") if l.strip() != ""]
|
||||||
# Remove duplicates
|
# Remove duplicates
|
||||||
bugs = set(bugs)
|
bugs = set(bugs)
|
||||||
|
|
||||||
@ -208,8 +209,8 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
|||||||
try:
|
try:
|
||||||
ubuntu_source = getUbuntuSrcPkg(srcpkg, release)
|
ubuntu_source = getUbuntuSrcPkg(srcpkg, release)
|
||||||
ubuntu_ver = Version(ubuntu_source.getVersion())
|
ubuntu_ver = Version(ubuntu_source.getVersion())
|
||||||
ubuntu_dsc = filter(lambda f: f.endswith(".dsc"),
|
ubuntu_dsc = [f for f in ubuntu_source.sourceFileUrls()
|
||||||
ubuntu_source.sourceFileUrls())
|
if f.endswith(".dsc")]
|
||||||
assert len(ubuntu_dsc) == 1
|
assert len(ubuntu_dsc) == 1
|
||||||
ubuntu_dsc = ubuntu_dsc[0]
|
ubuntu_dsc = ubuntu_dsc[0]
|
||||||
except udtexceptions.PackageNotFoundException:
|
except udtexceptions.PackageNotFoundException:
|
||||||
@ -226,7 +227,7 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
|||||||
print '%s: D: Source %s: current version %s, new version %s' % \
|
print '%s: D: Source %s: current version %s, new version %s' % \
|
||||||
(script_name, srcpkg, ubuntu_ver, new_ver)
|
(script_name, srcpkg, ubuntu_ver, new_ver)
|
||||||
|
|
||||||
files = dsc_getfiles(dscurl)
|
files = dsc_getfiles(dscurl, script_name)
|
||||||
source_files = [f for f in files if f.is_source_file()]
|
source_files = [f for f in files if f.is_source_file()]
|
||||||
if verbose:
|
if verbose:
|
||||||
print '%s: D: Files: %s' % (script_name,
|
print '%s: D: Files: %s' % (script_name,
|
||||||
@ -238,7 +239,7 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
|||||||
if ubuntu_dsc is None:
|
if ubuntu_dsc is None:
|
||||||
ubuntu_files = None
|
ubuntu_files = None
|
||||||
else:
|
else:
|
||||||
ubuntu_files = dsc_getfiles(ubuntu_dsc)
|
ubuntu_files = dsc_getfiles(ubuntu_dsc, script_name)
|
||||||
|
|
||||||
# do we need the orig.tar.gz?
|
# do we need the orig.tar.gz?
|
||||||
need_orig = True
|
need_orig = True
|
||||||
@ -247,9 +248,8 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
|||||||
# We need to check if all .orig*.tar.* tarballs exist in Ubuntu
|
# We need to check if all .orig*.tar.* tarballs exist in Ubuntu
|
||||||
need_orig = False
|
need_orig = False
|
||||||
for source_file in source_files:
|
for source_file in source_files:
|
||||||
ubuntu_file = filter(lambda f: f.get_name() ==
|
ubuntu_file = [f for f in ubuntu_files
|
||||||
source_file.get_name(),
|
if f.get_name() == source_file.get_name()]
|
||||||
ubuntu_files)
|
|
||||||
if len(ubuntu_file) == 0:
|
if len(ubuntu_file) == 0:
|
||||||
# The source file does not exist in Ubuntu
|
# The source file does not exist in Ubuntu
|
||||||
if verbose:
|
if verbose:
|
||||||
@ -304,7 +304,8 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
|||||||
|
|
||||||
if len(fakesync_files) == 0:
|
if len(fakesync_files) == 0:
|
||||||
# create the changes file
|
# create the changes file
|
||||||
changes_file = "%s_%s_source.changes" % (srcpkg, new_ver.strip_epoch())
|
changes_filename = "%s_%s_source.changes" % \
|
||||||
|
(srcpkg, new_ver.strip_epoch())
|
||||||
cmd = ["dpkg-genchanges", "-S", "-v" + cur_ver.full_version,
|
cmd = ["dpkg-genchanges", "-S", "-v" + cur_ver.full_version,
|
||||||
"-DDistribution=" + release,
|
"-DDistribution=" + release,
|
||||||
"-DOrigin=debian/" + debian_dist,
|
"-DOrigin=debian/" + debian_dist,
|
||||||
@ -316,13 +317,13 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
|||||||
if not verbose:
|
if not verbose:
|
||||||
cmd += ["-q"]
|
cmd += ["-q"]
|
||||||
if verbose:
|
if verbose:
|
||||||
print_command(script_name, cmd + [">", "../" + changes_file])
|
print_command(script_name, cmd + [">", "../" + changes_filename])
|
||||||
changes = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
changes = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||||
env={"DEB_VENDOR": "Ubuntu"}).communicate()[0]
|
env={"DEB_VENDOR": "Ubuntu"}).communicate()[0]
|
||||||
|
|
||||||
# Add additional bug numbers
|
# Add additional bug numbers
|
||||||
if len(bugs) > 0:
|
if len(bugs) > 0:
|
||||||
changes = add_fixed_bugs(changes, bugs, verbose)
|
changes = add_fixed_bugs(changes, bugs)
|
||||||
|
|
||||||
# remove extracted (temporary) files
|
# remove extracted (temporary) files
|
||||||
if verbose:
|
if verbose:
|
||||||
@ -331,14 +332,14 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
|||||||
shutil.rmtree(directory, True)
|
shutil.rmtree(directory, True)
|
||||||
|
|
||||||
# write changes file
|
# write changes file
|
||||||
f = open(changes_file, "w")
|
changes_file = open(changes_filename, "w")
|
||||||
f.writelines(changes)
|
changes_file.writelines(changes)
|
||||||
f.close()
|
changes_file.close()
|
||||||
|
|
||||||
# remove signature and sign package
|
# remove signature and sign package
|
||||||
remove_signature(dscname)
|
remove_signature(dscname)
|
||||||
if keyid is not False:
|
if keyid is not False:
|
||||||
cmd = ["debsign", changes_file]
|
cmd = ["debsign", changes_filename]
|
||||||
if not keyid is None:
|
if not keyid is None:
|
||||||
cmd.insert(1, "-k" + keyid)
|
cmd.insert(1, "-k" + keyid)
|
||||||
if verbose:
|
if verbose:
|
||||||
@ -347,7 +348,8 @@ def sync_dsc(script_name, dscurl, debian_dist, release, name, email, bugs,
|
|||||||
else:
|
else:
|
||||||
# Create fakesync changelog entry
|
# Create fakesync changelog entry
|
||||||
new_ver = Version(new_ver.full_version + "fakesync1")
|
new_ver = Version(new_ver.full_version + "fakesync1")
|
||||||
changes_file = "%s_%s_source.changes" % (srcpkg, new_ver.strip_epoch())
|
changes_filename = "%s_%s_source.changes" % \
|
||||||
|
(srcpkg, new_ver.strip_epoch())
|
||||||
if len(bugs) > 0:
|
if len(bugs) > 0:
|
||||||
message = "Fake sync due to mismatching orig tarball (LP: %s)." % \
|
message = "Fake sync due to mismatching orig tarball (LP: %s)." % \
|
||||||
(", ".join(["#" + str(b) for b in bugs]))
|
(", ".join(["#" + str(b) for b in bugs]))
|
||||||
@ -418,7 +420,7 @@ def get_debian_dscurl(package, dist, release, version=None, component=None):
|
|||||||
package, dsc_file)
|
package, dsc_file)
|
||||||
return dscurl
|
return dscurl
|
||||||
|
|
||||||
if __name__ == "__main__":
|
def main():
|
||||||
script_name = os.path.basename(sys.argv[0])
|
script_name = os.path.basename(sys.argv[0])
|
||||||
usage = "%s [options] <.dsc URL/path or package name>" % (script_name)
|
usage = "%s [options] <.dsc URL/path or package name>" % (script_name)
|
||||||
epilog = "See %s(1) for more info." % (script_name)
|
epilog = "See %s(1) for more info." % (script_name)
|
||||||
@ -510,3 +512,6 @@ if __name__ == "__main__":
|
|||||||
sync_dsc(script_name, dscurl, options.dist, options.release,
|
sync_dsc(script_name, dscurl, options.dist, options.release,
|
||||||
options.uploader_name, options.uploader_email, options.bugs,
|
options.uploader_name, options.uploader_email, options.bugs,
|
||||||
options.keyid, options.verbose)
|
options.keyid, options.verbose)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
@ -136,8 +136,10 @@ class Sbuild(Builder):
|
|||||||
["sbuild-distupgrade"],
|
["sbuild-distupgrade"],
|
||||||
["sbuild-clean", "-a", "-c"]]
|
["sbuild-clean", "-a", "-c"]]
|
||||||
for cmd in commands:
|
for cmd in commands:
|
||||||
|
#pylint: disable=W0631
|
||||||
Logger.command(cmd + [chroot])
|
Logger.command(cmd + [chroot])
|
||||||
ret = subprocess.call(cmd + [chroot])
|
ret = subprocess.call(cmd + [chroot])
|
||||||
|
#pylint: enable=W0631
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
return self._update_failure(ret, dist)
|
return self._update_failure(ret, dist)
|
||||||
return 0
|
return 0
|
||||||
|
@ -35,11 +35,11 @@ class Patch(object):
|
|||||||
def get_strip_level(self):
|
def get_strip_level(self):
|
||||||
strip_level = None
|
strip_level = None
|
||||||
if self.is_debdiff():
|
if self.is_debdiff():
|
||||||
changelog = filter(lambda f: f.endswith("debian/changelog"),
|
changelog = [f for f in self.changed_files
|
||||||
self.changed_files)[0]
|
if f.endswith("debian/changelog")][0]
|
||||||
strip_level = len(changelog.split(os.sep)) - 2
|
strip_level = len(changelog.split(os.sep)) - 2
|
||||||
return strip_level
|
return strip_level
|
||||||
|
|
||||||
def is_debdiff(self):
|
def is_debdiff(self):
|
||||||
return len(filter(lambda f: f.endswith("debian/changelog"),
|
return len([f for f in self.changed_files
|
||||||
self.changed_files)) > 0
|
if f.endswith("debian/changelog")]) > 0
|
||||||
|
@ -97,7 +97,7 @@ def ask_for_manual_fixing():
|
|||||||
def get_patch_or_branch(bug):
|
def get_patch_or_branch(bug):
|
||||||
patch = None
|
patch = None
|
||||||
branch = None
|
branch = None
|
||||||
attached_patches = filter(lambda a: a.type == "Patch", bug.attachments)
|
attached_patches = [a for a in bug.attachments if a.type == "Patch"]
|
||||||
linked_branches = [b.branch for b in bug.linked_branches]
|
linked_branches = [b.branch for b in bug.linked_branches]
|
||||||
if len(attached_patches) == 0 and len(linked_branches) == 0:
|
if len(attached_patches) == 0 and len(linked_branches) == 0:
|
||||||
if len(bug.attachments) == 0:
|
if len(bug.attachments) == 0:
|
||||||
@ -114,15 +114,16 @@ def get_patch_or_branch(bug):
|
|||||||
branch = linked_branches[0].bzr_identity
|
branch = linked_branches[0].bzr_identity
|
||||||
else:
|
else:
|
||||||
if len(attached_patches) == 0:
|
if len(attached_patches) == 0:
|
||||||
Logger.normal("https://launchpad.net/bugs/%i has %i branches " \
|
msg = "https://launchpad.net/bugs/%i has %i branches linked:" % \
|
||||||
"linked:" % (bug.id, len(linked_branches)))
|
(bug.id, len(linked_branches))
|
||||||
elif len(linked_branches) == 0:
|
elif len(linked_branches) == 0:
|
||||||
Logger.normal("https://launchpad.net/bugs/%i has %i patches" \
|
msg = "https://launchpad.net/bugs/%i has %i patches attached:" % \
|
||||||
" attached:" % (bug.id, len(attached_patches)))
|
(bug.id, len(attached_patches))
|
||||||
else:
|
else:
|
||||||
Logger.normal("https://launchpad.net/bugs/%i has %i branch(es)" \
|
msg = ("https://launchpad.net/bugs/%i has %i branch(es) linked and "
|
||||||
" linked and %i patch(es) attached:" % \
|
"%i patch(es) attached:") % (bug.id, len(linked_branches),
|
||||||
(bug.id, len(linked_branches), len(attached_patches)))
|
len(attached_patches))
|
||||||
|
Logger.normal(msg)
|
||||||
i = 0
|
i = 0
|
||||||
for linked_branch in linked_branches:
|
for linked_branch in linked_branches:
|
||||||
i += 1
|
i += 1
|
||||||
@ -205,8 +206,8 @@ def apply_patch(task, patch):
|
|||||||
edit = True
|
edit = True
|
||||||
return edit
|
return edit
|
||||||
|
|
||||||
def main(bug_number, build, builder, edit, keyid, lpinstance, update, upload,
|
def sponsor_patch(bug_number, build, builder, edit, keyid, lpinstance, update,
|
||||||
workdir, verbose=False):
|
upload, workdir, verbose=False):
|
||||||
workdir = os.path.expanduser(workdir)
|
workdir = os.path.expanduser(workdir)
|
||||||
if not os.path.isdir(workdir):
|
if not os.path.isdir(workdir):
|
||||||
try:
|
try:
|
||||||
@ -228,7 +229,7 @@ def main(bug_number, build, builder, edit, keyid, lpinstance, update, upload,
|
|||||||
(patch, branch) = get_patch_or_branch(bug)
|
(patch, branch) = get_patch_or_branch(bug)
|
||||||
|
|
||||||
bug_tasks = [BugTask(x, launchpad) for x in bug.bug_tasks]
|
bug_tasks = [BugTask(x, launchpad) for x in bug.bug_tasks]
|
||||||
ubuntu_tasks = filter(lambda x: x.is_ubuntu_task(), bug_tasks)
|
ubuntu_tasks = [x for x in bug_tasks if x.is_ubuntu_task()]
|
||||||
if len(ubuntu_tasks) == 0:
|
if len(ubuntu_tasks) == 0:
|
||||||
Logger.error("No Ubuntu bug task found on bug #%i." % (bug_number))
|
Logger.error("No Ubuntu bug task found on bug #%i." % (bug_number))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -240,7 +241,7 @@ def main(bug_number, build, builder, edit, keyid, lpinstance, update, upload,
|
|||||||
(len(ubuntu_tasks), bug_number))
|
(len(ubuntu_tasks), bug_number))
|
||||||
for task in ubuntu_tasks:
|
for task in ubuntu_tasks:
|
||||||
print task.get_short_info()
|
print task.get_short_info()
|
||||||
open_ubuntu_tasks = filter(lambda x: not x.is_complete(), ubuntu_tasks)
|
open_ubuntu_tasks = [x for x in ubuntu_tasks if x.is_complete()]
|
||||||
if len(open_ubuntu_tasks) == 1:
|
if len(open_ubuntu_tasks) == 1:
|
||||||
task = open_ubuntu_tasks[0]
|
task = open_ubuntu_tasks[0]
|
||||||
else:
|
else:
|
@ -87,6 +87,8 @@ class WrapAndSortControl(Control):
|
|||||||
|
|
||||||
class Install(object):
|
class Install(object):
|
||||||
def __init__(self, filename):
|
def __init__(self, filename):
|
||||||
|
self.content = None
|
||||||
|
self.filename = None
|
||||||
self.open(filename)
|
self.open(filename)
|
||||||
|
|
||||||
def open(self, filename):
|
def open(self, filename):
|
||||||
@ -97,9 +99,9 @@ class Install(object):
|
|||||||
def save(self, filename=None):
|
def save(self, filename=None):
|
||||||
if filename:
|
if filename:
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
f = open(self.filename, "w")
|
install_file = open(self.filename, "w")
|
||||||
f.write("".join(self.content))
|
install_file.write("".join(self.content))
|
||||||
f.close()
|
install_file.close()
|
||||||
|
|
||||||
def sort(self):
|
def sort(self):
|
||||||
self.content = sorted(self.content)
|
self.content = sorted(self.content)
|
||||||
@ -120,11 +122,11 @@ def sort_list(unsorted_list):
|
|||||||
param = [x for x in unsorted_list if x.startswith("${")]
|
param = [x for x in unsorted_list if x.startswith("${")]
|
||||||
return sorted(normal) + sorted(param)
|
return sorted(normal) + sorted(param)
|
||||||
|
|
||||||
def main(options):
|
def wrap_and_sort(options):
|
||||||
debdir = lambda x: os.path.join(options.debian_directory, x)
|
debdir = lambda x: os.path.join(options.debian_directory, x)
|
||||||
|
|
||||||
control_files = filter(os.path.isfile,
|
possible_control_files = [debdir("control"), debdir("control.in")]
|
||||||
[debdir("control"), debdir("control.in")])
|
control_files = [f for f in possible_control_files if os.path.isfile(f)]
|
||||||
for control_file in control_files:
|
for control_file in control_files:
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
print control_file
|
print control_file
|
||||||
@ -135,8 +137,8 @@ def main(options):
|
|||||||
options.sort_binary_packages, options.keep_first)
|
options.sort_binary_packages, options.keep_first)
|
||||||
control.save()
|
control.save()
|
||||||
|
|
||||||
copyright_files = filter(os.path.isfile,
|
possible_copyright_files = [debdir("copyright"), debdir("copyright.in")]
|
||||||
[debdir("copyright"), debdir("copyright.in")])
|
copyright_files = [f for f in possible_copyright_files if os.path.isfile(f)]
|
||||||
for copyright_file in copyright_files:
|
for copyright_file in copyright_files:
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
print copyright_file
|
print copyright_file
|
||||||
@ -152,7 +154,7 @@ def main(options):
|
|||||||
install.sort()
|
install.sort()
|
||||||
install.save()
|
install.save()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
def main():
|
||||||
script_name = os.path.basename(sys.argv[0])
|
script_name = os.path.basename(sys.argv[0])
|
||||||
usage = "%s [options]" % (script_name)
|
usage = "%s [options]" % (script_name)
|
||||||
epilog = "See %s(1) for more info." % (script_name)
|
epilog = "See %s(1) for more info." % (script_name)
|
||||||
@ -196,4 +198,7 @@ if __name__ == "__main__":
|
|||||||
options.debian_directory)
|
options.debian_directory)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
main(options)
|
wrap_and_sort(options)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user