Actually merge trunk

This commit is contained in:
Evan Broder 2010-12-17 01:47:15 -08:00
commit ab0dc4244b
10 changed files with 440 additions and 369 deletions

View File

@ -138,8 +138,7 @@ def dscurl_from_package(lp, workdir, package, version, source_release):
if not source_release and not version: if not source_release and not version:
source_release = lp.distributions['ubuntu'].current_series.name source_release = lp.distributions['ubuntu'].current_series.name
# If source_release is specified, then version is just for # If source_release is specified, then version is just for verification
# verification
if source_release: if source_release:
srcpkg = find_release_package(lp, package, version, source_release) srcpkg = find_release_package(lp, package, version, source_release)
else: else:
@ -156,8 +155,7 @@ def dscurl_from_dsc(package):
if os.path.exists(path): if os.path.exists(path):
return 'file://%s' % path return 'file://%s' % path
else: else:
# Can't resolve it as a local path? Let's just hope it's good # Can't resolve it as a local path? Let's just hope it's good as-is
# as-is
return package return package
def fetch_package(lp, workdir, package, version, source_release): def fetch_package(lp, workdir, package, version, source_release):
@ -168,10 +166,7 @@ def fetch_package(lp, workdir, package, version, source_release):
else: else:
dsc = dscurl_from_package(lp, workdir, package, version, source_release) dsc = dscurl_from_package(lp, workdir, package, version, source_release)
check_call(['dget', check_call(['dget', '--download-only', '--allow-unauthenticated', dsc],
'--download-only',
'--allow-unauthenticated',
dsc],
cwd=workdir) cwd=workdir)
return os.path.join(workdir, os.path.basename(dsc)) return os.path.join(workdir, os.path.basename(dsc))
@ -211,19 +206,12 @@ def do_upload(workdir, package, bp_version, upload):
elif answer in ('n', 'no'): elif answer in ('n', 'no'):
return return
check_call(['dput', check_call(['dput', upload, '%s_%s_source.changes' % (package, bp_version)],
upload,
'%s_%s_source.changes' %
(package, bp_version)],
cwd=workdir) cwd=workdir)
def do_backport(workdir, package, dscfile, version, release, build, builder, upload): def do_backport(workdir, package, dscfile, version, release, build, builder, upload):
check_call(['dpkg-source', check_call(['dpkg-source', '-x', dscfile, package], cwd=workdir)
'-x',
dscfile,
package],
cwd=workdir)
srcdir = os.path.join(workdir, package) srcdir = os.path.join(workdir, package)
bp_version = get_backport_version(version, upload, release) bp_version = get_backport_version(version, upload, release)
@ -236,8 +224,7 @@ def do_backport(workdir, package, dscfile, version, release, build, builder, upl
'--distribution', bp_dist, '--distribution', bp_dist,
'No-change backport to %s' % release], 'No-change backport to %s' % release],
cwd=srcdir) cwd=srcdir)
check_call(['debuild', '-S', '-sa'], check_call(['debuild', '-S', '-sa'], cwd=srcdir)
cwd=srcdir)
if ':' in bp_version: if ':' in bp_version:
bp_version = bp_version[bp_version.find(':')+1:] bp_version = bp_version[bp_version.find(':')+1:]

9
debian/changelog vendored
View File

@ -5,12 +5,19 @@ ubuntu-dev-tools (0.108) UNRELEASED; urgency=low
Use the 'production' LP instance instead of 'edge' (which is going away). Use the 'production' LP instance instead of 'edge' (which is going away).
* pbuilder-dist: Fix typo in local archive support, introduced in 0.107. * pbuilder-dist: Fix typo in local archive support, introduced in 0.107.
[ Benjamin Drung ]
* pull-lp-source: Unquote URI to get "+" instead of "%2B" in the file name
(LP: #681114).
[ Colin Watson ]
* grep-merges: New tool.
[ Evan Broder ] [ Evan Broder ]
* backportpackage: new script for testing backport requests in a PPA. * backportpackage: new script for testing backport requests in a PPA.
* sponsor-patch: Add --update option to make sure build environment is * sponsor-patch: Add --update option to make sure build environment is
up to date (LP: #689605) up to date (LP: #689605)
-- Evan Broder <evan@ebroder.net> Mon, 13 Dec 2010 03:57:20 -0800 -- Evan Broder <evan@ebroder.net> Fri, 17 Dec 2010 01:46:50 -0800
ubuntu-dev-tools (0.107) experimental; urgency=low ubuntu-dev-tools (0.107) experimental; urgency=low

1
debian/control vendored
View File

@ -62,6 +62,7 @@ Description: useful tools for Ubuntu developers
- grab-attachments - download all bug attachments from a Launchpad bug - grab-attachments - download all bug attachments from a Launchpad bug
report. report.
- grab-merge - grabs a merge from merges.ubuntu.com easily. - grab-merge - grabs a merge from merges.ubuntu.com easily.
- grep-merges - search for pending merges from Debian.
- hugdaylist - compile HugDay lists from bug list URLs. - hugdaylist - compile HugDay lists from bug list URLs.
- import-bug-from-debian - copy a bug from the Debian BTS to Launchpad - import-bug-from-debian - copy a bug from the Debian BTS to Launchpad
- lp-list-bugs - briefly list status of Launchpad bugs. - lp-list-bugs - briefly list status of Launchpad bugs.

20
debian/copyright vendored
View File

@ -81,11 +81,11 @@ On Debian and Ubuntu systems, the complete text of the GNU General Public
License v2 can be found in `/usr/share/common-licenses/GPL-2'. License v2 can be found in `/usr/share/common-licenses/GPL-2'.
dch-repeat, errno, get-branches, get-build-deps, grab-attachments, grab-merge, dch-repeat, errno, get-branches, get-build-deps, grab-attachments, grab-merge,
hugdaylist, lp-list-bugs, manage-credentials, massfile, merge-changelog, grep-merges, hugdaylist, lp-list-bugs, manage-credentials, massfile,
mk-sbuild, pbuilder-dist-simple, pull-debian-debdiff, pull-debian-source, merge-changelog, mk-sbuild, pbuilder-dist-simple, pull-debian-debdiff,
pull-lp-source, pull-revu-source, setup-packaging-environment, pull-debian-source, pull-lp-source, pull-revu-source,
suspicious-source, ubuntu-build and what-patch are licensed under the GNU setup-packaging-environment, suspicious-source, ubuntu-build and what-patch
General Public License, version 3: are licensed under the GNU General Public License, version 3:
This program is free software: you can redistribute it and/or modify This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by it under the terms of the GNU General Public License as published by
@ -101,8 +101,8 @@ License v3 can be found in `/usr/share/common-licenses/GPL-3'.
The following scripts can be used, at your option, regarding any later The following scripts can be used, at your option, regarding any later
version of the previously specified license: 404main, dch-repeat, dgetlp, version of the previously specified license: 404main, dch-repeat, dgetlp,
get-build-deps, import-bug-from-debian, lp-list-bugs, lp-project-upload, get-build-deps, grep-merges, import-bug-from-debian, lp-list-bugs,
lp-set-dup, manage-credentials, mk-sbuild-lv, pbuilder-dist, lp-project-upload, lp-set-dup, manage-credentials, mk-sbuild-lv,
pull-debian-debdiff, pull-debian-source, pull-lp-source, pull-revu-source, pbuilder-dist, pull-debian-debdiff, pull-debian-source, pull-lp-source,
reverse-build-depends, setup-packaging-environment, submittodebian, pull-revu-source, reverse-build-depends, setup-packaging-environment,
suspicious-source, syncpackage, ubuntu-build, what-patch. submittodebian, suspicious-source, syncpackage, ubuntu-build, what-patch.

26
doc/grep-merges.1 Normal file
View File

@ -0,0 +1,26 @@
.TH grep\-merges 1 "December 15, 2010" "ubuntu-dev-tools"
.SH NAME
grep\-merges \- search for outstanding merges from Debian
.SH SYNOPSIS
.B grep\-merges
.RI [ string ]
.SH DESCRIPTION
.B grep\-merges
searches merges.ubuntu.com for pending merges from Debian.
If a
.I string
is given, it will list all merges whose source package name, last changelog
author, or last uploader contain that string.
Otherwise, it will list all merges.
.SH EXAMPLES
.nf
$ grep\-merges cjwatson
tzsetup Colin Watson <cjwatson@ubuntu.com>
console-setup Colin Watson <cjwatson@ubuntu.com>
.fi
.SH AUTHOR
.B grep\-merges
and this manual page were written by Colin Watson <cjwatson@ubuntu.com>.
.PP
Both are released under the terms of the GNU General Public License, version
3 or (at your option) any later version.

View File

@ -94,7 +94,8 @@ Display a help message and exit.
.TP .TP
.B UBUNTUTOOLS_BUILDER .B UBUNTUTOOLS_BUILDER
The default builder for Ubuntu development tools that support it (including \fBsponsor\-patch\fR. The default builder for Ubuntu development tools that support it (including
\fBsponsor\-patch\fR).
Supported are \fBpbuilder\fR(8) and \fBsbuild\fR(1). Supported are \fBpbuilder\fR(8) and \fBsbuild\fR(1).
If unset and not provided on the command line, \fBpbuilder\fR(8) is used. If unset and not provided on the command line, \fBpbuilder\fR(8) is used.

46
grep-merges Executable file
View File

@ -0,0 +1,46 @@
#! /usr/bin/python
#
# grep-merges - search for pending merges from Debian
#
# Copyright (C) 2010 Canonical Ltd.
# Authors:
# - Colin Watson <cjwatson@ubuntu.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import urllib2
import json
if len(sys.argv) > 1:
match = sys.argv[1]
else:
match = None
for component in ('main', 'main-manual',
'restricted', 'restricted-manual',
'universe', 'universe-manual',
'multiverse', 'multiverse-manual'):
page = urllib2.urlopen('http://merges.ubuntu.com/%s.json' % component)
for merge in json.load(page):
package = merge['source_package']
author, uploader = '', ''
if 'user' in merge:
author = merge['user']
if 'uploader' in merge:
uploader = '(%s)' % merge['uploader']
pretty_uploader = ' '.join((author, uploader)).strip()
if (match is None or
match in package or match in author or match in uploader):
print '%s\t%s' % (package, pretty_uploader)

View File

@ -28,6 +28,7 @@
import os import os
import sys import sys
import subprocess import subprocess
import urllib
from optparse import OptionParser from optparse import OptionParser
# ubuntu-dev-tools modules. # ubuntu-dev-tools modules.
@ -78,7 +79,7 @@ if __name__ == '__main__':
# All good - start downloading... # All good - start downloading...
print 'Fetching the source for %s from %s (%s)...' % ( print 'Fetching the source for %s from %s (%s)...' % (
package, release.capitalize(), pocket) package, release.capitalize(), pocket)
if subprocess.call(['/usr/bin/dget', '-xu', dsc_url[0]]) == 0: if subprocess.call(['/usr/bin/dget', '-xu', urllib.unquote(dsc_url[0])]) == 0:
print 'Success!' print 'Success!'
else: else:
print 'Failed to fetch and extrace the source.', \ print 'Failed to fetch and extrace the source.', \

View File

@ -26,6 +26,7 @@ setup(name='ubuntu-dev-tools',
'get-build-deps', 'get-build-deps',
'grab-attachments', 'grab-attachments',
'grab-merge', 'grab-merge',
'grep-merges',
'hugdaylist', 'hugdaylist',
'import-bug-from-debian', 'import-bug-from-debian',
'lp-list-bugs', 'lp-list-bugs',

View File

@ -83,304 +83,305 @@ Launchpad = Launchpad()
class MetaWrapper(type): class MetaWrapper(type):
''' '''
A meta class used for wrapping LP API objects. A meta class used for wrapping LP API objects.
''' '''
def __init__(cls, name, bases, attrd): def __init__(cls, name, bases, attrd):
super(MetaWrapper, cls).__init__(name, bases, attrd) super(MetaWrapper, cls).__init__(name, bases, attrd)
if 'resource_type' not in attrd: if 'resource_type' not in attrd:
raise TypeError('Class "%s" needs an associated resource type' % name) raise TypeError('Class "%s" needs an associated resource type' % name)
cls._cache = dict() cls._cache = dict()
class BaseWrapper(object): class BaseWrapper(object):
''' '''
A base class from which other wrapper classes are derived. A base class from which other wrapper classes are derived.
''' '''
__metaclass__ = MetaWrapper __metaclass__ = MetaWrapper
resource_type = None # it's a base class after all resource_type = None # it's a base class after all
def __new__(cls, data): def __new__(cls, data):
if isinstance(data, basestring) and data.startswith('%s%s/' % (lookup_service_root(service), api_version)): if isinstance(data, basestring) and data.startswith('%s%s/' % (lookup_service_root(service), api_version)):
# looks like a LP API URL # looks like a LP API URL
# check if it's already cached # check if it's already cached
cached = cls._cache.get(data) cached = cls._cache.get(data)
if cached: if cached:
return cached return cached
# not cached, so try to get it # not cached, so try to get it
try: try:
data = Launchpad.load(data) data = Launchpad.load(data)
except HTTPError: except HTTPError:
# didn't work # didn't work
pass pass
if isinstance(data, Entry): if isinstance(data, Entry):
if data.resource_type_link in cls.resource_type: if data.resource_type_link in cls.resource_type:
# check if it's already cached # check if it's already cached
cached = cls._cache.get(data.self_link) cached = cls._cache.get(data.self_link)
if not cached: if not cached:
# create a new instance # create a new instance
cached = object.__new__(cls) cached = object.__new__(cls)
cached._lpobject = data cached._lpobject = data
# and add it to our cache # and add it to our cache
cls._cache[data.self_link] = cached cls._cache[data.self_link] = cached
# add additional class specific caching (if available) # add additional class specific caching (if available)
cache = getattr(cls, 'cache', None) cache = getattr(cls, 'cache', None)
if callable(cache): if callable(cache):
cache(cached) cache(cached)
return cached return cached
else:
raise TypeError("'%s' is not a '%s' object" % (str(data), str(cls.resource_type)))
else:
# not a LP API representation, let the specific class handle it
fetch = getattr(cls, 'fetch', None)
if callable(fetch):
return fetch(data)
else:
raise NotImplementedError("Don't know how to fetch '%s' from LP" % str(data))
def __call__(self):
return self._lpobject
def __getattr__(self, attr):
return getattr(self._lpobject, attr)
def __repr__(self):
if hasattr(str, 'format'):
return '<{0}: {1!r}>'.format(self.__class__.__name__, self._lpobject)
else: else:
return '<%s: %r>' % (self.__class__.__name__, self._lpobject) raise TypeError("'%s' is not a '%s' object" % (str(data), str(cls.resource_type)))
else:
# not a LP API representation, let the specific class handle it
fetch = getattr(cls, 'fetch', None)
if callable(fetch):
return fetch(data)
else:
raise NotImplementedError("Don't know how to fetch '%s' from LP" % str(data))
def __call__(self):
return self._lpobject
def __getattr__(self, attr):
return getattr(self._lpobject, attr)
def __repr__(self):
if hasattr(str, 'format'):
return '<{0}: {1!r}>'.format(self.__class__.__name__, self._lpobject)
else:
return '<%s: %r>' % (self.__class__.__name__, self._lpobject)
class Distribution(BaseWrapper): class Distribution(BaseWrapper):
''' '''
Wrapper class around a LP distribution object. Wrapper class around a LP distribution object.
''' '''
resource_type = lookup_service_root(service) + api_version + '/#distribution' resource_type = lookup_service_root(service) + api_version + '/#distribution'
def __init__(self, *args): def __init__(self, *args):
# Don't share _series and _archives between different Distributions # Don't share _series and _archives between different Distributions
if '_series' not in self.__dict__: if '_series' not in self.__dict__:
self._series = dict() self._series = dict()
if '_archives' not in self.__dict__: if '_archives' not in self.__dict__:
self._archives = dict() self._archives = dict()
def cache(self): def cache(self):
self._cache[self.name] = self self._cache[self.name] = self
@classmethod @classmethod
def fetch(cls, dist): def fetch(cls, dist):
''' '''
Fetch the distribution object identified by 'dist' from LP. Fetch the distribution object identified by 'dist' from LP.
''' '''
if not isinstance(dist, basestring): if not isinstance(dist, basestring):
raise TypeError("Don't know what do with '%r'" % dist) raise TypeError("Don't know what do with '%r'" % dist)
cached = cls._cache.get(dist) cached = cls._cache.get(dist)
if not cached: if not cached:
cached = Distribution(Launchpad.distributions[dist]) cached = Distribution(Launchpad.distributions[dist])
return cached return cached
def getArchive(self, archive = None): def getArchive(self, archive = None):
''' '''
Returns an Archive object for the requested archive. Returns an Archive object for the requested archive.
Raises a ArchiveNotFoundException if the archive doesn't exist. Raises a ArchiveNotFoundException if the archive doesn't exist.
If 'archive' is None, return the main archive. If 'archive' is None, return the main archive.
''' '''
if archive: if archive:
res = self._archives.get(archive) res = self._archives.get(archive)
if not res: if not res:
for a in self.archives: for a in self.archives:
if a.name == archive: if a.name == archive:
res = Archive(a) res = Archive(a)
self._archives[res.name] = res self._archives[res.name] = res
break break
if res: if res:
return res return res
else: else:
raise ArchiveNotFoundException("The Archive '%s' doesn't exist in %s" % (archive, self.display_name)) raise ArchiveNotFoundException("The Archive '%s' doesn't exist in %s" % (archive, self.display_name))
else: else:
if not '_main_archive' in self.__dict__: if not '_main_archive' in self.__dict__:
self._main_archive = Archive(self.main_archive_link) self._main_archive = Archive(self.main_archive_link)
return self._main_archive return self._main_archive
def getSeries(self, name_or_version): def getSeries(self, name_or_version):
''' '''
Returns a DistroSeries object for a series passed by name Returns a DistroSeries object for a series passed by name
(e.g. 'karmic') or version (e.g. '9.10'). (e.g. 'karmic') or version (e.g. '9.10').
If the series is not found: raise SeriesNotFoundException If the series is not found: raise SeriesNotFoundException
''' '''
if name_or_version not in self._series: if name_or_version not in self._series:
try: try:
series = DistroSeries(self().getSeries(name_or_version = name_or_version)) series = DistroSeries(self().getSeries(name_or_version = name_or_version))
# Cache with name and version # Cache with name and version
self._series[series.name] = series self._series[series.name] = series
self._series[series.version] = series self._series[series.version] = series
except HTTPError: except HTTPError:
raise SeriesNotFoundException("Release '%s' is unknown in '%s'." % (name_or_version, self.display_name)) raise SeriesNotFoundException("Release '%s' is unknown in '%s'." % (name_or_version, self.display_name))
return self._series[name_or_version] return self._series[name_or_version]
def getDevelopmentSeries(self): def getDevelopmentSeries(self):
''' '''
Returns a DistroSeries object of the current development series. Returns a DistroSeries object of the current development series.
''' '''
dev = DistroSeries(self.current_series_link) dev = DistroSeries(self.current_series_link)
# Cache it in _series if not already done # Cache it in _series if not already done
if dev.name not in self._series: if dev.name not in self._series:
self._series[dev.name] = dev self._series[dev.name] = dev
self._series[dev.version] = dev self._series[dev.version] = dev
return dev return dev
class DistroSeries(BaseWrapper): class DistroSeries(BaseWrapper):
''' '''
Wrapper class around a LP distro series object. Wrapper class around a LP distro series object.
''' '''
resource_type = lookup_service_root(service) + api_version + '/#distro_series' resource_type = lookup_service_root(service) + api_version + '/#distro_series'
class Archive(BaseWrapper): class Archive(BaseWrapper):
''' '''
Wrapper class around a LP archive object. Wrapper class around a LP archive object.
''' '''
resource_type = lookup_service_root(service) + api_version + '/#archive' resource_type = lookup_service_root(service) + api_version + '/#archive'
def __init__(self, *args): def __init__(self, *args):
# Don't share _srcpkgs between different Archives # Don't share _srcpkgs between different Archives
if '_srcpkgs' not in self.__dict__: if '_srcpkgs' not in self.__dict__:
self._srcpkgs = dict() self._srcpkgs = dict()
def getSourcePackage(self, name, series = None, pocket = 'Release'): def getSourcePackage(self, name, series = None, pocket = 'Release'):
''' '''
Returns a SourcePackagePublishingHistory object for the most Returns a SourcePackagePublishingHistory object for the most
recent source package in the distribution 'dist', series and recent source package in the distribution 'dist', series and
pocket. pocket.
series defaults to the current development series if not specified. series defaults to the current development series if not specified.
If the requested source package doesn't exist a If the requested source package doesn't exist a
PackageNotFoundException is raised. PackageNotFoundException is raised.
''' '''
# Check if pocket has a valid value # Check if pocket has a valid value
if pocket not in ('Release', 'Security', 'Updates', 'Proposed', 'Backports'): if pocket not in ('Release', 'Security', 'Updates', 'Proposed', 'Backports'):
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket) raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket)
dist = Distribution(self.distribution_link) dist = Distribution(self.distribution_link)
# Check if series is already a DistoSeries object or not # Check if series is already a DistoSeries object or not
if not isinstance(series, DistroSeries): if not isinstance(series, DistroSeries):
if series: if series:
series = dist.getSeries(series) series = dist.getSeries(series)
else: else:
series = dist.getDevelopmentSeries() series = dist.getDevelopmentSeries()
# NOTE: # NOTE:
# For Debian all source publication are in the state 'Pending' so filter on this # For Debian all source publication are in the state 'Pending' so filter on this
# instead of 'Published'. As the result is sorted also by date the first result # instead of 'Published'. As the result is sorted also by date the first result
# will be the most recent one (i.e. the one we are interested in). # will be the most recent one (i.e. the one we are interested in).
if dist.name in ('debian',): if dist.name in ('debian',):
state = 'Pending' state = 'Pending'
else: else:
state = 'Published' state = 'Published'
if (name, series.name, pocket) not in self._srcpkgs: if (name, series.name, pocket) not in self._srcpkgs:
try: try:
srcpkg = self.getPublishedSources( srcpkg = self.getPublishedSources(
source_name = name, distro_series = series(), pocket = pocket, source_name = name, distro_series = series(), pocket = pocket,
status = state, exact_match = True)[0] status = state, exact_match = True)[0]
self._srcpkgs[(name, series.name, pocket)] = SourcePackagePublishingHistory(srcpkg) self._srcpkgs[(name, series.name, pocket)] = SourcePackagePublishingHistory(srcpkg)
except IndexError: except IndexError:
if pocket == 'Release': if pocket == 'Release':
msg = "The package '%s' does not exist in the %s %s archive in '%s'" % \ msg = "The package '%s' does not exist in the %s %s archive in '%s'" % \
(name, dist.display_name, self.name, series.name) (name, dist.display_name, self.name, series.name)
else: else:
msg = "The package '%s' does not exist in the %s %s archive in '%s-%s'" % \ msg = "The package '%s' does not exist in the %s %s archive in '%s-%s'" % \
(name, dist.display_name, self.name, series.name, pocket.lower()) (name, dist.display_name, self.name, series.name, pocket.lower())
raise PackageNotFoundException(msg) raise PackageNotFoundException(msg)
return self._srcpkgs[(name, series.name, pocket)] return self._srcpkgs[(name, series.name, pocket)]
class SourcePackagePublishingHistory(BaseWrapper): class SourcePackagePublishingHistory(BaseWrapper):
''' '''
Wrapper class around a LP source package object. Wrapper class around a LP source package object.
''' '''
resource_type = lookup_service_root(service) + api_version + '/#source_package_publishing_history' resource_type = lookup_service_root(service) + api_version + '/#source_package_publishing_history'
def __init__(self, *args): def __init__(self, *args):
# Don't share _builds between different SourcePackagePublishingHistory objects # Don't share _builds between different SourcePackagePublishingHistory objects
if '_builds' not in self.__dict__: if '_builds' not in self.__dict__:
self._builds = dict() self._builds = dict()
def getPackageName(self): def getPackageName(self):
''' '''
Returns the source package name. Returns the source package name.
''' '''
return self._lpobject.source_package_name return self._lpobject.source_package_name
def getVersion(self): def getVersion(self):
''' '''
Returns the version of the source package. Returns the version of the source package.
''' '''
return self._lpobject.source_package_version return self._lpobject.source_package_version
def getComponent(self): def getComponent(self):
''' '''
Returns the component of the source package. Returns the component of the source package.
''' '''
return self._lpobject.component_name return self._lpobject.component_name
def _fetch_builds(self): def _fetch_builds(self):
'''Populate self._builds with the build records.''' '''Populate self._builds with the build records.'''
builds = self.getBuilds() builds = self.getBuilds()
for build in builds: for build in builds:
self._builds[build.arch_tag] = Build(build) self._builds[build.arch_tag] = Build(build)
def getBuildStates(self, archs): def getBuildStates(self, archs):
res = list() res = list()
if not self._builds: if not self._builds:
self._fetch_builds() self._fetch_builds()
for arch in archs: for arch in archs:
build = self._builds.get(arch) build = self._builds.get(arch)
if build: if build:
res.append(' %s' % build) res.append(' %s' % build)
return "Build state(s) for '%s':\n%s" % ( return "Build state(s) for '%s':\n%s" % (
self.getPackageName(), '\n'.join(res)) self.getPackageName(), '\n'.join(res))
def rescoreBuilds(self, archs, score): def rescoreBuilds(self, archs, score):
res = list() res = list()
if not self._builds: if not self._builds:
self._fetch_builds() self._fetch_builds()
for arch in archs: for arch in archs:
build = self._builds.get(arch) build = self._builds.get(arch)
if build: if build:
if build.rescore(score): if build.rescore(score):
res.append(' %s: done' % arch) res.append(' %s: done' % arch)
else: else:
res.append(' %s: failed' % arch) res.append(' %s: failed' % arch)
return "Rescoring builds of '%s' to %i:\n%s" % ( return "Rescoring builds of '%s' to %i:\n%s" % (
self.getPackageName(), score, '\n'.join(res)) self.getPackageName(), score, '\n'.join(res))
def retryBuilds(self, archs): def retryBuilds(self, archs):
res = list() res = list()
if not self._builds: if not self._builds:
self._fetch_builds() self._fetch_builds()
for arch in archs: for arch in archs:
build = self._builds.get(arch) build = self._builds.get(arch)
if build: if build:
if build.retry(): if build.retry():
res.append(' %s: done' % arch) res.append(' %s: done' % arch)
else: else:
res.append(' %s: failed' % arch) res.append(' %s: failed' % arch)
return "Retrying builds of '%s':\n%s" % ( return "Retrying builds of '%s':\n%s" % (
self.getPackageName(), '\n'.join(res)) self.getPackageName(), '\n'.join(res))
class MetaPersonTeam(MetaWrapper): class MetaPersonTeam(MetaWrapper):
@ -401,115 +402,115 @@ class MetaPersonTeam(MetaWrapper):
return cls._me return cls._me
class PersonTeam(BaseWrapper): class PersonTeam(BaseWrapper):
''' '''
Wrapper class around a LP person or team object. Wrapper class around a LP person or team object.
''' '''
__metaclass__ = MetaPersonTeam __metaclass__ = MetaPersonTeam
resource_type = ( resource_type = (
lookup_service_root(service) + api_version + '/#person', lookup_service_root(service) + api_version + '/#person',
lookup_service_root(service) + api_version + '/#team', lookup_service_root(service) + api_version + '/#team',
) )
def __init__(self, *args): def __init__(self, *args):
# Don't share _upload between different PersonTeams # Don't share _upload between different PersonTeams
if '_upload' not in self.__dict__: if '_upload' not in self.__dict__:
self._upload = dict() self._upload = dict()
def __str__(self): def __str__(self):
return u'%s (%s)' % (self.display_name, self.name) return u'%s (%s)' % (self.display_name, self.name)
def cache(self): def cache(self):
self._cache[self.name] = self self._cache[self.name] = self
@classmethod @classmethod
def fetch(cls, person_or_team): def fetch(cls, person_or_team):
''' '''
Fetch the person or team object identified by 'url' from LP. Fetch the person or team object identified by 'url' from LP.
''' '''
if not isinstance(person_or_team, basestring): if not isinstance(person_or_team, basestring):
raise TypeError("Don't know what do with '%r'" % person_or_team) raise TypeError("Don't know what do with '%r'" % person_or_team)
cached = cls._cache.get(person_or_team) cached = cls._cache.get(person_or_team)
if not cached: if not cached:
cached = PersonTeam(Launchpad.people[person_or_team]) cached = PersonTeam(Launchpad.people[person_or_team])
return cached return cached
def isLpTeamMember(self, team): def isLpTeamMember(self, team):
''' '''
Checks if the user is a member of a certain team on Launchpad. Checks if the user is a member of a certain team on Launchpad.
Returns True if the user is a member of the team otherwise False. Returns True if the user is a member of the team otherwise False.
''' '''
return any(t.name == team for t in self.super_teams) return any(t.name == team for t in self.super_teams)
def canUploadPackage(self, archive, distroseries, package, component, pocket='Release'): def canUploadPackage(self, archive, distroseries, package, component, pocket='Release'):
'''Check if the person or team has upload rights for the source '''Check if the person or team has upload rights for the source
package to the specified 'archive' and 'distrorelease'. package to the specified 'archive' and 'distrorelease'.
A source package name and a component have to be specified. A source package name and a component have to be specified.
'archive' has to be a Archive object. 'archive' has to be a Archive object.
'distroseries' has to be an DistroSeries object. 'distroseries' has to be an DistroSeries object.
''' '''
if not isinstance(archive, Archive): if not isinstance(archive, Archive):
raise TypeError("'%r' is not an Archive object." % archive) raise TypeError("'%r' is not an Archive object." % archive)
if not isinstance(distroseries, DistroSeries): if not isinstance(distroseries, DistroSeries):
raise TypeError("'%r' is not a DistroSeries object." % distroseries) raise TypeError("'%r' is not a DistroSeries object." % distroseries)
if package is not None and not isinstance(package, basestring): if package is not None and not isinstance(package, basestring):
raise TypeError('A source package name expected.') raise TypeError('A source package name expected.')
if component is not None and not isinstance(component, basestring): if component is not None and not isinstance(component, basestring):
raise TypeError('A component name expected.') raise TypeError('A component name expected.')
if package is None and component is None: if package is None and component is None:
raise ValueError('Either a source package name or a component has to be specified.') raise ValueError('Either a source package name or a component has to be specified.')
if pocket not in ('Release', 'Security', 'Updates', 'Proposed', 'Backports'): if pocket not in ('Release', 'Security', 'Updates', 'Proposed', 'Backports'):
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket) raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket)
canUpload = self._upload.get((archive, distroseries, pocket, package, component)) canUpload = self._upload.get((archive, distroseries, pocket, package, component))
if canUpload is None: if canUpload is None:
# checkUpload() throws an exception if the person can't upload # checkUpload() throws an exception if the person can't upload
try: try:
archive.checkUpload( archive.checkUpload(
component=component, component=component,
distroseries=distroseries(), distroseries=distroseries(),
person=self(), person=self(),
pocket=pocket, pocket=pocket,
sourcepackagename=package, sourcepackagename=package,
) )
canUpload = True canUpload = True
except HTTPError, e: except HTTPError, e:
if e.response.status == 403: if e.response.status == 403:
canUpload = False canUpload = False
else: else:
raise e raise e
self._upload[(archive, distroseries, pocket, package, component)] = canUpload self._upload[(archive, distroseries, pocket, package, component)] = canUpload
return canUpload return canUpload
class Build(BaseWrapper): class Build(BaseWrapper):
''' '''
Wrapper class around a build object. Wrapper class around a build object.
''' '''
resource_type = lookup_service_root(service) + api_version + '/#build' resource_type = lookup_service_root(service) + api_version + '/#build'
def __str__(self): def __str__(self):
return u'%s: %s' % (self.arch_tag, self.buildstate) return u'%s: %s' % (self.arch_tag, self.buildstate)
def rescore(self, score): def rescore(self, score):
if self.can_be_rescored: if self.can_be_rescored:
self().rescore(score = score) self().rescore(score = score)
return True return True
return False return False
def retry(self): def retry(self):
if self.can_be_retried: if self.can_be_retried:
self().retry() self().retry()
return True return True
return False return False
class DistributionSourcePackage(BaseWrapper): class DistributionSourcePackage(BaseWrapper):
''' '''
Caching class for distribution_source_package objects. Caching class for distribution_source_package objects.
''' '''
resource_type = lookup_service_root(service) + api_version + '/#distribution_source_package' resource_type = lookup_service_root(service) + api_version + '/#distribution_source_package'