2009-06-11 16:33:47 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
2009-08-04 15:40:41 +02:00
|
|
|
# lpapicache.py - wrapper classes around the LP API implementing caching
|
|
|
|
# for usage in the ubuntu-dev-tools package
|
2009-06-11 16:33:47 +02:00
|
|
|
#
|
2010-02-06 01:53:29 +01:00
|
|
|
# Copyright © 2009-2010 Michael Bienia <geser@ubuntu.com>
|
2011-11-15 01:50:55 +02:00
|
|
|
# 2011 Stefano Rivera <stefanor@ubuntu.com>
|
2009-06-11 16:33:47 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU General Public License
|
|
|
|
# as published by the Free Software Foundation; either version 3
|
|
|
|
# of the License, or (at your option) any later version.
|
2010-12-03 00:06:43 +01:00
|
|
|
#
|
2009-06-11 16:33:47 +02:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# Please see the /usr/share/common-licenses/GPL file for the full text
|
|
|
|
# of the GNU General Public License license.
|
|
|
|
#
|
|
|
|
# Based on code written by Jonathan Davies <jpds@ubuntu.com>
|
|
|
|
|
2017-05-01 00:20:03 +02:00
|
|
|
import collections
|
2023-01-30 21:28:47 +01:00
|
|
|
import logging
|
2020-01-22 14:09:58 -05:00
|
|
|
import os
|
2017-11-21 14:37:42 -05:00
|
|
|
import re
|
2020-03-04 08:28:13 +01:00
|
|
|
from copy import copy
|
2023-01-30 21:28:47 +01:00
|
|
|
from urllib.error import URLError
|
|
|
|
from urllib.parse import urlparse
|
2020-03-04 08:28:13 +01:00
|
|
|
|
2017-03-23 06:43:28 -04:00
|
|
|
from debian.changelog import Changelog
|
2017-05-01 00:20:03 +02:00
|
|
|
from launchpadlib.errors import HTTPError
|
2023-01-30 21:28:47 +01:00
|
|
|
from launchpadlib.launchpad import Launchpad as LP
|
2017-05-01 00:20:03 +02:00
|
|
|
from lazr.restfulclient.resource import Entry
|
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
from ubuntutools.lp import API_VERSION, SERVICE
|
2023-01-30 19:45:36 +01:00
|
|
|
from ubuntutools.lp.udtexceptions import (
|
|
|
|
AlreadyLoggedInError,
|
|
|
|
ArchiveNotFoundException,
|
|
|
|
ArchSeriesNotFoundException,
|
|
|
|
PackageNotFoundException,
|
|
|
|
PocketDoesNotExistError,
|
|
|
|
SeriesNotFoundException,
|
|
|
|
)
|
2023-01-30 21:28:47 +01:00
|
|
|
from ubuntutools.misc import (
|
|
|
|
DEFAULT_POCKETS,
|
|
|
|
DEFAULT_STATUSES,
|
|
|
|
POCKETS,
|
|
|
|
STATUSES,
|
|
|
|
download_text,
|
|
|
|
host_architecture,
|
|
|
|
)
|
|
|
|
from ubuntutools.version import Version
|
2023-01-30 19:45:36 +01:00
|
|
|
|
2018-10-12 18:54:07 -04:00
|
|
|
Logger = logging.getLogger(__name__)
|
|
|
|
|
2009-06-11 16:33:47 +02:00
|
|
|
|
2010-02-20 18:19:44 +01:00
|
|
|
__all__ = [
|
2023-01-30 19:45:36 +01:00
|
|
|
"Archive",
|
|
|
|
"BinaryPackagePublishingHistory",
|
|
|
|
"Build",
|
|
|
|
"Distribution",
|
|
|
|
"DistributionSourcePackage",
|
|
|
|
"DistroSeries",
|
|
|
|
"DistroArchSeries",
|
|
|
|
"Launchpad",
|
|
|
|
"PackageUpload",
|
|
|
|
"PersonTeam",
|
|
|
|
"Project",
|
|
|
|
"ProjectSeries",
|
|
|
|
"SourcePackagePublishingHistory",
|
|
|
|
]
|
2010-02-20 18:19:44 +01:00
|
|
|
|
2011-12-03 22:44:13 +02:00
|
|
|
|
2023-01-31 16:58:24 +01:00
|
|
|
class _Launchpad:
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Singleton for LP API access."""
|
2010-02-20 17:07:03 +01:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
__lp = None
|
|
|
|
|
|
|
|
def login(self, service=SERVICE, api_version=API_VERSION):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Enforce a non-anonymous login."""
|
2010-12-29 22:50:00 +02:00
|
|
|
if not self.logged_in:
|
2023-01-30 19:45:36 +01:00
|
|
|
self.__lp = LP.login_with("ubuntu-dev-tools", service, version=api_version)
|
2022-08-09 12:08:50 -04:00
|
|
|
# Unfortunately launchpadlib may 'login' using cached
|
|
|
|
# credentials, without actually verifying if the credentials
|
|
|
|
# are valid; which can lead to this 'login' not actually
|
|
|
|
# logging in.
|
|
|
|
# So, this forces actual LP access here, to force actual login.
|
2023-01-31 15:51:29 +01:00
|
|
|
self.__lp.me # pylint: disable=pointless-statement
|
2010-02-20 17:07:03 +01:00
|
|
|
else:
|
2023-01-30 19:45:36 +01:00
|
|
|
raise AlreadyLoggedInError("Already logged in to Launchpad.")
|
2010-02-20 17:07:03 +01:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def login_anonymously(self, service=SERVICE, api_version=API_VERSION):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Enforce an anonymous login."""
|
2010-12-29 22:50:00 +02:00
|
|
|
if not self.logged_in:
|
2023-01-30 19:45:36 +01:00
|
|
|
self.__lp = LP.login_anonymously("ubuntu-dev-tools", service, version=api_version)
|
2010-02-20 17:07:03 +01:00
|
|
|
else:
|
2023-01-30 19:45:36 +01:00
|
|
|
raise AlreadyLoggedInError("Already logged in to Launchpad.")
|
2010-02-20 17:07:03 +01:00
|
|
|
|
2010-12-29 22:50:00 +02:00
|
|
|
def login_existing(self, lp):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Use an already logged in Launchpad object"""
|
2010-12-29 22:50:00 +02:00
|
|
|
if not self.logged_in:
|
|
|
|
self.__lp = lp
|
|
|
|
else:
|
2023-01-30 19:45:36 +01:00
|
|
|
raise AlreadyLoggedInError("Already logged in to Launchpad.")
|
2010-12-29 22:50:00 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def logged_in(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Are we logged in?"""
|
|
|
|
return "_Launchpad__lp" in self.__dict__
|
2010-12-29 22:50:00 +02:00
|
|
|
|
2010-02-20 17:07:03 +01:00
|
|
|
def __getattr__(self, attr):
|
2010-12-29 22:50:00 +02:00
|
|
|
if not self.logged_in:
|
2020-06-15 19:08:18 -04:00
|
|
|
self.login_anonymously()
|
2010-02-20 17:07:03 +01:00
|
|
|
return getattr(self.__lp, attr)
|
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
return self
|
2017-05-01 00:20:03 +02:00
|
|
|
|
|
|
|
|
2010-12-26 21:56:05 +02:00
|
|
|
Launchpad = _Launchpad()
|
2009-06-11 20:23:30 +02:00
|
|
|
|
2009-07-19 15:40:07 +02:00
|
|
|
|
|
|
|
class MetaWrapper(type):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
A meta class used for wrapping LP API objects.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
2010-12-12 11:46:07 +01:00
|
|
|
def __init__(cls, name, bases, attrd):
|
|
|
|
super(MetaWrapper, cls).__init__(name, bases, attrd)
|
2023-01-30 19:45:36 +01:00
|
|
|
if "resource_type" not in attrd:
|
2023-01-31 19:32:58 +01:00
|
|
|
raise TypeError(f'Class "{name}" needs an associated resource type')
|
2023-01-31 16:58:24 +01:00
|
|
|
cls._cache = {}
|
2009-07-19 15:40:07 +02:00
|
|
|
|
|
|
|
|
2023-01-31 16:58:24 +01:00
|
|
|
class BaseWrapper(metaclass=MetaWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
A base class from which other wrapper classes are derived.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
2023-01-31 10:35:22 +01:00
|
|
|
resource_type: str = None # it's a base class after all
|
2010-12-12 11:46:07 +01:00
|
|
|
|
|
|
|
def __new__(cls, data):
|
2019-09-04 19:17:00 -03:00
|
|
|
if isinstance(data, str) and data.startswith(str(Launchpad._root_uri)):
|
2010-12-12 11:46:07 +01:00
|
|
|
# looks like a LP API URL
|
|
|
|
# check if it's already cached
|
|
|
|
cached = cls._cache.get(data)
|
|
|
|
if cached:
|
|
|
|
return cached
|
|
|
|
|
|
|
|
# not cached, so try to get it
|
|
|
|
try:
|
|
|
|
data = Launchpad.load(data)
|
|
|
|
except HTTPError:
|
|
|
|
# didn't work
|
|
|
|
pass
|
|
|
|
|
|
|
|
if isinstance(data, Entry):
|
2023-01-30 19:45:36 +01:00
|
|
|
(service_root, resource_type) = data.resource_type_link.split("#")
|
2017-05-01 00:20:03 +02:00
|
|
|
if service_root == str(Launchpad._root_uri) and resource_type in cls.resource_type:
|
2010-12-12 11:46:07 +01:00
|
|
|
# check if it's already cached
|
|
|
|
cached = cls._cache.get(data.self_link)
|
|
|
|
if not cached:
|
|
|
|
# create a new instance
|
|
|
|
cached = object.__new__(cls)
|
|
|
|
cached._lpobject = data
|
|
|
|
# and add it to our cache
|
|
|
|
cls._cache[data.self_link] = cached
|
2023-01-31 11:13:07 +01:00
|
|
|
Logger.debug("%s: %s", cls.__name__, data.self_link)
|
2010-12-12 11:46:07 +01:00
|
|
|
# add additional class specific caching (if available)
|
2023-01-30 19:45:36 +01:00
|
|
|
cache = getattr(cls, "cache", None)
|
2022-01-20 16:21:56 +02:00
|
|
|
if isinstance(cache, collections.abc.Callable):
|
2010-12-12 11:46:07 +01:00
|
|
|
cache(cached)
|
|
|
|
return cached
|
2023-01-31 19:32:58 +01:00
|
|
|
raise TypeError(f"'{data}' is not a '{cls.resource_type}' object")
|
2023-01-31 16:58:24 +01:00
|
|
|
|
|
|
|
# not a LP API representation, let the specific class handle it
|
|
|
|
fetch = getattr(cls, "fetch", None)
|
|
|
|
if isinstance(fetch, collections.abc.Callable):
|
|
|
|
return fetch(data)
|
2023-01-31 19:32:58 +01:00
|
|
|
raise NotImplementedError(f"Don't know how to fetch '{data}' from LP")
|
2010-12-12 11:46:07 +01:00
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
return self._lpobject
|
|
|
|
|
|
|
|
def __getattr__(self, attr):
|
|
|
|
return getattr(self._lpobject, attr)
|
|
|
|
|
|
|
|
def __repr__(self):
|
2023-01-31 19:32:58 +01:00
|
|
|
return f"<{self.__class__.__name__}: {self._lpobject!r}>"
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2009-07-19 15:40:07 +02:00
|
|
|
|
2009-07-22 11:27:28 +02:00
|
|
|
class Distribution(BaseWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Wrapper class around a LP distribution object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "distribution"
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def __init__(self, *args): # pylint: disable=unused-argument
|
2023-01-31 16:58:24 +01:00
|
|
|
self._archives = {}
|
|
|
|
self._series_by_name = {}
|
|
|
|
self._series = {}
|
2018-07-24 16:28:25 -04:00
|
|
|
self._dev_series = None
|
|
|
|
self._have_all_series = False
|
2023-01-31 15:51:29 +01:00
|
|
|
self._main_archive = None
|
2010-12-12 11:46:07 +01:00
|
|
|
|
|
|
|
def cache(self):
|
|
|
|
self._cache[self.name] = self
|
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
def _cache_series(self, series):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2018-07-24 16:28:25 -04:00
|
|
|
Add the DistroSeries to the cache if needed.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2018-07-24 16:28:25 -04:00
|
|
|
if series.version not in self._series:
|
|
|
|
self._series_by_name[series.name] = series
|
|
|
|
self._series[series.version] = series
|
|
|
|
|
2010-12-12 11:46:07 +01:00
|
|
|
@classmethod
|
|
|
|
def fetch(cls, dist):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Fetch the distribution object identified by 'dist' from LP.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2019-09-04 19:17:00 -03:00
|
|
|
if not isinstance(dist, str):
|
2023-01-31 19:32:58 +01:00
|
|
|
raise TypeError(f"Don't know what do with '{dist!r}'")
|
2010-12-12 11:46:07 +01:00
|
|
|
cached = cls._cache.get(dist)
|
|
|
|
if not cached:
|
|
|
|
cached = Distribution(Launchpad.distributions[dist])
|
|
|
|
return cached
|
|
|
|
|
2011-08-20 11:31:02 +02:00
|
|
|
def getArchive(self, archive=None):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Returns an Archive object for the requested archive.
|
|
|
|
Raises a ArchiveNotFoundException if the archive doesn't exist.
|
|
|
|
|
|
|
|
If 'archive' is None, return the main archive.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
if archive:
|
|
|
|
res = self._archives.get(archive)
|
|
|
|
|
|
|
|
if not res:
|
2023-01-30 23:10:31 +01:00
|
|
|
for archive_ in self.archives:
|
|
|
|
if archive_.name == archive:
|
|
|
|
res = Archive(archive_)
|
2010-12-12 11:46:07 +01:00
|
|
|
self._archives[res.name] = res
|
|
|
|
break
|
|
|
|
|
|
|
|
if res:
|
|
|
|
return res
|
2023-01-31 19:32:58 +01:00
|
|
|
message = f"The Archive '{archive}' doesn't exist in {self.display_name}"
|
2023-01-31 16:58:24 +01:00
|
|
|
raise ArchiveNotFoundException(message)
|
|
|
|
|
|
|
|
if self._main_archive is None:
|
|
|
|
self._main_archive = Archive(self.main_archive_link)
|
|
|
|
return self._main_archive
|
2010-12-12 11:46:07 +01:00
|
|
|
|
|
|
|
def getSeries(self, name_or_version):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Returns a DistroSeries object for a series passed by name
|
|
|
|
(e.g. 'karmic') or version (e.g. '9.10').
|
|
|
|
If the series is not found: raise SeriesNotFoundException
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2018-07-24 16:28:25 -04:00
|
|
|
if name_or_version in self._series:
|
|
|
|
return self._series[name_or_version]
|
|
|
|
if name_or_version in self._series_by_name:
|
|
|
|
return self._series_by_name[name_or_version]
|
|
|
|
|
|
|
|
try:
|
|
|
|
series = DistroSeries(self().getSeries(name_or_version=name_or_version))
|
2023-01-30 23:10:31 +01:00
|
|
|
except HTTPError as error:
|
2023-01-31 19:32:58 +01:00
|
|
|
message = f"Release '{name_or_version}' is unknown in '{self.display_name}'."
|
2023-01-30 23:10:31 +01:00
|
|
|
raise SeriesNotFoundException(message) from error
|
2018-07-24 16:28:25 -04:00
|
|
|
|
|
|
|
self._cache_series(series)
|
|
|
|
return series
|
2010-12-12 11:46:07 +01:00
|
|
|
|
|
|
|
def getDevelopmentSeries(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Returns a DistroSeries object of the current development series.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2018-07-24 16:28:25 -04:00
|
|
|
if not self._dev_series:
|
|
|
|
series = DistroSeries(self.current_series_link)
|
|
|
|
self._cache_series(series)
|
|
|
|
self._dev_series = series
|
|
|
|
return self._dev_series
|
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def getAllSeries(self, active=True): # pylint: disable=unused-argument
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2018-07-24 16:28:25 -04:00
|
|
|
Returns a list of all DistroSeries objects.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2018-07-24 16:28:25 -04:00
|
|
|
if not self._have_all_series:
|
2023-01-30 23:10:31 +01:00
|
|
|
for series in Launchpad.load(self.series_collection_link).entries:
|
|
|
|
series_link = DistroSeries(series["self_link"])
|
|
|
|
self._cache_series(series_link)
|
2018-07-24 16:28:25 -04:00
|
|
|
self._have_all_series = True
|
|
|
|
|
|
|
|
allseries = filter(lambda s: s.active, self._series.values())
|
2023-01-30 19:45:36 +01:00
|
|
|
allseries = sorted(allseries, key=lambda s: float(s.version), reverse=True)
|
2023-01-31 19:32:58 +01:00
|
|
|
Logger.debug("Found series: %s", ", ".join([f"{s.name} ({s.version})" for s in allseries]))
|
2018-07-24 16:28:25 -04:00
|
|
|
return collections.OrderedDict((s.name, s) for s in allseries)
|
2009-07-22 13:43:53 +02:00
|
|
|
|
2009-07-22 11:27:28 +02:00
|
|
|
|
2023-01-31 16:58:24 +01:00
|
|
|
class DistroArchSeries(BaseWrapper): # pylint: disable=too-few-public-methods
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-04-30 20:02:06 +02:00
|
|
|
Wrapper class around a LP distro arch series object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "distro_arch_series"
|
2017-04-30 20:02:06 +02:00
|
|
|
|
2017-03-23 07:24:17 -04:00
|
|
|
def getSeries(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Get DistroSeries for this.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
return DistroSeries(self._lpobject.distroseries_link)
|
|
|
|
|
2017-04-30 20:02:06 +02:00
|
|
|
|
2009-07-19 15:40:07 +02:00
|
|
|
class DistroSeries(BaseWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Wrapper class around a LP distro series object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "distro_series"
|
2009-07-19 15:40:07 +02:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def __init__(self, *args): # pylint: disable=unused-argument
|
2017-04-30 20:02:06 +02:00
|
|
|
if "_architectures" not in self.__dict__:
|
2023-01-31 16:58:24 +01:00
|
|
|
self._architectures = {}
|
2017-04-30 20:02:06 +02:00
|
|
|
|
2017-03-23 07:24:17 -04:00
|
|
|
def getArchSeries(self, archtag=None):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-04-30 20:02:06 +02:00
|
|
|
Returns a DistroArchSeries object for an architecture passed by name
|
|
|
|
(e.g. 'amd64').
|
2017-03-23 07:24:17 -04:00
|
|
|
If arch is not specified, get the DistroArchSeries for the system arch.
|
|
|
|
The special archtag 'all' will get the system arch.
|
2017-04-30 20:02:06 +02:00
|
|
|
If the architecture is not found: raise ArchSeriesNotFoundException.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
if not archtag or archtag == "all":
|
2017-03-23 07:24:17 -04:00
|
|
|
archtag = host_architecture()
|
2017-04-30 20:02:06 +02:00
|
|
|
if archtag not in self._architectures:
|
|
|
|
try:
|
2023-01-30 19:45:36 +01:00
|
|
|
architecture = DistroArchSeries(self().getDistroArchSeries(archtag=archtag))
|
|
|
|
self._architectures[architecture.architecture_tag] = architecture
|
2023-01-30 23:10:31 +01:00
|
|
|
except HTTPError as error:
|
2023-01-31 19:32:58 +01:00
|
|
|
message = f"Architecture {archtag} is unknown."
|
2023-01-30 23:10:31 +01:00
|
|
|
raise ArchSeriesNotFoundException(message) from error
|
2017-04-30 20:02:06 +02:00
|
|
|
return self._architectures[archtag]
|
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
def getPackageUploads(self, name=None, pocket=None, version=None, status="Unapproved"):
|
|
|
|
"""Returns a list of PackageUploads for this series."""
|
|
|
|
params = {"exact_match": True}
|
2020-03-04 08:28:13 +01:00
|
|
|
if name:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["name"] = name
|
2020-03-04 08:28:13 +01:00
|
|
|
if pocket:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["pocket"] = pocket
|
2020-03-04 08:28:13 +01:00
|
|
|
if version:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["version"] = version
|
2020-03-04 08:28:13 +01:00
|
|
|
if status:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["status"] = status
|
2020-03-04 08:28:13 +01:00
|
|
|
return [PackageUpload(p) for p in self._lpobject.getPackageUploads(**params)]
|
|
|
|
|
|
|
|
|
|
|
|
class PackageUpload(BaseWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-03-04 08:28:13 +01:00
|
|
|
Wrapper class around a LP package_upload object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "package_upload"
|
2020-03-04 08:28:13 +01:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def __init__(self, *args): # pylint: disable=unused-argument
|
2020-03-04 08:28:13 +01:00
|
|
|
self._custom_urls = None
|
|
|
|
self._source_urls = None
|
|
|
|
self._binary_urls = None
|
|
|
|
self._binary_properties = None
|
|
|
|
self._binary_prop_dict = None
|
|
|
|
|
|
|
|
def getArchive(self):
|
|
|
|
return Archive(self._lpobject.archive_link)
|
|
|
|
|
|
|
|
def getSourceArchive(self):
|
|
|
|
if self._lpobject.copy_source_archive_link:
|
|
|
|
return Archive(self._lpobject.copy_source_archive_link)
|
|
|
|
return None
|
|
|
|
|
|
|
|
def getDistroSeries(self):
|
|
|
|
return DistroSeries(self._lpobject.distroseries_link)
|
|
|
|
|
|
|
|
def changesFileUrl(self):
|
|
|
|
return self._lpobject.changes_file_url
|
|
|
|
|
|
|
|
def customFileUrls(self):
|
|
|
|
if not self._custom_urls:
|
|
|
|
self._custom_urls = self._lpobject.customFileUrls()
|
|
|
|
return copy(self._custom_urls)
|
|
|
|
|
|
|
|
def sourceFileUrls(self):
|
|
|
|
if not self._source_urls:
|
|
|
|
self._source_urls = self._lpobject.sourceFileUrls()
|
|
|
|
return copy(self._source_urls)
|
|
|
|
|
|
|
|
def binaryFileUrls(self):
|
|
|
|
if not self._binary_urls:
|
|
|
|
self._binary_urls = self._lpobject.binaryFileUrls()
|
|
|
|
return copy(self._binary_urls)
|
|
|
|
|
|
|
|
def getBinaryProperties(self):
|
|
|
|
if not self._binary_properties:
|
|
|
|
self._binary_properties = self._lpobject.getBinaryProperties()
|
|
|
|
return copy(self._binary_properties)
|
|
|
|
|
|
|
|
def binaryFileProperties(self, filename_or_url):
|
|
|
|
if not self._binary_prop_dict:
|
|
|
|
urls = self.binaryFileUrls()
|
|
|
|
props = self.getBinaryProperties()
|
|
|
|
self._binary_prop_dict = dict(zip(urls, props))
|
2023-04-04 12:11:36 +02:00
|
|
|
for key, value in copy(self._binary_prop_dict).items():
|
2023-01-30 23:10:31 +01:00
|
|
|
filename = os.path.basename(urlparse(key).path)
|
|
|
|
self._binary_prop_dict[filename] = value
|
2020-03-04 08:28:13 +01:00
|
|
|
return self._binary_prop_dict.get(filename_or_url, {})
|
|
|
|
|
2009-07-19 15:59:43 +02:00
|
|
|
|
2009-07-22 14:26:27 +02:00
|
|
|
class Archive(BaseWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Wrapper class around a LP archive object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "archive"
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def __init__(self, *args): # pylint: disable=unused-argument
|
2011-12-02 18:59:46 +02:00
|
|
|
self._binpkgs = {}
|
|
|
|
self._srcpkgs = {}
|
|
|
|
self._pkg_uploaders = {}
|
|
|
|
self._pkgset_uploaders = {}
|
|
|
|
self._component_uploaders = {}
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
def getSourcePackage(
|
|
|
|
self,
|
|
|
|
name,
|
|
|
|
series=None,
|
|
|
|
pocket=None,
|
|
|
|
version=None,
|
|
|
|
status=None,
|
|
|
|
wrapper=None,
|
|
|
|
search_all_series=False,
|
|
|
|
):
|
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Returns a SourcePackagePublishingHistory object for the most
|
|
|
|
recent source package in the distribution 'dist', series and
|
|
|
|
pocket.
|
|
|
|
|
|
|
|
series defaults to the current development series if not specified.
|
2017-03-23 07:24:17 -04:00
|
|
|
series must be either a series name string, or DistroSeries object.
|
|
|
|
|
|
|
|
version may be specified to get only the exact version requested.
|
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
pocket may be a string or a list. If no version is provided, it
|
|
|
|
defaults to all pockets except 'Backports'; if searching for a
|
|
|
|
specific version, it defaults to all pockets. Pocket strings must
|
|
|
|
be capitalized.
|
|
|
|
|
2019-11-06 17:07:42 -05:00
|
|
|
status may be a string or a list. If no version is provided, it
|
|
|
|
defaults to only 'Pending' and 'Published'; if searching for a
|
|
|
|
specific version, it defaults to all statuses. Status strings must
|
|
|
|
be capitalized.
|
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
wrapper is the class to return an instance of; defaults to
|
|
|
|
SourcePackagePublishingHistory.
|
|
|
|
|
|
|
|
search_all_series is used if series is None. If False, this will
|
|
|
|
search only the latest devel series, and if True all series
|
|
|
|
will be searched, in reverse order, starting with the latest
|
|
|
|
devel series. Defaults to False.
|
2011-11-23 01:45:49 +02:00
|
|
|
|
2010-12-12 11:46:07 +01:00
|
|
|
If the requested source package doesn't exist a
|
|
|
|
PackageNotFoundException is raised.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
return self._getPublishedItem(
|
|
|
|
name,
|
|
|
|
series,
|
|
|
|
pocket,
|
|
|
|
cache=self._srcpkgs,
|
|
|
|
function="getPublishedSources",
|
|
|
|
name_key="source_name",
|
|
|
|
wrapper=wrapper or SourcePackagePublishingHistory,
|
|
|
|
version=version,
|
|
|
|
status=status,
|
|
|
|
search_all_series=search_all_series,
|
|
|
|
binary=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
def getBinaryPackage(
|
|
|
|
self,
|
|
|
|
name,
|
|
|
|
archtag=None,
|
|
|
|
series=None,
|
|
|
|
pocket=None,
|
|
|
|
version=None,
|
|
|
|
status=None,
|
|
|
|
wrapper=None,
|
|
|
|
search_all_series=False,
|
|
|
|
):
|
|
|
|
"""
|
2011-12-02 15:01:19 +02:00
|
|
|
Returns a BinaryPackagePublishingHistory object for the most
|
2017-04-30 20:02:06 +02:00
|
|
|
recent source package in the distribution 'dist', architecture
|
|
|
|
'archtag', series and pocket.
|
2011-12-02 15:01:19 +02:00
|
|
|
|
|
|
|
series defaults to the current development series if not specified.
|
2017-03-23 07:24:17 -04:00
|
|
|
series must be either a series name string, or DistroArchSeries object.
|
2017-11-21 14:37:42 -05:00
|
|
|
series may be omitted if version is specified.
|
2017-03-23 07:24:17 -04:00
|
|
|
|
|
|
|
version may be specified to get only the exact version requested.
|
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
pocket may be a string or a list. If no version is provided, it
|
|
|
|
defaults to all pockets except 'Backports'; if searching for a
|
|
|
|
specific version, it defaults to all pockets. Pocket strings must
|
|
|
|
be capitalized.
|
|
|
|
|
2019-11-06 17:07:42 -05:00
|
|
|
status may be a string or a list. If no version is provided, it
|
|
|
|
defaults to only 'Pending' and 'Published'; if searching for a
|
|
|
|
specific version, it defaults to all statuses. Status strings must
|
|
|
|
be capitalized.
|
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
wrapper is the class to return an instance of; defaults to
|
|
|
|
BinaryPackagePublishingHistory.
|
|
|
|
|
|
|
|
search_all_series is used if series is None. If False, this will
|
|
|
|
search only the latest devel series, and if True all series
|
|
|
|
will be searched, in reverse order, starting with the latest
|
|
|
|
devel series. Defaults to False.
|
2011-12-02 15:01:19 +02:00
|
|
|
|
|
|
|
If the requested binary package doesn't exist a
|
|
|
|
PackageNotFoundException is raised.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
return self._getPublishedItem(
|
|
|
|
name,
|
|
|
|
series,
|
|
|
|
pocket,
|
|
|
|
archtag=archtag,
|
|
|
|
cache=self._binpkgs,
|
|
|
|
function="getPublishedBinaries",
|
|
|
|
name_key="binary_name",
|
|
|
|
wrapper=wrapper or BinaryPackagePublishingHistory,
|
|
|
|
version=version,
|
|
|
|
status=status,
|
|
|
|
search_all_series=search_all_series,
|
|
|
|
binary=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _getPublishedItem(
|
|
|
|
self,
|
|
|
|
name,
|
|
|
|
series,
|
|
|
|
pocket,
|
|
|
|
cache,
|
|
|
|
function,
|
|
|
|
name_key,
|
|
|
|
wrapper,
|
|
|
|
archtag=None,
|
|
|
|
version=None,
|
|
|
|
status=None,
|
|
|
|
search_all_series=False,
|
|
|
|
binary=False,
|
|
|
|
):
|
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Common code between getSourcePackage and getBinaryPackage.
|
|
|
|
|
|
|
|
Don't use this directly.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2018-07-24 16:28:25 -04:00
|
|
|
if not pocket:
|
|
|
|
if version and not series:
|
|
|
|
# check ALL pockets if specific version in any series
|
|
|
|
pockets = POCKETS
|
|
|
|
else:
|
|
|
|
# otherwise, check all pockets EXCEPT 'Backports'
|
|
|
|
pockets = DEFAULT_POCKETS
|
2019-09-04 19:17:00 -03:00
|
|
|
elif isinstance(pocket, str):
|
2018-07-24 16:28:25 -04:00
|
|
|
pockets = (pocket,)
|
2011-11-23 01:45:49 +02:00
|
|
|
else:
|
2018-07-24 16:28:25 -04:00
|
|
|
pockets = tuple(pocket)
|
2011-11-23 01:45:49 +02:00
|
|
|
|
2023-01-30 23:10:31 +01:00
|
|
|
for pocket_ in pockets:
|
|
|
|
if pocket_ not in POCKETS:
|
2023-01-31 19:32:58 +01:00
|
|
|
raise PocketDoesNotExistError(f"Pocket '{pocket_}' does not exist.")
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2019-11-06 17:07:42 -05:00
|
|
|
if not status:
|
|
|
|
if version:
|
|
|
|
# check ALL statuses if specific version
|
|
|
|
statuses = STATUSES
|
|
|
|
else:
|
|
|
|
# otherwise, only check 'Pending' and 'Published'
|
|
|
|
statuses = DEFAULT_STATUSES
|
|
|
|
elif isinstance(status, str):
|
|
|
|
statuses = (status,)
|
|
|
|
else:
|
|
|
|
statuses = tuple(status)
|
|
|
|
|
2023-01-30 23:10:31 +01:00
|
|
|
for status_ in statuses:
|
|
|
|
if status_ not in STATUSES:
|
2023-01-31 19:32:58 +01:00
|
|
|
raise ValueError(f"Status '{status_}' is not valid.")
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
dist = Distribution(self.distribution_link)
|
2017-03-23 07:24:17 -04:00
|
|
|
|
|
|
|
# please don't pass DistroArchSeries as archtag!
|
|
|
|
# but, the code was like that before so keep
|
|
|
|
# backwards compatibility.
|
|
|
|
if isinstance(archtag, DistroArchSeries):
|
|
|
|
series = archtag
|
|
|
|
archtag = None
|
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
series_to_check = [series]
|
|
|
|
if not version and not series:
|
|
|
|
# if neither version or series are specified, use either the
|
|
|
|
# devel series or search all series
|
|
|
|
if search_all_series:
|
|
|
|
series_to_check = dist.getAllSeries().values()
|
|
|
|
else:
|
|
|
|
series_to_check = [dist.getDevelopmentSeries()]
|
2017-03-23 07:24:17 -04:00
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
# check each series - if only version was provided, series will be None
|
2023-01-31 16:58:24 +01:00
|
|
|
for series in series_to_check: # pylint: disable=redefined-argument-from-local
|
2018-07-24 16:28:25 -04:00
|
|
|
arch_series = None
|
|
|
|
|
|
|
|
if isinstance(series, DistroArchSeries):
|
|
|
|
arch_series = series
|
|
|
|
series = series.getSeries()
|
|
|
|
elif isinstance(series, DistroSeries):
|
|
|
|
pass
|
|
|
|
elif series:
|
|
|
|
series = dist.getSeries(series)
|
2017-04-30 20:02:06 +02:00
|
|
|
|
2018-07-24 16:28:25 -04:00
|
|
|
if binary:
|
|
|
|
if arch_series is None and series:
|
|
|
|
arch_series = series.getArchSeries(archtag=archtag)
|
|
|
|
if archtag is None and arch_series:
|
|
|
|
archtag = arch_series.architecture_tag
|
|
|
|
if archtag is None:
|
|
|
|
archtag = host_architecture()
|
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
index = (name, getattr(series, "name", None), archtag, pockets, statuses, version)
|
2018-07-24 16:28:25 -04:00
|
|
|
|
|
|
|
if index in cache:
|
|
|
|
return cache[index]
|
2017-04-30 20:02:06 +02:00
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
params = {name_key: name, "exact_match": True}
|
2017-03-23 07:24:17 -04:00
|
|
|
|
2017-11-21 14:37:42 -05:00
|
|
|
if arch_series:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["distro_arch_series"] = arch_series()
|
2017-11-21 14:37:42 -05:00
|
|
|
elif series:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["distro_series"] = series()
|
2017-04-30 20:02:06 +02:00
|
|
|
|
2011-11-23 01:45:49 +02:00
|
|
|
if len(pockets) == 1:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["pocket"] = pockets[0]
|
2018-07-24 16:28:25 -04:00
|
|
|
|
2019-11-06 17:07:42 -05:00
|
|
|
if len(statuses) == 1:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["status"] = statuses[0]
|
2011-11-23 01:45:49 +02:00
|
|
|
|
2017-03-23 07:24:17 -04:00
|
|
|
if version:
|
2023-01-30 19:45:36 +01:00
|
|
|
params["version"] = version
|
2017-03-23 07:24:17 -04:00
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
Logger.debug(
|
2023-01-31 19:32:58 +01:00
|
|
|
"Calling %s(%s)", function, ", ".join([f"{k}={v}" for (k, v) in params.items()])
|
2023-01-30 19:45:36 +01:00
|
|
|
)
|
2011-12-02 15:01:19 +02:00
|
|
|
records = getattr(self, function)(**params)
|
2011-11-23 01:45:49 +02:00
|
|
|
|
2023-01-31 19:32:58 +01:00
|
|
|
err_msg = f"does not exist in the {dist.display_name} {self.name} archive"
|
2018-07-24 16:28:25 -04:00
|
|
|
|
2011-11-23 01:45:49 +02:00
|
|
|
for record in records:
|
2018-07-24 16:28:25 -04:00
|
|
|
if binary:
|
2023-01-30 19:45:36 +01:00
|
|
|
rversion = getattr(record, "binary_package_version", None)
|
2018-07-24 16:28:25 -04:00
|
|
|
else:
|
2023-01-30 19:45:36 +01:00
|
|
|
rversion = getattr(record, "source_package_version", None)
|
2023-01-31 19:32:58 +01:00
|
|
|
skipmsg = f"Skipping version {rversion}: "
|
2018-07-24 16:28:25 -04:00
|
|
|
|
2011-11-23 01:45:49 +02:00
|
|
|
if record.pocket not in pockets:
|
2023-01-31 19:32:58 +01:00
|
|
|
err_msg = f"pocket {record.pocket} not in ({','.join(pockets)})"
|
2018-07-24 16:28:25 -04:00
|
|
|
Logger.debug(skipmsg + err_msg)
|
|
|
|
continue
|
2019-11-06 17:07:42 -05:00
|
|
|
if record.status not in statuses:
|
2023-01-31 19:32:58 +01:00
|
|
|
err_msg = f"status {record.status} not in ({','.join(statuses)})"
|
2019-11-06 17:07:42 -05:00
|
|
|
Logger.debug(skipmsg + err_msg)
|
2011-11-23 01:45:49 +02:00
|
|
|
continue
|
2023-01-30 23:10:31 +01:00
|
|
|
release = wrapper(record)
|
|
|
|
if binary and archtag and archtag != release.arch:
|
2023-01-31 19:32:58 +01:00
|
|
|
err_msg = f"arch {release.arch} does not match requested arch {archtag}"
|
2018-07-24 16:28:25 -04:00
|
|
|
Logger.debug(skipmsg + err_msg)
|
2017-11-21 14:37:42 -05:00
|
|
|
continue
|
2018-07-24 16:28:25 -04:00
|
|
|
# results are ordered so first is latest
|
2023-01-30 23:10:31 +01:00
|
|
|
cache[index] = release
|
|
|
|
return release
|
2018-07-24 16:28:25 -04:00
|
|
|
|
|
|
|
version_with_epoch = None
|
|
|
|
if version and version == Version(version).strip_epoch() and len(records) == 0:
|
|
|
|
# a specific version was asked for, but we found none;
|
|
|
|
# check if one exists with an epoch to give a hint in error msg
|
|
|
|
for epoch in range(1, 9):
|
2023-01-30 23:10:31 +01:00
|
|
|
version_ = Version(version)
|
|
|
|
version_.epoch = epoch
|
|
|
|
params["version"] = version_.full_version
|
2018-07-24 16:28:25 -04:00
|
|
|
if len(getattr(self, function)(**params)) > 0:
|
2023-01-30 23:10:31 +01:00
|
|
|
version_with_epoch = version_.full_version
|
2023-01-31 11:13:07 +01:00
|
|
|
Logger.debug("Found version with epoch %s", version_with_epoch)
|
2018-07-24 16:28:25 -04:00
|
|
|
break
|
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
if name_key == "binary_name":
|
2018-07-24 16:28:25 -04:00
|
|
|
package_type = "binary package"
|
2023-01-30 19:45:36 +01:00
|
|
|
elif name_key == "source_name":
|
2018-07-24 16:28:25 -04:00
|
|
|
package_type = "source package"
|
|
|
|
else:
|
|
|
|
package_type = "package"
|
2023-01-31 19:32:58 +01:00
|
|
|
msg = f"The {package_type} '{name}' "
|
2018-07-24 16:28:25 -04:00
|
|
|
if version:
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f"version {version} "
|
2018-07-24 16:28:25 -04:00
|
|
|
msg += err_msg
|
|
|
|
if binary and archtag:
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f" for architecture {archtag}"
|
2018-07-24 16:28:25 -04:00
|
|
|
if len(series_to_check) > 1:
|
|
|
|
msg += " in any release"
|
|
|
|
if len(pockets) == 1:
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f" for pocket {pockets[0]}"
|
2018-07-24 16:28:25 -04:00
|
|
|
elif len(pockets) != len(POCKETS):
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f" for pockets {', '.join(pockets)}"
|
2018-07-24 16:28:25 -04:00
|
|
|
elif series:
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f" in {series.name}"
|
2018-07-24 16:28:25 -04:00
|
|
|
if len(pockets) == 1:
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f"-{pockets[0]}"
|
2018-07-24 16:28:25 -04:00
|
|
|
elif len(pockets) != len(POCKETS):
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f" for pockets {', '.join(pockets)}"
|
2019-11-06 17:07:42 -05:00
|
|
|
if len(statuses) == 1:
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f" with status {statuses[0]}"
|
2019-11-06 17:07:42 -05:00
|
|
|
elif len(statuses) != len(STATUSES):
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f" with status in {', '.join(statuses)}"
|
2018-07-24 16:28:25 -04:00
|
|
|
if version_with_epoch:
|
2023-01-31 19:32:58 +01:00
|
|
|
msg += f" (did you forget the epoch? try {version_with_epoch})"
|
2018-07-24 16:28:25 -04:00
|
|
|
raise PackageNotFoundException(msg)
|
2009-07-23 13:36:05 +02:00
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
def copyPackage(
|
|
|
|
self,
|
|
|
|
source_name,
|
|
|
|
version,
|
|
|
|
from_archive,
|
|
|
|
to_pocket,
|
|
|
|
to_series=None,
|
|
|
|
sponsored=None,
|
|
|
|
include_binaries=False,
|
|
|
|
):
|
|
|
|
"""Copy a single named source into this archive.
|
2011-08-16 18:07:58 +01:00
|
|
|
|
|
|
|
Asynchronously copy a specific version of a named source to the
|
|
|
|
destination archive if necessary. Calls to this method will return
|
|
|
|
immediately if the copy passes basic security checks and the copy
|
|
|
|
will happen sometime later with full checking.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2023-01-31 15:51:29 +01:00
|
|
|
# pylint: disable=protected-access
|
2011-12-07 15:46:09 +02:00
|
|
|
if isinstance(sponsored, PersonTeam):
|
|
|
|
sponsored = sponsored._lpobject
|
|
|
|
|
2011-08-16 18:07:58 +01:00
|
|
|
self._lpobject.copyPackage(
|
|
|
|
source_name=source_name,
|
|
|
|
version=version,
|
|
|
|
from_archive=from_archive._lpobject,
|
|
|
|
to_pocket=to_pocket,
|
|
|
|
to_series=to_series,
|
2011-12-07 00:23:22 +02:00
|
|
|
sponsored=sponsored,
|
2023-01-30 19:45:36 +01:00
|
|
|
include_binaries=include_binaries,
|
|
|
|
)
|
2011-08-16 18:07:58 +01:00
|
|
|
|
2011-12-02 18:59:46 +02:00
|
|
|
def getUploadersForComponent(self, component_name):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Get the list of PersonTeams who can upload packages in the
|
2011-12-02 18:59:46 +02:00
|
|
|
specified component.
|
2011-12-03 22:44:13 +02:00
|
|
|
[Note: the permission records, themselves, aren't exposed]
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-12-02 18:59:46 +02:00
|
|
|
if component_name not in self._component_uploaders:
|
2023-01-30 19:45:36 +01:00
|
|
|
self._component_uploaders[component_name] = sorted(
|
|
|
|
set(
|
|
|
|
PersonTeam(permission.person_link)
|
|
|
|
for permission in self._lpobject.getUploadersForComponent(
|
|
|
|
component_name=component_name
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2011-12-02 18:59:46 +02:00
|
|
|
return self._component_uploaders[component_name]
|
|
|
|
|
|
|
|
def getUploadersForPackage(self, source_package_name):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Get the list of PersonTeams who can upload source_package_name)
|
2011-12-03 22:44:13 +02:00
|
|
|
[Note: the permission records, themselves, aren't exposed]
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-12-02 18:59:46 +02:00
|
|
|
if source_package_name not in self._pkg_uploaders:
|
2021-08-17 15:51:23 +02:00
|
|
|
self._pkg_uploaders[source_package_name] = sorted(
|
|
|
|
set(
|
2023-01-30 19:45:36 +01:00
|
|
|
PersonTeam(permission.person_link)
|
|
|
|
for permission in self._lpobject.getUploadersForPackage(
|
|
|
|
source_package_name=source_package_name
|
|
|
|
)
|
2021-08-17 15:54:28 +02:00
|
|
|
),
|
2023-01-30 19:45:36 +01:00
|
|
|
key=lambda s: s.name,
|
2021-08-17 15:51:23 +02:00
|
|
|
)
|
2011-12-02 18:59:46 +02:00
|
|
|
return self._pkg_uploaders[source_package_name]
|
|
|
|
|
|
|
|
def getUploadersForPackageset(self, packageset, direct_permissions=False):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Get the list of PersonTeams who can upload packages in packageset
|
2011-12-03 22:44:13 +02:00
|
|
|
[Note: the permission records, themselves, aren't exposed]
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-12-02 18:59:46 +02:00
|
|
|
key = (packageset, direct_permissions)
|
|
|
|
if key not in self._pkgset_uploaders:
|
2023-01-30 19:45:36 +01:00
|
|
|
self._pkgset_uploaders[key] = sorted(
|
|
|
|
set(
|
|
|
|
PersonTeam(permission.person_link)
|
2023-01-31 15:51:29 +01:00
|
|
|
# pylint: disable=protected-access
|
2023-01-30 19:45:36 +01:00
|
|
|
for permission in self._lpobject.getUploadersForPackageset(
|
|
|
|
packageset=packageset._lpobject, direct_permissions=direct_permissions
|
|
|
|
)
|
2017-05-01 00:20:03 +02:00
|
|
|
)
|
2023-01-30 19:45:36 +01:00
|
|
|
)
|
2011-12-02 18:59:46 +02:00
|
|
|
return self._pkgset_uploaders[key]
|
|
|
|
|
2021-07-12 18:12:57 -04:00
|
|
|
def getMySubscriptionURL(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Get the "subscription URL" for the logged in user
|
2021-07-12 18:12:57 -04:00
|
|
|
|
|
|
|
If this is a private archive (i.e. private PPA), this returns
|
|
|
|
the "subscription URL" including authentication; otherwise
|
|
|
|
this returns None.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2021-07-12 18:12:57 -04:00
|
|
|
if self.private:
|
|
|
|
return PersonTeam.me.getArchiveSubscriptionURL(archive=self._lpobject)
|
|
|
|
return None
|
|
|
|
|
2009-07-22 14:26:27 +02:00
|
|
|
|
2009-08-06 16:16:17 +02:00
|
|
|
class SourcePackagePublishingHistory(BaseWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Wrapper class around a LP source package object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "source_package_publishing_history"
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def __init__(self, *args): # pylint: disable=unused-argument
|
2017-03-23 07:24:17 -04:00
|
|
|
self._archive = None
|
2011-11-13 22:50:34 +02:00
|
|
|
self._changelog = None
|
2017-11-21 14:37:42 -05:00
|
|
|
self._binaries = {}
|
2017-03-23 07:24:17 -04:00
|
|
|
self._distro_series = None
|
2020-01-22 14:09:58 -05:00
|
|
|
self._source_urls = None
|
2010-12-23 20:42:21 +01:00
|
|
|
# Don't share _builds between different
|
|
|
|
# SourcePackagePublishingHistory objects
|
2023-01-30 19:45:36 +01:00
|
|
|
if "_builds" not in self.__dict__:
|
2023-01-31 16:58:24 +01:00
|
|
|
self._builds = {}
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2017-03-23 07:24:17 -04:00
|
|
|
def getDistroSeries(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Return the DistroSeries.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
if not self._distro_series:
|
|
|
|
self._distro_series = DistroSeries(self._lpobject.distro_series_link)
|
|
|
|
return self._distro_series
|
|
|
|
|
2010-12-12 11:46:07 +01:00
|
|
|
def getPackageName(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Returns the source package name.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
return self._lpobject.source_package_name
|
|
|
|
|
|
|
|
def getVersion(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Returns the version of the source package.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
return self._lpobject.source_package_version
|
|
|
|
|
|
|
|
def getComponent(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Returns the component of the source package.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
return self._lpobject.component_name
|
|
|
|
|
2017-03-23 07:24:17 -04:00
|
|
|
def getSeriesName(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the series
|
|
|
|
|
|
|
|
Named getSeriesName() to avoid confusion with
|
|
|
|
getDistroSeries()
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
return self.getDistroSeries().name
|
|
|
|
|
2011-11-23 01:45:49 +02:00
|
|
|
def getSeriesAndPocket(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-23 01:45:49 +02:00
|
|
|
Returns a human-readable release-pocket
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
release = self.getSeriesName()
|
2023-01-30 19:45:36 +01:00
|
|
|
if self.pocket != "Release":
|
|
|
|
release += "-" + self.pocket.lower()
|
2011-11-23 01:45:49 +02:00
|
|
|
return release
|
|
|
|
|
2017-03-23 07:24:17 -04:00
|
|
|
def getArchive(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Get this SPPH's archive.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
if not self._archive:
|
|
|
|
self._archive = Archive(self._lpobject.archive_link)
|
|
|
|
|
|
|
|
return self._archive
|
|
|
|
|
2011-11-13 22:50:34 +02:00
|
|
|
def getChangelog(self, since_version=None):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-13 22:50:34 +02:00
|
|
|
Return the changelog, optionally since a particular version
|
|
|
|
May return None if the changelog isn't available
|
|
|
|
Only available in the devel API, not 1.0
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-13 22:50:34 +02:00
|
|
|
if self._changelog is None:
|
|
|
|
url = self._lpobject.changelogUrl()
|
|
|
|
if url is None:
|
2023-01-30 19:45:36 +01:00
|
|
|
Logger.error(
|
2023-01-31 11:13:07 +01:00
|
|
|
"No changelog available for %s %s", self.getPackageName(), self.getVersion()
|
2023-01-30 19:45:36 +01:00
|
|
|
)
|
2011-11-13 22:50:34 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
2020-07-20 17:48:52 -04:00
|
|
|
self._changelog = download_text(url)
|
|
|
|
except URLError as e:
|
2023-01-31 11:13:07 +01:00
|
|
|
Logger.error("Exception while downloading '%s': %s", url, e)
|
2011-11-22 15:57:02 +02:00
|
|
|
return None
|
2011-11-13 22:50:34 +02:00
|
|
|
|
|
|
|
if since_version is None:
|
|
|
|
return self._changelog
|
|
|
|
|
2019-09-04 19:17:00 -03:00
|
|
|
if isinstance(since_version, str):
|
2011-11-13 22:50:34 +02:00
|
|
|
since_version = Version(since_version)
|
|
|
|
|
|
|
|
new_entries = []
|
|
|
|
for block in Changelog(self._changelog):
|
|
|
|
if block.version <= since_version:
|
|
|
|
break
|
2019-09-04 19:17:00 -03:00
|
|
|
new_entries.append(str(block))
|
2023-01-30 19:45:36 +01:00
|
|
|
return "".join(new_entries)
|
2011-11-13 22:50:34 +02:00
|
|
|
|
2020-01-22 14:09:58 -05:00
|
|
|
def sourceFileUrls(self, include_meta=False):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Return the URL for this source publication's files.
|
|
|
|
|
|
|
|
The include_meta param changes the return value;
|
|
|
|
when it is False (the default), an array of url strings is
|
|
|
|
returned. When include_meta is True, an array is returned
|
|
|
|
with dicts, containing the entries:
|
|
|
|
url: the url string
|
|
|
|
sha1: the SHA1 checksum of the source file (if provided)
|
|
|
|
sha256: the SHA256 checksum of the source file
|
|
|
|
size: the size of the source file
|
|
|
|
Also, this function adds a 'filename' field:
|
|
|
|
filename: the filename parsed from the url path
|
2020-03-19 16:25:52 -04:00
|
|
|
Note that while all the keys will be in the dict, their values
|
|
|
|
may be None.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
if not self._source_urls:
|
|
|
|
urls = self._lpobject.sourceFileUrls(include_meta=True)
|
|
|
|
if not urls:
|
2023-01-30 19:45:36 +01:00
|
|
|
Logger.warning(
|
2023-01-31 11:13:07 +01:00
|
|
|
"SPPH %s_%s has no sourceFileUrls", self.getPackageName(), self.getVersion()
|
2023-01-30 19:45:36 +01:00
|
|
|
)
|
2023-01-30 23:10:31 +01:00
|
|
|
for url in urls:
|
2020-03-19 16:25:52 -04:00
|
|
|
# make sure mandatory fields are present
|
2023-01-30 19:45:36 +01:00
|
|
|
for field in ["url", "sha1", "sha256", "size"]:
|
2023-01-30 23:10:31 +01:00
|
|
|
if field not in url:
|
|
|
|
url[field] = None
|
|
|
|
url["filename"] = os.path.basename(urlparse(url["url"]).path)
|
2020-01-22 14:09:58 -05:00
|
|
|
self._source_urls = urls
|
|
|
|
|
|
|
|
if include_meta:
|
|
|
|
return list(self._source_urls)
|
2023-01-30 19:45:36 +01:00
|
|
|
return [f["url"] for f in self._source_urls]
|
2020-01-22 14:09:58 -05:00
|
|
|
|
|
|
|
def sourceFileUrl(self, filename):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Returns the URL for the specified source filename.
|
|
|
|
|
|
|
|
If the filename is not found in the sourceFileUrls(), this returns None.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
for f in self.sourceFileUrls(include_meta=True):
|
2023-01-30 19:45:36 +01:00
|
|
|
if filename == f["filename"]:
|
|
|
|
return f["url"]
|
2020-01-22 14:09:58 -05:00
|
|
|
return None
|
|
|
|
|
|
|
|
def sourceFileSha1(self, url_or_filename):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Returns the SHA1 checksum for the specified source file url.
|
|
|
|
|
|
|
|
If the url is not found in the sourceFileUrls(), this returns None.
|
|
|
|
|
|
|
|
The url may be specified as a filename.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
for f in self.sourceFileUrls(include_meta=True):
|
2023-01-30 19:45:36 +01:00
|
|
|
if url_or_filename in [f["url"], f["filename"]]:
|
|
|
|
return f["sha1"]
|
2020-01-22 14:09:58 -05:00
|
|
|
return None
|
|
|
|
|
|
|
|
def sourceFileSha256(self, url_or_filename):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Returns the SHA256 checksum for the specified source file url.
|
|
|
|
|
|
|
|
If the url is not found in the sourceFileUrls(), this returns None.
|
|
|
|
|
|
|
|
The url may be specified as a filename.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
for f in self.sourceFileUrls(include_meta=True):
|
2023-01-30 19:45:36 +01:00
|
|
|
if url_or_filename in [f["url"], f["filename"]]:
|
|
|
|
return f["sha256"]
|
2020-01-22 14:09:58 -05:00
|
|
|
return None
|
|
|
|
|
|
|
|
def sourceFileSize(self, url_or_filename):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Returns the size for the specified source file url.
|
|
|
|
|
|
|
|
If the url is not found in the sourceFileUrls(), this returns 0.
|
|
|
|
|
|
|
|
The url may be specified as a filename.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
for f in self.sourceFileUrls(include_meta=True):
|
2023-01-30 19:45:36 +01:00
|
|
|
if url_or_filename in [f["url"], f["filename"]]:
|
|
|
|
return int(f["size"])
|
2020-01-22 14:09:58 -05:00
|
|
|
return 0
|
|
|
|
|
2020-02-07 06:58:10 -05:00
|
|
|
def getBinaries(self, arch=None, name=None, ext=None):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the resulting BinaryPackagePublishingHistorys.
|
2020-02-07 06:58:10 -05:00
|
|
|
If arch is specified, it returns binaries for only that arch,
|
|
|
|
plus any binaries with arch 'all'. If arch is not specified, or
|
|
|
|
if arch is specified as 'all', all archs are returned.
|
2019-11-25 16:40:18 -05:00
|
|
|
|
2017-11-21 14:37:42 -05:00
|
|
|
If name is specified, only returns BPPH matching that (regex) name.
|
2019-11-25 16:40:18 -05:00
|
|
|
|
|
|
|
If ext is specified, only returns BPPH matching that (regex) ext.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
if arch == "all":
|
2020-02-07 06:58:10 -05:00
|
|
|
arch = None
|
2017-11-21 14:37:42 -05:00
|
|
|
|
2020-02-07 06:26:11 -05:00
|
|
|
if self.status in ["Pending", "Published"]:
|
2017-11-21 14:37:42 -05:00
|
|
|
# Published, great! Directly query the list of binaries
|
2023-01-30 19:45:36 +01:00
|
|
|
binaries = map(BinaryPackagePublishingHistory, self._lpobject.getPublishedBinaries())
|
2023-01-30 23:10:31 +01:00
|
|
|
for binary in binaries:
|
|
|
|
arch_ = binary.arch
|
|
|
|
if arch_ not in self._binaries:
|
|
|
|
self._binaries[arch_] = {}
|
|
|
|
self._binaries[arch_][binary.binary_package_name] = binary
|
2017-11-21 14:37:42 -05:00
|
|
|
else:
|
2018-07-24 16:28:25 -04:00
|
|
|
# we have to go the long way :(
|
2018-10-12 18:54:07 -04:00
|
|
|
Logger.info("Please wait, this may take some time...")
|
2017-11-21 14:37:42 -05:00
|
|
|
archive = self.getArchive()
|
|
|
|
urls = self.binaryFileUrls()
|
|
|
|
for url in urls:
|
|
|
|
# strip out the URL leading text.
|
2020-01-26 18:56:43 -05:00
|
|
|
filename = os.path.basename(urlparse(url).path)
|
2017-11-21 14:37:42 -05:00
|
|
|
# strip the file suffix
|
2023-01-30 23:10:31 +01:00
|
|
|
(pkgname, _, extension) = filename.rpartition(".")
|
2017-11-21 14:37:42 -05:00
|
|
|
# split into name, version, arch
|
2023-01-30 23:10:31 +01:00
|
|
|
(name_, _, arch_) = pkgname.rsplit("_", 2)
|
2020-02-07 06:56:44 -05:00
|
|
|
# arch 'all' has separate bpph for each real arch,
|
|
|
|
# but all point to the same binary url
|
2023-01-30 23:10:31 +01:00
|
|
|
if arch_ == "all":
|
|
|
|
arch_ = arch or host_architecture()
|
2017-11-21 14:37:42 -05:00
|
|
|
# Only check the arch requested - saves time
|
2023-01-30 23:10:31 +01:00
|
|
|
if arch and arch != arch_:
|
2017-11-21 14:37:42 -05:00
|
|
|
continue
|
|
|
|
# Only check the name requested - saves time
|
2023-01-30 23:10:31 +01:00
|
|
|
if name and not re.match(name, name_):
|
2017-11-21 14:37:42 -05:00
|
|
|
continue
|
2019-11-25 16:40:18 -05:00
|
|
|
# Only check the ext requested - saves time
|
2023-01-30 23:10:31 +01:00
|
|
|
if ext and not re.match(ext, extension):
|
2019-11-25 16:40:18 -05:00
|
|
|
continue
|
2017-11-21 14:37:42 -05:00
|
|
|
# If we already have this BPPH, keep going
|
2023-01-30 23:10:31 +01:00
|
|
|
if arch_ in self._binaries and name_ in self._binaries[arch_]:
|
2017-11-21 14:37:42 -05:00
|
|
|
continue
|
|
|
|
# we ignore the version, as it may be missing epoch
|
|
|
|
# also we can't use series, as some package versions
|
|
|
|
# span multiple series! (e.g. for different archs)
|
2023-01-30 23:10:31 +01:00
|
|
|
params = {"name": name_, "archtag": arch_, "version": self.getVersion()}
|
2017-11-21 14:37:42 -05:00
|
|
|
try:
|
|
|
|
bpph = archive.getBinaryPackage(**params)
|
|
|
|
except PackageNotFoundException:
|
2023-01-31 11:13:07 +01:00
|
|
|
Logger.debug("Could not find pkg in archive: %s", filename)
|
2017-11-21 14:37:42 -05:00
|
|
|
continue
|
2023-01-30 23:10:31 +01:00
|
|
|
if arch_ not in self._binaries:
|
|
|
|
self._binaries[arch_] = {}
|
|
|
|
self._binaries[arch_][name_] = bpph
|
2017-11-21 14:37:42 -05:00
|
|
|
|
2020-02-07 06:56:44 -05:00
|
|
|
if not arch:
|
|
|
|
bpphs = [b for a in self._binaries.values() for b in a.values()]
|
2017-11-21 14:37:42 -05:00
|
|
|
elif arch in self._binaries:
|
2020-02-07 06:56:44 -05:00
|
|
|
bpphs = list(self._binaries[arch].values())
|
|
|
|
else:
|
|
|
|
return []
|
2017-11-21 14:37:42 -05:00
|
|
|
|
|
|
|
if name:
|
2019-11-25 16:40:18 -05:00
|
|
|
bpphs = [b for b in bpphs if re.match(name, b.binary_package_name)]
|
|
|
|
|
|
|
|
if ext:
|
|
|
|
bpphs = [b for b in bpphs if re.match(ext, b.getFileExt())]
|
2017-11-21 14:37:42 -05:00
|
|
|
|
|
|
|
return bpphs
|
2011-11-15 01:50:04 +02:00
|
|
|
|
2010-12-12 11:46:07 +01:00
|
|
|
def _fetch_builds(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""Populate self._builds with the build records."""
|
2010-12-12 11:46:07 +01:00
|
|
|
builds = self.getBuilds()
|
|
|
|
for build in builds:
|
|
|
|
self._builds[build.arch_tag] = Build(build)
|
|
|
|
|
2009-07-10 10:38:06 +02:00
|
|
|
|
2011-11-15 01:50:04 +02:00
|
|
|
class BinaryPackagePublishingHistory(BaseWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-15 01:50:04 +02:00
|
|
|
Wrapper class around a LP binary package object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "binary_package_publishing_history"
|
2011-11-15 01:50:04 +02:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def __init__(self, *args): # pylint: disable=unused-argument
|
2017-03-23 07:24:17 -04:00
|
|
|
self._arch = None
|
2019-11-25 16:40:18 -05:00
|
|
|
self._ext = None
|
2020-01-22 14:09:58 -05:00
|
|
|
self._binary_urls = None
|
2017-03-23 07:24:17 -04:00
|
|
|
|
|
|
|
@property
|
|
|
|
def arch(self):
|
|
|
|
if not self._arch:
|
|
|
|
das = DistroArchSeries(self._lpobject.distro_arch_series_link)
|
|
|
|
self._arch = das.architecture_tag
|
|
|
|
return self._arch
|
|
|
|
|
|
|
|
def getSourcePackageName(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the source package name.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
return self.getBuild().source_package_name
|
|
|
|
|
2011-11-15 01:50:04 +02:00
|
|
|
def getPackageName(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-15 01:50:04 +02:00
|
|
|
Returns the binary package name.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-15 01:50:04 +02:00
|
|
|
return self._lpobject.binary_package_name
|
|
|
|
|
|
|
|
def getVersion(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-15 01:50:04 +02:00
|
|
|
Returns the version of the binary package.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-15 01:50:04 +02:00
|
|
|
return self._lpobject.binary_package_version
|
|
|
|
|
|
|
|
def getComponent(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-15 01:50:04 +02:00
|
|
|
Returns the component of the binary package.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-11-15 01:50:04 +02:00
|
|
|
return self._lpobject.component_name
|
|
|
|
|
2020-01-22 14:09:58 -05:00
|
|
|
def binaryFileUrls(self, include_meta=False):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-04-30 20:02:06 +02:00
|
|
|
Return the URL for this binary publication's files.
|
|
|
|
Only available in the devel API, not 1.0
|
2020-01-22 14:09:58 -05:00
|
|
|
|
|
|
|
The include_meta param changes the return value;
|
|
|
|
when it is False (the default), an array of url strings is
|
|
|
|
returned (but typically there is only a single url in the array).
|
|
|
|
When include_meta is True, an array (again, with typically only one
|
|
|
|
entry) is returned with dicts, containing the entries:
|
|
|
|
url: the url string
|
|
|
|
sha1: the SHA1 checksum of the binary file
|
|
|
|
sha256: the SHA256 checksum of the binary file
|
|
|
|
size: the size of the binary file
|
|
|
|
Also, this function adds a 'filename' field:
|
|
|
|
filename: the filename parsed from the url path
|
2020-03-19 16:25:52 -04:00
|
|
|
Note that while all the keys will be in the dict, their values
|
|
|
|
may be None.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
if not self._binary_urls:
|
|
|
|
try:
|
|
|
|
urls = self._lpobject.binaryFileUrls(include_meta=True)
|
2023-01-30 23:10:31 +01:00
|
|
|
except AttributeError as error:
|
2023-01-30 19:45:36 +01:00
|
|
|
raise AttributeError(
|
|
|
|
"binaryFileUrls can only be found in lpapi "
|
|
|
|
"devel, not 1.0. Login using devel to have it."
|
2023-01-30 23:10:31 +01:00
|
|
|
) from error
|
2019-11-25 16:40:18 -05:00
|
|
|
if not urls:
|
2023-01-30 19:45:36 +01:00
|
|
|
Logger.warning(
|
2023-01-31 11:13:07 +01:00
|
|
|
"BPPH %s_%s has no binaryFileUrls", self.getPackageName(), self.getVersion()
|
2023-01-30 19:45:36 +01:00
|
|
|
)
|
2023-01-30 23:10:31 +01:00
|
|
|
for url in urls:
|
2020-03-19 16:25:52 -04:00
|
|
|
# make sure mandatory fields are present
|
2023-01-30 19:45:36 +01:00
|
|
|
for field in ["url", "sha1", "sha256", "size"]:
|
2023-01-30 23:10:31 +01:00
|
|
|
if field not in url:
|
|
|
|
url[field] = None
|
|
|
|
url["filename"] = os.path.basename(urlparse(url["url"]).path)
|
2020-01-22 14:09:58 -05:00
|
|
|
self._binary_urls = urls
|
|
|
|
|
|
|
|
if include_meta:
|
|
|
|
return list(self._binary_urls)
|
2023-01-30 19:45:36 +01:00
|
|
|
return [f["url"] for f in self._binary_urls]
|
2020-01-22 14:09:58 -05:00
|
|
|
|
|
|
|
def binaryFileUrl(self, filename):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Returns the URL for the specified binary filename.
|
|
|
|
|
|
|
|
If the filename is not found in the binaryFileUrls(), this returns None.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
for f in self.binaryFileUrls(include_meta=True):
|
2023-01-30 19:45:36 +01:00
|
|
|
if filename == f["filename"]:
|
|
|
|
return f["url"]
|
2020-01-22 14:09:58 -05:00
|
|
|
return None
|
|
|
|
|
|
|
|
def binaryFileSha1(self, url_or_filename):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Returns the SHA1 checksum for the specified binary file url.
|
|
|
|
|
|
|
|
If the url is not found in the binaryFileUrls(), this returns None.
|
|
|
|
|
|
|
|
The url may be specified as a filename.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
for f in self.binaryFileUrls(include_meta=True):
|
2023-01-30 19:45:36 +01:00
|
|
|
if url_or_filename in [f["url"], f["filename"]]:
|
|
|
|
return f["sha1"]
|
2020-01-22 14:09:58 -05:00
|
|
|
return None
|
|
|
|
|
|
|
|
def binaryFileSha256(self, url_or_filename):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Returns the SHA256 checksum for the specified binary file url.
|
|
|
|
|
|
|
|
If the url is not found in the binaryFileUrls(), this returns None.
|
|
|
|
|
|
|
|
The url may be specified as a filename.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
for f in self.binaryFileUrls(include_meta=True):
|
2023-01-30 19:45:36 +01:00
|
|
|
if url_or_filename in [f["url"], f["filename"]]:
|
|
|
|
return f["sha256"]
|
2020-01-22 14:09:58 -05:00
|
|
|
return None
|
|
|
|
|
|
|
|
def binaryFileSize(self, url_or_filename):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
Returns the size for the specified binary file url.
|
|
|
|
|
|
|
|
If the url is not found in the binaryFileUrls(), this returns 0.
|
|
|
|
|
|
|
|
The url may be specified as a filename.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-01-22 14:09:58 -05:00
|
|
|
for f in self.binaryFileUrls(include_meta=True):
|
2023-01-30 19:45:36 +01:00
|
|
|
if url_or_filename in [f["url"], f["filename"]]:
|
|
|
|
return int(f["size"])
|
2020-01-22 14:09:58 -05:00
|
|
|
return 0
|
2017-04-30 20:02:06 +02:00
|
|
|
|
2017-03-23 07:24:17 -04:00
|
|
|
def getBuild(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the original build of the binary package.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
return Build(self._lpobject.build_link)
|
|
|
|
|
|
|
|
def getUrl(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the original build URL of the binary package.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2023-01-31 19:32:58 +01:00
|
|
|
return f"{self.getBuild().getUrl()}/+files/{self.getFileName()}"
|
2017-03-23 07:24:17 -04:00
|
|
|
|
|
|
|
def getFileVersion(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the file version, which is the package version without the epoch
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
return Version(self.getVersion()).strip_epoch()
|
|
|
|
|
|
|
|
def getFileArch(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the file arch, which is 'all' if not arch-specific
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
if bool(self._lpobject.architecture_specific):
|
|
|
|
return self.arch
|
2023-01-31 16:58:24 +01:00
|
|
|
return "all"
|
2017-03-23 07:24:17 -04:00
|
|
|
|
|
|
|
def getFileExt(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the file extension; "deb", "ddeb", or "udeb".
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2019-11-25 16:40:18 -05:00
|
|
|
if not self._ext:
|
|
|
|
self._ext = self._getFileExt()
|
|
|
|
|
|
|
|
return self._ext
|
|
|
|
|
|
|
|
def _getFileExt(self):
|
|
|
|
try:
|
|
|
|
# this is the best way, from the actual URL filename
|
2023-01-30 19:45:36 +01:00
|
|
|
return self.binaryFileUrls()[0].rpartition(".")[2]
|
2019-11-25 16:40:18 -05:00
|
|
|
except (AttributeError, IndexError):
|
2023-01-30 19:45:36 +01:00
|
|
|
Logger.debug("Could not get file ext from url, trying to guess...")
|
2019-11-25 16:40:18 -05:00
|
|
|
|
|
|
|
# is_debug should be reliable way of detecting ddeb...?
|
|
|
|
try:
|
|
|
|
if self.is_debug:
|
|
|
|
return "ddeb"
|
|
|
|
except AttributeError:
|
|
|
|
# is_debug only available with api version 'devel'
|
|
|
|
if self.getPackageName().endswith("-dbgsym"):
|
|
|
|
return "ddeb"
|
|
|
|
|
|
|
|
# is this reliable?
|
|
|
|
if self.getPackageName().endswith("-di") or self.getPackageName().endswith("-udeb"):
|
2017-03-23 07:24:17 -04:00
|
|
|
return "udeb"
|
2019-11-25 16:40:18 -05:00
|
|
|
|
|
|
|
# everything else - assume regular deb
|
|
|
|
return "deb"
|
2017-03-23 07:24:17 -04:00
|
|
|
|
|
|
|
def getFileName(self):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2017-03-23 07:24:17 -04:00
|
|
|
Returns the filename for this binary package.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2023-01-31 19:32:58 +01:00
|
|
|
return (
|
|
|
|
f"{self.getPackageName()}_{self.getFileVersion()}"
|
|
|
|
f"_{self.getFileArch()}.{self.getFileExt()}"
|
2023-01-30 19:45:36 +01:00
|
|
|
)
|
2017-03-23 07:24:17 -04:00
|
|
|
|
2011-11-15 01:50:04 +02:00
|
|
|
|
2010-02-20 18:13:15 +01:00
|
|
|
class MetaPersonTeam(MetaWrapper):
|
|
|
|
@property
|
|
|
|
def me(cls):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""The PersonTeam object of the currently authenticated LP user or
|
2010-02-20 18:13:15 +01:00
|
|
|
None when anonymously logged in.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
if "_me" not in cls.__dict__:
|
2010-02-20 18:13:15 +01:00
|
|
|
try:
|
2021-07-12 18:12:57 -04:00
|
|
|
# We have to use me.self_link due to LP: #504297
|
|
|
|
cls._me = PersonTeam(Launchpad.me.self_link)
|
2014-12-18 20:28:48 +00:00
|
|
|
except HTTPError as error:
|
2010-02-20 18:13:15 +01:00
|
|
|
if error.response.status == 401:
|
|
|
|
# Anonymous login
|
2011-08-20 11:31:02 +02:00
|
|
|
cls._me = None
|
2010-02-20 18:13:15 +01:00
|
|
|
else:
|
|
|
|
raise
|
|
|
|
return cls._me
|
|
|
|
|
2011-12-03 22:44:13 +02:00
|
|
|
|
2019-09-04 19:17:00 -03:00
|
|
|
class PersonTeam(BaseWrapper, metaclass=MetaPersonTeam):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Wrapper class around a LP person or team object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-02-20 18:13:15 +01:00
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
resource_type = ("person", "team")
|
2009-07-10 10:38:06 +02:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def __init__(self, *args): # pylint: disable=unused-argument
|
2010-12-12 11:46:07 +01:00
|
|
|
# Don't share _upload between different PersonTeams
|
2017-03-31 15:48:32 -04:00
|
|
|
self._ppas = None
|
2023-01-30 19:45:36 +01:00
|
|
|
if "_upload" not in self.__dict__:
|
2023-01-31 16:58:24 +01:00
|
|
|
self._upload = {}
|
2010-12-12 11:46:07 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
2023-01-31 19:32:58 +01:00
|
|
|
return f"{self.display_name} ({self.name})"
|
2010-12-12 11:46:07 +01:00
|
|
|
|
|
|
|
def cache(self):
|
|
|
|
self._cache[self.name] = self
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def fetch(cls, person_or_team):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Fetch the person or team object identified by 'url' from LP.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2019-09-04 19:17:00 -03:00
|
|
|
if not isinstance(person_or_team, str):
|
2023-01-31 19:32:58 +01:00
|
|
|
raise TypeError(f"Don't know what do with '{person_or_team!r}'")
|
2010-12-12 11:46:07 +01:00
|
|
|
cached = cls._cache.get(person_or_team)
|
|
|
|
if not cached:
|
|
|
|
cached = PersonTeam(Launchpad.people[person_or_team])
|
|
|
|
return cached
|
|
|
|
|
|
|
|
def isLpTeamMember(self, team):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Checks if the user is a member of a certain team on Launchpad.
|
|
|
|
|
|
|
|
Returns True if the user is a member of the team otherwise False.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
return any(t.name == team for t in self.super_teams)
|
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
def canUploadPackage(self, archive, distroseries, package, component, pocket="Release"):
|
|
|
|
"""Check if the person or team has upload rights for the source
|
2010-12-12 11:46:07 +01:00
|
|
|
package to the specified 'archive' and 'distrorelease'.
|
|
|
|
|
|
|
|
A source package name and a component have to be specified.
|
|
|
|
'archive' has to be a Archive object.
|
|
|
|
'distroseries' has to be an DistroSeries object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
if not isinstance(archive, Archive):
|
2023-01-31 19:32:58 +01:00
|
|
|
raise TypeError(f"'{archive!r}' is not an Archive object.")
|
2010-12-12 11:46:07 +01:00
|
|
|
if not isinstance(distroseries, DistroSeries):
|
2023-01-31 19:32:58 +01:00
|
|
|
raise TypeError(f"'{distroseries!r}' is not a DistroSeries object.")
|
2019-09-04 19:17:00 -03:00
|
|
|
if package is not None and not isinstance(package, str):
|
2023-01-30 19:45:36 +01:00
|
|
|
raise TypeError("A source package name expected.")
|
2019-09-04 19:17:00 -03:00
|
|
|
if component is not None and not isinstance(component, str):
|
2023-01-30 19:45:36 +01:00
|
|
|
raise TypeError("A component name expected.")
|
2010-12-12 11:46:07 +01:00
|
|
|
if package is None and component is None:
|
2023-01-30 19:45:36 +01:00
|
|
|
raise ValueError("Either a source package name or a component has to be specified.")
|
2018-07-24 16:28:25 -04:00
|
|
|
if pocket not in POCKETS:
|
2023-01-31 19:32:58 +01:00
|
|
|
raise PocketDoesNotExistError(f"Pocket '{pocket}' does not exist.")
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2023-01-30 23:10:31 +01:00
|
|
|
can_upload = self._upload.get((archive, distroseries, pocket, package, component))
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2023-01-30 23:10:31 +01:00
|
|
|
if can_upload is None:
|
2010-12-12 11:46:07 +01:00
|
|
|
# checkUpload() throws an exception if the person can't upload
|
|
|
|
try:
|
|
|
|
archive.checkUpload(
|
2017-05-01 00:20:03 +02:00
|
|
|
component=component,
|
|
|
|
distroseries=distroseries(),
|
|
|
|
person=self(),
|
|
|
|
pocket=pocket,
|
|
|
|
sourcepackagename=package,
|
|
|
|
)
|
2023-01-30 23:10:31 +01:00
|
|
|
can_upload = True
|
2014-12-18 20:28:48 +00:00
|
|
|
except HTTPError as e:
|
2010-12-12 11:46:07 +01:00
|
|
|
if e.response.status == 403:
|
2023-01-30 23:10:31 +01:00
|
|
|
can_upload = False
|
2010-12-12 11:46:07 +01:00
|
|
|
else:
|
|
|
|
raise e
|
2010-12-23 20:42:21 +01:00
|
|
|
index = (archive, distroseries, pocket, package, component)
|
2023-01-30 23:10:31 +01:00
|
|
|
self._upload[index] = can_upload
|
2010-12-12 11:46:07 +01:00
|
|
|
|
2023-01-30 23:10:31 +01:00
|
|
|
return can_upload
|
2009-07-24 16:52:18 +02:00
|
|
|
|
2017-03-31 15:48:32 -04:00
|
|
|
def getPPAs(self):
|
2018-07-10 14:23:21 -04:00
|
|
|
if self._ppas is None:
|
2023-01-30 19:45:36 +01:00
|
|
|
ppas = [
|
|
|
|
Archive(ppa["self_link"])
|
|
|
|
for ppa in Launchpad.load(self._lpobject.ppas_collection_link).entries
|
|
|
|
]
|
2018-07-10 14:23:21 -04:00
|
|
|
self._ppas = {ppa.name: ppa for ppa in ppas}
|
2017-03-31 15:48:32 -04:00
|
|
|
return self._ppas
|
|
|
|
|
2018-07-10 14:23:21 -04:00
|
|
|
def getPPAByName(self, name):
|
2018-07-24 16:28:25 -04:00
|
|
|
return Archive(self._lpobject.getPPAByName(name=name))
|
2018-07-10 14:23:21 -04:00
|
|
|
|
2009-08-06 16:23:38 +02:00
|
|
|
|
2020-07-21 11:26:16 -04:00
|
|
|
class Project(BaseWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-07-21 11:26:16 -04:00
|
|
|
Wrapper class around a LP project object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "project"
|
2020-07-21 11:26:16 -04:00
|
|
|
|
2023-01-31 15:51:29 +01:00
|
|
|
def __init__(self, *args): # pylint: disable=unused-argument
|
2020-07-21 11:26:16 -04:00
|
|
|
self._series = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def series(self):
|
|
|
|
"""Get a list of all ProjectSeries
|
|
|
|
|
|
|
|
The list will be sorted by date_created, in descending order.
|
|
|
|
"""
|
|
|
|
if not self._series:
|
2023-01-30 19:45:36 +01:00
|
|
|
series = [
|
|
|
|
ProjectSeries(s["self_link"])
|
|
|
|
for s in Launchpad.load(self._lpobject.series_collection_link).entries
|
|
|
|
]
|
2020-07-21 11:26:16 -04:00
|
|
|
self._series = sorted(series, key=lambda s: s.date_created, reverse=True)
|
|
|
|
return self._series.copy()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def fetch(cls, project):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-07-21 11:26:16 -04:00
|
|
|
Fetch the project object identified by 'project' from LP.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-07-21 11:26:16 -04:00
|
|
|
if not isinstance(project, str):
|
2023-01-31 19:32:58 +01:00
|
|
|
raise TypeError(f"Don't know what do with '{project!r}'")
|
2020-07-21 11:26:16 -04:00
|
|
|
return Project(Launchpad.projects(project))
|
|
|
|
|
|
|
|
|
2023-01-31 16:58:24 +01:00
|
|
|
class ProjectSeries(BaseWrapper): # pylint: disable=too-few-public-methods
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2020-07-21 11:26:16 -04:00
|
|
|
Wrapper class around a LP project_series object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "project_series"
|
2020-07-21 11:26:16 -04:00
|
|
|
|
|
|
|
|
2009-07-25 16:00:01 +02:00
|
|
|
class Build(BaseWrapper):
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Wrapper class around a build object.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "build"
|
2009-07-25 16:00:01 +02:00
|
|
|
|
2010-12-12 11:46:07 +01:00
|
|
|
def __str__(self):
|
2023-01-31 19:32:58 +01:00
|
|
|
return f"{self.arch_tag}: {self.buildstate}"
|
2009-07-25 16:00:01 +02:00
|
|
|
|
2017-03-23 07:24:17 -04:00
|
|
|
def getSourcePackagePublishingHistory(self):
|
|
|
|
link = self._lpobject.current_source_publication_link
|
|
|
|
if link:
|
2023-01-30 19:45:36 +01:00
|
|
|
if re.search("redacted", link):
|
2017-03-23 07:24:17 -04:00
|
|
|
# Too old - the link has been 'redacted'
|
|
|
|
return None
|
|
|
|
return SourcePackagePublishingHistory(link)
|
|
|
|
return None
|
|
|
|
|
|
|
|
def getUrl(self):
|
|
|
|
return self()
|
|
|
|
|
2010-12-12 11:46:07 +01:00
|
|
|
def rescore(self, score):
|
|
|
|
if self.can_be_rescored:
|
2011-12-03 22:44:13 +02:00
|
|
|
self().rescore(score=score)
|
2010-12-12 11:46:07 +01:00
|
|
|
return True
|
|
|
|
return False
|
2009-07-25 16:00:01 +02:00
|
|
|
|
2010-12-12 11:46:07 +01:00
|
|
|
def retry(self):
|
|
|
|
if self.can_be_retried:
|
|
|
|
self().retry()
|
|
|
|
return True
|
|
|
|
return False
|
2009-08-06 16:23:38 +02:00
|
|
|
|
|
|
|
|
2023-01-31 16:58:24 +01:00
|
|
|
class DistributionSourcePackage(BaseWrapper): # pylint: disable=too-few-public-methods
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2010-12-12 11:46:07 +01:00
|
|
|
Caching class for distribution_source_package objects.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "distribution_source_package"
|
2011-12-02 18:59:46 +02:00
|
|
|
|
|
|
|
|
2023-01-31 16:58:24 +01:00
|
|
|
class Packageset(BaseWrapper): # pylint: disable=too-few-public-methods
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
2011-12-02 18:59:46 +02:00
|
|
|
Caching class for packageset objects.
|
2023-01-30 19:45:36 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
resource_type = "packageset"
|
2011-12-02 18:59:46 +02:00
|
|
|
_lp_packagesets = None
|
|
|
|
_source_sets = {}
|
|
|
|
|
|
|
|
@classmethod
|
2023-01-30 19:45:36 +01:00
|
|
|
def setsIncludingSource(cls, sourcepackagename, distroseries=None, direct_inclusion=False):
|
|
|
|
"""Get the package sets including sourcepackagename"""
|
2011-12-02 18:59:46 +02:00
|
|
|
|
|
|
|
if cls._lp_packagesets is None:
|
|
|
|
cls._lp_packagesets = Launchpad.packagesets
|
|
|
|
|
|
|
|
key = (sourcepackagename, distroseries, direct_inclusion)
|
|
|
|
if key not in cls._source_sets:
|
2023-01-30 19:45:36 +01:00
|
|
|
params = {"sourcepackagename": sourcepackagename, "direct_inclusion": direct_inclusion}
|
2011-12-02 18:59:46 +02:00
|
|
|
if distroseries is not None:
|
2023-01-31 15:51:29 +01:00
|
|
|
params["distroseries"] = distroseries._lpobject # pylint: disable=protected-access
|
2011-12-02 18:59:46 +02:00
|
|
|
|
2023-01-30 19:45:36 +01:00
|
|
|
cls._source_sets[key] = [
|
|
|
|
Packageset(packageset)
|
|
|
|
for packageset in cls._lp_packagesets.setsIncludingSource(**params)
|
|
|
|
]
|
2011-12-02 18:59:46 +02:00
|
|
|
|
|
|
|
return cls._source_sets[key]
|