mirror of
https://git.launchpad.net/ubuntu-dev-tools
synced 2025-03-12 07:31:08 +00:00
Address pylint complaints
Signed-off-by: Benjamin Drung <benjamin.drung@canonical.com>
This commit is contained in:
parent
8692bc2b1c
commit
b1bc7e1cdc
@ -324,7 +324,7 @@ def orig_needed(upload, workdir, pkg):
|
|||||||
|
|
||||||
version = pkg.version.upstream_version
|
version = pkg.version.upstream_version
|
||||||
|
|
||||||
h = Http()
|
http = Http()
|
||||||
for filename in glob.glob(os.path.join(workdir, "%s_%s.orig*" % (pkg.source, version))):
|
for filename in glob.glob(os.path.join(workdir, "%s_%s.orig*" % (pkg.source, version))):
|
||||||
url = "https://launchpad.net/~%s/+archive/%s/+sourcefiles/%s/%s/%s" % (
|
url = "https://launchpad.net/~%s/+archive/%s/+sourcefiles/%s/%s/%s" % (
|
||||||
quote(user),
|
quote(user),
|
||||||
@ -334,7 +334,7 @@ def orig_needed(upload, workdir, pkg):
|
|||||||
quote(os.path.basename(filename)),
|
quote(os.path.basename(filename)),
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
headers, body = h.request(url, "HEAD")
|
headers, body = http.request(url, "HEAD")
|
||||||
if headers.status != 200 or not headers["content-location"].startswith(
|
if headers.status != 200 or not headers["content-location"].startswith(
|
||||||
"https://launchpadlibrarian.net"
|
"https://launchpadlibrarian.net"
|
||||||
):
|
):
|
||||||
|
@ -21,6 +21,9 @@
|
|||||||
# this program; if not, write to the Free Software Foundation, Inc.,
|
# this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import optparse
|
import optparse
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
|
@ -22,6 +22,9 @@
|
|||||||
# UDT_EDIT_WRAPPER_TEMPLATE_RE: An extra boilerplate-detecting regex.
|
# UDT_EDIT_WRAPPER_TEMPLATE_RE: An extra boilerplate-detecting regex.
|
||||||
# UDT_EDIT_WRAPPER_FILE_DESCRIPTION: The type of file being edited.
|
# UDT_EDIT_WRAPPER_FILE_DESCRIPTION: The type of file being edited.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -19,6 +19,9 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import optparse
|
import optparse
|
||||||
import sys
|
import sys
|
||||||
|
@ -21,6 +21,9 @@
|
|||||||
#
|
#
|
||||||
# ##################################################################
|
# ##################################################################
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
@ -18,6 +18,9 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from debian.changelog import Changelog
|
from debian.changelog import Changelog
|
||||||
@ -59,9 +62,9 @@ def merge_changelog(left_changelog, right_changelog):
|
|||||||
right_blocks = iter(right_cl)
|
right_blocks = iter(right_cl)
|
||||||
|
|
||||||
clist = sorted(left_versions | right_versions, reverse=True)
|
clist = sorted(left_versions | right_versions, reverse=True)
|
||||||
ci = len(clist)
|
remaining = len(clist)
|
||||||
for version in clist:
|
for version in clist:
|
||||||
ci -= 1
|
remaining -= 1
|
||||||
if version in left_versions:
|
if version in left_versions:
|
||||||
block = next(left_blocks)
|
block = next(left_blocks)
|
||||||
if version in right_versions:
|
if version in right_versions:
|
||||||
@ -71,7 +74,7 @@ def merge_changelog(left_changelog, right_changelog):
|
|||||||
|
|
||||||
assert block.version == version
|
assert block.version == version
|
||||||
|
|
||||||
Logger.info(str(block).strip() + ("\n" if ci else ""))
|
Logger.info(str(block).strip() + ("\n" if remaining else ""))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -29,6 +29,9 @@
|
|||||||
# configurations. For example, a symlink called pbuilder-hardy will assume
|
# configurations. For example, a symlink called pbuilder-hardy will assume
|
||||||
# that the target distribution is always meant to be Ubuntu Hardy.
|
# that the target distribution is always meant to be Ubuntu Hardy.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -17,6 +17,9 @@
|
|||||||
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||||
# PERFORMANCE OF THIS SOFTWARE.
|
# PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import optparse
|
import optparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
3
pull-pkg
3
pull-pkg
@ -23,6 +23,9 @@
|
|||||||
#
|
#
|
||||||
# ##################################################################
|
# ##################################################################
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -6,6 +6,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -6,6 +6,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -6,6 +6,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -6,6 +6,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -5,6 +5,9 @@
|
|||||||
#
|
#
|
||||||
# See pull-pkg
|
# See pull-pkg
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
from ubuntutools.pullpkg import PullPkg
|
from ubuntutools.pullpkg import PullPkg
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -14,6 +14,9 @@
|
|||||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -14,6 +14,9 @@
|
|||||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import gzip
|
import gzip
|
||||||
import json
|
import json
|
||||||
@ -35,21 +38,21 @@ def load_index(url):
|
|||||||
and read it.
|
and read it.
|
||||||
"""
|
"""
|
||||||
cachedir = os.path.expanduser("~/.cache/ubuntu-dev-tools")
|
cachedir = os.path.expanduser("~/.cache/ubuntu-dev-tools")
|
||||||
fn = os.path.join(cachedir, "seeded.json.gz")
|
seeded = os.path.join(cachedir, "seeded.json.gz")
|
||||||
|
|
||||||
if not os.path.isfile(fn) or time.time() - os.path.getmtime(fn) > 60 * 60 * 2:
|
if not os.path.isfile(seeded) or time.time() - os.path.getmtime(seeded) > 60 * 60 * 2:
|
||||||
if not os.path.isdir(cachedir):
|
if not os.path.isdir(cachedir):
|
||||||
os.makedirs(cachedir)
|
os.makedirs(cachedir)
|
||||||
urllib.request.urlretrieve(url, fn)
|
urllib.request.urlretrieve(url, seeded)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with gzip.open(fn, "r") as f:
|
with gzip.open(seeded, "r") as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
Logger.error(
|
Logger.error(
|
||||||
"Unable to parse seed data: %s. Deleting cached data, please try again.", str(e)
|
"Unable to parse seed data: %s. Deleting cached data, please try again.", str(e)
|
||||||
)
|
)
|
||||||
os.unlink(fn)
|
os.unlink(seeded)
|
||||||
|
|
||||||
|
|
||||||
def resolve_binaries(sources):
|
def resolve_binaries(sources):
|
||||||
|
@ -14,6 +14,9 @@
|
|||||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
|
@ -187,8 +187,8 @@ def submit_bugreport(body, debdiff, deb_version, changelog):
|
|||||||
|
|
||||||
|
|
||||||
def check_reportbug_config():
|
def check_reportbug_config():
|
||||||
fn = os.path.expanduser("~/.reportbugrc")
|
reportbugrc_filename = os.path.expanduser("~/.reportbugrc")
|
||||||
if os.path.exists(fn):
|
if os.path.exists(reportbugrc_filename):
|
||||||
return
|
return
|
||||||
email = ubu_email()[1]
|
email = ubu_email()[1]
|
||||||
reportbugrc = (
|
reportbugrc = (
|
||||||
@ -210,7 +210,7 @@ no-cc
|
|||||||
% email
|
% email
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(fn, "w") as f:
|
with open(reportbugrc_filename, "w") as f:
|
||||||
f.write(reportbugrc)
|
f.write(reportbugrc)
|
||||||
|
|
||||||
Logger.info(
|
Logger.info(
|
||||||
|
21
ubuntu-build
21
ubuntu-build
@ -22,6 +22,9 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from optparse import OptionGroup, OptionParser
|
from optparse import OptionGroup, OptionParser
|
||||||
|
|
||||||
@ -145,14 +148,14 @@ def main():
|
|||||||
try:
|
try:
|
||||||
package = str(args[0]).lower()
|
package = str(args[0]).lower()
|
||||||
release = str(args[1]).lower()
|
release = str(args[1]).lower()
|
||||||
op = str(args[2]).lower()
|
operation = str(args[2]).lower()
|
||||||
except IndexError:
|
except IndexError:
|
||||||
opt_parser.print_help()
|
opt_parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Check our operation.
|
# Check our operation.
|
||||||
if op not in ("rescore", "retry", "status"):
|
if operation not in ("rescore", "retry", "status"):
|
||||||
Logger.error("Invalid operation: %s." % op)
|
Logger.error("Invalid operation: %s." % operation)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# If the user has specified an architecture to build, we only wish to
|
# If the user has specified an architecture to build, we only wish to
|
||||||
@ -199,9 +202,9 @@ def main():
|
|||||||
# (retry) or buildd admins (rescore). Check if the proper permissions
|
# (retry) or buildd admins (rescore). Check if the proper permissions
|
||||||
# are in place.
|
# are in place.
|
||||||
me = PersonTeam.me
|
me = PersonTeam.me
|
||||||
if op == "rescore":
|
if operation == "rescore":
|
||||||
necessary_privs = me.isLpTeamMember("launchpad-buildd-admins")
|
necessary_privs = me.isLpTeamMember("launchpad-buildd-admins")
|
||||||
if op == "retry":
|
if operation == "retry":
|
||||||
necessary_privs = me.canUploadPackage(
|
necessary_privs = me.canUploadPackage(
|
||||||
ubuntu_archive,
|
ubuntu_archive,
|
||||||
distroseries,
|
distroseries,
|
||||||
@ -210,11 +213,11 @@ def main():
|
|||||||
pocket=pocket,
|
pocket=pocket,
|
||||||
)
|
)
|
||||||
|
|
||||||
if op in ("rescore", "retry") and not necessary_privs:
|
if operation in ("rescore", "retry") and not necessary_privs:
|
||||||
Logger.error(
|
Logger.error(
|
||||||
"You cannot perform the %s operation on a %s "
|
"You cannot perform the %s operation on a %s "
|
||||||
"package as you do not have the permissions "
|
"package as you do not have the permissions "
|
||||||
"to do this action." % (op, component)
|
"to do this action." % (operation, component)
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -235,7 +238,7 @@ def main():
|
|||||||
|
|
||||||
done = True
|
done = True
|
||||||
Logger.info("%s: %s." % (build.arch_tag, build.buildstate))
|
Logger.info("%s: %s." % (build.arch_tag, build.buildstate))
|
||||||
if op == "rescore":
|
if operation == "rescore":
|
||||||
if build.can_be_rescored:
|
if build.can_be_rescored:
|
||||||
# FIXME: make priority an option
|
# FIXME: make priority an option
|
||||||
priority = 5000
|
priority = 5000
|
||||||
@ -243,7 +246,7 @@ def main():
|
|||||||
build.rescore(score=priority)
|
build.rescore(score=priority)
|
||||||
else:
|
else:
|
||||||
Logger.info("Cannot rescore build on %s." % build.arch_tag)
|
Logger.info("Cannot rescore build on %s." % build.arch_tag)
|
||||||
if op == "retry":
|
if operation == "retry":
|
||||||
if build.can_be_retried:
|
if build.can_be_retried:
|
||||||
Logger.info("Retrying build on %s..." % build.arch_tag)
|
Logger.info("Retrying build on %s..." % build.arch_tag)
|
||||||
build.retry()
|
build.retry()
|
||||||
|
@ -20,6 +20,9 @@
|
|||||||
#
|
#
|
||||||
# ##################################################################
|
# ##################################################################
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import optparse
|
import optparse
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
@ -14,6 +14,9 @@
|
|||||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import optparse
|
import optparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ import logging
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def getLogger():
|
def getLogger(): # pylint: disable=invalid-name
|
||||||
"""Get the logger instance for this module
|
"""Get the logger instance for this module
|
||||||
|
|
||||||
Quick guide for using this or not: if you want to call ubuntutools
|
Quick guide for using this or not: if you want to call ubuntutools
|
||||||
|
@ -102,12 +102,12 @@ class Dsc(debian.deb822.Dsc):
|
|||||||
|
|
||||||
def verify_file(self, pathname):
|
def verify_file(self, pathname):
|
||||||
"Verify that pathname matches the checksums in the dsc"
|
"Verify that pathname matches the checksums in the dsc"
|
||||||
p = Path(pathname)
|
path = Path(pathname)
|
||||||
if not p.is_file():
|
if not path.is_file():
|
||||||
return False
|
return False
|
||||||
alg, checksums = self.get_strongest_checksum()
|
alg, checksums = self.get_strongest_checksum()
|
||||||
size, digest = checksums[p.name]
|
size, digest = checksums[path.name]
|
||||||
return verify_file_checksum(p, alg, digest, size)
|
return verify_file_checksum(path, alg, digest, size)
|
||||||
|
|
||||||
def compare_dsc(self, other):
|
def compare_dsc(self, other):
|
||||||
"""Check whether any files in these two dscs that have the same name
|
"""Check whether any files in these two dscs that have the same name
|
||||||
@ -256,7 +256,7 @@ class SourcePackage(ABC):
|
|||||||
# log binary lookup failure, in case it provides hints
|
# log binary lookup failure, in case it provides hints
|
||||||
Logger.info(str(bpnfe))
|
Logger.info(str(bpnfe))
|
||||||
# raise the original exception for the source lookup
|
# raise the original exception for the source lookup
|
||||||
raise pnfe
|
raise pnfe from None
|
||||||
|
|
||||||
self.binary = self.source
|
self.binary = self.source
|
||||||
self.source = bpph.getSourcePackageName()
|
self.source = bpph.getSourcePackageName()
|
||||||
@ -312,8 +312,8 @@ class SourcePackage(ABC):
|
|||||||
if self._dsc_source:
|
if self._dsc_source:
|
||||||
raise RuntimeError("Internal error: we have a dsc file but dsc not set")
|
raise RuntimeError("Internal error: we have a dsc file but dsc not set")
|
||||||
urls = self._source_urls(self.dsc_name)
|
urls = self._source_urls(self.dsc_name)
|
||||||
with tempfile.TemporaryDirectory() as d:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
tmpdsc = Path(d) / self.dsc_name
|
tmpdsc = Path(tmpdir) / self.dsc_name
|
||||||
self._download_file_from_urls(urls, tmpdsc)
|
self._download_file_from_urls(urls, tmpdsc)
|
||||||
self._dsc = Dsc(tmpdsc.read_bytes())
|
self._dsc = Dsc(tmpdsc.read_bytes())
|
||||||
self._check_dsc_signature()
|
self._check_dsc_signature()
|
||||||
@ -401,35 +401,35 @@ class SourcePackage(ABC):
|
|||||||
Logger.warning("Signature on %s could not be verified" % self.dsc_name)
|
Logger.warning("Signature on %s could not be verified" % self.dsc_name)
|
||||||
|
|
||||||
def _verify_file(self, pathname, dscverify=False, sha1sum=None, sha256sum=None, size=0):
|
def _verify_file(self, pathname, dscverify=False, sha1sum=None, sha256sum=None, size=0):
|
||||||
p = Path(pathname)
|
path = Path(pathname)
|
||||||
if not p.exists():
|
if not path.exists():
|
||||||
return False
|
return False
|
||||||
if dscverify and not self.dsc.verify_file(p):
|
if dscverify and not self.dsc.verify_file(path):
|
||||||
return False
|
return False
|
||||||
checksums = {}
|
checksums = {}
|
||||||
if sha1sum:
|
if sha1sum:
|
||||||
checksums["SHA1"] = sha1sum
|
checksums["SHA1"] = sha1sum
|
||||||
if sha256sum:
|
if sha256sum:
|
||||||
checksums["SHA256"] = sha256sum
|
checksums["SHA256"] = sha256sum
|
||||||
if not verify_file_checksums(p, checksums, size):
|
if not verify_file_checksums(path, checksums, size):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _download_file(self, url, filename, size=0, dscverify=False, sha1sum=None, sha256sum=None):
|
def _download_file(self, url, filename, size=0, dscverify=False, sha1sum=None, sha256sum=None):
|
||||||
"Download url to filename; will be put in workdir unless filename is absolute path."
|
"Download url to filename; will be put in workdir unless filename is absolute path."
|
||||||
if Path(filename).is_absolute():
|
if Path(filename).is_absolute():
|
||||||
p = Path(filename).expanduser().resolve()
|
path = Path(filename).expanduser().resolve()
|
||||||
else:
|
else:
|
||||||
p = self.workdir / filename
|
path = self.workdir / filename
|
||||||
|
|
||||||
can_verify = any((dscverify, sha1sum, sha256sum))
|
can_verify = any((dscverify, sha1sum, sha256sum))
|
||||||
if can_verify and self._verify_file(p, dscverify, sha1sum, sha256sum, size):
|
if can_verify and self._verify_file(path, dscverify, sha1sum, sha256sum, size):
|
||||||
Logger.info(f"Using existing file {p}")
|
Logger.info(f"Using existing file {path}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
download(url, p, size)
|
download(url, path, size)
|
||||||
|
|
||||||
return self._verify_file(p, dscverify, sha1sum, sha256sum, size)
|
return self._verify_file(path, dscverify, sha1sum, sha256sum, size)
|
||||||
|
|
||||||
def _download_file_from_urls(
|
def _download_file_from_urls(
|
||||||
self, urls, filename, size=0, dscverify=False, sha1sum=None, sha256sum=None
|
self, urls, filename, size=0, dscverify=False, sha1sum=None, sha256sum=None
|
||||||
@ -698,8 +698,8 @@ class PersonalPackageArchiveSourcePackage(UbuntuSourcePackage):
|
|||||||
def team(self):
|
def team(self):
|
||||||
try:
|
try:
|
||||||
return PersonTeam.fetch(self._teamname)
|
return PersonTeam.fetch(self._teamname)
|
||||||
except KeyError:
|
except KeyError as error:
|
||||||
raise ValueError(f"No user/team '{self._teamname}' found on Launchpad")
|
raise ValueError(f"No user/team '{self._teamname}' found on Launchpad") from error
|
||||||
|
|
||||||
@functools.lru_cache()
|
@functools.lru_cache()
|
||||||
def getArchive(self):
|
def getArchive(self):
|
||||||
@ -823,9 +823,9 @@ class UbuntuCloudArchiveSourcePackage(PersonalPackageArchiveSourcePackage):
|
|||||||
if not any((release, pocket)):
|
if not any((release, pocket)):
|
||||||
all_ppas = cls.getUbuntuCloudArchiveTeam().getPPAs()
|
all_ppas = cls.getUbuntuCloudArchiveTeam().getPPAs()
|
||||||
ppas = []
|
ppas = []
|
||||||
for r in cls.getUbuntuCloudArchiveReleaseNames():
|
for ppa_release in cls.getUbuntuCloudArchiveReleaseNames():
|
||||||
for p in cls.VALID_POCKETS:
|
for valid_pocket in cls.VALID_POCKETS:
|
||||||
name = f"{r}-{p}"
|
name = f"{ppa_release}-{valid_pocket}"
|
||||||
if name in all_ppas:
|
if name in all_ppas:
|
||||||
ppas.append(all_ppas[name])
|
ppas.append(all_ppas[name])
|
||||||
return ppas
|
return ppas
|
||||||
@ -868,27 +868,27 @@ class UbuntuCloudArchiveSourcePackage(PersonalPackageArchiveSourcePackage):
|
|||||||
release = release.lower().strip()
|
release = release.lower().strip()
|
||||||
|
|
||||||
# Cases 1 and 2
|
# Cases 1 and 2
|
||||||
PATTERN1 = r"^(?P<ucarelease>[a-z]+)(?:-(?P<pocket>[a-z]+))?$"
|
pattern1 = r"^(?P<ucarelease>[a-z]+)(?:-(?P<pocket>[a-z]+))?$"
|
||||||
# Cases 3 and 4
|
# Cases 3 and 4
|
||||||
PATTERN2 = r"^(?P<ubunturelease>[a-z]+)-(?P<ucarelease>[a-z]+)(?:-(?P<pocket>[a-z]+))?$"
|
pattern2 = r"^(?P<ubunturelease>[a-z]+)-(?P<ucarelease>[a-z]+)(?:-(?P<pocket>[a-z]+))?$"
|
||||||
# Case 5
|
# Case 5
|
||||||
PATTERN3 = r"^(?P<ubunturelease>[a-z]+)-(?P<pocket>[a-z]+)/(?P<ucarelease>[a-z]+)$"
|
pattern3 = r"^(?P<ubunturelease>[a-z]+)-(?P<pocket>[a-z]+)/(?P<ucarelease>[a-z]+)$"
|
||||||
|
|
||||||
for pattern in [PATTERN1, PATTERN2, PATTERN3]:
|
for pattern in [pattern1, pattern2, pattern3]:
|
||||||
match = re.match(pattern, release)
|
match = re.match(pattern, release)
|
||||||
if match:
|
if match:
|
||||||
r = match.group("ucarelease")
|
uca_release = match.group("ucarelease")
|
||||||
p = match.group("pocket")
|
pocket = match.group("pocket")
|
||||||
# For UCA, there is no 'release' pocket, the default is 'updates'
|
# For UCA, there is no 'release' pocket, the default is 'updates'
|
||||||
if p and p == "release":
|
if pocket and pocket == "release":
|
||||||
Logger.warning(
|
Logger.warning(
|
||||||
"Ubuntu Cloud Archive does not use 'release' pocket,"
|
"Ubuntu Cloud Archive does not use 'release' pocket,"
|
||||||
" using 'updates' instead"
|
" using 'updates' instead"
|
||||||
)
|
)
|
||||||
p = "updates"
|
pocket = "updates"
|
||||||
if cls.isValidRelease(r) and (not p or p in cls.VALID_POCKETS):
|
if cls.isValidRelease(uca_release) and (not pocket or pocket in cls.VALID_POCKETS):
|
||||||
Logger.debug(f"Using Ubuntu Cloud Archive release '{r}'")
|
Logger.debug(f"Using Ubuntu Cloud Archive release '{uca_release}'")
|
||||||
return (r, p)
|
return (uca_release, pocket)
|
||||||
raise SeriesNotFoundException(f"Ubuntu Cloud Archive release '{release}' not found")
|
raise SeriesNotFoundException(f"Ubuntu Cloud Archive release '{release}' not found")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -897,14 +897,14 @@ class UbuntuCloudArchiveSourcePackage(PersonalPackageArchiveSourcePackage):
|
|||||||
raise SeriesNotFoundException(f"Ubuntu Cloud Archive release '{release}' not found")
|
raise SeriesNotFoundException(f"Ubuntu Cloud Archive release '{release}' not found")
|
||||||
if pocket and pocket not in cls.VALID_POCKETS:
|
if pocket and pocket not in cls.VALID_POCKETS:
|
||||||
raise PocketDoesNotExistError(f"Ubuntu Cloud Archive pocket '{pocket}' is invalid")
|
raise PocketDoesNotExistError(f"Ubuntu Cloud Archive pocket '{pocket}' is invalid")
|
||||||
DEFAULT = tuple(
|
default = tuple(
|
||||||
cls.getUbuntuCloudArchivePPAs(release=release or cls.getDevelSeries())[0].name.split(
|
cls.getUbuntuCloudArchivePPAs(release=release or cls.getDevelSeries())[0].name.split(
|
||||||
"-", maxsplit=1
|
"-", maxsplit=1
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if not package:
|
if not package:
|
||||||
# not much we can do without a package name
|
# not much we can do without a package name
|
||||||
return DEFAULT
|
return default
|
||||||
checked_pocket = False
|
checked_pocket = False
|
||||||
for ppa in cls.getUbuntuCloudArchivePPAs(release=release):
|
for ppa in cls.getUbuntuCloudArchivePPAs(release=release):
|
||||||
if pocket and pocket != ppa.name.partition("-")[2]:
|
if pocket and pocket != ppa.name.partition("-")[2]:
|
||||||
@ -918,10 +918,10 @@ class UbuntuCloudArchiveSourcePackage(PersonalPackageArchiveSourcePackage):
|
|||||||
if version:
|
if version:
|
||||||
params["version"] = version
|
params["version"] = version
|
||||||
if ppa.getPublishedSources(**params):
|
if ppa.getPublishedSources(**params):
|
||||||
(r, _, p) = ppa.name.partition("-")
|
(ppa_release, _, ppa_pocket) = ppa.name.partition("-")
|
||||||
return (r, p)
|
return (ppa_release, ppa_pocket)
|
||||||
# package/version not found in any ppa
|
# package/version not found in any ppa
|
||||||
return DEFAULT
|
return default
|
||||||
|
|
||||||
|
|
||||||
class _WebJSON(object):
|
class _WebJSON(object):
|
||||||
@ -986,9 +986,9 @@ class _Snapshot(_WebJSON):
|
|||||||
url = "/mr/package/{}/{}/srcfiles".format(name, version)
|
url = "/mr/package/{}/{}/srcfiles".format(name, version)
|
||||||
try:
|
try:
|
||||||
response = self.load("{}?fileinfo=1".format(url))
|
response = self.load("{}?fileinfo=1".format(url))
|
||||||
except HTTPError:
|
except HTTPError as error:
|
||||||
msg = "Package {} version {} not found"
|
msg = "Package {} version {} not found"
|
||||||
raise PackageNotFoundException(msg.format(name, version))
|
raise PackageNotFoundException(msg.format(name, version)) from error
|
||||||
result = response.get("result")
|
result = response.get("result")
|
||||||
info = response.get("fileinfo")
|
info = response.get("fileinfo")
|
||||||
if len(result) < 1:
|
if len(result) < 1:
|
||||||
@ -998,11 +998,11 @@ class _Snapshot(_WebJSON):
|
|||||||
# this expects the 'component' to follow 'pool[-*]' in the path
|
# this expects the 'component' to follow 'pool[-*]' in the path
|
||||||
found_pool = False
|
found_pool = False
|
||||||
component = None
|
component = None
|
||||||
for s in path.split("/"):
|
for part in path.split("/"):
|
||||||
if found_pool:
|
if found_pool:
|
||||||
component = s
|
component = part
|
||||||
break
|
break
|
||||||
if s.startswith("pool"):
|
if part.startswith("pool"):
|
||||||
found_pool = True
|
found_pool = True
|
||||||
if not component:
|
if not component:
|
||||||
Logger.warning("could not determine component from path %s" % path)
|
Logger.warning("could not determine component from path %s" % path)
|
||||||
@ -1014,8 +1014,8 @@ class _Snapshot(_WebJSON):
|
|||||||
def _get_package(self, name, url, pkginit, version, sort_key):
|
def _get_package(self, name, url, pkginit, version, sort_key):
|
||||||
try:
|
try:
|
||||||
results = self.load("/mr/{}/{}/".format(url, name))["result"]
|
results = self.load("/mr/{}/{}/".format(url, name))["result"]
|
||||||
except HTTPError:
|
except HTTPError as error:
|
||||||
raise PackageNotFoundException("Package {} not found.".format(name))
|
raise PackageNotFoundException("Package {} not found.".format(name)) from error
|
||||||
|
|
||||||
results = sorted(results, key=lambda r: r[sort_key], reverse=True)
|
results = sorted(results, key=lambda r: r[sort_key], reverse=True)
|
||||||
results = [pkginit(r) for r in results if version == r["version"]]
|
results = [pkginit(r) for r in results if version == r["version"]]
|
||||||
@ -1168,7 +1168,7 @@ class SnapshotBinaryPackage(SnapshotPackage):
|
|||||||
|
|
||||||
|
|
||||||
class SnapshotFile(object):
|
class SnapshotFile(object):
|
||||||
def __init__(self, pkg_name, pkg_version, component, obj, h):
|
def __init__(self, pkg_name, pkg_version, component, obj, h): # pylint: disable=invalid-name
|
||||||
self.package_name = pkg_name
|
self.package_name = pkg_name
|
||||||
self.package_version = pkg_version
|
self.package_version = pkg_version
|
||||||
self.component = component
|
self.component = component
|
||||||
|
@ -2,5 +2,6 @@
|
|||||||
# ubuntu-dev-tools Launchpad Python modules.
|
# ubuntu-dev-tools Launchpad Python modules.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
service = "production"
|
service = "production"
|
||||||
api_version = "devel"
|
api_version = "devel"
|
||||||
|
@ -242,9 +242,9 @@ class Distribution(BaseWrapper):
|
|||||||
res = self._archives.get(archive)
|
res = self._archives.get(archive)
|
||||||
|
|
||||||
if not res:
|
if not res:
|
||||||
for a in self.archives:
|
for archive_ in self.archives:
|
||||||
if a.name == archive:
|
if archive_.name == archive:
|
||||||
res = Archive(a)
|
res = Archive(archive_)
|
||||||
self._archives[res.name] = res
|
self._archives[res.name] = res
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -271,9 +271,9 @@ class Distribution(BaseWrapper):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
series = DistroSeries(self().getSeries(name_or_version=name_or_version))
|
series = DistroSeries(self().getSeries(name_or_version=name_or_version))
|
||||||
except HTTPError:
|
except HTTPError as error:
|
||||||
message = "Release '%s' is unknown in '%s'." % (name_or_version, self.display_name)
|
message = "Release '%s' is unknown in '%s'." % (name_or_version, self.display_name)
|
||||||
raise SeriesNotFoundException(message)
|
raise SeriesNotFoundException(message) from error
|
||||||
|
|
||||||
self._cache_series(series)
|
self._cache_series(series)
|
||||||
return series
|
return series
|
||||||
@ -293,9 +293,9 @@ class Distribution(BaseWrapper):
|
|||||||
Returns a list of all DistroSeries objects.
|
Returns a list of all DistroSeries objects.
|
||||||
"""
|
"""
|
||||||
if not self._have_all_series:
|
if not self._have_all_series:
|
||||||
for s in Launchpad.load(self.series_collection_link).entries:
|
for series in Launchpad.load(self.series_collection_link).entries:
|
||||||
series = DistroSeries(s["self_link"])
|
series_link = DistroSeries(series["self_link"])
|
||||||
self._cache_series(series)
|
self._cache_series(series_link)
|
||||||
self._have_all_series = True
|
self._have_all_series = True
|
||||||
|
|
||||||
allseries = filter(lambda s: s.active, self._series.values())
|
allseries = filter(lambda s: s.active, self._series.values())
|
||||||
@ -346,9 +346,9 @@ class DistroSeries(BaseWrapper):
|
|||||||
try:
|
try:
|
||||||
architecture = DistroArchSeries(self().getDistroArchSeries(archtag=archtag))
|
architecture = DistroArchSeries(self().getDistroArchSeries(archtag=archtag))
|
||||||
self._architectures[architecture.architecture_tag] = architecture
|
self._architectures[architecture.architecture_tag] = architecture
|
||||||
except HTTPError:
|
except HTTPError as error:
|
||||||
message = "Architecture %s is unknown." % archtag
|
message = "Architecture %s is unknown." % archtag
|
||||||
raise ArchSeriesNotFoundException(message)
|
raise ArchSeriesNotFoundException(message) from error
|
||||||
return self._architectures[archtag]
|
return self._architectures[archtag]
|
||||||
|
|
||||||
def getPackageUploads(self, name=None, pocket=None, version=None, status="Unapproved"):
|
def getPackageUploads(self, name=None, pocket=None, version=None, status="Unapproved"):
|
||||||
@ -418,9 +418,9 @@ class PackageUpload(BaseWrapper):
|
|||||||
urls = self.binaryFileUrls()
|
urls = self.binaryFileUrls()
|
||||||
props = self.getBinaryProperties()
|
props = self.getBinaryProperties()
|
||||||
self._binary_prop_dict = dict(zip(urls, props))
|
self._binary_prop_dict = dict(zip(urls, props))
|
||||||
for (k, v) in copy(self._binary_prop_dict).items():
|
for (key, value) in copy(self._binary_prop_dict).items():
|
||||||
filename = os.path.basename(urlparse(k).path)
|
filename = os.path.basename(urlparse(key).path)
|
||||||
self._binary_prop_dict[filename] = v
|
self._binary_prop_dict[filename] = value
|
||||||
return self._binary_prop_dict.get(filename_or_url, {})
|
return self._binary_prop_dict.get(filename_or_url, {})
|
||||||
|
|
||||||
|
|
||||||
@ -583,9 +583,9 @@ class Archive(BaseWrapper):
|
|||||||
else:
|
else:
|
||||||
pockets = tuple(pocket)
|
pockets = tuple(pocket)
|
||||||
|
|
||||||
for p in pockets:
|
for pocket_ in pockets:
|
||||||
if p not in POCKETS:
|
if pocket_ not in POCKETS:
|
||||||
raise PocketDoesNotExistError("Pocket '%s' does not exist." % p)
|
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket_)
|
||||||
|
|
||||||
if not status:
|
if not status:
|
||||||
if version:
|
if version:
|
||||||
@ -599,9 +599,9 @@ class Archive(BaseWrapper):
|
|||||||
else:
|
else:
|
||||||
statuses = tuple(status)
|
statuses = tuple(status)
|
||||||
|
|
||||||
for s in statuses:
|
for status_ in statuses:
|
||||||
if s not in STATUSES:
|
if status_ not in STATUSES:
|
||||||
raise ValueError("Status '%s' is not valid." % s)
|
raise ValueError("Status '%s' is not valid." % status_)
|
||||||
|
|
||||||
dist = Distribution(self.distribution_link)
|
dist = Distribution(self.distribution_link)
|
||||||
|
|
||||||
@ -685,25 +685,25 @@ class Archive(BaseWrapper):
|
|||||||
err_msg = "status %s not in (%s)" % (record.status, ",".join(statuses))
|
err_msg = "status %s not in (%s)" % (record.status, ",".join(statuses))
|
||||||
Logger.debug(skipmsg + err_msg)
|
Logger.debug(skipmsg + err_msg)
|
||||||
continue
|
continue
|
||||||
r = wrapper(record)
|
release = wrapper(record)
|
||||||
if binary and archtag and archtag != r.arch:
|
if binary and archtag and archtag != release.arch:
|
||||||
err_msg = "arch %s does not match requested arch %s" % (r.arch, archtag)
|
err_msg = "arch %s does not match requested arch %s" % (release.arch, archtag)
|
||||||
Logger.debug(skipmsg + err_msg)
|
Logger.debug(skipmsg + err_msg)
|
||||||
continue
|
continue
|
||||||
# results are ordered so first is latest
|
# results are ordered so first is latest
|
||||||
cache[index] = r
|
cache[index] = release
|
||||||
return r
|
return release
|
||||||
|
|
||||||
version_with_epoch = None
|
version_with_epoch = None
|
||||||
if version and version == Version(version).strip_epoch() and len(records) == 0:
|
if version and version == Version(version).strip_epoch() and len(records) == 0:
|
||||||
# a specific version was asked for, but we found none;
|
# a specific version was asked for, but we found none;
|
||||||
# check if one exists with an epoch to give a hint in error msg
|
# check if one exists with an epoch to give a hint in error msg
|
||||||
for epoch in range(1, 9):
|
for epoch in range(1, 9):
|
||||||
v = Version(version)
|
version_ = Version(version)
|
||||||
v.epoch = epoch
|
version_.epoch = epoch
|
||||||
params["version"] = v.full_version
|
params["version"] = version_.full_version
|
||||||
if len(getattr(self, function)(**params)) > 0:
|
if len(getattr(self, function)(**params)) > 0:
|
||||||
version_with_epoch = v.full_version
|
version_with_epoch = version_.full_version
|
||||||
Logger.debug("Found version with epoch %s" % version_with_epoch)
|
Logger.debug("Found version with epoch %s" % version_with_epoch)
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -957,12 +957,12 @@ class SourcePackagePublishingHistory(BaseWrapper):
|
|||||||
Logger.warning(
|
Logger.warning(
|
||||||
"SPPH %s_%s has no sourceFileUrls" % (self.getPackageName(), self.getVersion())
|
"SPPH %s_%s has no sourceFileUrls" % (self.getPackageName(), self.getVersion())
|
||||||
)
|
)
|
||||||
for u in urls:
|
for url in urls:
|
||||||
# make sure mandatory fields are present
|
# make sure mandatory fields are present
|
||||||
for field in ["url", "sha1", "sha256", "size"]:
|
for field in ["url", "sha1", "sha256", "size"]:
|
||||||
if field not in u:
|
if field not in url:
|
||||||
u[field] = None
|
url[field] = None
|
||||||
u["filename"] = os.path.basename(urlparse(u["url"]).path)
|
url["filename"] = os.path.basename(urlparse(url["url"]).path)
|
||||||
self._source_urls = urls
|
self._source_urls = urls
|
||||||
|
|
||||||
if include_meta:
|
if include_meta:
|
||||||
@ -1036,11 +1036,11 @@ class SourcePackagePublishingHistory(BaseWrapper):
|
|||||||
if self.status in ["Pending", "Published"]:
|
if self.status in ["Pending", "Published"]:
|
||||||
# Published, great! Directly query the list of binaries
|
# Published, great! Directly query the list of binaries
|
||||||
binaries = map(BinaryPackagePublishingHistory, self._lpobject.getPublishedBinaries())
|
binaries = map(BinaryPackagePublishingHistory, self._lpobject.getPublishedBinaries())
|
||||||
for b in binaries:
|
for binary in binaries:
|
||||||
a = b.arch
|
arch_ = binary.arch
|
||||||
if a not in self._binaries:
|
if arch_ not in self._binaries:
|
||||||
self._binaries[a] = {}
|
self._binaries[arch_] = {}
|
||||||
self._binaries[a][b.binary_package_name] = b
|
self._binaries[arch_][binary.binary_package_name] = binary
|
||||||
else:
|
else:
|
||||||
# we have to go the long way :(
|
# we have to go the long way :(
|
||||||
Logger.info("Please wait, this may take some time...")
|
Logger.info("Please wait, this may take some time...")
|
||||||
@ -1050,37 +1050,37 @@ class SourcePackagePublishingHistory(BaseWrapper):
|
|||||||
# strip out the URL leading text.
|
# strip out the URL leading text.
|
||||||
filename = os.path.basename(urlparse(url).path)
|
filename = os.path.basename(urlparse(url).path)
|
||||||
# strip the file suffix
|
# strip the file suffix
|
||||||
(pkgname, _, e) = filename.rpartition(".")
|
(pkgname, _, extension) = filename.rpartition(".")
|
||||||
# split into name, version, arch
|
# split into name, version, arch
|
||||||
(n, v, a) = pkgname.rsplit("_", 2)
|
(name_, _, arch_) = pkgname.rsplit("_", 2)
|
||||||
# arch 'all' has separate bpph for each real arch,
|
# arch 'all' has separate bpph for each real arch,
|
||||||
# but all point to the same binary url
|
# but all point to the same binary url
|
||||||
if a == "all":
|
if arch_ == "all":
|
||||||
a = arch or host_architecture()
|
arch_ = arch or host_architecture()
|
||||||
# Only check the arch requested - saves time
|
# Only check the arch requested - saves time
|
||||||
if arch and arch != a:
|
if arch and arch != arch_:
|
||||||
continue
|
continue
|
||||||
# Only check the name requested - saves time
|
# Only check the name requested - saves time
|
||||||
if name and not re.match(name, n):
|
if name and not re.match(name, name_):
|
||||||
continue
|
continue
|
||||||
# Only check the ext requested - saves time
|
# Only check the ext requested - saves time
|
||||||
if ext and not re.match(ext, e):
|
if ext and not re.match(ext, extension):
|
||||||
continue
|
continue
|
||||||
# If we already have this BPPH, keep going
|
# If we already have this BPPH, keep going
|
||||||
if a in self._binaries and n in self._binaries[a]:
|
if arch_ in self._binaries and name_ in self._binaries[arch_]:
|
||||||
continue
|
continue
|
||||||
# we ignore the version, as it may be missing epoch
|
# we ignore the version, as it may be missing epoch
|
||||||
# also we can't use series, as some package versions
|
# also we can't use series, as some package versions
|
||||||
# span multiple series! (e.g. for different archs)
|
# span multiple series! (e.g. for different archs)
|
||||||
params = {"name": n, "archtag": a, "version": self.getVersion()}
|
params = {"name": name_, "archtag": arch_, "version": self.getVersion()}
|
||||||
try:
|
try:
|
||||||
bpph = archive.getBinaryPackage(**params)
|
bpph = archive.getBinaryPackage(**params)
|
||||||
except PackageNotFoundException:
|
except PackageNotFoundException:
|
||||||
Logger.debug("Could not find pkg in archive: %s" % filename)
|
Logger.debug("Could not find pkg in archive: %s" % filename)
|
||||||
continue
|
continue
|
||||||
if a not in self._binaries:
|
if arch_ not in self._binaries:
|
||||||
self._binaries[a] = {}
|
self._binaries[arch_] = {}
|
||||||
self._binaries[a][n] = bpph
|
self._binaries[arch_][name_] = bpph
|
||||||
|
|
||||||
if not arch:
|
if not arch:
|
||||||
bpphs = [b for a in self._binaries.values() for b in a.values()]
|
bpphs = [b for a in self._binaries.values() for b in a.values()]
|
||||||
@ -1215,21 +1215,21 @@ class BinaryPackagePublishingHistory(BaseWrapper):
|
|||||||
if not self._binary_urls:
|
if not self._binary_urls:
|
||||||
try:
|
try:
|
||||||
urls = self._lpobject.binaryFileUrls(include_meta=True)
|
urls = self._lpobject.binaryFileUrls(include_meta=True)
|
||||||
except AttributeError:
|
except AttributeError as error:
|
||||||
raise AttributeError(
|
raise AttributeError(
|
||||||
"binaryFileUrls can only be found in lpapi "
|
"binaryFileUrls can only be found in lpapi "
|
||||||
"devel, not 1.0. Login using devel to have it."
|
"devel, not 1.0. Login using devel to have it."
|
||||||
)
|
) from error
|
||||||
if not urls:
|
if not urls:
|
||||||
Logger.warning(
|
Logger.warning(
|
||||||
"BPPH %s_%s has no binaryFileUrls" % (self.getPackageName(), self.getVersion())
|
"BPPH %s_%s has no binaryFileUrls" % (self.getPackageName(), self.getVersion())
|
||||||
)
|
)
|
||||||
for u in urls:
|
for url in urls:
|
||||||
# make sure mandatory fields are present
|
# make sure mandatory fields are present
|
||||||
for field in ["url", "sha1", "sha256", "size"]:
|
for field in ["url", "sha1", "sha256", "size"]:
|
||||||
if field not in u:
|
if field not in url:
|
||||||
u[field] = None
|
url[field] = None
|
||||||
u["filename"] = os.path.basename(urlparse(u["url"]).path)
|
url["filename"] = os.path.basename(urlparse(url["url"]).path)
|
||||||
self._binary_urls = urls
|
self._binary_urls = urls
|
||||||
|
|
||||||
if include_meta:
|
if include_meta:
|
||||||
@ -1438,9 +1438,9 @@ class PersonTeam(BaseWrapper, metaclass=MetaPersonTeam):
|
|||||||
if pocket not in POCKETS:
|
if pocket not in POCKETS:
|
||||||
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket)
|
raise PocketDoesNotExistError("Pocket '%s' does not exist." % pocket)
|
||||||
|
|
||||||
canUpload = self._upload.get((archive, distroseries, pocket, package, component))
|
can_upload = self._upload.get((archive, distroseries, pocket, package, component))
|
||||||
|
|
||||||
if canUpload is None:
|
if can_upload is None:
|
||||||
# checkUpload() throws an exception if the person can't upload
|
# checkUpload() throws an exception if the person can't upload
|
||||||
try:
|
try:
|
||||||
archive.checkUpload(
|
archive.checkUpload(
|
||||||
@ -1450,16 +1450,16 @@ class PersonTeam(BaseWrapper, metaclass=MetaPersonTeam):
|
|||||||
pocket=pocket,
|
pocket=pocket,
|
||||||
sourcepackagename=package,
|
sourcepackagename=package,
|
||||||
)
|
)
|
||||||
canUpload = True
|
can_upload = True
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
if e.response.status == 403:
|
if e.response.status == 403:
|
||||||
canUpload = False
|
can_upload = False
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
index = (archive, distroseries, pocket, package, component)
|
index = (archive, distroseries, pocket, package, component)
|
||||||
self._upload[index] = canUpload
|
self._upload[index] = can_upload
|
||||||
|
|
||||||
return canUpload
|
return can_upload
|
||||||
|
|
||||||
def getPPAs(self):
|
def getPPAs(self):
|
||||||
if self._ppas is None:
|
if self._ppas is None:
|
||||||
|
@ -52,7 +52,7 @@ UPLOAD_QUEUE_STATUSES = ("New", "Unapproved", "Accepted", "Done", "Rejected")
|
|||||||
|
|
||||||
DOWNLOAD_BLOCKSIZE_DEFAULT = 8192
|
DOWNLOAD_BLOCKSIZE_DEFAULT = 8192
|
||||||
|
|
||||||
_system_distribution_chain = []
|
_SYSTEM_DISTRIBUTION_CHAIN = []
|
||||||
|
|
||||||
|
|
||||||
class DownloadError(Exception):
|
class DownloadError(Exception):
|
||||||
@ -74,11 +74,11 @@ def system_distribution_chain():
|
|||||||
the distribution chain can't be determined, print an error message
|
the distribution chain can't be determined, print an error message
|
||||||
and return an empty list.
|
and return an empty list.
|
||||||
"""
|
"""
|
||||||
global _system_distribution_chain
|
global _SYSTEM_DISTRIBUTION_CHAIN
|
||||||
if len(_system_distribution_chain) == 0:
|
if len(_SYSTEM_DISTRIBUTION_CHAIN) == 0:
|
||||||
try:
|
try:
|
||||||
vendor = check_output(("dpkg-vendor", "--query", "Vendor"), encoding="utf-8").strip()
|
vendor = check_output(("dpkg-vendor", "--query", "Vendor"), encoding="utf-8").strip()
|
||||||
_system_distribution_chain.append(vendor)
|
_SYSTEM_DISTRIBUTION_CHAIN.append(vendor)
|
||||||
except CalledProcessError:
|
except CalledProcessError:
|
||||||
Logger.error("Could not determine what distribution you are running.")
|
Logger.error("Could not determine what distribution you are running.")
|
||||||
return []
|
return []
|
||||||
@ -89,7 +89,7 @@ def system_distribution_chain():
|
|||||||
(
|
(
|
||||||
"dpkg-vendor",
|
"dpkg-vendor",
|
||||||
"--vendor",
|
"--vendor",
|
||||||
_system_distribution_chain[-1],
|
_SYSTEM_DISTRIBUTION_CHAIN[-1],
|
||||||
"--query",
|
"--query",
|
||||||
"Parent",
|
"Parent",
|
||||||
),
|
),
|
||||||
@ -98,9 +98,9 @@ def system_distribution_chain():
|
|||||||
except CalledProcessError:
|
except CalledProcessError:
|
||||||
# Vendor has no parent
|
# Vendor has no parent
|
||||||
break
|
break
|
||||||
_system_distribution_chain.append(parent)
|
_SYSTEM_DISTRIBUTION_CHAIN.append(parent)
|
||||||
|
|
||||||
return _system_distribution_chain
|
return _SYSTEM_DISTRIBUTION_CHAIN
|
||||||
|
|
||||||
|
|
||||||
def system_distribution():
|
def system_distribution():
|
||||||
@ -138,16 +138,16 @@ def readlist(filename, uniq=True):
|
|||||||
Read a list of words from the indicated file. If 'uniq' is True, filter
|
Read a list of words from the indicated file. If 'uniq' is True, filter
|
||||||
out duplicated words.
|
out duplicated words.
|
||||||
"""
|
"""
|
||||||
p = Path(filename)
|
path = Path(filename)
|
||||||
|
|
||||||
if not p.is_file():
|
if not path.is_file():
|
||||||
Logger.error(f"File {p} does not exist.")
|
Logger.error(f"File {path} does not exist.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
content = p.read_text().replace("\n", " ").replace(",", " ")
|
content = path.read_text().replace("\n", " ").replace(",", " ")
|
||||||
|
|
||||||
if not content.strip():
|
if not content.strip():
|
||||||
Logger.error(f"File {p} is empty.")
|
Logger.error(f"File {path} is empty.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
items = [item for item in content.split() if item]
|
items = [item for item in content.split() if item]
|
||||||
@ -234,29 +234,31 @@ def verify_file_checksums(pathname, checksums={}, size=0):
|
|||||||
|
|
||||||
Returns True if all checks pass, False otherwise
|
Returns True if all checks pass, False otherwise
|
||||||
"""
|
"""
|
||||||
p = Path(pathname)
|
path = Path(pathname)
|
||||||
|
|
||||||
if not p.is_file():
|
if not path.is_file():
|
||||||
Logger.error(f"File {p} not found")
|
Logger.error(f"File {path} not found")
|
||||||
return False
|
return False
|
||||||
filesize = p.stat().st_size
|
filesize = path.stat().st_size
|
||||||
if size and size != filesize:
|
if size and size != filesize:
|
||||||
Logger.error(f"File {p} incorrect size, got {filesize} expected {size}")
|
Logger.error(f"File {path} incorrect size, got {filesize} expected {size}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for (alg, checksum) in checksums.items():
|
for (alg, checksum) in checksums.items():
|
||||||
h = hashlib.new(alg)
|
hash_ = hashlib.new(alg)
|
||||||
with p.open("rb") as f:
|
with path.open("rb") as f:
|
||||||
while True:
|
while True:
|
||||||
block = f.read(h.block_size)
|
block = f.read(hash_.block_size)
|
||||||
if len(block) == 0:
|
if len(block) == 0:
|
||||||
break
|
break
|
||||||
h.update(block)
|
hash_.update(block)
|
||||||
digest = h.hexdigest()
|
digest = hash_.hexdigest()
|
||||||
if digest == checksum:
|
if digest == checksum:
|
||||||
Logger.debug(f"File {p} checksum ({alg}) verified: {checksum}")
|
Logger.debug(f"File {path} checksum ({alg}) verified: {checksum}")
|
||||||
else:
|
else:
|
||||||
Logger.error(f"File {p} checksum ({alg}) mismatch: got {digest} expected {checksum}")
|
Logger.error(
|
||||||
|
f"File {path} checksum ({alg}) mismatch: got {digest} expected {checksum}"
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -288,9 +290,13 @@ def extract_authentication(url):
|
|||||||
|
|
||||||
This returns a tuple in the form (url, username, password)
|
This returns a tuple in the form (url, username, password)
|
||||||
"""
|
"""
|
||||||
u = urlparse(url)
|
components = urlparse(url)
|
||||||
if u.username or u.password:
|
if components.username or components.password:
|
||||||
return (u._replace(netloc=u.hostname).geturl(), u.username, u.password)
|
return (
|
||||||
|
components._replace(netloc=components.hostname).geturl(),
|
||||||
|
components.username,
|
||||||
|
components.password,
|
||||||
|
)
|
||||||
return (url, None, None)
|
return (url, None, None)
|
||||||
|
|
||||||
|
|
||||||
@ -339,21 +345,21 @@ def download(src, dst, size=0, *, blocksize=DOWNLOAD_BLOCKSIZE_DEFAULT):
|
|||||||
(src, username, password) = extract_authentication(src)
|
(src, username, password) = extract_authentication(src)
|
||||||
auth = (username, password) if username or password else None
|
auth = (username, password) if username or password else None
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory() as d:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
tmpdst = Path(d) / "dst"
|
tmpdst = Path(tmpdir) / "dst"
|
||||||
try:
|
try:
|
||||||
with requests.get(src, stream=True, auth=auth) as fsrc, tmpdst.open("wb") as fdst:
|
with requests.get(src, stream=True, auth=auth) as fsrc, tmpdst.open("wb") as fdst:
|
||||||
fsrc.raise_for_status()
|
fsrc.raise_for_status()
|
||||||
_download(fsrc, fdst, size, blocksize=blocksize)
|
_download(fsrc, fdst, size, blocksize=blocksize)
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as error:
|
||||||
if e.response is not None and e.response.status_code == 404:
|
if error.response is not None and error.response.status_code == 404:
|
||||||
raise NotFoundError(f"URL {src} not found: {e}")
|
raise NotFoundError(f"URL {src} not found: {error}") from error
|
||||||
raise DownloadError(e)
|
raise DownloadError(error) from error
|
||||||
except requests.exceptions.ConnectionError as e:
|
except requests.exceptions.ConnectionError as error:
|
||||||
# This is likely a archive hostname that doesn't resolve, like 'ftpmaster.internal'
|
# This is likely a archive hostname that doesn't resolve, like 'ftpmaster.internal'
|
||||||
raise NotFoundError(f"URL {src} not found: {e}")
|
raise NotFoundError(f"URL {src} not found: {error}") from error
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as error:
|
||||||
raise DownloadError(e)
|
raise DownloadError(error) from error
|
||||||
shutil.move(tmpdst, dst)
|
shutil.move(tmpdst, dst)
|
||||||
return dst
|
return dst
|
||||||
|
|
||||||
@ -440,8 +446,8 @@ def _download(fsrc, fdst, size, *, blocksize):
|
|||||||
|
|
||||||
|
|
||||||
def _download_text(src, binary, *, blocksize):
|
def _download_text(src, binary, *, blocksize):
|
||||||
with tempfile.TemporaryDirectory() as d:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
dst = Path(d) / "dst"
|
dst = Path(tmpdir) / "dst"
|
||||||
download(src, dst, blocksize=blocksize)
|
download(src, dst, blocksize=blocksize)
|
||||||
return dst.read_bytes() if binary else dst.read_text()
|
return dst.read_bytes() if binary else dst.read_text()
|
||||||
|
|
||||||
|
@ -107,21 +107,21 @@ class PullPkg(object):
|
|||||||
unexpected errors will flow up to the caller.
|
unexpected errors will flow up to the caller.
|
||||||
On success, this simply returns.
|
On success, this simply returns.
|
||||||
"""
|
"""
|
||||||
Logger = ubuntutools_getLogger()
|
logger = ubuntutools_getLogger()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cls(*args, **kwargs).pull()
|
cls(*args, **kwargs).pull()
|
||||||
return
|
return
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
Logger.info("User abort.")
|
logger.info("User abort.")
|
||||||
except (
|
except (
|
||||||
PackageNotFoundException,
|
PackageNotFoundException,
|
||||||
SeriesNotFoundException,
|
SeriesNotFoundException,
|
||||||
PocketDoesNotExistError,
|
PocketDoesNotExistError,
|
||||||
InvalidDistroValueError,
|
InvalidDistroValueError,
|
||||||
InvalidPullValueError,
|
InvalidPullValueError,
|
||||||
) as e:
|
) as error:
|
||||||
Logger.error(str(e))
|
logger.error(str(error))
|
||||||
sys.exit(errno.ENOENT)
|
sys.exit(errno.ENOENT)
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -275,12 +275,12 @@ class PullPkg(object):
|
|||||||
|
|
||||||
if distro == DISTRO_PPA:
|
if distro == DISTRO_PPA:
|
||||||
# PPAs are part of Ubuntu distribution
|
# PPAs are part of Ubuntu distribution
|
||||||
d = Distribution(DISTRO_UBUNTU)
|
distribution = Distribution(DISTRO_UBUNTU)
|
||||||
else:
|
else:
|
||||||
d = Distribution(distro)
|
distribution = Distribution(distro)
|
||||||
|
|
||||||
# let SeriesNotFoundException flow up
|
# let SeriesNotFoundException flow up
|
||||||
d.getSeries(release)
|
distribution.getSeries(release)
|
||||||
|
|
||||||
Logger.debug("Using distro '%s' release '%s' pocket '%s'", distro, release, pocket)
|
Logger.debug("Using distro '%s' release '%s' pocket '%s'", distro, release, pocket)
|
||||||
return (release, pocket)
|
return (release, pocket)
|
||||||
@ -340,12 +340,12 @@ class PullPkg(object):
|
|||||||
params["package"] = options["package"]
|
params["package"] = options["package"]
|
||||||
|
|
||||||
if options["release"]:
|
if options["release"]:
|
||||||
(r, v, p) = self.parse_release_and_version(
|
(release, version, pocket) = self.parse_release_and_version(
|
||||||
distro, options["release"], options["version"]
|
distro, options["release"], options["version"]
|
||||||
)
|
)
|
||||||
params["series"] = r
|
params["series"] = release
|
||||||
params["version"] = v
|
params["version"] = version
|
||||||
params["pocket"] = p
|
params["pocket"] = pocket
|
||||||
|
|
||||||
if params["package"].endswith(".dsc") and not params["series"] and not params["version"]:
|
if params["package"].endswith(".dsc") and not params["series"] and not params["version"]:
|
||||||
params["dscfile"] = params["package"]
|
params["dscfile"] = params["package"]
|
||||||
@ -553,33 +553,33 @@ class PullPkg(object):
|
|||||||
raise PackageNotFoundException(msg)
|
raise PackageNotFoundException(msg)
|
||||||
|
|
||||||
if pull == PULL_LIST:
|
if pull == PULL_LIST:
|
||||||
for p in packages:
|
for pkg in packages:
|
||||||
msg = "Found %s %s (ID %s)" % (p.package_name, p.package_version, p.id)
|
msg = "Found %s %s (ID %s)" % (pkg.package_name, pkg.package_version, pkg.id)
|
||||||
if p.display_arches:
|
if pkg.display_arches:
|
||||||
msg += " arch %s" % p.display_arches
|
msg += " arch %s" % pkg.display_arches
|
||||||
Logger.info(msg)
|
Logger.info(msg)
|
||||||
url = p.changesFileUrl()
|
url = pkg.changesFileUrl()
|
||||||
if url:
|
if url:
|
||||||
Logger.info("Changes file:")
|
Logger.info("Changes file:")
|
||||||
Logger.info(" %s", url)
|
Logger.info(" %s", url)
|
||||||
else:
|
else:
|
||||||
Logger.info("No changes file")
|
Logger.info("No changes file")
|
||||||
urls = p.sourceFileUrls()
|
urls = pkg.sourceFileUrls()
|
||||||
if urls:
|
if urls:
|
||||||
Logger.info("Source files:")
|
Logger.info("Source files:")
|
||||||
for url in urls:
|
for url in urls:
|
||||||
Logger.info(" %s", url)
|
Logger.info(" %s", url)
|
||||||
else:
|
else:
|
||||||
Logger.info("No source files")
|
Logger.info("No source files")
|
||||||
urls = p.binaryFileUrls()
|
urls = pkg.binaryFileUrls()
|
||||||
if urls:
|
if urls:
|
||||||
Logger.info("Binary files:")
|
Logger.info("Binary files:")
|
||||||
for url in urls:
|
for url in urls:
|
||||||
Logger.info(" %s", url)
|
Logger.info(" %s", url)
|
||||||
Logger.info(" { %s }" % p.binaryFileProperties(url))
|
Logger.info(" { %s }" % pkg.binaryFileProperties(url))
|
||||||
else:
|
else:
|
||||||
Logger.info("No binary files")
|
Logger.info("No binary files")
|
||||||
urls = p.customFileUrls()
|
urls = pkg.customFileUrls()
|
||||||
if urls:
|
if urls:
|
||||||
Logger.info("Custom files:")
|
Logger.info("Custom files:")
|
||||||
for url in urls:
|
for url in urls:
|
||||||
@ -593,18 +593,18 @@ class PullPkg(object):
|
|||||||
else:
|
else:
|
||||||
msg += ", please specify the version"
|
msg += ", please specify the version"
|
||||||
Logger.error("Available package versions/ids are:")
|
Logger.error("Available package versions/ids are:")
|
||||||
for p in packages:
|
for pkg in packages:
|
||||||
Logger.error("%s %s (id %s)" % (p.package_name, p.package_version, p.id))
|
Logger.error("%s %s (id %s)" % (pkg.package_name, pkg.package_version, pkg.id))
|
||||||
raise PackageNotFoundException(msg)
|
raise PackageNotFoundException(msg)
|
||||||
|
|
||||||
p = packages[0]
|
pkg = packages[0]
|
||||||
|
|
||||||
urls = set(p.customFileUrls())
|
urls = set(pkg.customFileUrls())
|
||||||
if p.changesFileUrl():
|
if pkg.changesFileUrl():
|
||||||
urls.add(p.changesFileUrl())
|
urls.add(pkg.changesFileUrl())
|
||||||
|
|
||||||
if pull == PULL_SOURCE:
|
if pull == PULL_SOURCE:
|
||||||
urls |= set(p.sourceFileUrls())
|
urls |= set(pkg.sourceFileUrls())
|
||||||
if not urls:
|
if not urls:
|
||||||
Logger.error("No source files to download")
|
Logger.error("No source files to download")
|
||||||
dscfile = None
|
dscfile = None
|
||||||
@ -636,7 +636,7 @@ class PullPkg(object):
|
|||||||
else:
|
else:
|
||||||
raise InvalidPullValueError("Invalid pull value %s" % pull)
|
raise InvalidPullValueError("Invalid pull value %s" % pull)
|
||||||
|
|
||||||
urls |= set(p.binaryFileUrls())
|
urls |= set(pkg.binaryFileUrls())
|
||||||
if not urls:
|
if not urls:
|
||||||
Logger.error("No binary files to download")
|
Logger.error("No binary files to download")
|
||||||
for url in urls:
|
for url in urls:
|
||||||
|
@ -149,9 +149,9 @@ def get_ubuntu_delta_changelog(srcpkg):
|
|||||||
changes = Changes(Http().request(changes_url)[1])
|
changes = Changes(Http().request(changes_url)[1])
|
||||||
for line in changes["Changes"].splitlines():
|
for line in changes["Changes"].splitlines():
|
||||||
line = line[1:]
|
line = line[1:]
|
||||||
m = topline.match(line)
|
match = topline.match(line)
|
||||||
if m:
|
if match:
|
||||||
distribution = m.group(3).split()[0].split("-")[0]
|
distribution = match.group(3).split()[0].split("-")[0]
|
||||||
if debian_info.valid(distribution):
|
if debian_info.valid(distribution):
|
||||||
break
|
break
|
||||||
if line.startswith(" "):
|
if line.startswith(" "):
|
||||||
|
@ -207,9 +207,9 @@ Content-Type: text/plain; charset=UTF-8
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
Logger.info("Connecting to %s:%s ...", mailserver_host, mailserver_port)
|
Logger.info("Connecting to %s:%s ...", mailserver_host, mailserver_port)
|
||||||
s = smtplib.SMTP(mailserver_host, mailserver_port)
|
smtp = smtplib.SMTP(mailserver_host, mailserver_port)
|
||||||
break
|
break
|
||||||
except smtplib.SMTPConnectError as s:
|
except smtplib.SMTPConnectError as error:
|
||||||
try:
|
try:
|
||||||
# py2 path
|
# py2 path
|
||||||
# pylint: disable=unsubscriptable-object
|
# pylint: disable=unsubscriptable-object
|
||||||
@ -217,8 +217,8 @@ Content-Type: text/plain; charset=UTF-8
|
|||||||
"Could not connect to %s:%s: %s (%i)",
|
"Could not connect to %s:%s: %s (%i)",
|
||||||
mailserver_host,
|
mailserver_host,
|
||||||
mailserver_port,
|
mailserver_port,
|
||||||
s[1],
|
error[1],
|
||||||
s[0],
|
error[0],
|
||||||
)
|
)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
# pylint: disable=no-member
|
# pylint: disable=no-member
|
||||||
@ -226,15 +226,15 @@ Content-Type: text/plain; charset=UTF-8
|
|||||||
"Could not connect to %s:%s: %s (%i)",
|
"Could not connect to %s:%s: %s (%i)",
|
||||||
mailserver_host,
|
mailserver_host,
|
||||||
mailserver_port,
|
mailserver_port,
|
||||||
s.strerror,
|
error.strerror,
|
||||||
s.errno,
|
error.errno,
|
||||||
)
|
)
|
||||||
if s.smtp_code == 421:
|
if error.smtp_code == 421:
|
||||||
confirmation_prompt(
|
confirmation_prompt(
|
||||||
message="This is a temporary error, press [Enter] "
|
message="This is a temporary error, press [Enter] "
|
||||||
"to retry. Press [Ctrl-C] to abort now."
|
"to retry. Press [Ctrl-C] to abort now."
|
||||||
)
|
)
|
||||||
except socket.error as s:
|
except socket.error as error:
|
||||||
try:
|
try:
|
||||||
# py2 path
|
# py2 path
|
||||||
# pylint: disable=unsubscriptable-object
|
# pylint: disable=unsubscriptable-object
|
||||||
@ -242,8 +242,8 @@ Content-Type: text/plain; charset=UTF-8
|
|||||||
"Could not connect to %s:%s: %s (%i)",
|
"Could not connect to %s:%s: %s (%i)",
|
||||||
mailserver_host,
|
mailserver_host,
|
||||||
mailserver_port,
|
mailserver_port,
|
||||||
s[1],
|
error[1],
|
||||||
s[0],
|
error[0],
|
||||||
)
|
)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
# pylint: disable=no-member
|
# pylint: disable=no-member
|
||||||
@ -251,27 +251,27 @@ Content-Type: text/plain; charset=UTF-8
|
|||||||
"Could not connect to %s:%s: %s (%i)",
|
"Could not connect to %s:%s: %s (%i)",
|
||||||
mailserver_host,
|
mailserver_host,
|
||||||
mailserver_port,
|
mailserver_port,
|
||||||
s.strerror,
|
error.strerror,
|
||||||
s.errno,
|
error.errno,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if mailserver_user and mailserver_pass:
|
if mailserver_user and mailserver_pass:
|
||||||
try:
|
try:
|
||||||
s.login(mailserver_user, mailserver_pass)
|
smtp.login(mailserver_user, mailserver_pass)
|
||||||
except smtplib.SMTPAuthenticationError:
|
except smtplib.SMTPAuthenticationError:
|
||||||
Logger.error("Error authenticating to the server: invalid username and password.")
|
Logger.error("Error authenticating to the server: invalid username and password.")
|
||||||
s.quit()
|
smtp.quit()
|
||||||
return
|
return
|
||||||
except smtplib.SMTPException:
|
except smtplib.SMTPException:
|
||||||
Logger.error("Unknown SMTP error.")
|
Logger.error("Unknown SMTP error.")
|
||||||
s.quit()
|
smtp.quit()
|
||||||
return
|
return
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
s.sendmail(myemailaddr, to, mail.encode("utf-8"))
|
smtp.sendmail(myemailaddr, to, mail.encode("utf-8"))
|
||||||
s.quit()
|
smtp.quit()
|
||||||
os.remove(backup.name)
|
os.remove(backup.name)
|
||||||
Logger.info("Sync request mailed.")
|
Logger.info("Sync request mailed.")
|
||||||
break
|
break
|
||||||
@ -285,8 +285,8 @@ Content-Type: text/plain; charset=UTF-8
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
except smtplib.SMTPResponseException as e:
|
except smtplib.SMTPResponseException as error:
|
||||||
Logger.error("Error while sending: %i, %s", e.smtp_code, e.smtp_error)
|
Logger.error("Error while sending: %i, %s", error.smtp_code, error.smtp_error)
|
||||||
return
|
return
|
||||||
except smtplib.SMTPServerDisconnected:
|
except smtplib.SMTPServerDisconnected:
|
||||||
Logger.error("Server disconnected while sending the mail.")
|
Logger.error("Server disconnected while sending the mail.")
|
||||||
|
@ -57,11 +57,11 @@ class ExamplePackage(object):
|
|||||||
return "my content"
|
return "my content"
|
||||||
|
|
||||||
def create(self):
|
def create(self):
|
||||||
with tempfile.TemporaryDirectory() as d:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
self._create(Path(d))
|
self._create(Path(tmpdir))
|
||||||
|
|
||||||
def _create(self, d):
|
def _create(self, directory: Path):
|
||||||
pkgdir = d / self.dirname
|
pkgdir = directory / self.dirname
|
||||||
pkgdir.mkdir()
|
pkgdir.mkdir()
|
||||||
(pkgdir / self.content_filename).write_text(self.content_text)
|
(pkgdir / self.content_filename).write_text(self.content_text)
|
||||||
|
|
||||||
@ -80,13 +80,13 @@ class ExamplePackage(object):
|
|||||||
f"dpkg-source -b {self.dirname}".split(),
|
f"dpkg-source -b {self.dirname}".split(),
|
||||||
check=True,
|
check=True,
|
||||||
env=self.env,
|
env=self.env,
|
||||||
cwd=str(d),
|
cwd=str(directory),
|
||||||
stdout=subprocess.DEVNULL,
|
stdout=subprocess.DEVNULL,
|
||||||
stderr=subprocess.DEVNULL,
|
stderr=subprocess.DEVNULL,
|
||||||
)
|
)
|
||||||
|
|
||||||
# move tarballs and dsc to destdir
|
# move tarballs and dsc to destdir
|
||||||
self.destdir.mkdir(parents=True, exist_ok=True)
|
self.destdir.mkdir(parents=True, exist_ok=True)
|
||||||
(d / self.orig.name).rename(self.orig)
|
(directory / self.orig.name).rename(self.orig)
|
||||||
(d / self.debian.name).rename(self.debian)
|
(directory / self.debian.name).rename(self.debian)
|
||||||
(d / self.dsc.name).rename(self.dsc)
|
(directory / self.dsc.name).rename(self.dsc)
|
||||||
|
@ -26,9 +26,9 @@ from ubuntutools.test.example_package import ExamplePackage
|
|||||||
|
|
||||||
class BaseVerificationTestCase(unittest.TestCase):
|
class BaseVerificationTestCase(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
d = tempfile.TemporaryDirectory()
|
tmpdir = tempfile.TemporaryDirectory()
|
||||||
self.addCleanup(d.cleanup)
|
self.addCleanup(tmpdir.cleanup)
|
||||||
self.pkg = ExamplePackage(destdir=Path(d.name))
|
self.pkg = ExamplePackage(destdir=Path(tmpdir.name))
|
||||||
self.pkg.create()
|
self.pkg.create()
|
||||||
self.dsc = ubuntutools.archive.Dsc(self.pkg.dsc.read_bytes())
|
self.dsc = ubuntutools.archive.Dsc(self.pkg.dsc.read_bytes())
|
||||||
|
|
||||||
@ -65,9 +65,9 @@ class LocalSourcePackageTestCase(BaseVerificationTestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super().setUp()
|
super().setUp()
|
||||||
d = tempfile.TemporaryDirectory()
|
tmpdir = tempfile.TemporaryDirectory()
|
||||||
self.addCleanup(d.cleanup)
|
self.addCleanup(tmpdir.cleanup)
|
||||||
self.workdir = Path(d.name)
|
self.workdir = Path(tmpdir.name)
|
||||||
|
|
||||||
def pull(self, **kwargs):
|
def pull(self, **kwargs):
|
||||||
"""Do the pull from pkg dir to the workdir, return the SourcePackage"""
|
"""Do the pull from pkg dir to the workdir, return the SourcePackage"""
|
||||||
|
@ -40,9 +40,9 @@ class ConfigTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(ConfigTestCase, self).setUp()
|
super(ConfigTestCase, self).setUp()
|
||||||
m = mock.mock_open()
|
open_mock = mock.mock_open()
|
||||||
m.side_effect = self._fake_open
|
open_mock.side_effect = self._fake_open
|
||||||
patcher = mock.patch("builtins.open", m)
|
patcher = mock.patch("builtins.open", open_mock)
|
||||||
self.addCleanup(patcher.stop)
|
self.addCleanup(patcher.stop)
|
||||||
patcher.start()
|
patcher.start()
|
||||||
|
|
||||||
@ -230,8 +230,6 @@ class UbuEmailTestCase(unittest.TestCase):
|
|||||||
try:
|
try:
|
||||||
os.environ["DEBFULLNAME"] = env_name
|
os.environ["DEBFULLNAME"] = env_name
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
raise unittest.SkipTest(
|
self.skipTest("python interpreter is not running in an unicode capable locale")
|
||||||
"python interpreter is not running in an unicode capable locale"
|
|
||||||
)
|
|
||||||
os.environ["DEBEMAIL"] = email = "joe@example.net"
|
os.environ["DEBEMAIL"] = email = "joe@example.net"
|
||||||
self.assertEqual(ubu_email(), (name, email))
|
self.assertEqual(ubu_email(), (name, email))
|
||||||
|
@ -14,6 +14,9 @@
|
|||||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
# pylint: enable=invalid-name
|
||||||
|
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
Loading…
x
Reference in New Issue
Block a user