parent
6bd359375d
commit
0393350b70
@ -1,441 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
import tarfile
|
|
||||||
import shutil
|
|
||||||
from git import Repo, GitCommandError
|
|
||||||
import tempfile
|
|
||||||
from concurrent.futures import ThreadPoolExecutor, wait, FIRST_COMPLETED
|
|
||||||
import fnmatch
|
|
||||||
import re
|
|
||||||
from debian.copyright import Header, Copyright
|
|
||||||
import uuid
|
|
||||||
from common import clean_old_logs
|
|
||||||
|
|
||||||
BASE_DIR = "/srv/lubuntu-ci/repos"
|
|
||||||
DEBFULLNAME = "Lugito"
|
|
||||||
DEBEMAIL = "info@lubuntu.me"
|
|
||||||
OUTPUT_DIR = os.path.join(BASE_DIR, "build_output")
|
|
||||||
SUPPRESSED_LINTIAN_TAGS = [
|
|
||||||
"orig-tarball-missing-upstream-signature",
|
|
||||||
"package-has-long-file-name",
|
|
||||||
"adopted-extended-field"
|
|
||||||
]
|
|
||||||
BASE_OUTPUT_DIR = "/srv/lubuntu-ci/output"
|
|
||||||
LOG_DIR = os.path.join(BASE_OUTPUT_DIR, "logs", "source_builds")
|
|
||||||
BASE_LINTIAN_DIR = os.path.join(BASE_OUTPUT_DIR, f".lintian.tmp.{str(uuid.uuid4())[:8]}")
|
|
||||||
REAL_LINTIAN_DIR = os.path.join(BASE_OUTPUT_DIR, "lintian")
|
|
||||||
|
|
||||||
os.makedirs(LOG_DIR, exist_ok=True)
|
|
||||||
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
|
||||||
os.makedirs(BASE_LINTIAN_DIR, exist_ok=True)
|
|
||||||
|
|
||||||
current_time = datetime.utcnow().strftime("%Y%m%dT%H%M%S")
|
|
||||||
log_file = os.path.join(LOG_DIR, f"{current_time}.log")
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s - %(levelname)s - %(message)s",
|
|
||||||
handlers=[
|
|
||||||
logging.FileHandler(log_file),
|
|
||||||
logging.StreamHandler()
|
|
||||||
]
|
|
||||||
)
|
|
||||||
logger = logging.getLogger("TimeBasedLogger")
|
|
||||||
|
|
||||||
def run_command(cmd, cwd=None, env=None, show_output=False):
|
|
||||||
logging.info(f"Executing: {' '.join(cmd)} in {cwd or 'current directory'}")
|
|
||||||
try:
|
|
||||||
result = subprocess.run(
|
|
||||||
cmd,
|
|
||||||
cwd=cwd,
|
|
||||||
env=env,
|
|
||||||
check=True,
|
|
||||||
capture_output=True,
|
|
||||||
text=True
|
|
||||||
)
|
|
||||||
if show_output:
|
|
||||||
if result.stdout:
|
|
||||||
logging.info(f"Output: {result.stdout.strip()}")
|
|
||||||
if result.stderr:
|
|
||||||
logging.warning(f"Output: {result.stderr.strip()}")
|
|
||||||
logging.info(f"Command succeeded: {' '.join(cmd)}")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
logging.error(f"Command failed: {' '.join(cmd)}")
|
|
||||||
logging.error(e.stderr)
|
|
||||||
logging.error(e.stdout)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def parse_version(changelog_path):
|
|
||||||
try:
|
|
||||||
epoch = None
|
|
||||||
|
|
||||||
with open(changelog_path, "r") as f:
|
|
||||||
first_line = f.readline().strip()
|
|
||||||
|
|
||||||
version_match = first_line.split("(")[1].split(")")[0]
|
|
||||||
upstream_version = version_match
|
|
||||||
|
|
||||||
if ":" in version_match:
|
|
||||||
epoch, upstream_version = version_match.split(":", 1)
|
|
||||||
upstream_version = upstream_version.split("-")[0]
|
|
||||||
upstream_version = re.sub(r"(\+git[0-9]+)?(~[a-z]+)?$", "", upstream_version)
|
|
||||||
|
|
||||||
# Log the results
|
|
||||||
current_date = datetime.now().strftime("%Y%m%d%H%M")
|
|
||||||
if epoch:
|
|
||||||
version = f"{epoch}:{upstream_version}+git{current_date}"
|
|
||||||
logging.info(f"Upstream version extracted: {epoch}:{upstream_version}")
|
|
||||||
logging.info(f"Going to use: {version}")
|
|
||||||
else:
|
|
||||||
version = f"{upstream_version}+git{current_date}"
|
|
||||||
logging.info(f"Upstream version extracted: {upstream_version}")
|
|
||||||
logging.info(f"Going to use: {version}")
|
|
||||||
|
|
||||||
return version
|
|
||||||
except (IndexError, FileNotFoundError) as e:
|
|
||||||
logging.error(f"Error parsing version from {changelog_path}: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def get_exclusions(packaging):
|
|
||||||
exclusions = []
|
|
||||||
with io.open(os.path.join(packaging, "debian/copyright"), "rt", encoding="utf-8") as f:
|
|
||||||
copyright_obj = Copyright(f)
|
|
||||||
for paragraph in copyright_obj.all_paragraphs():
|
|
||||||
if isinstance(paragraph, Header):
|
|
||||||
if paragraph.files_excluded:
|
|
||||||
for file_name in paragraph.files_excluded:
|
|
||||||
exclusions.append(file_name)
|
|
||||||
break
|
|
||||||
return exclusions
|
|
||||||
|
|
||||||
def create_tarball(name, source_dir, exclusions=[]):
|
|
||||||
tar_filename = f"{name}_MAIN.orig.tar.gz"
|
|
||||||
logging.info(f"Creating tarball: {tar_filename}")
|
|
||||||
exclusions.append(".git/")
|
|
||||||
|
|
||||||
def exclusion_func(tarinfo):
|
|
||||||
for exclusion in exclusions:
|
|
||||||
if exclusion in tarinfo.name:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return tarinfo
|
|
||||||
|
|
||||||
with tarfile.open(tar_filename, "w:gz") as tar:
|
|
||||||
tar.add(source_dir, arcname=os.path.basename(source_dir), filter=exclusion_func)
|
|
||||||
logging.info(f"Tarball created and compressed: {tar_filename}")
|
|
||||||
|
|
||||||
def update_changelog(packaging_dir, release, version, env):
|
|
||||||
name = os.path.basename(packaging_dir)
|
|
||||||
logging.info(f"Updating changelog for {name} to version {version}-0ubuntu1~ppa1")
|
|
||||||
run_command(["git", "checkout", "debian/changelog"], cwd=packaging_dir)
|
|
||||||
cmd = [
|
|
||||||
"dch",
|
|
||||||
"--distribution", release,
|
|
||||||
"--package", name,
|
|
||||||
"--newversion", f"{version}-0ubuntu1~ppa1",
|
|
||||||
"--urgency", "low",
|
|
||||||
"CI upload."
|
|
||||||
]
|
|
||||||
run_command(cmd, cwd=packaging_dir, env=env)
|
|
||||||
|
|
||||||
def build_package(packaging_dir, env, large):
|
|
||||||
name = os.path.basename(packaging_dir)
|
|
||||||
logging.info(f"Building source package for {name}")
|
|
||||||
|
|
||||||
if large:
|
|
||||||
temp_dir = os.path.join(OUTPUT_DIR, f".tmp_{name}_{env['VERSION']}")
|
|
||||||
logging.warning(f"{name} is quite large and will not fit in /tmp, building at {temp_dir}")
|
|
||||||
if not os.path.exists(temp_dir):
|
|
||||||
os.mkdir(temp_dir)
|
|
||||||
else:
|
|
||||||
temp_dir = tempfile.mkdtemp()
|
|
||||||
|
|
||||||
try:
|
|
||||||
temp_packaging_dir = os.path.join(temp_dir, name)
|
|
||||||
os.makedirs(temp_packaging_dir, exist_ok=True)
|
|
||||||
shutil.copytree(packaging_dir + "/debian", temp_packaging_dir + "/debian")
|
|
||||||
|
|
||||||
tarball_name = f"{name}_{env['VERSION']}.orig.tar.gz"
|
|
||||||
tarball_source = os.path.join(BASE_DIR, tarball_name)
|
|
||||||
tarball_dest = os.path.join(temp_dir, tarball_name)
|
|
||||||
shutil.copyfile(tarball_source, tarball_dest)
|
|
||||||
|
|
||||||
cmd_build = ["debuild", "--no-lintian", "-S", "-d", "-sa", "-nc"]
|
|
||||||
run_command(cmd_build, cwd=temp_packaging_dir, env=env)
|
|
||||||
run_command(["git", "checkout", "debian/changelog"], cwd=packaging_dir)
|
|
||||||
|
|
||||||
pattern = f"{name}_{env['VERSION']}*"
|
|
||||||
for filename in os.listdir(temp_dir):
|
|
||||||
if fnmatch.fnmatch(filename, pattern):
|
|
||||||
source_file = os.path.join(temp_dir, filename)
|
|
||||||
dest_file = os.path.join(OUTPUT_DIR, filename)
|
|
||||||
shutil.copyfile(source_file, dest_file)
|
|
||||||
logging.info(f"Copied {filename} to {OUTPUT_DIR}")
|
|
||||||
|
|
||||||
changes_files = [f for f in os.listdir(OUTPUT_DIR) if f.startswith(f"{name}_{env['VERSION']}") and f.endswith("_source.changes")]
|
|
||||||
finally:
|
|
||||||
shutil.rmtree(temp_dir)
|
|
||||||
|
|
||||||
if changes_files:
|
|
||||||
changes_file = os.path.join(OUTPUT_DIR, changes_files[-1])
|
|
||||||
logging.info(f"Built package, changes file: {changes_file}")
|
|
||||||
return changes_file
|
|
||||||
else:
|
|
||||||
logging.error("No changes file found after build.")
|
|
||||||
raise FileNotFoundError("Changes file not found.")
|
|
||||||
|
|
||||||
def load_config(config_path):
|
|
||||||
try:
|
|
||||||
with open(config_path, "r") as f:
|
|
||||||
config = yaml.safe_load(f)
|
|
||||||
if "packages" not in config or "releases" not in config:
|
|
||||||
raise ValueError("Config file must contain 'packages' and 'releases' sections.")
|
|
||||||
return config
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error loading config file: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def clone_or_update_repo(destination, repo_url, repo_branch=None):
|
|
||||||
if os.path.exists(destination):
|
|
||||||
logging.info(f"Repository already exists at {destination}, checking branch and remote URL.")
|
|
||||||
try:
|
|
||||||
repo = Repo(destination)
|
|
||||||
|
|
||||||
current_remote_url = repo.remotes.origin.url
|
|
||||||
if current_remote_url != repo_url:
|
|
||||||
logging.info(f"Remote URL differs for {destination}. Removing and recloning.")
|
|
||||||
shutil.rmtree(destination)
|
|
||||||
else:
|
|
||||||
repo.git.reset("--hard", "HEAD")
|
|
||||||
current_branch = repo.active_branch.name
|
|
||||||
if repo_branch and current_branch != repo_branch:
|
|
||||||
logging.info(f"Branch differs for {destination}. Removing and recloning.")
|
|
||||||
shutil.rmtree(destination)
|
|
||||||
else:
|
|
||||||
logging.info(f"Repository matches desired remote and branch, pulling updates.")
|
|
||||||
repo.git.checkout(repo_branch or current_branch)
|
|
||||||
try:
|
|
||||||
repo.remotes.origin.pull()
|
|
||||||
repo.submodule_update(recursive=True)
|
|
||||||
logging.info(f"Pulled latest changes for {destination}")
|
|
||||||
except GitCommandError as e:
|
|
||||||
if 'non-fast-forward' in str(e):
|
|
||||||
logging.error(f"Pull failed due to non-fast-forward update: {e}")
|
|
||||||
logging.info(f"Removing repository {destination} and cloning again.")
|
|
||||||
shutil.rmtree(destination)
|
|
||||||
else:
|
|
||||||
logging.error(f"Pull failed for {destination}: {e}")
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error updating repository {destination}: {e}")
|
|
||||||
logging.info(f"Removing repository {destination} and cloning again.")
|
|
||||||
shutil.rmtree(destination)
|
|
||||||
try:
|
|
||||||
logging.info(f"Cloning repository {repo_url} into {destination}")
|
|
||||||
repo = Repo.clone_from(repo_url, destination, recurse_submodules=True)
|
|
||||||
if repo_branch:
|
|
||||||
repo.git.checkout(repo_branch)
|
|
||||||
logging.info(f"Checked out {repo_branch} in {destination}")
|
|
||||||
except GitCommandError as e:
|
|
||||||
logging.error(f"Git clone failed for {repo_url}: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def publish_lintian():
|
|
||||||
if os.path.exists(BASE_LINTIAN_DIR):
|
|
||||||
for root, dirs, files in os.walk(BASE_LINTIAN_DIR):
|
|
||||||
for file in files:
|
|
||||||
# Determine the source and destination paths
|
|
||||||
src_path = os.path.join(root, file)
|
|
||||||
rel_path = os.path.relpath(src_path, BASE_LINTIAN_DIR)
|
|
||||||
dest_path = os.path.join(REAL_LINTIAN_DIR, rel_path)
|
|
||||||
|
|
||||||
# Ensure the destination directory exists
|
|
||||||
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
|
|
||||||
|
|
||||||
# Copy the file
|
|
||||||
shutil.copy2(src_path, dest_path)
|
|
||||||
|
|
||||||
# Remove the temporary directory
|
|
||||||
shutil.rmtree(BASE_LINTIAN_DIR)
|
|
||||||
|
|
||||||
def run_source_lintian(name, sources_path):
|
|
||||||
logging.info(f"Running Lintian for {name}")
|
|
||||||
with tempfile.NamedTemporaryFile(mode='w+', suffix='.txt') as temp_file:
|
|
||||||
temp_file.write("\n".join(SUPPRESSED_LINTIAN_TAGS))
|
|
||||||
temp_file.flush()
|
|
||||||
temp_file_path = temp_file.name
|
|
||||||
|
|
||||||
cmd = [
|
|
||||||
"lintian",
|
|
||||||
"-EvIL",
|
|
||||||
"+pedantic",
|
|
||||||
"--suppress-tags-from-file",
|
|
||||||
f"{temp_file_path}",
|
|
||||||
sources_path
|
|
||||||
]
|
|
||||||
|
|
||||||
result = subprocess.run(
|
|
||||||
cmd,
|
|
||||||
capture_output=True,
|
|
||||||
text=True
|
|
||||||
)
|
|
||||||
|
|
||||||
stderr, stdout = None, None
|
|
||||||
if result.stderr:
|
|
||||||
stderr = result.stderr.strip()
|
|
||||||
if result.stdout:
|
|
||||||
stdout = result.stdout.strip()
|
|
||||||
|
|
||||||
lintian_output = None
|
|
||||||
if stderr == stdout:
|
|
||||||
lintian_output = stderr
|
|
||||||
else:
|
|
||||||
lintian_output = f"{stderr}\n{stdout}".strip()
|
|
||||||
|
|
||||||
if lintian_output:
|
|
||||||
pkgdir = os.path.join(BASE_LINTIAN_DIR, name)
|
|
||||||
if not os.path.exists(pkgdir):
|
|
||||||
os.mkdir(pkgdir)
|
|
||||||
output_file = os.path.join(pkgdir, "source.txt")
|
|
||||||
with open(output_file, "a") as f:
|
|
||||||
f.write(lintian_output)
|
|
||||||
|
|
||||||
logging.info(f"Lintian run for {name} is complete")
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(description="Automate Lubuntu package builds.")
|
|
||||||
parser.add_argument("config", help="Path to the YAML configuration file.")
|
|
||||||
parser.add_argument("--skip-dput", action="store_true", help="Skip the dput upload step.")
|
|
||||||
parser.add_argument("--skip-cleanup", action="store_true", help="Skip removal of build_output.")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
config = load_config(args.config)
|
|
||||||
packages = config["packages"]
|
|
||||||
releases = config["releases"]
|
|
||||||
|
|
||||||
os.makedirs(BASE_DIR, exist_ok=True)
|
|
||||||
logging.info(f"Using base directory: {BASE_DIR}")
|
|
||||||
os.chdir(BASE_DIR)
|
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
|
||||||
def dput_source(name, upload_target, changes_files, devel_changes_files):
|
|
||||||
if changes_files:
|
|
||||||
hr_changes = ", ".join(changes_files)
|
|
||||||
logging.info(f"Uploading {hr_changes} to {upload_target} using dput")
|
|
||||||
cmd_upload = ["dput", upload_target] + changes_files
|
|
||||||
run_command(cmd_upload, cwd=OUTPUT_DIR)
|
|
||||||
logging.info(f"Completed upload of {hr_changes} to {upload_target}")
|
|
||||||
|
|
||||||
for file in devel_changes_files:
|
|
||||||
if file:
|
|
||||||
futures.add(executor.submit(run_source_lintian, name, file))
|
|
||||||
|
|
||||||
def prepare_package(pkg):
|
|
||||||
name = pkg.get("name")
|
|
||||||
if not name:
|
|
||||||
logging.warning(f"Skipping package due to missing name: {pkg}")
|
|
||||||
return
|
|
||||||
upstream_url = pkg.get("upstream_url") or f"https://github.com/lxqt/{name}.git"
|
|
||||||
upstream_destination = os.path.join(BASE_DIR, f"upstream-{name}")
|
|
||||||
clone_or_update_repo(upstream_destination, upstream_url)
|
|
||||||
packaging_url = pkg.get("packaging_url") or f"https://git.lubuntu.me/Lubuntu/{name}-packaging.git"
|
|
||||||
packaging_branch = pkg.get("packaging_branch") or f"ubuntu/{releases[0]}" if releases else None
|
|
||||||
packaging_destination = os.path.join(BASE_DIR, name)
|
|
||||||
clone_or_update_repo(packaging_destination, packaging_url, packaging_branch)
|
|
||||||
exclusions = get_exclusions(packaging_destination)
|
|
||||||
create_tarball(name, upstream_destination, exclusions)
|
|
||||||
run_command(["update-maintainer"], cwd=packaging_destination)
|
|
||||||
futures.add(executor.submit(process_package, pkg))
|
|
||||||
|
|
||||||
def process_package(pkg):
|
|
||||||
name = pkg.get("name")
|
|
||||||
upload_target = pkg.get("upload_target", "ppa:lubuntu-ci/unstable-ci-proposed")
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
logging.warning(f"Skipping package due to missing name: {pkg}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
package_changes = []
|
|
||||||
|
|
||||||
packaging_destination = os.path.join(BASE_DIR, name)
|
|
||||||
changelog_path = os.path.join(packaging_destination, "debian", "changelog")
|
|
||||||
version = parse_version(changelog_path)
|
|
||||||
|
|
||||||
for release in releases:
|
|
||||||
logging.info(f"Building {name} for {release}")
|
|
||||||
try:
|
|
||||||
epoch, release_version = None, version
|
|
||||||
if ":" in version:
|
|
||||||
epoch, release_version = version.split(":", 1)
|
|
||||||
|
|
||||||
release_version = f"{release_version}~{release}"
|
|
||||||
tarball_name = f"{name}_{release_version}.orig.tar.gz"
|
|
||||||
tarball_source = os.path.join(BASE_DIR, f"{name}_MAIN.orig.tar.gz")
|
|
||||||
tarball_dest = os.path.join(BASE_DIR, tarball_name)
|
|
||||||
shutil.copyfile(tarball_source, tarball_dest)
|
|
||||||
if epoch:
|
|
||||||
release_version = f"{epoch}:{release_version}"
|
|
||||||
|
|
||||||
env = os.environ.copy()
|
|
||||||
env["DEBFULLNAME"] = DEBFULLNAME
|
|
||||||
env["DEBEMAIL"] = DEBEMAIL
|
|
||||||
env["VERSION"] = release_version
|
|
||||||
env["UPLOAD_TARGET"] = upload_target
|
|
||||||
|
|
||||||
# Update changelog and build package
|
|
||||||
update_changelog(packaging_destination, release, release_version, env)
|
|
||||||
if epoch:
|
|
||||||
_, env["VERSION"] = release_version.split(":", 1)
|
|
||||||
changes_file = build_package(packaging_destination, env, pkg.get("large", False))
|
|
||||||
if changes_file:
|
|
||||||
package_changes.append((changes_file, env))
|
|
||||||
os.remove(os.path.join(BASE_DIR, tarball_name))
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error processing package '{name}' for release '{release}': {e}")
|
|
||||||
|
|
||||||
changes_files = [os.path.basename(cf) for cf, env in package_changes]
|
|
||||||
devel_changes_files = set(os.path.join(OUTPUT_DIR, file) if releases[0] in file else None for file in changes_files)
|
|
||||||
if args.skip_dput:
|
|
||||||
for changes_file in devel_changes_files:
|
|
||||||
if changes_file:
|
|
||||||
futures.add(executor.submit(run_source_lintian, name, changes_file))
|
|
||||||
else:
|
|
||||||
if package_changes:
|
|
||||||
upload_target = package_changes[0][1]["UPLOAD_TARGET"]
|
|
||||||
futures.add(executor.submit(dput_source, name, upload_target, changes_files, devel_changes_files))
|
|
||||||
|
|
||||||
os.remove(os.path.join(BASE_DIR, f"{name}_MAIN.orig.tar.gz"))
|
|
||||||
|
|
||||||
futures = set(executor.submit(prepare_package, pkg) for pkg in packages)
|
|
||||||
|
|
||||||
while futures:
|
|
||||||
done, not_done = wait(futures, return_when=FIRST_COMPLETED)
|
|
||||||
|
|
||||||
for future in done:
|
|
||||||
try:
|
|
||||||
result = future.result()
|
|
||||||
except Exception as e:
|
|
||||||
logging.exception("Task generated an exception")
|
|
||||||
finally:
|
|
||||||
futures.remove(future)
|
|
||||||
|
|
||||||
if not args.skip_cleanup:
|
|
||||||
shutil.rmtree(OUTPUT_DIR)
|
|
||||||
logging.info("Publishing Lintian output...")
|
|
||||||
publish_lintian()
|
|
||||||
clean_old_logs(LOG_DIR)
|
|
||||||
|
|
||||||
logging.info("Script completed successfully.")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -0,0 +1,25 @@
|
|||||||
|
cmake_minimum_required(VERSION 3.21)
|
||||||
|
project(build-packages CXX)
|
||||||
|
|
||||||
|
set(CMAKE_CXX_STANDARD 23)
|
||||||
|
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||||
|
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||||
|
|
||||||
|
find_package(yaml-cpp REQUIRED)
|
||||||
|
find_package(PkgConfig REQUIRED)
|
||||||
|
pkg_check_modules(LIBGIT2 REQUIRED IMPORTED_TARGET libgit2)
|
||||||
|
|
||||||
|
add_library(common SHARED common.cpp)
|
||||||
|
target_link_libraries(common yaml-cpp)
|
||||||
|
|
||||||
|
add_library(update_maintainer SHARED update_maintainer.cpp)
|
||||||
|
|
||||||
|
add_executable(build-packages main.cpp)
|
||||||
|
target_link_libraries(build-packages common update_maintainer PkgConfig::LIBGIT2 yaml-cpp)
|
||||||
|
|
||||||
|
set(CMAKE_INSTALL_RPATH "$ORIGIN/lib")
|
||||||
|
set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
|
||||||
|
|
||||||
|
install(TARGETS common LIBRARY DESTINATION lib)
|
||||||
|
install(TARGETS update_maintainer LIBRARY DESTINATION lib)
|
||||||
|
install(TARGETS build-packages RUNTIME DESTINATION .)
|
@ -0,0 +1,116 @@
|
|||||||
|
#include "common.h"
|
||||||
|
#include <iostream>
|
||||||
|
#include <fstream>
|
||||||
|
#include <sstream>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <cstdio>
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <filesystem>
|
||||||
|
#include <chrono>
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
namespace fs = std::filesystem;
|
||||||
|
|
||||||
|
static void log_info(const std::string &msg) {
|
||||||
|
std::cout << "[INFO] " << msg << "\n";
|
||||||
|
}
|
||||||
|
static void log_error(const std::string &msg) {
|
||||||
|
std::cerr << "[ERROR] " << msg << "\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string parse_version(const fs::path &changelog_path) {
|
||||||
|
if (!fs::exists(changelog_path)) {
|
||||||
|
throw std::runtime_error("Changelog not found: " + changelog_path.string());
|
||||||
|
}
|
||||||
|
std::ifstream f(changelog_path);
|
||||||
|
if (!f) throw std::runtime_error("Unable to open changelog");
|
||||||
|
std::string first_line;
|
||||||
|
std::getline(f, first_line);
|
||||||
|
f.close();
|
||||||
|
|
||||||
|
size_t start = first_line.find('(');
|
||||||
|
size_t end = first_line.find(')');
|
||||||
|
if (start == std::string::npos || end == std::string::npos) {
|
||||||
|
throw std::runtime_error("Invalid changelog format");
|
||||||
|
}
|
||||||
|
std::string version_match = first_line.substr(start+1, end - (start+1));
|
||||||
|
|
||||||
|
std::string epoch;
|
||||||
|
std::string upstream_version = version_match;
|
||||||
|
if (auto pos = version_match.find(':'); pos != std::string::npos) {
|
||||||
|
epoch = version_match.substr(0, pos);
|
||||||
|
upstream_version = version_match.substr(pos+1);
|
||||||
|
}
|
||||||
|
if (auto pos = upstream_version.find('-'); pos != std::string::npos) {
|
||||||
|
upstream_version = upstream_version.substr(0, pos);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::regex git_regex("(\\+git[0-9]+)?(~[a-z]+)?$");
|
||||||
|
upstream_version = std::regex_replace(upstream_version, git_regex, "");
|
||||||
|
|
||||||
|
auto t = std::time(nullptr);
|
||||||
|
std::tm tm = *std::gmtime(&t);
|
||||||
|
char buf[32];
|
||||||
|
std::strftime(buf, sizeof(buf), "%Y%m%d%H%M", &tm);
|
||||||
|
std::string current_date = buf;
|
||||||
|
|
||||||
|
std::string version;
|
||||||
|
if (!epoch.empty()) {
|
||||||
|
version = epoch + ":" + upstream_version + "+git" + current_date;
|
||||||
|
} else {
|
||||||
|
version = upstream_version + "+git" + current_date;
|
||||||
|
}
|
||||||
|
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run_command(const std::vector<std::string> &cmd, const std::optional<fs::path> &cwd, bool show_output) {
|
||||||
|
std::string full_cmd;
|
||||||
|
for (const auto &c : cmd) {
|
||||||
|
full_cmd += c + " ";
|
||||||
|
}
|
||||||
|
if (cwd) {
|
||||||
|
full_cmd = "cd " + cwd->string() + " && " + full_cmd;
|
||||||
|
}
|
||||||
|
log_info("Executing: " + full_cmd);
|
||||||
|
int ret = std::system(full_cmd.c_str());
|
||||||
|
if (ret != 0) {
|
||||||
|
log_error("Command failed: " + full_cmd);
|
||||||
|
throw std::runtime_error("Command failed");
|
||||||
|
}
|
||||||
|
if (show_output) {
|
||||||
|
std::cout << "[INFO] Command succeeded: " + full_cmd << "\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void clean_old_logs(const fs::path &log_dir, int max_age_seconds) {
|
||||||
|
auto now = std::chrono::system_clock::now();
|
||||||
|
for (auto &entry : fs::directory_iterator(log_dir)) {
|
||||||
|
if (fs::is_regular_file(entry)) {
|
||||||
|
auto ftime = fs::last_write_time(entry);
|
||||||
|
auto sctp = decltype(ftime)::clock::to_sys(ftime);
|
||||||
|
auto age = std::chrono::duration_cast<std::chrono::seconds>(now - sctp).count();
|
||||||
|
if (age > max_age_seconds) {
|
||||||
|
fs::remove(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void create_tarball(const std::string &name, const fs::path &source_dir, const std::vector<std::string> &exclusions) {
|
||||||
|
std::string tar_filename = name + "_MAIN.orig.tar.gz";
|
||||||
|
std::cout << "[INFO] Creating tarball: " << tar_filename << "\n";
|
||||||
|
|
||||||
|
std::vector<std::string> cmd;
|
||||||
|
cmd.push_back("tar");
|
||||||
|
for (auto &ex : exclusions) {
|
||||||
|
cmd.push_back("--exclude=" + ex);
|
||||||
|
}
|
||||||
|
cmd.push_back("--exclude=.git/");
|
||||||
|
cmd.push_back("-czf");
|
||||||
|
cmd.push_back(tar_filename);
|
||||||
|
cmd.push_back(fs::path(source_dir).filename().string());
|
||||||
|
|
||||||
|
run_command(cmd, source_dir.parent_path());
|
||||||
|
std::cout << "[INFO] Tarball created and compressed: " << tar_filename << "\n";
|
||||||
|
}
|
@ -0,0 +1,10 @@
|
|||||||
|
#pragma once
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
#include <filesystem>
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
|
std::string parse_version(const std::filesystem::path &changelog_path);
|
||||||
|
void run_command(const std::vector<std::string> &cmd, const std::optional<std::filesystem::path> &cwd = std::nullopt, bool show_output=false);
|
||||||
|
void clean_old_logs(const std::filesystem::path &log_dir, int max_age_seconds=86400);
|
||||||
|
void create_tarball(const std::string &name, const std::filesystem::path &source_dir, const std::vector<std::string> &exclusions);
|
@ -0,0 +1,644 @@
|
|||||||
|
#include "common.h"
|
||||||
|
#include "update_maintainer.h"
|
||||||
|
#include <iostream>
|
||||||
|
#include <fstream>
|
||||||
|
#include <sstream>
|
||||||
|
#include <filesystem>
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <cstdio>
|
||||||
|
#include <vector>
|
||||||
|
#include <string>
|
||||||
|
#include <regex>
|
||||||
|
#include <map>
|
||||||
|
#include <optional>
|
||||||
|
#include <thread>
|
||||||
|
#include <future>
|
||||||
|
#include <chrono>
|
||||||
|
#include <algorithm>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <unordered_set>
|
||||||
|
#include <iterator>
|
||||||
|
#include <yaml-cpp/yaml.h>
|
||||||
|
#include <ctime>
|
||||||
|
|
||||||
|
#include <git2.h>
|
||||||
|
|
||||||
|
namespace fs = std::filesystem;
|
||||||
|
|
||||||
|
static const std::string BASE_DIR = "/srv/lubuntu-ci/repos";
|
||||||
|
static const std::string DEBFULLNAME = "Lugito";
|
||||||
|
static const std::string DEBEMAIL = "info@lubuntu.me";
|
||||||
|
static const std::string OUTPUT_DIR = BASE_DIR + "/build_output";
|
||||||
|
static const std::vector<std::string> SUPPRESSED_LINTIAN_TAGS = {
|
||||||
|
"orig-tarball-missing-upstream-signature",
|
||||||
|
"package-has-long-file-name",
|
||||||
|
"adopted-extended-field"
|
||||||
|
};
|
||||||
|
static const std::string BASE_OUTPUT_DIR = "/srv/lubuntu-ci/output";
|
||||||
|
static const std::string LOG_DIR = BASE_OUTPUT_DIR + "/logs/source_builds";
|
||||||
|
static std::string BASE_LINTIAN_DIR;
|
||||||
|
static const std::string REAL_LINTIAN_DIR = BASE_OUTPUT_DIR + "/lintian";
|
||||||
|
static std::string urgency_level_override = "low";
|
||||||
|
static int worker_count = 5;
|
||||||
|
|
||||||
|
static std::ofstream log_file_stream;
|
||||||
|
|
||||||
|
static void log_all(const std::string &msg, bool is_error=false) {
|
||||||
|
if (is_error) {
|
||||||
|
std::cerr << msg;
|
||||||
|
} else {
|
||||||
|
std::cout << msg;
|
||||||
|
}
|
||||||
|
if (log_file_stream.is_open()) {
|
||||||
|
log_file_stream << msg;
|
||||||
|
log_file_stream.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void log_info(const std::string &msg) {
|
||||||
|
log_all("[INFO] " + msg + "\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
static void log_warning(const std::string &msg) {
|
||||||
|
log_all("[WARN] " + msg + "\n", false);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void log_error(const std::string &msg) {
|
||||||
|
log_all("[ERROR] " + msg + "\n", true);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void run_command_silent_on_success(const std::vector<std::string> &cmd, const std::optional<fs::path> &cwd = std::nullopt) {
|
||||||
|
std::string full_cmd;
|
||||||
|
for (auto &c: cmd) full_cmd += c + " ";
|
||||||
|
std::string exec_cmd = full_cmd;
|
||||||
|
if(cwd) exec_cmd = "cd " + cwd->string() + " && " + exec_cmd;
|
||||||
|
|
||||||
|
log_info("Executing: " + full_cmd);
|
||||||
|
|
||||||
|
FILE* pipe = popen(exec_cmd.c_str(), "r");
|
||||||
|
if(!pipe) {
|
||||||
|
log_error("Failed to run: " + full_cmd);
|
||||||
|
throw std::runtime_error("Command failed");
|
||||||
|
}
|
||||||
|
std::stringstream ss;
|
||||||
|
{
|
||||||
|
char buffer[256];
|
||||||
|
while(fgets(buffer,256,pipe)) {
|
||||||
|
ss << buffer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
int ret = pclose(pipe);
|
||||||
|
if (ret != 0) {
|
||||||
|
log_error("Command failed: " + full_cmd);
|
||||||
|
log_error("Output:\n" + ss.str());
|
||||||
|
throw std::runtime_error("Command failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize libgit2 once
|
||||||
|
static void git_init_once() {
|
||||||
|
static std::once_flag flag;
|
||||||
|
std::call_once(flag, [](){
|
||||||
|
git_libgit2_init();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
static void git_fetch_and_checkout(const fs::path &repo_path, const std::string &repo_url, const std::optional<std::string> &branch) {
|
||||||
|
git_init_once();
|
||||||
|
git_repository* repo = nullptr;
|
||||||
|
bool need_clone = false;
|
||||||
|
if(fs::exists(repo_path)) {
|
||||||
|
int err = git_repository_open(&repo, repo_path.string().c_str());
|
||||||
|
if(err<0) {
|
||||||
|
log_warning("Cannot open repo at " + repo_path.string() + ", recloning");
|
||||||
|
fs::remove_all(repo_path);
|
||||||
|
need_clone = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
need_clone = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(!need_clone && repo!=nullptr) {
|
||||||
|
git_remote* remote = nullptr;
|
||||||
|
int err = git_remote_lookup(&remote, repo, "origin");
|
||||||
|
if(err<0) {
|
||||||
|
log_warning("No origin remote? Recloning");
|
||||||
|
git_repository_free(repo);
|
||||||
|
fs::remove_all(repo_path);
|
||||||
|
need_clone = true;
|
||||||
|
} else {
|
||||||
|
const char* url = git_remote_url(remote);
|
||||||
|
if(!url || repo_url!=url) {
|
||||||
|
log_info("Remote URL differs. Removing and recloning.");
|
||||||
|
git_remote_free(remote);
|
||||||
|
git_repository_free(repo);
|
||||||
|
fs::remove_all(repo_path);
|
||||||
|
need_clone = true;
|
||||||
|
} else {
|
||||||
|
// fetch
|
||||||
|
git_remote_free(remote);
|
||||||
|
git_remote* origin = nullptr;
|
||||||
|
git_remote_lookup(&origin, repo, "origin");
|
||||||
|
git_fetch_options fetch_opts = GIT_FETCH_OPTIONS_INIT;
|
||||||
|
git_remote_fetch(origin, nullptr, &fetch_opts, nullptr);
|
||||||
|
git_remote_free(origin);
|
||||||
|
|
||||||
|
if(branch) {
|
||||||
|
git_reference* ref = nullptr;
|
||||||
|
std::string fullbranch = "refs/remotes/origin/" + *branch;
|
||||||
|
if(git_reference_lookup(&ref, repo, fullbranch.c_str())==0) {
|
||||||
|
git_object* target = nullptr;
|
||||||
|
git_reference_peel(&target, ref, GIT_OBJECT_COMMIT);
|
||||||
|
git_checkout_options co_opts = GIT_CHECKOUT_OPTIONS_INIT;
|
||||||
|
co_opts.checkout_strategy = GIT_CHECKOUT_FORCE;
|
||||||
|
git_checkout_tree(repo, target, &co_opts);
|
||||||
|
git_reference_free(ref);
|
||||||
|
git_repository_set_head_detached(repo, git_object_id(target));
|
||||||
|
git_object_free(target);
|
||||||
|
} else {
|
||||||
|
log_error("Branch " + *branch + " not found, recloning");
|
||||||
|
git_repository_free(repo);
|
||||||
|
fs::remove_all(repo_path);
|
||||||
|
need_clone = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
git_repository_free(repo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(need_clone) {
|
||||||
|
git_clone_options clone_opts = GIT_CLONE_OPTIONS_INIT;
|
||||||
|
git_checkout_options co_opts = GIT_CHECKOUT_OPTIONS_INIT;
|
||||||
|
co_opts.checkout_strategy = GIT_CHECKOUT_FORCE;
|
||||||
|
clone_opts.checkout_opts = co_opts;
|
||||||
|
git_repository* newrepo = nullptr;
|
||||||
|
int err = git_clone(&newrepo, repo_url.c_str(), repo_path.string().c_str(), &clone_opts);
|
||||||
|
if(err<0) {
|
||||||
|
const git_error* e = git_error_last();
|
||||||
|
log_error(std::string("Git clone failed: ")+(e?e->message:"unknown"));
|
||||||
|
throw std::runtime_error("Git clone failed");
|
||||||
|
}
|
||||||
|
if(branch) {
|
||||||
|
git_reference* ref = nullptr;
|
||||||
|
std::string fullbranch = "refs/remotes/origin/" + *branch;
|
||||||
|
if(git_reference_lookup(&ref, newrepo, fullbranch.c_str())==0) {
|
||||||
|
git_object* target = nullptr;
|
||||||
|
git_reference_peel(&target, ref, GIT_OBJECT_COMMIT);
|
||||||
|
git_checkout_options co_opts = GIT_CHECKOUT_OPTIONS_INIT;
|
||||||
|
co_opts.checkout_strategy = GIT_CHECKOUT_FORCE;
|
||||||
|
git_checkout_tree(newrepo, target, &co_opts);
|
||||||
|
git_reference_free(ref);
|
||||||
|
git_repository_set_head_detached(newrepo, git_object_id(target));
|
||||||
|
git_object_free(target);
|
||||||
|
} else {
|
||||||
|
log_error("Git checkout of branch " + *branch + " failed after clone.");
|
||||||
|
git_repository_free(newrepo);
|
||||||
|
throw std::runtime_error("Branch checkout failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
git_repository_free(newrepo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static YAML::Node load_config(const fs::path &config_path) {
|
||||||
|
YAML::Node config = YAML::LoadFile(config_path.string());
|
||||||
|
if (!config["packages"] || !config["releases"]) {
|
||||||
|
throw std::runtime_error("Config file must contain 'packages' and 'releases' sections.");
|
||||||
|
}
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void publish_lintian() {
|
||||||
|
if(!BASE_LINTIAN_DIR.empty() && fs::exists(BASE_LINTIAN_DIR)) {
|
||||||
|
for (auto &p : fs::recursive_directory_iterator(BASE_LINTIAN_DIR)) {
|
||||||
|
if (fs::is_regular_file(p)) {
|
||||||
|
fs::path rel = fs::relative(p.path(), BASE_LINTIAN_DIR);
|
||||||
|
fs::path dest = fs::path(REAL_LINTIAN_DIR) / rel;
|
||||||
|
fs::create_directories(dest.parent_path());
|
||||||
|
std::error_code ec;
|
||||||
|
fs::copy_file(p.path(), dest, fs::copy_options::overwrite_existing, ec);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fs::remove_all(BASE_LINTIAN_DIR);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define get_exclusions here before usage
|
||||||
|
static std::vector<std::string> get_exclusions(const fs::path &packaging) {
|
||||||
|
std::vector<std::string> exclusions;
|
||||||
|
fs::path cpr = packaging / "debian" / "copyright";
|
||||||
|
if(!fs::exists(cpr)) return exclusions;
|
||||||
|
|
||||||
|
std::ifstream f(cpr);
|
||||||
|
if(!f) return exclusions;
|
||||||
|
std::string line;
|
||||||
|
bool found = false;
|
||||||
|
while(std::getline(f,line)) {
|
||||||
|
if (line.find("Files-Excluded:") != std::string::npos) {
|
||||||
|
size_t pos=line.find(':');
|
||||||
|
if(pos!=std::string::npos) {
|
||||||
|
std::string excl = line.substr(pos+1);
|
||||||
|
std::istringstream iss(excl);
|
||||||
|
std::string token;
|
||||||
|
while(iss>>token) {
|
||||||
|
exclusions.push_back(token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return exclusions;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char** argv) {
|
||||||
|
fs::create_directories(LOG_DIR);
|
||||||
|
fs::create_directories(OUTPUT_DIR);
|
||||||
|
|
||||||
|
auto now = std::time(nullptr);
|
||||||
|
std::tm tm = *std::gmtime(&now);
|
||||||
|
char buf[32];
|
||||||
|
std::strftime(buf, sizeof(buf), "%Y%m%dT%H%M%S", &tm);
|
||||||
|
std::string current_time = buf;
|
||||||
|
|
||||||
|
std::string uuid_part = current_time.substr(0,8);
|
||||||
|
BASE_LINTIAN_DIR = BASE_OUTPUT_DIR + "/.lintian.tmp." + uuid_part;
|
||||||
|
fs::create_directories(BASE_LINTIAN_DIR);
|
||||||
|
|
||||||
|
fs::path log_file = fs::path(LOG_DIR) / (current_time + ".log");
|
||||||
|
log_file_stream.open(log_file);
|
||||||
|
if(!log_file_stream.is_open()) {
|
||||||
|
std::cerr<<"[ERROR] Unable to open log file.\n";
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool skip_dput = false;
|
||||||
|
bool skip_cleanup = false;
|
||||||
|
std::string config_path;
|
||||||
|
for(int i=1; i<argc; i++) {
|
||||||
|
std::string arg=argv[i];
|
||||||
|
if(arg=="--skip-dput") {
|
||||||
|
skip_dput=true;
|
||||||
|
} else if(arg=="--skip-cleanup") {
|
||||||
|
skip_cleanup=true;
|
||||||
|
} else if(arg.rfind("--urgency-level=",0)==0) {
|
||||||
|
urgency_level_override = arg.substr(std::string("--urgency-level=").size());
|
||||||
|
} else if(arg.rfind("--workers=",0)==0) {
|
||||||
|
worker_count = std::stoi(arg.substr(std::string("--workers=").size()));
|
||||||
|
if(worker_count<1) worker_count=1;
|
||||||
|
} else if(config_path.empty()) {
|
||||||
|
config_path = arg;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(config_path.empty()) {
|
||||||
|
log_error("No config file specified.");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
setenv("DEBFULLNAME", DEBFULLNAME.c_str(),1);
|
||||||
|
setenv("DEBEMAIL", DEBEMAIL.c_str(),1);
|
||||||
|
|
||||||
|
YAML::Node config;
|
||||||
|
try {
|
||||||
|
config = load_config(config_path);
|
||||||
|
} catch (std::exception &e) {
|
||||||
|
log_error(std::string("Error loading config file: ")+e.what());
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto packages = config["packages"];
|
||||||
|
auto releases = config["releases"];
|
||||||
|
|
||||||
|
fs::current_path(BASE_DIR);
|
||||||
|
|
||||||
|
auto get_packaging_branch = [&](const YAML::Node &pkg)->std::optional<std::string>{
|
||||||
|
if(pkg["packaging_branch"] && pkg["packaging_branch"].IsScalar()) {
|
||||||
|
return pkg["packaging_branch"].as<std::string>();
|
||||||
|
} else if (releases.size()>0) {
|
||||||
|
return "ubuntu/" + releases[0].as<std::string>();
|
||||||
|
}
|
||||||
|
return std::nullopt;
|
||||||
|
};
|
||||||
|
|
||||||
|
auto parse_version = [&](const fs::path &changelog_path){
|
||||||
|
std::ifstream f(changelog_path);
|
||||||
|
if(!f) throw std::runtime_error("Changelog not found: " + changelog_path.string());
|
||||||
|
std::string first_line;
|
||||||
|
std::getline(f, first_line);
|
||||||
|
size_t start = first_line.find('(');
|
||||||
|
size_t end = first_line.find(')');
|
||||||
|
if(start==std::string::npos||end==std::string::npos) throw std::runtime_error("Invalid changelog format");
|
||||||
|
std::string version_match = first_line.substr(start+1,end-(start+1));
|
||||||
|
std::string epoch;
|
||||||
|
std::string upstream_version = version_match;
|
||||||
|
if(auto pos=version_match.find(':'); pos!=std::string::npos) {
|
||||||
|
epoch = version_match.substr(0,pos);
|
||||||
|
upstream_version = version_match.substr(pos+1);
|
||||||
|
}
|
||||||
|
if(auto pos=upstream_version.find('-'); pos!=std::string::npos) {
|
||||||
|
upstream_version=upstream_version.substr(0,pos);
|
||||||
|
}
|
||||||
|
std::regex git_regex("(\\+git[0-9]+)?(~[a-z]+)?$");
|
||||||
|
upstream_version = std::regex_replace(upstream_version, git_regex, "");
|
||||||
|
auto t = std::time(nullptr);
|
||||||
|
std::tm tm = *std::gmtime(&t);
|
||||||
|
char buf[32];
|
||||||
|
std::strftime(buf, sizeof(buf), "%Y%m%d%H%M", &tm);
|
||||||
|
std::string current_date = buf;
|
||||||
|
std::string version;
|
||||||
|
if(!epoch.empty()) {
|
||||||
|
version = epoch + ":" + upstream_version + "+git" + current_date;
|
||||||
|
} else {
|
||||||
|
version = upstream_version + "+git" + current_date;
|
||||||
|
}
|
||||||
|
return version;
|
||||||
|
};
|
||||||
|
|
||||||
|
auto run_source_lintian = [&](const std::string &name, const fs::path &source_path){
|
||||||
|
log_info("Running Lintian for " + name);
|
||||||
|
fs::path temp_file = fs::temp_directory_path() / ("lintian_suppress_" + name + ".txt");
|
||||||
|
{
|
||||||
|
std::ofstream of(temp_file);
|
||||||
|
for (auto &tag: SUPPRESSED_LINTIAN_TAGS) {
|
||||||
|
of<<tag<<"\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::string cmd = "lintian -EvIL +pedantic --suppress-tags-from-file " + temp_file.string() + " " + source_path.string() + " 2>&1";
|
||||||
|
FILE* pipe = popen(cmd.c_str(),"r");
|
||||||
|
std::stringstream ss;
|
||||||
|
if(pipe) {
|
||||||
|
char buffer[256];
|
||||||
|
while(fgets(buffer,256,pipe)) {
|
||||||
|
ss<<buffer;
|
||||||
|
}
|
||||||
|
int ret = pclose(pipe);
|
||||||
|
fs::remove(temp_file);
|
||||||
|
if(ret!=0) {
|
||||||
|
log_error("Lintian failed:\n"+ss.str());
|
||||||
|
if(!ss.str().empty()) {
|
||||||
|
fs::path pkgdir = fs::path(BASE_LINTIAN_DIR)/name;
|
||||||
|
fs::create_directories(pkgdir);
|
||||||
|
std::ofstream out(pkgdir/"source.txt",std::ios::app);
|
||||||
|
out<<ss.str()<<"\n";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if(!ss.str().empty()) {
|
||||||
|
fs::path pkgdir = fs::path(BASE_LINTIAN_DIR)/name;
|
||||||
|
fs::create_directories(pkgdir);
|
||||||
|
std::ofstream out(pkgdir/"source.txt",std::ios::app);
|
||||||
|
out<<ss.str()<<"\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fs::remove(temp_file);
|
||||||
|
log_error("Failed to run lintian");
|
||||||
|
}
|
||||||
|
log_info("Lintian run for " + name + " is complete");
|
||||||
|
};
|
||||||
|
|
||||||
|
auto dput_source = [&](const std::string &name, const std::string &upload_target, const std::vector<std::string> &changes_files, const std::vector<std::string> &devel_changes_files){
|
||||||
|
if(!changes_files.empty()) {
|
||||||
|
std::string hr_changes;
|
||||||
|
for(auto &c: changes_files) hr_changes += c+" ";
|
||||||
|
log_info("Uploading "+hr_changes+"to "+upload_target+" using dput");
|
||||||
|
std::vector<std::string> cmd = {"dput",upload_target};
|
||||||
|
for(auto &c: changes_files) cmd.push_back(c);
|
||||||
|
try {
|
||||||
|
run_command_silent_on_success(cmd, OUTPUT_DIR);
|
||||||
|
log_info("Completed upload of changes to "+upload_target);
|
||||||
|
for(auto &file: devel_changes_files) {
|
||||||
|
if(!file.empty()) {
|
||||||
|
run_source_lintian(name, file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (...) {
|
||||||
|
// error logged already
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
auto update_changelog = [&](const fs::path &packaging_dir, const std::string &release, const std::string &version_with_epoch){
|
||||||
|
std::string name = packaging_dir.filename().string();
|
||||||
|
log_info("Updating changelog for " + name + " to version " + version_with_epoch + "-0ubuntu1~ppa1");
|
||||||
|
run_command_silent_on_success({"git","checkout","debian/changelog"}, packaging_dir);
|
||||||
|
std::vector<std::string> cmd={
|
||||||
|
"dch","--distribution",release,"--package",name,"--newversion",version_with_epoch+"-0ubuntu1~ppa1","--urgency",urgency_level_override,"CI upload."
|
||||||
|
};
|
||||||
|
run_command_silent_on_success(cmd, packaging_dir);
|
||||||
|
};
|
||||||
|
|
||||||
|
auto build_package = [&](const fs::path &packaging_dir, const std::map<std::string,std::string> &env_vars, bool large) {
|
||||||
|
std::string name = packaging_dir.filename().string();
|
||||||
|
log_info("Building source package for " + name);
|
||||||
|
fs::path temp_dir;
|
||||||
|
if(large) {
|
||||||
|
temp_dir = fs::path(OUTPUT_DIR)/(".tmp_"+name+"_"+env_vars.at("VERSION"));
|
||||||
|
fs::create_directories(temp_dir);
|
||||||
|
log_warning(name+" is quite large and will not fit in /tmp, building at "+temp_dir.string());
|
||||||
|
} else {
|
||||||
|
temp_dir = fs::temp_directory_path()/("tmp_build_"+name+"_"+env_vars.at("VERSION"));
|
||||||
|
fs::create_directories(temp_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::error_code ec;
|
||||||
|
fs::path temp_packaging_dir = temp_dir/name;
|
||||||
|
fs::create_directories(temp_packaging_dir,ec);
|
||||||
|
fs::copy(packaging_dir/"debian", temp_packaging_dir/"debian", fs::copy_options::recursive, ec);
|
||||||
|
|
||||||
|
std::string tarball_name = name+"_"+env_vars.at("VERSION")+".orig.tar.gz";
|
||||||
|
fs::path tarball_source = fs::path(BASE_DIR)/(name+"_MAIN.orig.tar.gz");
|
||||||
|
fs::path tarball_dest = temp_dir/tarball_name;
|
||||||
|
fs::copy_file(tarball_source,tarball_dest,fs::copy_options::overwrite_existing,ec);
|
||||||
|
|
||||||
|
for (auto &e: env_vars) {
|
||||||
|
setenv(e.first.c_str(), e.second.c_str(),1);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<std::string> cmd_build={"debuild","--no-lintian","-S","-d","-sa","-nc"};
|
||||||
|
try {
|
||||||
|
run_command_silent_on_success(cmd_build,temp_packaging_dir);
|
||||||
|
run_command_silent_on_success({"git","checkout","debian/changelog"}, packaging_dir);
|
||||||
|
} catch(...) {
|
||||||
|
fs::remove_all(temp_dir,ec);
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string pattern = name+"_"+env_vars.at("VERSION");
|
||||||
|
for(auto &entry: fs::directory_iterator(temp_dir)) {
|
||||||
|
std::string fname=entry.path().filename().string();
|
||||||
|
if(fname.rfind(pattern,0)==0) {
|
||||||
|
fs::path dest=fs::path(OUTPUT_DIR)/fname;
|
||||||
|
fs::copy_file(entry.path(),dest,fs::copy_options::overwrite_existing,ec);
|
||||||
|
log_info("Copied "+fname+" to "+OUTPUT_DIR);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string changes_file;
|
||||||
|
for(auto &entry : fs::directory_iterator(OUTPUT_DIR)) {
|
||||||
|
std::string fname=entry.path().filename().string();
|
||||||
|
if(fname.rfind(name+"_"+env_vars.at("VERSION"),0)==0 && fname.ends_with("_source.changes")) {
|
||||||
|
changes_file=entry.path().string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::remove_all(temp_dir,ec);
|
||||||
|
|
||||||
|
if(changes_file.empty()) {
|
||||||
|
log_error("No changes file found after build.");
|
||||||
|
throw std::runtime_error("Changes file not found");
|
||||||
|
}
|
||||||
|
log_info("Built package, changes file: "+changes_file);
|
||||||
|
return changes_file;
|
||||||
|
};
|
||||||
|
|
||||||
|
auto process_package = [&](const YAML::Node &pkg){
|
||||||
|
std::string name = pkg["name"] ? pkg["name"].as<std::string>() : "";
|
||||||
|
std::string upload_target = pkg["upload_target"] ? pkg["upload_target"].as<std::string>() : "ppa:lubuntu-ci/unstable-ci-proposed";
|
||||||
|
if(name.empty()) {
|
||||||
|
log_warning("Skipping package due to missing name.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
fs::path packaging_destination = fs::path(BASE_DIR)/name;
|
||||||
|
fs::path changelog_path = packaging_destination/"debian"/"changelog";
|
||||||
|
std::string version = parse_version(changelog_path);
|
||||||
|
|
||||||
|
bool large = pkg["large"] ? pkg["large"].as<bool>() : false;
|
||||||
|
std::vector<std::pair<std::string,std::map<std::string,std::string>>> built_changes;
|
||||||
|
|
||||||
|
std::string epoch;
|
||||||
|
std::string version_no_epoch=version;
|
||||||
|
if(auto pos=version.find(':');pos!=std::string::npos) {
|
||||||
|
epoch=version.substr(0,pos);
|
||||||
|
version_no_epoch=version.substr(pos+1);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto rel : releases) {
|
||||||
|
std::string release = rel.as<std::string>();
|
||||||
|
log_info("Building "+name+" for "+release);
|
||||||
|
|
||||||
|
std::string release_version_no_epoch = version_no_epoch + "~" + release;
|
||||||
|
fs::path tarball_source = fs::path(BASE_DIR)/(name+"_MAIN.orig.tar.gz");
|
||||||
|
fs::path tarball_dest = fs::path(BASE_DIR)/(name+"_"+release_version_no_epoch+".orig.tar.gz");
|
||||||
|
fs::copy_file(tarball_source,tarball_dest,fs::copy_options::overwrite_existing);
|
||||||
|
|
||||||
|
std::string version_for_dch = epoch.empty()? release_version_no_epoch : (epoch+":"+release_version_no_epoch);
|
||||||
|
|
||||||
|
std::map<std::string,std::string> env_map;
|
||||||
|
env_map["DEBFULLNAME"]=DEBFULLNAME;
|
||||||
|
env_map["DEBEMAIL"]=DEBEMAIL;
|
||||||
|
env_map["VERSION"]=release_version_no_epoch;
|
||||||
|
env_map["UPLOAD_TARGET"]=upload_target;
|
||||||
|
|
||||||
|
try {
|
||||||
|
update_changelog(packaging_destination, release, version_for_dch);
|
||||||
|
std::string changes_file = build_package(packaging_destination, env_map, large);
|
||||||
|
if(!changes_file.empty()) {
|
||||||
|
built_changes.push_back({changes_file,env_map});
|
||||||
|
}
|
||||||
|
} catch(std::exception &e) {
|
||||||
|
log_error("Error processing package '"+name+"' for release '"+release+"': "+std::string(e.what()));
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::remove(tarball_dest);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<std::string> changes_files;
|
||||||
|
for(auto &bc: built_changes) {
|
||||||
|
fs::path cf(bc.first);
|
||||||
|
changes_files.push_back(cf.filename().string());
|
||||||
|
}
|
||||||
|
|
||||||
|
std::unordered_set<std::string> devel_changes_files;
|
||||||
|
if(releases.size()>0) {
|
||||||
|
std::string first_release = releases[0].as<std::string>();
|
||||||
|
for (auto &f: changes_files) {
|
||||||
|
if(f.find("~"+first_release)!=std::string::npos) {
|
||||||
|
devel_changes_files.insert((fs::path(OUTPUT_DIR)/f).string());
|
||||||
|
} else {
|
||||||
|
devel_changes_files.insert(std::string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(built_changes.empty()) return;
|
||||||
|
|
||||||
|
if(getenv("DEBFULLNAME")==nullptr) setenv("DEBFULLNAME",DEBFULLNAME.c_str(),1);
|
||||||
|
if(getenv("DEBEMAIL")==nullptr) setenv("DEBEMAIL",DEBEMAIL.c_str(),1);
|
||||||
|
|
||||||
|
if(skip_dput) {
|
||||||
|
for (auto &file : devel_changes_files) {
|
||||||
|
if(!file.empty()) {
|
||||||
|
run_source_lintian(name,file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
std::string real_upload_target = built_changes[0].second.at("UPLOAD_TARGET");
|
||||||
|
dput_source(name, real_upload_target, changes_files, std::vector<std::string>(devel_changes_files.begin(), devel_changes_files.end()));
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::remove(fs::path(BASE_DIR)/(name+"_MAIN.orig.tar.gz"));
|
||||||
|
};
|
||||||
|
|
||||||
|
auto prepare_package = [&](const YAML::Node &pkg){
|
||||||
|
std::string name = pkg["name"] ? pkg["name"].as<std::string>() : "";
|
||||||
|
if(name.empty()) {
|
||||||
|
log_warning("Skipping package due to missing name.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string upstream_url = pkg["upstream_url"] ? pkg["upstream_url"].as<std::string>() : ("https://github.com/lxqt/"+name+".git");
|
||||||
|
fs::path upstream_destination = fs::path(BASE_DIR)/("upstream-"+name);
|
||||||
|
std::optional<std::string> packaging_branch = get_packaging_branch(pkg);
|
||||||
|
std::string packaging_url = pkg["packaging_url"] ? pkg["packaging_url"].as<std::string>() : ("https://git.lubuntu.me/Lubuntu/"+name+"-packaging.git");
|
||||||
|
fs::path packaging_destination = fs::path(BASE_DIR)/name;
|
||||||
|
|
||||||
|
try {
|
||||||
|
git_fetch_and_checkout(upstream_destination, upstream_url, std::nullopt);
|
||||||
|
} catch(...) {
|
||||||
|
log_error("Failed to prepare upstream repo for "+name);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
git_fetch_and_checkout(packaging_destination, packaging_url, packaging_branch);
|
||||||
|
} catch(...) {
|
||||||
|
log_error("Failed to prepare packaging repo for "+name);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
update_maintainer((packaging_destination/"debian").string(), false);
|
||||||
|
} catch(std::exception &e) {
|
||||||
|
log_warning("update_maintainer: "+std::string(e.what())+" for "+name);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto exclusions = get_exclusions(packaging_destination);
|
||||||
|
create_tarball(name, upstream_destination, exclusions);
|
||||||
|
|
||||||
|
process_package(pkg);
|
||||||
|
};
|
||||||
|
|
||||||
|
std::vector<std::future<void>> futures;
|
||||||
|
for(auto pkg: packages) {
|
||||||
|
futures.push_back(std::async(std::launch::async, prepare_package, pkg));
|
||||||
|
}
|
||||||
|
|
||||||
|
for(auto &fut: futures) {
|
||||||
|
try {
|
||||||
|
fut.get();
|
||||||
|
} catch(std::exception &e) {
|
||||||
|
log_error(std::string("Task generated an exception: ")+e.what());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(!skip_cleanup) {
|
||||||
|
fs::remove_all(OUTPUT_DIR);
|
||||||
|
}
|
||||||
|
log_info("Publishing Lintian output...");
|
||||||
|
publish_lintian();
|
||||||
|
clean_old_logs(fs::path(LOG_DIR));
|
||||||
|
|
||||||
|
log_info("Script completed successfully.");
|
||||||
|
return 0;
|
||||||
|
}
|
@ -0,0 +1,204 @@
|
|||||||
|
#include "update_maintainer.h"
|
||||||
|
#include <filesystem>
|
||||||
|
#include <fstream>
|
||||||
|
#include <iostream>
|
||||||
|
#include <regex>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <string>
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
|
namespace fs = std::filesystem;
|
||||||
|
|
||||||
|
static const char* PREVIOUS_UBUNTU_MAINTAINERS[] = {
|
||||||
|
"ubuntu core developers <ubuntu-devel@lists.ubuntu.com>",
|
||||||
|
"ubuntu core developers <ubuntu-devel-discuss@lists.ubuntu.com>",
|
||||||
|
"ubuntu motu developers <ubuntu-motu@lists.ubuntu.com>"
|
||||||
|
};
|
||||||
|
static const char* UBUNTU_MAINTAINER = "Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>";
|
||||||
|
|
||||||
|
class MaintainerUpdateException : public std::runtime_error {
|
||||||
|
public:
|
||||||
|
using std::runtime_error::runtime_error;
|
||||||
|
};
|
||||||
|
|
||||||
|
static std::optional<fs::path> find_control_file(const fs::path &debian_dir) {
|
||||||
|
fs::path control_in = debian_dir / "control.in";
|
||||||
|
fs::path control = debian_dir / "control";
|
||||||
|
if (fs::exists(control_in)) return control_in;
|
||||||
|
if (fs::exists(control)) return control;
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
static fs::path find_changelog_file(const fs::path &debian_dir) {
|
||||||
|
fs::path changelog = debian_dir / "changelog";
|
||||||
|
if (!fs::exists(changelog)) {
|
||||||
|
throw MaintainerUpdateException("No changelog file found");
|
||||||
|
}
|
||||||
|
return changelog;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool xsbc_managed_by_rules(const fs::path &debian_dir) {
|
||||||
|
fs::path rules = debian_dir / "rules";
|
||||||
|
if (!fs::exists(rules)) return false;
|
||||||
|
std::ifstream rf(rules);
|
||||||
|
std::string line;
|
||||||
|
while (std::getline(rf, line)) {
|
||||||
|
if (line.find("XSBC-Original-") != std::string::npos) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::string get_distribution(const fs::path &changelog_file) {
|
||||||
|
// parse first line of changelog: "package (version) dist; urgency=..."
|
||||||
|
// dist is the token after ')'
|
||||||
|
std::ifstream f(changelog_file);
|
||||||
|
if(!f) throw MaintainerUpdateException("Unable to open changelog.");
|
||||||
|
std::string first_line;
|
||||||
|
std::getline(f, first_line);
|
||||||
|
// Format: "pkg (ver) dist; urgency=..."
|
||||||
|
// find ') '
|
||||||
|
size_t pos = first_line.find(')');
|
||||||
|
if(pos == std::string::npos) throw MaintainerUpdateException("Invalid changelog format");
|
||||||
|
// after ') ', next token is distribution until space
|
||||||
|
// skip ')'
|
||||||
|
pos++;
|
||||||
|
while(pos < first_line.size() && std::isspace((unsigned char)first_line[pos])) pos++;
|
||||||
|
// now read until space or ';'
|
||||||
|
size_t start = pos;
|
||||||
|
while(pos < first_line.size() && !std::isspace((unsigned char)first_line[pos]) && first_line[pos] != ';') pos++;
|
||||||
|
std::string dist = first_line.substr(start, pos - start);
|
||||||
|
// remove -proposed-updates etc
|
||||||
|
size_t dashpos = dist.find('-');
|
||||||
|
if (dashpos != std::string::npos) {
|
||||||
|
dist = dist.substr(0, dashpos);
|
||||||
|
}
|
||||||
|
return dist;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::string read_file(const fs::path &p) {
|
||||||
|
std::ifstream f(p);
|
||||||
|
if(!f) throw MaintainerUpdateException("Cannot read file: " + p.string());
|
||||||
|
std::stringstream ss;
|
||||||
|
ss << f.rdbuf();
|
||||||
|
return ss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
static void write_file(const fs::path &p, const std::string &content) {
|
||||||
|
std::ofstream f(p);
|
||||||
|
if(!f) throw MaintainerUpdateException("Cannot write file: " + p.string());
|
||||||
|
f << content;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::optional<std::string> get_field(const std::string &content, const std::string &field_regex) {
|
||||||
|
std::regex r(field_regex, std::regex_constants::multiline);
|
||||||
|
std::smatch m;
|
||||||
|
if(std::regex_search(content, m, r)) {
|
||||||
|
return m[1].str();
|
||||||
|
}
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::string set_field(const std::string &content, const std::string &field_regex, const std::string &new_line) {
|
||||||
|
std::regex r(field_regex, std::regex_constants::multiline);
|
||||||
|
return std::regex_replace(content, r, new_line);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void update_maintainer_file(const fs::path &control_file, const std::string &distribution, bool verbose) {
|
||||||
|
std::string c = read_file(control_file);
|
||||||
|
|
||||||
|
auto original_maintainer = get_field(c, "^Maintainer:\\s?(.*)$");
|
||||||
|
if(!original_maintainer) {
|
||||||
|
throw MaintainerUpdateException("No Maintainer field found");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string om = *original_maintainer;
|
||||||
|
std::string om_lower = om;
|
||||||
|
for (auto &ch : om_lower) ch = (char)std::tolower((unsigned char)ch);
|
||||||
|
|
||||||
|
// Check previous ubuntu maintainers
|
||||||
|
for (auto &pm : PREVIOUS_UBUNTU_MAINTAINERS) {
|
||||||
|
std::string pm_lower = pm;
|
||||||
|
for (auto &ch: pm_lower) ch=(char)std::tolower((unsigned char)ch);
|
||||||
|
if(pm_lower == om_lower) {
|
||||||
|
if(verbose) {
|
||||||
|
std::cout<<"The old maintainer was: "<<om<<"\n";
|
||||||
|
std::cout<<"Resetting as: "<<UBUNTU_MAINTAINER<<"\n";
|
||||||
|
}
|
||||||
|
// just set maintainer
|
||||||
|
std::regex maint_re("^Maintainer:\\s?.*$", std::regex_constants::multiline);
|
||||||
|
c = std::regex_replace(c, maint_re, "Maintainer: " + std::string(UBUNTU_MAINTAINER));
|
||||||
|
write_file(control_file, c);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If ends with ubuntu.com, do nothing
|
||||||
|
{
|
||||||
|
std::string lower_om = om_lower;
|
||||||
|
if (lower_om.rfind("ubuntu.com>", lower_om.size()-11) != std::string::npos) {
|
||||||
|
if(verbose) {
|
||||||
|
std::cout<<"The Maintainer email is ubuntu.com address. Doing nothing.\n";
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debian distributions: stable, testing, unstable, experimental
|
||||||
|
if(distribution=="stable"||distribution=="testing"||distribution=="unstable"||distribution=="experimental") {
|
||||||
|
if(verbose) {
|
||||||
|
std::cout<<"The package targets Debian. Doing nothing.\n";
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// set XSBC-Original-Maintainer if needed
|
||||||
|
auto orig_field = get_field(c, "^(?:[XSBC]*-)?Original-Maintainer:\\s?(.*)$");
|
||||||
|
if(orig_field && verbose) {
|
||||||
|
std::cout<<"Overwriting original maintainer: "<< *orig_field <<"\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
if(verbose) {
|
||||||
|
std::cout<<"The original maintainer is: "<< om <<"\n";
|
||||||
|
std::cout<<"Resetting as: "<<UBUNTU_MAINTAINER<<"\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
// set original maint
|
||||||
|
if(orig_field) {
|
||||||
|
// pattern to replace original maint
|
||||||
|
std::regex orig_re("^(?:[XSBC]*-)?Original-Maintainer:.*$", std::regex_constants::multiline);
|
||||||
|
c = std::regex_replace(c, orig_re, "XSBC-Original-Maintainer: " + om);
|
||||||
|
} else {
|
||||||
|
// insert after Maintainer line
|
||||||
|
std::regex maint_re("^(Maintainer:.*)$", std::regex_constants::multiline);
|
||||||
|
c = std::regex_replace(c, maint_re, "$1\nXSBC-Original-Maintainer: " + om);
|
||||||
|
}
|
||||||
|
|
||||||
|
// now set maint
|
||||||
|
{
|
||||||
|
std::regex maint_re("^Maintainer:\\s?.*$", std::regex_constants::multiline);
|
||||||
|
c = std::regex_replace(c, maint_re, "Maintainer: " + std::string(UBUNTU_MAINTAINER));
|
||||||
|
}
|
||||||
|
|
||||||
|
write_file(control_file, c);
|
||||||
|
}
|
||||||
|
|
||||||
|
void update_maintainer(const std::string &debian_directory, bool verbose) {
|
||||||
|
fs::path debian_dir(debian_directory);
|
||||||
|
auto control_file = find_control_file(debian_dir);
|
||||||
|
if(!control_file) {
|
||||||
|
throw MaintainerUpdateException("No control file found");
|
||||||
|
}
|
||||||
|
fs::path changelog = find_changelog_file(debian_dir);
|
||||||
|
if(xsbc_managed_by_rules(debian_dir)) {
|
||||||
|
if(verbose) {
|
||||||
|
std::cout<<"XSBC-Original is managed by rules. Doing nothing.\n";
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string distribution = get_distribution(changelog);
|
||||||
|
|
||||||
|
update_maintainer_file(*control_file, distribution, verbose);
|
||||||
|
}
|
@ -0,0 +1,4 @@
|
|||||||
|
#pragma once
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
void update_maintainer(const std::string &debian_directory, bool verbose=false);
|
Loading…
Reference in new issue