MEGAcommit adding a FULL web interface

main
Simon Quigley 2 weeks ago
parent d63f3b8d79
commit 9ff2bd6fcd

@ -1,63 +1,141 @@
cmake_minimum_required(VERSION 3.21)
project(lubuntuci CXX)
cmake_minimum_required(VERSION 3.16)
project(lubuntu_ci_all CXX)
set(CMAKE_AUTOMOC ON)
set(CMAKE_CXX_STANDARD 23)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_BUILD_TYPE Debug CACHE STRING "Build type" FORCE)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
#
# Allow the user to override LAUNCHPAD_CPP_INCLUDE_DIR/LAUNCHPAD_CPP_LIBRARY
#
if (NOT DEFINED LAUNCHPAD_CPP_INCLUDE_DIR)
set(LAUNCHPAD_CPP_INCLUDE_DIR "/srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp")
endif()
if (NOT DEFINED LAUNCHPAD_CPP_LIBRARY)
set(LAUNCHPAD_CPP_LIBRARY "/srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so")
endif()
find_package(yaml-cpp REQUIRED)
find_package(Qt6 REQUIRED COMPONENTS Core HttpServer Sql)
find_package(PkgConfig REQUIRED)
pkg_check_modules(LIBGIT2 REQUIRED IMPORTED_TARGET libgit2)
find_package(CURL REQUIRED)
find_library(UUID_LIB uuid)
find_package(ZLIB REQUIRED)
find_package(yaml-cpp REQUIRED)
pkg_check_modules(LIBARCHIVE REQUIRED libarchive)
include_directories(${LIBARCHIVE_INCLUDE_DIRS})
link_directories(${LIBARCHIVE_LIBRARY_DIRS})
add_definitions(${LIBARCHIVE_CFLAGS_OTHER})
pkg_check_modules(LIBGIT2 REQUIRED libgit2)
find_package(ZLIB REQUIRED)
find_package(CURL REQUIRED)
set(UUID_LIB "uuid")
include_directories(/srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
#
# 1. The main library: lubuntuci_lib
#
add_library(lubuntuci_lib SHARED
common.cpp
utilities.cpp
ci_logic.cpp
ci_database_objs.cpp
lubuntuci_lib.cpp
task_queue.cpp
template_renderer.cpp
web_server.cpp
sources_parser.cpp
naive_bayes_classifier.cpp
)
add_library(lubuntuci SHARED common.cpp utilities.cpp)
target_include_directories(lubuntuci PUBLIC ${CMAKE_CURRENT_SOURCE_DIR} ${LIBARCHIVE_INCLUDE_DIRS})
target_link_libraries(lubuntuci PUBLIC yaml-cpp::yaml-cpp PRIVATE CURL::libcurl ${LIBARCHIVE_LIBRARIES} /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so)
target_include_directories(lubuntuci_lib PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}
"${LAUNCHPAD_CPP_INCLUDE_DIR}"
)
add_library(update_maintainer_lib STATIC update-maintainer-lib.cpp)
target_include_directories(update_maintainer_lib PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
target_link_libraries(update_maintainer_lib PRIVATE lubuntuci yaml-cpp::yaml-cpp CURL::libcurl)
target_link_libraries(lubuntuci_lib
Qt6::Core
Qt6::HttpServer
Qt6::Sql
yaml-cpp
${LIBARCHIVE_LIBRARIES}
${LIBGIT2_LIBRARIES}
"${LAUNCHPAD_CPP_LIBRARY}"
ZLIB::ZLIB
CURL::libcurl
${UUID_LIB}
)
add_executable(update-maintainer update-maintainer.cpp)
target_link_libraries(update-maintainer PRIVATE update_maintainer_lib)
#
# 2. The update-maintainer-lib library
#
add_library(update_maintainer_lib STATIC
update-maintainer-lib.cpp
)
target_include_directories(update_maintainer_lib PRIVATE
"${LAUNCHPAD_CPP_INCLUDE_DIR}"
)
target_link_libraries(update_maintainer_lib
lubuntuci_lib
yaml-cpp
CURL::libcurl
${LIBARCHIVE_LIBRARIES}
${LIBGIT2_LIBRARIES}
ZLIB::ZLIB
"${LAUNCHPAD_CPP_LIBRARY}"
${UUID_LIB}
)
add_executable(build-packages build-packages.cpp)
target_include_directories(build-packages PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
target_link_libraries(build-packages PRIVATE lubuntuci PkgConfig::LIBGIT2 update_maintainer_lib yaml-cpp::yaml-cpp)
#
# 3. Build each executable
#
add_executable(fetch-indexes fetch-indexes.cpp utilities.cpp)
target_include_directories(fetch-indexes PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
target_link_libraries(fetch-indexes PRIVATE lubuntuci CURL::libcurl yaml-cpp::yaml-cpp ZLIB::ZLIB /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so)
add_executable(update-maintainer update-maintainer.cpp)
target_link_libraries(update-maintainer
lubuntuci_lib
update_maintainer_lib
yaml-cpp
${LIBARCHIVE_LIBRARIES}
${LIBGIT2_LIBRARIES}
ZLIB::ZLIB
CURL::libcurl
"${LAUNCHPAD_CPP_LIBRARY}"
${UUID_LIB}
)
add_executable(lintian-ppa lintian-ppa.cpp)
target_include_directories(lintian-ppa PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
target_link_libraries(lintian-ppa PRIVATE lubuntuci ${UUID_LIB} /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so)
set_target_properties(lubuntuci build-packages fetch-indexes update-maintainer lintian-ppa PROPERTIES
BUILD_WITH_INSTALL_RPATH TRUE
INSTALL_RPATH "$ORIGIN/lib"
target_link_libraries(lintian-ppa
lubuntuci_lib
yaml-cpp
${LIBARCHIVE_LIBRARIES}
${LIBGIT2_LIBRARIES}
"${LAUNCHPAD_CPP_LIBRARY}"
ZLIB::ZLIB
CURL::libcurl
${UUID_LIB}
)
install(TARGETS lubuntuci
LIBRARY DESTINATION lib
add_executable(fetch-indexes fetch-indexes.cpp)
target_link_libraries(fetch-indexes
lubuntuci_lib
yaml-cpp
${LIBARCHIVE_LIBRARIES}
${LIBGIT2_LIBRARIES}
"${LAUNCHPAD_CPP_LIBRARY}"
ZLIB::ZLIB
CURL::libcurl
${UUID_LIB}
)
install(TARGETS build-packages fetch-indexes update-maintainer lintian-ppa
RUNTIME DESTINATION .
add_executable(web_ui main.cpp)
target_link_libraries(web_ui
lubuntuci_lib
yaml-cpp
${LIBARCHIVE_LIBRARIES}
${LIBGIT2_LIBRARIES}
ZLIB::ZLIB
CURL::libcurl
"${LAUNCHPAD_CPP_LIBRARY}"
${UUID_LIB}
)
install(FILES common.h update-maintainer-lib.h utilities.h
DESTINATION include/lubuntuci
)
#
# Copy templates
#
file(COPY ../templates
DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(COPY ../static
DESTINATION ${CMAKE_CURRENT_BINARY_DIR})

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -0,0 +1,269 @@
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#ifndef CI_DATABASE_OBJS_H
#define CI_DATABASE_OBJS_H
#include <chrono>
#include <iostream>
#include <memory>
#include <string>
#include <vector>
#include <mutex>
#include <QDateTime>
#include <QSqlDatabase>
#include <yaml-cpp/yaml.h>
#include "common.h"
class Person {
public:
int id;
std::string username;
std::string logo_url;
Person(int id = 0, const std::string username = "", const std::string logo_url = "");
};
class Release {
public:
int id;
int version;
std::string codename;
bool isDefault;
Release(int id = 0, int version = 0, const std::string& codename = "", bool isDefault = false);
std::vector<Release> get_releases(QSqlDatabase& p_db);
Release get_release_by_id(QSqlDatabase& p_db, int id);
bool set_releases(QSqlDatabase& p_db, YAML::Node& releases);
};
class Package {
public:
int id;
std::string name;
bool large;
std::string upstream_browser;
std::string packaging_browser;
std::string upstream_url;
std::string packaging_branch;
std::string packaging_url;
Package(int id = 0, const std::string& name = "", bool large = false, const std::string& upstream_url = "", const std::string& packaging_branch = "", const std::string& packaging_url = "");
std::vector<Package> get_packages(QSqlDatabase& p_db);
Package get_package_by_id(QSqlDatabase& p_db, int id);
bool set_packages(QSqlDatabase& p_db, YAML::Node& packages);
private:
std::string transform_url(const std::string& url);
};
class Branch {
public:
int id;
std::string name;
std::string upload_target;
std::string upload_target_ssh;
Branch(int id = 0, const std::string& name = "", const std::string& upload_target = "", const std::string& upload_target_ssh = "");
std::vector<Branch> get_branches(QSqlDatabase& p_db);
Branch get_branch_by_id(QSqlDatabase& p_db, int id);
};
class GitCommit {
public:
int id = 0;
std::string commit_hash;
std::string commit_summary;
std::string commit_message;
std::chrono::zoned_time<std::chrono::seconds> commit_datetime;
std::string commit_author;
std::string commit_committer;
GitCommit(
QSqlDatabase& p_db,
const std::string& commit_hash = "",
const std::string& commit_summary = "",
const std::string& commit_message = "",
const std::chrono::zoned_time<std::chrono::seconds>& commit_datetime = std::chrono::zoned_time<std::chrono::seconds>(),
const std::string& commit_author = "",
const std::string& commit_committer = ""
);
GitCommit(
const int id = 0,
const std::string& commit_hash = "",
const std::string& commit_summary = "",
const std::string& commit_message = "",
const std::chrono::zoned_time<std::chrono::seconds>& commit_datetime = std::chrono::zoned_time<std::chrono::seconds>(),
const std::string& commit_author = "",
const std::string& commit_committer = ""
);
GitCommit get_commit_by_id(QSqlDatabase& p_db, int id);
std::optional<GitCommit> get_commit_by_hash(QSqlDatabase& p_db, const std::string commit_hash);
private:
std::chrono::zoned_time<std::chrono::seconds> convert_timestr_to_zonedtime(const std::string& datetime_str);
};
class JobStatus {
public:
int id;
int build_score;
std::string name;
std::string display_name;
JobStatus(QSqlDatabase& p_db, int id);
};
class PackageConf {
public:
int id = 0;
std::shared_ptr<Package> package;
std::shared_ptr<Release> release;
std::shared_ptr<Branch> branch;
std::shared_ptr<GitCommit> packaging_commit = std::make_shared<GitCommit>();
std::shared_ptr<GitCommit> upstream_commit = std::make_shared<GitCommit>();
std::string upstream_version;
int ppa_revision = 1;
bool operator<(const PackageConf& other) const {
if (package->id != other.package->id)
return package->id < other.package->id;
if (release->id != other.release->id)
return release->id < other.release->id;
if (branch->id != other.branch->id)
return branch->id < other.branch->id;
return id < other.id;
}
bool operator==(const PackageConf& other) const {
// Intentionally leave out our ID
return package->id == other.package->id &&
release->id == other.release->id &&
branch->id == other.branch->id;
}
PackageConf(int id = 0, std::shared_ptr<Package> package = NULL, std::shared_ptr<Release> release = NULL, std::shared_ptr<Branch> branch = NULL,
std::shared_ptr<GitCommit> packaging_commit = NULL, std::shared_ptr<GitCommit> upstream_commit = NULL);
std::vector<std::shared_ptr<PackageConf>> get_package_confs(QSqlDatabase& p_db, std::map<std::string, std::shared_ptr<JobStatus>> jobstatus_map);
std::vector<std::shared_ptr<PackageConf>> get_package_confs_by_package_name(QSqlDatabase& p_db,
std::vector<std::shared_ptr<PackageConf>> packageconfs,
const std::string& package_name);
void assign_task(std::shared_ptr<JobStatus> jobstatus, std::shared_ptr<Task> task_ptr, std::weak_ptr<PackageConf> packageconf_ptr);
int successful_task_count();
int total_task_count();
std::shared_ptr<Task> get_task_by_jobstatus(std::shared_ptr<JobStatus> jobstatus);
bool set_package_confs(QSqlDatabase& p_db);
bool set_commit_id(const std::string& _commit_id = "");
bool set_commit_time(const std::chrono::zoned_time<std::chrono::seconds>& _commit_time = std::chrono::zoned_time<std::chrono::seconds>{});
void sync(QSqlDatabase& p_db);
bool can_check_source_upload();
bool can_check_builds();
struct PackageConfPlain {
int package_id;
int release_id;
int branch_id;
bool operator<(const PackageConf::PackageConfPlain& other) const {
if (package_id != other.package_id)
return package_id < other.package_id;
if (release_id != other.release_id)
return release_id < other.release_id;
return branch_id < other.branch_id;
}
bool operator==(const PackageConf::PackageConfPlain& other) const {
return package_id == other.package_id &&
release_id == other.release_id &&
branch_id == other.branch_id;
}
};
private:
std::unordered_map<std::shared_ptr<JobStatus>, std::shared_ptr<Task>> jobstatus_task_map_;
std::unique_ptr<std::mutex> task_mutex_ = std::make_unique<std::mutex>();
};
class Task {
public:
int id;
int build_score = 0;
bool successful;
std::int64_t queue_time = 0;
std::int64_t start_time = 0;
std::int64_t finish_time = 0;
std::function<void(std::shared_ptr<Log> log)> func;
std::shared_ptr<Log> log;
std::shared_ptr<JobStatus> jobstatus;
std::weak_ptr<PackageConf> parent_packageconf;
bool is_running;
Task(QSqlDatabase& p_db, std::shared_ptr<JobStatus> jobstatus, std::int64_t time, std::shared_ptr<PackageConf> packageconf);
Task();
std::set<std::shared_ptr<Task>> get_completed_tasks(QSqlDatabase& p_db, std::vector<std::shared_ptr<PackageConf>> packageconfs, std::map<std::string, std::shared_ptr<JobStatus>> job_statuses, int page, int per_page);
void save(QSqlDatabase& p_db, int _packageconf_id = 0);
std::shared_ptr<PackageConf> get_parent_packageconf() const {
return parent_packageconf.lock();
}
struct TaskComparator {
bool operator()(const std::shared_ptr<Task>& lhs, const std::shared_ptr<Task>& rhs) const {
return Task::compare(lhs, rhs);
}
};
// Custom comparator for task ordering
bool operator<(const Task& other) const {
if (build_score != other.build_score) {
return build_score < other.build_score;
} else if (queue_time != other.queue_time) {
return queue_time < other.queue_time;
} else if (start_time != other.start_time) {
return start_time < other.start_time;
} else if (finish_time != other.finish_time) {
return finish_time < other.finish_time;
}
return true;
}
bool operator<(const std::shared_ptr<Task>& other) const {
if (build_score != other->build_score) {
return build_score < other->build_score;
} else if (queue_time != other->queue_time) {
return queue_time < other->queue_time;
} else if (start_time != other->start_time) {
return start_time < other->start_time;
} else if (finish_time != other->finish_time) {
return finish_time < other->finish_time;
}
return true;
}
static bool compare(const std::shared_ptr<Task>& lhs, const std::shared_ptr<Task>& rhs);
};
inline size_t qHash(const PackageConf::PackageConfPlain& key, size_t seed = 0) {
size_t res = 0;
res ^= std::hash<int>()(key.package_id) + 0x9e3779b9 + (res << 6) + (res >> 2);
res ^= std::hash<int>()(key.release_id) + 0x9e3779b9 + (res << 6) + (res >> 2);
res ^= std::hash<int>()(key.branch_id) + 0x9e3779b9 + (res << 6) + (res >> 2);
return res;
}
#endif // CI_DATABASE_OBJS_H

File diff suppressed because it is too large Load Diff

@ -0,0 +1,133 @@
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
// cpp/ci_logic.h
// [License Header as in original]
#ifndef CI_LOGIC_H
#define CI_LOGIC_H
#include "ci_database_objs.h"
#include "task_queue.h"
#include <string>
#include <vector>
#include <optional>
#include <filesystem>
#include <mutex>
#include <queue>
#include <thread>
#include <functional>
#include <condition_variable>
#include <QSqlDatabase>
#include <yaml-cpp/yaml.h>
struct CiProject;
/**
* Data describing one package to pull/build/etc.
*/
struct CiProject {
std::string name;
std::string version;
std::string time;
std::string upload_target;
std::string upstream_url;
std::string packaging_url;
std::optional<std::string> packaging_branch;
std::filesystem::path main_tarball;
bool large = false;
// These get populated during build
std::vector<std::string> changes_files;
std::vector<std::string> devel_changes_files;
};
class CiLogic {
public:
// Initialize global configurations
void init_global();
// Load YAML configuration from a given path
YAML::Node load_yaml_config(const std::filesystem::path &config_path);
// Convert a YAML node to a CiProject structure
CiProject yaml_to_project(const YAML::Node &pkg_node);
// Clone or fetch a git repository
void clone_or_fetch(const std::filesystem::path &repo_dir, const std::string &repo_url, const std::optional<std::string> &branch, std::shared_ptr<Log> log = NULL);
bool pull_project(std::shared_ptr<PackageConf> &proj, std::shared_ptr<Log> log = NULL);
bool create_project_tarball(std::shared_ptr<PackageConf> &proj, std::shared_ptr<Log> log = NULL);
std::tuple<bool, std::set<std::string>> build_project(std::shared_ptr<PackageConf> proj, std::shared_ptr<Log> log = NULL);
bool upload_and_lint(std::shared_ptr<PackageConf> &proj, const std::set<std::string> changes_files, bool skip_dput, std::shared_ptr<Log> log = NULL);
// Perform cleanup and summarize the build process
void do_summary(bool skip_cleanup);
// Process the entire pipeline for a given PackageConf ID
void process_entire_pipeline(std::shared_ptr<PackageConf> &proj, bool skip_dput, bool skip_cleanup);
// Retrieve all PackageConf entries from the database
std::vector<std::shared_ptr<PackageConf>> get_config(const std::string &repo_name = "", int page = 0, int per_page = 0, const std::string& sort_by = "", const std::string& sort_order = "");
// Function to enqueue tasks
void enqueue(std::function<void()> task);
// Fetch logs for a specific PackageConf ID
std::string get_logs_for_repo_conf(int package_conf_id);
std::map<std::string, std::shared_ptr<JobStatus>> get_job_statuses();
std::vector<std::shared_ptr<PackageConf>> get_packageconfs();
std::shared_ptr<PackageConf> get_packageconf_by_id(int id);
std::vector<std::shared_ptr<PackageConf>> get_packageconfs_by_ids(std::set<int> ids);
void set_packageconfs(std::vector<std::shared_ptr<PackageConf>> _pkgconfs);
void sync(std::shared_ptr<PackageConf> pkgconf);
QSqlDatabase get_thread_connection();
std::string queue_pull_tarball(std::vector<std::shared_ptr<PackageConf>> repos,
std::unique_ptr<TaskQueue>& task_queue,
const std::map<std::string, std::shared_ptr<JobStatus>> job_statuses);
std::vector<Release> releases;
std::vector<Package> packages;
std::vector<Branch> branches;
private:
// Initialize the database
bool init_database(const QString& connectionName = "LubuntuCIConnection",
const QString& databasePath = "/srv/lubuntu-ci/repos/ci-tools/lubuntu_ci.db");
void debuild_package(const fs::path &packaging_dir, std::shared_ptr<Log> log);
QSqlDatabase p_db;
mutable std::mutex connection_mutex_;
mutable std::mutex packageconfs_mutex_;
std::vector<std::shared_ptr<PackageConf>> packageconfs;
std::map<std::string, std::shared_ptr<JobStatus>> _cached_job_statuses;
struct package_conf_item {
std::shared_ptr<PackageConf> first_pkgconf;
std::shared_ptr<Task> first_pull_task = std::make_shared<Task>();
std::shared_ptr<Task> first_tarball_task = std::make_shared<Task>();
std::shared_ptr<GitCommit> packaging_commit = std::make_shared<GitCommit>();
std::shared_ptr<GitCommit> upstream_commit = std::make_shared<GitCommit>();
};
};
#endif // CI_LOGIC_H

@ -1,4 +1,4 @@
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@ -14,110 +14,146 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#include "common.h"
#include "utilities.h"
#include "/usr/include/archive.h"
#include "/usr/include/archive_entry.h"
#include <chrono>
#include <archive_entry.h>
#include <iostream>
#include <fstream>
#include <sstream>
#include <stdexcept>
#include <cstdio>
#include <cstdlib>
#include <filesystem>
#include <chrono>
#include <regex>
#include <chrono>
#include <ctime>
#include <mutex>
#include <unordered_set>
#include <QProcess>
namespace fs = std::filesystem;
// Define the global 'verbose' variable
bool verbose = false;
static void log_info(const std::string &msg) {
// Logger function implementations
void log_info(const std::string &msg) {
std::cout << "[INFO] " << msg << "\n";
}
static void log_error(const std::string &msg) {
void log_warning(const std::string &msg) {
std::cerr << "[WARNING] " << msg << "\n";
}
void log_error(const std::string &msg) {
std::cerr << "[ERROR] " << msg << "\n";
}
std::string parse_version(const fs::path &changelog_path) {
if (!fs::exists(changelog_path)) {
throw std::runtime_error("Changelog not found: " + changelog_path.string());
}
std::ifstream f(changelog_path);
if (!f) throw std::runtime_error("Unable to open changelog");
std::string first_line;
std::getline(f, first_line);
f.close();
size_t start = first_line.find('(');
size_t end = first_line.find(')');
if (start == std::string::npos || end == std::string::npos) {
throw std::runtime_error("Invalid changelog format");
void log_verbose(const std::string &msg) {
if (verbose) {
std::cout << "[VERBOSE] " << msg << "\n";
}
std::string version_match = first_line.substr(start+1, end - (start+1));
}
std::string epoch;
std::string upstream_version = version_match;
if (auto pos = version_match.find(':'); pos != std::string::npos) {
epoch = version_match.substr(0, pos);
upstream_version = version_match.substr(pos+1);
}
if (auto pos = upstream_version.find('-'); pos != std::string::npos) {
upstream_version = upstream_version.substr(0, pos);
}
namespace fs = std::filesystem;
std::regex git_regex("(\\+git[0-9]+)?(~[a-z]+)?$");
upstream_version = std::regex_replace(upstream_version, git_regex, "");
bool run_command(const std::vector<std::string> &cmd,
const std::optional<std::filesystem::path> &cwd,
bool show_output,
std::shared_ptr<Log> log) {
if (cmd.empty()) {
throw std::runtime_error("Command is empty");
}
auto t = std::time(nullptr);
std::tm tm = *std::gmtime(&t);
char buf[32];
std::strftime(buf, sizeof(buf), "%Y%m%d%H%M", &tm);
std::string current_date = buf;
QProcess process;
std::string version;
if (!epoch.empty()) {
version = epoch + ":" + upstream_version + "+git" + current_date;
} else {
version = upstream_version + "+git" + current_date;
// Set the working directory if provided
if (cwd) {
process.setWorkingDirectory(QString::fromStdString(cwd->string()));
}
return version;
}
// Set up the environment (if needed)
QProcessEnvironment env = QProcessEnvironment::systemEnvironment();
process.setProcessEnvironment(env);
void run_command(const std::vector<std::string> &cmd, const std::optional<fs::path> &cwd, bool show_output) {
std::string full_cmd;
for (const auto &c : cmd) {
full_cmd += c + " ";
// Extract executable and arguments
QString program = QString::fromStdString(cmd[0]);
QStringList arguments;
for (size_t i = 1; i < cmd.size(); ++i) {
arguments << QString::fromStdString(cmd[i]);
}
if (cwd) {
full_cmd = "cd " + cwd->string() + " && " + full_cmd;
// Start the command
process.start(program, arguments);
if (!process.waitForStarted()) {
throw std::runtime_error("Failed to start the command: " + program.toStdString());
}
log_info("Executing: " + full_cmd);
int ret = std::system(full_cmd.c_str());
if (ret != 0) {
log_error("Command failed: " + full_cmd);
throw std::runtime_error("Command failed");
// Stream output while the process is running
while (process.state() == QProcess::Running) {
if (process.waitForReadyRead()) {
QByteArray output = process.readAllStandardOutput();
QByteArray error = process.readAllStandardError();
if (log) {
log->append(output.toStdString());
log->append(error.toStdString());
}
if (show_output) {
std::cout << output.toStdString();
std::cerr << error.toStdString();
}
}
}
if (show_output) {
std::cout << "[INFO] Command succeeded: " + full_cmd << "\n";
// Wait for the process to finish
process.waitForFinished();
// Capture return code and errors
if (process.exitStatus() != QProcess::NormalExit || process.exitCode() != 0) {
QByteArray error_output = process.readAllStandardError();
std::string error_message = "Command failed with exit code: " + std::to_string(process.exitCode());
if (!error_output.isEmpty()) {
error_message += "\nError Output: " + error_output.toStdString();
}
throw std::runtime_error(error_message);
}
return true;
}
void clean_old_logs(const fs::path &log_dir, int max_age_seconds) {
auto now = std::chrono::system_clock::now();
for (auto &entry : fs::directory_iterator(log_dir)) {
if (fs::is_regular_file(entry)) {
auto ftime = fs::last_write_time(entry);
auto sctp = decltype(ftime)::clock::to_sys(ftime);
auto age = std::chrono::duration_cast<std::chrono::seconds>(now - sctp).count();
if (age > max_age_seconds) {
fs::remove(entry);
// Function to extract excluded files from a copyright file
std::vector<std::string> extract_files_excluded(const std::string& filepath) {
std::ifstream file(filepath);
if (!file.is_open()) {
throw std::runtime_error("Failed to open file: " + filepath);
}
std::vector<std::string> files_excluded;
std::string line;
std::regex files_excluded_pattern(R"(Files-Excluded:\s*(.*))");
bool in_files_excluded = false;
while (std::getline(file, line)) {
if (std::regex_match(line, files_excluded_pattern)) {
in_files_excluded = true;
std::smatch match;
if (std::regex_search(line, match, files_excluded_pattern) && match.size() > 1) {
files_excluded.emplace_back(match[1]);
}
} else if (in_files_excluded) {
if (!line.empty() && (line[0] == ' ' || line[0] == '\t')) {
files_excluded.emplace_back(line.substr(1));
} else {
break; // End of Files-Excluded block
}
}
}
}
return files_excluded;
}
void create_tarball(const std::string& tarballPath, const std::string& directory, const std::vector<std::string>& exclusions) {
std::cout << "[INFO] Creating tarball: " << tarballPath << std::endl;
// Function to create a tarball
void create_tarball(const std::string& tarballPath, const std::string& directory, const std::vector<std::string>& exclusions, std::shared_ptr<Log> log) {
log->append("Creating tarball: " + tarballPath);
struct archive* a = archive_write_new();
if (!a) {
@ -145,43 +181,69 @@ void create_tarball(const std::string& tarballPath, const std::string& directory
throw std::runtime_error(err);
}
for (auto it = fs::recursive_directory_iterator(directory, fs::directory_options::follow_directory_symlink | fs::directory_options::skip_permission_denied);
// Initialize a set to track added relative paths to prevent duplication
std::unordered_set<std::string> added_paths;
// Iterate through the directory recursively without following symlinks
for (auto it = fs::recursive_directory_iterator(
directory,
fs::directory_options::skip_permission_denied);
it != fs::recursive_directory_iterator(); ++it) {
const auto& path = it->path();
std::error_code ec;
fs::path relativePath = fs::relative(path, directory, ec);
fs::path relative_path = fs::relative(path, directory, ec);
if (ec) {
log_error("Failed to compute relative path for: " + path.string() + " Error: " + ec.message());
log->append("Failed to compute relative path for: " + path.string() + " Error: " + ec.message());
continue;
}
bool excluded = std::any_of(exclusions.begin(), exclusions.end(), [&relativePath](const std::string& exclusion) {
return relativePath.string().find(exclusion) != std::string::npos;
// Normalize the relative path to avoid discrepancies
fs::path normalized_relative_path = relative_path.lexically_normal();
std::string relative_path_str = normalized_relative_path.string();
// Check if this path has already been added
if (!added_paths.insert(relative_path_str).second) {
log->append("Duplicate path detected and skipped: " + relative_path_str);
continue; // Skip adding this duplicate path
}
// Exclusion logic (if any exclusions are provided)
bool excluded = std::any_of(exclusions.begin(), exclusions.end(), [&relative_path_str](const std::string& exclusion) {
return relative_path_str.find(exclusion) != std::string::npos;
});
if (excluded) { continue; }
fs::file_status fstatus = it->symlink_status(ec);
if (ec) {
log_error("Failed to get file status for: " + path.string() + " Error: " + ec.message());
log->append("Failed to get file status for: " + path.string() + " Error: " + ec.message());
continue;
}
struct archive_entry* entry = archive_entry_new();
if (!entry) {
log_error("Failed to create archive entry for: " + path.string());
log->append("Failed to create archive entry for: " + path.string());
archive_write_free(a);
throw std::runtime_error("Failed to create archive entry.");
}
archive_entry_set_pathname(entry, relativePath.c_str());
std::string entry_path = relative_path_str;
if (fs::is_directory(fstatus)) {
// Ensure the directory pathname ends with '/'
if (!entry_path.empty() && entry_path.back() != '/') {
entry_path += '/';
}
archive_entry_set_pathname(entry, entry_path.c_str());
} else {
archive_entry_set_pathname(entry, entry_path.c_str());
}
// Set file type, permissions, and size
if (fs::is_regular_file(fstatus)) {
// Regular file
uintmax_t filesize = fs::file_size(path, ec);
if (ec) {
log_error("Cannot get file size for: " + path.string() + " Error: " + ec.message());
log->append("Cannot get file size for: " + path.string() + " Error: " + ec.message());
archive_entry_free(entry);
continue;
}
@ -192,7 +254,7 @@ void create_tarball(const std::string& tarballPath, const std::string& directory
else if (fs::is_symlink(fstatus)) {
fs::path target = fs::read_symlink(path, ec);
if (ec) {
log_error("Cannot read symlink for: " + path.string() + " Error: " + ec.message());
log->append("Cannot read symlink for: " + path.string() + " Error: " + ec.message());
archive_entry_free(entry);
continue;
}
@ -206,7 +268,7 @@ void create_tarball(const std::string& tarballPath, const std::string& directory
archive_entry_set_perm(entry, static_cast<mode_t>(fstatus.permissions()));
}
else {
log_error("Unsupported file type for: " + path.string());
log->append("Unsupported file type for: " + path.string());
archive_entry_free(entry);
continue;
}
@ -215,18 +277,18 @@ void create_tarball(const std::string& tarballPath, const std::string& directory
fs::file_time_type ftime = fs::last_write_time(path, ec);
std::time_t mtime;
if (ec) {
log_error("Failed to get last write time for: " + path.string() + " Error: " + ec.message());
log->append("Failed to get last write time for: " + path.string() + " Error: " + ec.message());
// Obtain current UTC time as fallback
auto now = std::chrono::system_clock::now();
mtime = std::chrono::system_clock::to_time_t(now);
log_info("Setting default mtime (current UTC time) for: " + path.string());
log->append("Setting default mtime (current UTC time) for: " + path.string());
} else {
mtime = to_time_t(ftime);
}
archive_entry_set_mtime(entry, mtime, 0);
if (archive_write_header(a, entry) != ARCHIVE_OK) {
log_error("Failed to write header for: " + path.string() + " Error: " + archive_error_string(a));
log->append("Failed to write header for: " + path.string() + " Error: " + archive_error_string(a));
archive_entry_free(entry);
continue;
}
@ -234,7 +296,7 @@ void create_tarball(const std::string& tarballPath, const std::string& directory
if (fs::is_regular_file(fstatus)) {
std::ifstream fileStream(path, std::ios::binary);
if (!fileStream) {
log_error("Failed to open file for reading: " + path.string());
log->append("Failed to open file for reading: " + path.string());
archive_entry_free(entry);
continue;
}
@ -246,14 +308,14 @@ void create_tarball(const std::string& tarballPath, const std::string& directory
std::streamsize bytesRead = fileStream.gcount();
if (bytesRead > 0) {
if (archive_write_data(a, buffer, static_cast<size_t>(bytesRead)) < 0) {
log_error("Failed to write data for: " + path.string() + " Error: " + archive_error_string(a));
log->append("Failed to write data for: " + path.string() + " Error: " + archive_error_string(a));
break;
}
}
}
if (fileStream.bad()) {
log_error("Error reading file: " + path.string());
log->append("Error reading file: " + path.string());
}
}
@ -273,23 +335,5 @@ void create_tarball(const std::string& tarballPath, const std::string& directory
throw std::runtime_error(err);
}
std::cout << "[INFO] Tarball created and compressed: " << tarballPath << std::endl;
}
std::string get_current_utc_time() {
auto now = std::chrono::system_clock::now();
std::time_t now_time = std::chrono::system_clock::to_time_t(now);
std::tm tm_utc;
gmtime_r(&now_time, &tm_utc);
char buf[20];
std::strftime(buf, sizeof(buf), "%Y-%m-%dT%H:%M:%S", &tm_utc);
return std::string(buf);
}
std::time_t to_time_t(const fs::file_time_type& ftime) {
using namespace std::chrono;
// Convert to system_clock time_point
auto sctp = time_point_cast<system_clock::duration>(ftime - fs::file_time_type::clock::now()
+ system_clock::now());
return system_clock::to_time_t(sctp);
log->append("Tarball created and compressed: " + tarballPath);
}

@ -13,24 +13,73 @@
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#pragma once
#ifndef COMMON_H
#define COMMON_H
#include "utilities.h"
#include <string>
#include <vector>
#include <filesystem>
#include <optional>
#include <semaphore>
std::string parse_version(const std::filesystem::path &changelog_path);
void run_command(const std::vector<std::string> &cmd, const std::optional<std::filesystem::path> &cwd = std::nullopt, bool show_output=false);
void clean_old_logs(const std::filesystem::path &log_dir, int max_age_seconds=86400);
void create_tarball(const std::string& tarballPath, const std::string& directory, const std::vector<std::string>& exclusions);
std::string get_current_utc_time();
std::time_t to_time_t(const std::filesystem::file_time_type& ftime);
static std::counting_semaphore<5> semaphore(5);
struct semaphore_guard {
std::counting_semaphore<5> &sem;
semaphore_guard(std::counting_semaphore<5> &s) : sem(s) { sem.acquire(); }
~semaphore_guard() { sem.release(); }
#include <filesystem>
#include <shared_mutex>
#include <mutex>
#include <vector>
#include <regex>
namespace fs = std::filesystem;
class Task;
class Log {
private:
std::string data = "";
mutable std::shared_mutex lock_;
std::weak_ptr<Task> task_context_;
public:
void append(const std::string& str) {
std::unique_lock lock(lock_);
if (str.empty()) { return; }
data += std::format("[{}] {}", get_current_utc_time("%Y-%m-%dT%H:%M:%SZ"), str.ends_with('\n') ? str : str + '\n');
}
void set_log(const std::string& str) {
std::unique_lock lock(lock_);
data = str;
}
std::string get() const {
std::shared_lock lock(lock_);
return std::regex_replace(data, std::regex(R"(^\s+)"), "");
}
void assign_task_context(std::shared_ptr<Task> task) {
task_context_ = task;
}
std::shared_ptr<Task> get_task_context() const {
return task_context_.lock();
}
};
// Logger functions
extern bool verbose;
void log_info(const std::string &msg);
void log_warning(const std::string &msg);
void log_error(const std::string &msg);
void log_verbose(const std::string &msg);
// Function to run a command with optional working directory and show output
bool run_command(const std::vector<std::string> &cmd,
const std::optional<fs::path> &cwd = std::nullopt,
bool show_output = false,
std::shared_ptr<Log> log = nullptr);
// Function to extract excluded files from a copyright file
std::vector<std::string> extract_files_excluded(const std::string& filepath);
// Function to create a tarball
void create_tarball(const std::string& tarballPath,
const std::string& directory,
const std::vector<std::string>& exclusions,
std::shared_ptr<Log> log = nullptr);
#endif // COMMON_H

@ -54,7 +54,6 @@ void processRelease(const std::string& release, const YAML::Node& config);
void refresh(const std::string& url, const std::string& pocket, const std::string& britneyCache, std::mutex& logMutex);
int executeAndLog(const std::string& command);
// Change global_lp_opt to match login() return type
static std::optional<std::shared_ptr<launchpad>> global_lp_opt;
static launchpad* global_lp = nullptr;
@ -486,7 +485,7 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
std::string DEST = BRITNEY_DATADIR + RELEASE + "-proposed";
fs::create_directories(DEST);
fs::create_directories(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state");
writeFile(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state" / "age-policy-dates", "");
write_file(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state" / "age-policy-dates", "");
fs::remove(fs::path(DEST) / "Hints");
fs::create_symlink(BRITNEY_HINTDIR, fs::path(DEST) / "Hints");
@ -495,39 +494,39 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
std::string sourcesContent;
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) {
if (p.path().filename() == "Sources.gz") {
sourcesContent += decompressGzip(p.path());
sourcesContent += decompress_gzip(p.path());
}
}
writeFile(fs::path(DEST) / "Sources", sourcesContent);
write_file(fs::path(DEST) / "Sources", sourcesContent);
for (const auto& arch : ARCHES) {
std::string packagesContent;
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) {
if (p.path().filename() == "Packages.gz" && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
packagesContent += decompressGzip(p.path());
packagesContent += decompress_gzip(p.path());
}
}
writeFile(fs::path(DEST) / ("Packages_" + arch), packagesContent);
write_file(fs::path(DEST) / ("Packages_" + arch), packagesContent);
}
for (const auto& arch : PORTS_ARCHES) {
std::string packagesContent;
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) {
if (p.path().filename() == "Packages.gz" && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
packagesContent += decompressGzip(p.path());
packagesContent += decompress_gzip(p.path());
}
}
writeFile(fs::path(DEST) / ("Packages_" + arch), packagesContent);
write_file(fs::path(DEST) / ("Packages_" + arch), packagesContent);
}
writeFile(fs::path(DEST) / "Blocks", "");
writeFile(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", "");
write_file(fs::path(DEST) / "Blocks", "");
write_file(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", "");
}
{
DEST = BRITNEY_DATADIR + RELEASE;
fs::create_directories(DEST);
fs::create_directories(fs::path(BRITNEY_DATADIR) / RELEASE / "state");
writeFile(fs::path(BRITNEY_DATADIR) / RELEASE / "state" / "age-policy-dates", "");
write_file(fs::path(BRITNEY_DATADIR) / RELEASE / "state" / "age-policy-dates", "");
fs::remove(fs::path(DEST) / "Hints");
fs::create_symlink(BRITNEY_HINTDIR, fs::path(DEST) / "Hints");
@ -536,45 +535,45 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
std::string sourcesContent;
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) {
if (p.path().filename() == "Sources.gz" && p.path().string().find(RELEASE) != std::string::npos) {
sourcesContent += decompressGzip(p.path());
sourcesContent += decompress_gzip(p.path());
}
}
writeFile(fs::path(DEST) / "Sources", sourcesContent);
regexReplaceInFile(fs::path(DEST) / "Sources", "Section: universe/", "Section: ");
write_file(fs::path(DEST) / "Sources", sourcesContent);
regex_replace_in_file(fs::path(DEST) / "Sources", "Section: universe/", "Section: ");
}
for (const auto& arch : ARCHES) {
std::string packagesContent;
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) {
if (p.path().filename() == "Packages.gz" && p.path().string().find(RELEASE) != std::string::npos && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
packagesContent += decompressGzip(p.path());
packagesContent += decompress_gzip(p.path());
}
}
fs::path packagesFilePath = fs::path(DEST) / ("Packages_" + arch);
writeFile(packagesFilePath, packagesContent);
regexReplaceInFile(packagesFilePath, "Section: universe/", "Section: ");
write_file(packagesFilePath, packagesContent);
regex_replace_in_file(packagesFilePath, "Section: universe/", "Section: ");
}
for (const auto& arch : PORTS_ARCHES) {
std::string packagesContent;
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) {
if (p.path().filename() == "Packages.gz" && p.path().string().find(RELEASE) != std::string::npos && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
packagesContent += decompressGzip(p.path());
packagesContent += decompress_gzip(p.path());
}
}
fs::path packagesFilePath = fs::path(DEST) / ("Packages_" + arch);
writeFile(packagesFilePath, packagesContent);
regexReplaceInFile(packagesFilePath, "Section: universe/", "Section: ");
write_file(packagesFilePath, packagesContent);
regex_replace_in_file(packagesFilePath, "Section: universe/", "Section: ");
}
writeFile(fs::path(DEST) / "Blocks", "");
writeFile(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", "");
write_file(fs::path(DEST) / "Blocks", "");
write_file(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", "");
}
{
std::string configContent = readFile(BRITNEY_CONF);
std::string configContent = read_file(BRITNEY_CONF);
configContent = std::regex_replace(configContent, std::regex("%\\{SERIES\\}"), RELEASE);
writeFile("britney.conf", configContent);
write_file("britney.conf", configContent);
}
std::cout << "Running britney..." << std::endl;
@ -769,5 +768,5 @@ void refresh(const std::string& url, const std::string& pocket, const std::strin
fs::path outputPath = dir / urlPath.filename();
downloadFileWithTimestamping(url, outputPath, logFilePath, logMutex);
download_file_with_timestamping(url, outputPath, logFilePath, logMutex);
}

@ -1,481 +1,30 @@
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#include "common.h"
#include "utilities.h"
#include "launchpad.h"
#include "archive.h"
#include "distribution.h"
#include "distro_series.h"
#include "person.h"
#include "ci_logic.h"
#include <yaml-cpp/yaml.h>
#include <filesystem>
#include <iostream>
#include <fstream>
#include <sstream>
#include <string>
#include <vector>
#include <filesystem>
#include <mutex>
#include <thread>
#include <future>
#include <condition_variable>
#include <queue>
#include <chrono>
#include <ctime>
#include <getopt.h>
#include <regex>
#include <uuid/uuid.h>
#include <cstdlib>
#include <cstdio>
namespace fs = std::filesystem;
// Global variables for logging
std::mutex logMutex;
std::ofstream globalLogFile;
// Function to log informational messages
void log_info_custom(const std::string &msg) {
std::lock_guard<std::mutex> lock(logMutex);
if (globalLogFile.is_open()) {
auto now = std::chrono::system_clock::now();
std::time_t now_c = std::chrono::system_clock::to_time_t(now);
char timebuf[20];
std::strftime(timebuf, sizeof(timebuf), "%Y-%m-%d %H:%M:%S", std::gmtime(&now_c));
globalLogFile << timebuf << " - INFO - " << msg << "\n";
globalLogFile.flush();
}
}
// Function to log error messages
void log_error_custom(const std::string &msg) {
std::lock_guard<std::mutex> lock(logMutex);
if (globalLogFile.is_open()) {
auto now = std::chrono::system_clock::now();
std::time_t now_c = std::chrono::system_clock::to_time_t(now);
char timebuf[20];
std::strftime(timebuf, sizeof(timebuf), "%Y-%m-%d %H:%M:%S", std::gmtime(&now_c));
globalLogFile << timebuf << " - ERROR - " << msg << "\n";
globalLogFile.flush();
}
}
// Function to parse command-line arguments
struct Arguments {
std::string user;
std::string ppa;
std::optional<std::string> ppa2;
std::optional<std::string> override_output;
};
Arguments parseArguments(int argc, char* argv[]) {
Arguments args;
int opt;
bool showHelp = false;
static struct option long_options[] = {
{"user", required_argument, 0, 'u'},
{"ppa", required_argument, 0, 'p'},
{"ppa2", required_argument, 0, '2'},
{"override-output", required_argument, 0, 'o'},
{"help", no_argument, 0, 'h'},
{0, 0, 0, 0}
};
while ((opt = getopt_long(argc, argv, "u:p:2:o:h", long_options, nullptr)) != -1) {
switch (opt) {
case 'u':
args.user = optarg;
break;
case 'p':
args.ppa = optarg;
break;
case '2':
args.ppa2 = optarg;
break;
case 'o':
args.override_output = optarg;
break;
case 'h':
default:
std::cout << "Usage: " << argv[0] << " --user <user> --ppa <ppa> [--ppa2 <ppa2>] [--override-output <path>]\n";
exit(0);
}
}
if (args.user.empty() || args.ppa.empty()) {
std::cerr << "Error: --user and --ppa are required arguments.\n";
std::cout << "Usage: " << argv[0] << " --user <user> --ppa <ppa> [--ppa2 <ppa2>] [--override-output <path>]\n";
exit(1);
}
return args;
}
// Function to parse the Changes file and extract Source and Architecture
struct ChangesInfo {
std::string source;
std::string architecture;
};
std::optional<ChangesInfo> parse_changes_file(const fs::path& changesPath) {
if (!fs::exists(changesPath)) {
log_error_custom("Changelog not found: " + changesPath.string());
return std::nullopt;
}
std::ifstream infile(changesPath);
if (!infile.is_open()) {
log_error_custom("Unable to open changelog: " + changesPath.string());
return std::nullopt;
}
ChangesInfo info;
std::string line;
while (std::getline(infile, line)) {
if (line.empty())
break; // End of headers
if (line.find("Source:") == 0) {
info.source = line.substr(7);
// Trim whitespace
info.source.erase(0, info.source.find_first_not_of(" \t"));
}
if (line.find("Architecture:") == 0) {
info.architecture = line.substr(13);
// Trim whitespace
info.architecture.erase(0, info.architecture.find_first_not_of(" \t"));
}
}
infile.close();
#include <string>
if (info.source.empty() || info.architecture.empty()) {
log_error_custom("Invalid changelog format in: " + changesPath.string());
return std::nullopt;
int main(int argc, char** argv) {
if (argc<2) {
std::cerr << "Usage: lintian-ppa <some.changes> [--verbose]\n";
return 1;
}
return info;
}
// Function to run lintian and capture its output
std::optional<std::string> run_lintian(const fs::path& changesPath) {
std::vector<std::string> lintianCmd = {"lintian", "-EvIL", "+pedantic", changesPath.filename().string()};
try {
// Redirect stdout and stderr to capture output
std::string command = "lintian -EvIL +pedantic \"" + changesPath.string() + "\"";
std::array<char, 128> buffer;
std::string result;
std::unique_ptr<FILE, decltype(&pclose)> pipe(popen(command.c_str(), "r"), pclose);
if (!pipe) {
log_error_custom("Failed to run lintian command.");
return std::nullopt;
}
while (fgets(buffer.data(), buffer.size(), pipe.get()) != nullptr) {
result += buffer.data();
for (int i=1; i<argc; i++) {
std::string arg = argv[i];
if (arg=="--verbose" || arg=="-v") {
verbose = true;
}
return result;
} catch (...) {
log_error_custom("Exception occurred while running lintian.");
return std::nullopt;
}
}
// Function to process a single changes file URL
void process_sources(const std::string& url, const fs::path& baseOutputDir, const fs::path& lintianTmpDir) {
// Generate a unique temporary directory
uuid_t uuid_bytes;
uuid_generate(uuid_bytes); // Correctly call with one argument
char uuid_cstr[37]; // UUIDs are 36 characters plus null terminator
uuid_unparse_lower(uuid_bytes, uuid_cstr); // Convert to string
std::string uuid_str = std::string(uuid_cstr).substr(0, 8); // Extract first 8 characters
std::string tmpdir = (baseOutputDir / ("lintian_tmp_" + uuid_str)).string();
// Create temporary directory
fs::create_directories(tmpdir);
// Extract the changes file name from URL
std::string changes_file = url.substr(url.find_last_of('/') + 1);
log_info_custom("Downloading " + changes_file + " via dget.");
// Run dget -u <url> in the temporary directory
std::vector<std::string> dgetCmd = {"dget", "-u", url};
try {
run_command(dgetCmd, tmpdir);
} catch (const std::exception& e) {
log_error_custom("dget command failed for URL: " + url);
fs::remove_all(tmpdir);
return;
}
// Parse the Changes file
fs::path changesPath = fs::path(tmpdir) / changes_file;
auto changesInfoOpt = parse_changes_file(changesPath);
if (!changesInfoOpt.has_value()) {
fs::remove_all(tmpdir);
return;
}
ChangesInfo changesInfo = changesInfoOpt.value();
// Handle Architecture field
std::string arch = changesInfo.architecture;
arch = std::regex_replace(arch, std::regex("all"), "");
arch = std::regex_replace(arch, std::regex("_translations"), "");
std::istringstream iss(arch);
std::string arch_clean;
iss >> arch_clean;
if (arch_clean.empty()) {
fs::remove_all(tmpdir);
return;
}
log_info_custom("Running Lintian for " + changesInfo.source + " on " + arch_clean);
// Run lintian and capture output
auto lintianOutputOpt = run_lintian(changesPath);
if (!lintianOutputOpt.has_value()) {
fs::remove_all(tmpdir);
return;
}
std::string lintianOutput = lintianOutputOpt.value();
// Write lintian output to lintian_tmp/source/<arch>.txt
fs::path outputPath = lintianTmpDir / changesInfo.source;
fs::create_directories(outputPath);
fs::path archOutputFile = outputPath / (arch_clean + ".txt");
try {
writeFile(archOutputFile, lintianOutput);
} catch (const std::exception& e) {
log_error_custom("Failed to write lintian output for " + changesInfo.source + " on " + arch_clean);
}
std::string changes_path = argv[1];
// Remove temporary directory
fs::remove_all(tmpdir);
}
// Function to perform rsync-like copy
void rsync_copy(const fs::path& source, const fs::path& destination) {
try {
if (!fs::exists(destination)) {
fs::create_directories(destination);
}
for (const auto& entry : fs::recursive_directory_iterator(source)) {
const auto& path = entry.path();
auto relativePath = fs::relative(path, source);
fs::path destPath = destination / relativePath;
if (fs::is_symlink(path)) {
if (fs::exists(destPath) || fs::is_symlink(destPath)) {
fs::remove(destPath);
}
auto target = fs::read_symlink(path);
fs::create_symlink(target, destPath);
} else if (fs::is_directory(path)) {
fs::create_directories(destPath);
} else if (fs::is_regular_file(path)) {
fs::copy_file(path, destPath, fs::copy_options::overwrite_existing);
}
}
} catch (const std::exception& e) {
log_error_custom("rsync_copy failed from " + source.string() + " to " + destination.string() + ": " + e.what());
}
}
int main(int argc, char* argv[]) {
// Parse command-line arguments
Arguments args = parseArguments(argc, argv);
// Set BASE_OUTPUT_DIR
std::string BASE_OUTPUT_DIR = "/srv/lubuntu-ci/output/";
if (args.override_output.has_value()) {
BASE_OUTPUT_DIR = args.override_output.value();
}
// Set LOG_DIR
fs::path LOG_DIR = fs::path(BASE_OUTPUT_DIR) / "logs" / "lintian";
fs::create_directories(LOG_DIR);
// Create log file with current UTC timestamp
auto now = std::chrono::system_clock::now();
std::time_t now_c = std::chrono::system_clock::to_time_t(now);
char timestamp[20];
std::strftime(timestamp, sizeof(timestamp), "%Y%m%dT%H%M%S", std::gmtime(&now_c));
fs::path logFilePath = LOG_DIR / (std::string(timestamp) + ".log");
// Open global log file
globalLogFile.open(logFilePath, std::ios::app);
if (!globalLogFile.is_open()) {
std::cerr << "Error: Unable to open log file: " << logFilePath << std::endl;
return 1;
}
log_info_custom("Starting lintian-ppa.");
// Authenticate with Launchpad
log_info_custom("Logging into Launchpad...");
auto lp_opt = launchpad::login();
if (!lp_opt.has_value()) {
std::cerr << "Failed to authenticate with Launchpad.\n";
return 1;
}
auto lp = lp_opt.value().get();
auto ubuntu_opt = lp->distributions["ubuntu"];
distribution ubuntu = ubuntu_opt.value();
// FIXME
//auto ds_opt = ubuntu.current_series;
auto ds_opt = ubuntu.getSeries("plucky");
if (!ds_opt) {
std::cerr << "Failed to get current_series.\n";
return 1;
}
auto current_series = ds_opt;
// Retrieve user and PPA
auto user_opt = lp->people[args.user];
person user = user_opt.value();
auto ppa_opt = user.getPPAByName(ubuntu, args.ppa);
if (!ppa_opt.has_value()) {
log_error_custom("Failed to retrieve PPA: " + args.ppa);
return 1;
}
archive ppa = ppa_opt.value();
log_info_custom("Retrieved PPA: " + args.ppa);
std::optional<archive> ppa2_opt;
if (args.ppa2.has_value()) {
auto ppa2_found = user.getPPAByName(ubuntu, args.ppa2.value());
if (!ppa2_found.has_value()) {
log_error_custom("Failed to retrieve PPA2: " + args.ppa2.value());
if (!run_command({"lintian", "-EvIL", "+pedantic", changes_path}, std::nullopt, false)) {
return 1;
}
ppa2_opt = ppa2_found.value();
log_info_custom("Retrieved PPA2: " + args.ppa2.value());
}
// Set up lintian directories
fs::path lintianDir = fs::path(BASE_OUTPUT_DIR) / "lintian";
fs::path lintianTmpDir;
{
std::string uuid_str;
uuid_t uuid_bytes;
uuid_generate(uuid_bytes);
char uuid_cstr[37];
uuid_unparse(uuid_bytes, uuid_cstr);
uuid_str = std::string(uuid_cstr);
// Truncate UUID to first 8 characters
uuid_str = uuid_str.substr(0, 8);
lintianTmpDir = fs::path(BASE_OUTPUT_DIR) / ("lintian_tmp_" + uuid_str);
}
fs::create_directories(lintianDir);
fs::create_directories(lintianTmpDir);
// Initialize a vector to hold all threads
std::vector<std::thread> threads;
// Mutex for managing the published sources iterator
std::mutex sourcesMutex;
// Function to iterate over published sources and enqueue tasks
auto main_source_iter = [&](std::vector<std::thread>& threadsRef) {
// Path to .LAST_RUN file
fs::path lastRunFile = lintianDir / ".LAST_RUN";
std::chrono::system_clock::time_point lastRunTime = std::chrono::system_clock::now() - std::chrono::hours(24*365);
if (fs::exists(lastRunFile)) {
std::ifstream infile(lastRunFile);
if (infile.is_open()) {
std::string lastRunStr;
std::getline(infile, lastRunStr);
infile.close();
std::tm tm = {};
std::istringstream ss(lastRunStr);
ss >> std::get_time(&tm, "%Y-%m-%dT%H:%M:%S");
if (!ss.fail()) {
lastRunTime = std::chrono::system_clock::from_time_t(timegm(&tm));
log_info_custom("Last run time: " + lastRunStr);
} else {
log_error_custom("Invalid format in .LAST_RUN file.");
}
}
} else {
log_info_custom(".LAST_RUN file does not exist. Using default last run time.");
}
// Update .LAST_RUN with current time
{
std::ofstream outfile(lastRunFile, std::ios::trunc);
if (outfile.is_open()) {
auto currentTime = std::chrono::system_clock::now();
std::time_t currentTime_c = std::chrono::system_clock::to_time_t(currentTime);
char timebuf[20];
std::strftime(timebuf, sizeof(timebuf), "%Y-%m-%dT%H:%M:%S", std::gmtime(&currentTime_c));
outfile << timebuf;
outfile.close();
log_info_custom("Updated .LAST_RUN with current time: " + std::string(timebuf));
} else {
log_error_custom("Failed to update .LAST_RUN file.");
}
}
// Iterate over published sources
for (const auto& source : ppa.getPublishedSources("", "", current_series, false, true, "", "", "Published", "")) {
for (const auto& build : source.getBuilds()) {
if (build.buildstate == "Successfully built") {
// Assuming build.datebuilt is a std::chrono::system_clock::time_point
if (build.datebuilt >= lastRunTime) {
// Enqueue the process_sources task using semaphore and threads
threadsRef.emplace_back([=]() {
semaphore_guard guard(semaphore);
process_sources(build.changesfile_url, fs::path(BASE_OUTPUT_DIR), lintianTmpDir);
});
}
}
}
}
};
// Start the main_source_iter and enqueue tasks
main_source_iter(threads);
// Wait for all threads to complete
for(auto &t : threads) {
if(t.joinable()) {
t.join();
}
}
log_info_custom("All lintian tasks completed. Syncing temporary lintian data to final directory.");
rsync_copy(lintianTmpDir, lintianDir);
// Remove temporary lintian directory
fs::remove_all(lintianTmpDir);
// Clean old logs
clean_old_logs(LOG_DIR, 86400); // 1 day in seconds, adjust as needed
log_info_custom("Lintian-ppa processing completed successfully.");
// Close the global log file
if (globalLogFile.is_open()) {
globalLogFile.close();
} catch(...) {
log_error("Lintian reported some issues with " + changes_path);
}
return 0;

@ -0,0 +1,92 @@
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#include "lubuntuci_lib.h"
#include "ci_logic.h"
#include "common.h"
#include <yaml-cpp/yaml.h>
#include <filesystem>
#include <iostream>
#include <vector>
#include <string>
#include <mutex>
#include <git2.h>
namespace fs = std::filesystem;
/**
* list_known_repos():
* Make sure we call CiLogic::init_global() before reading
* the config, otherwise the config node will be empty.
*/
std::vector<std::shared_ptr<PackageConf>> LubuntuCI::list_known_repos(int page, int per_page, const std::string& sort_by, const std::string& sort_order)
{
cilogic.init_global();
if (page == 0 || per_page == 0 || sort_by.empty() || sort_order.empty()) { return cilogic.get_config(); }
return cilogic.get_config("", page, per_page, sort_by, sort_order);
}
/**
* pull_repo():
* - We do not call init_global() here because list_known_repos()
* or build_repo() might do it. But calling it again is safe.
*/
bool LubuntuCI::pull_repo(const std::string &repo_name, std::shared_ptr<Log> log)
{
log->append("Ensuring the global config is initialized...\n");
cilogic.init_global();
log->append("Global config is initialized. Getting the configs for the package name...\n");
auto pkgconfs = cilogic.get_config(repo_name);
log->append("Configs retrieved. Performing the pull...\n");
return cilogic.pull_project(pkgconfs.at(0), log);
}
/**
* create_project_tarball
*/
bool LubuntuCI::create_project_tarball(const std::string &repo_name, std::shared_ptr<Log> log)
{
cilogic.init_global();
log->append("Global config is initialized. Getting the configs for the package name...\n");
auto pkgconfs = cilogic.get_config(repo_name);
log->append("Configs retrieved. Performing the tarball creation...\n");
return cilogic.create_project_tarball(pkgconfs.at(0), log);
}
/**
* build_repo():
* - Also safely calls init_global().
* - Reads skip_dput from config if present (default = false).
*/
bool LubuntuCI::build_repo(const std::string &repo_name, std::shared_ptr<Log> log)
{
cilogic.init_global();
bool success = true;
for (auto pkgconf : cilogic.get_config(repo_name)) {
const auto [build_success, changes_files] = cilogic.build_project(pkgconf, log);
success = success && build_success && cilogic.upload_and_lint(pkgconf, changes_files, false);
}
return success;
}
/**
* get_repo_log():
* - Directly opens the repo in /srv/lubuntu-ci/repos/<repo_name>
* - Reads HEAD commit message
*/
std::string LubuntuCI::get_repo_log(const std::string &repo_name)
{
// FIXME: unused
}

@ -0,0 +1,53 @@
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#ifndef LUBUNTUCI_LIB_H
#define LUBUNTUCI_LIB_H
#include <string>
#include <vector>
#include "ci_logic.h"
class LubuntuCI {
public:
/**
* List all known repositories from the merged config.
*/
std::vector<std::shared_ptr<PackageConf>> list_known_repos(int page = 0,
int per_page = 0,
const std::string& sort_by = "",
const std::string& sort_order = "");
/**
* Pull a specific repository by name (returns true on success).
*/
bool pull_repo(const std::string &repo_name, std::shared_ptr<Log> log = NULL);
bool create_project_tarball(const std::string &repo_name, std::shared_ptr<Log> log);
/**
* Build a specific repository by name (returns true on success).
*/
bool build_repo(const std::string &repo_name, std::shared_ptr<Log> log = NULL);
/**
* Retrieve the most recent commit log from a named repo.
*/
std::string get_repo_log(const std::string &repo_name);
CiLogic cilogic = CiLogic();
};
#endif // LUBUNTUCI_LIB_H

@ -13,7 +13,21 @@
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#pragma once
#include <string>
#include <QCoreApplication>
#include <iostream>
#include "web_server.h"
void update_maintainer(const std::string &debian_directory, bool verbose);
int main(int argc, char *argv[])
{
QCoreApplication app(argc, argv);
WebServer server;
// You can pick 80 if running as root or with CAP_NET_BIND_SERVICE
// or 8080 if unprivileged
if (!server.start_server(8080)) {
std::cerr << "[ERROR] Failed to start server on port 8080\n";
return 1;
}
return app.exec();
}

@ -0,0 +1,314 @@
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#include "naive_bayes_classifier.h"
#include <curl/curl.h>
#include <zlib.h>
#include <algorithm>
#include <cctype>
#include <iostream>
#include <vector>
#include <numeric>
#include <cmath>
#include <cstring> // for std::memset
/******************************************************************************
* Constructor / Destructor
*****************************************************************************/
naive_bayes_classifier::naive_bayes_classifier() = default;
naive_bayes_classifier::~naive_bayes_classifier() = default;
/******************************************************************************
* reset
*****************************************************************************/
void naive_bayes_classifier::reset() {
word_freqs_.clear();
category_freqs_.clear();
vocabulary_.clear();
token_categories_map_.clear();
total_samples_ = 0.0;
}
/******************************************************************************
* train_from_url
*****************************************************************************/
bool naive_bayes_classifier::train_from_url(const std::string &url, const std::string &category) {
streaming_context ctx;
ctx.classifier = this;
ctx.is_prediction_mode = false;
ctx.category = category;
bool ok = fetch_and_inflate_gz(url, &naive_bayes_classifier::train_write_cb, &ctx);
if (!ok) {
std::cerr << "Error: train_from_url failed for " << url << std::endl;
return false;
}
category_freqs_[category]++;
total_samples_++;
return true;
}
/******************************************************************************
* predict_from_url
*****************************************************************************/
std::optional<std::string> naive_bayes_classifier::predict_from_url(const std::string &url) const {
streaming_context ctx;
ctx.classifier = const_cast<naive_bayes_classifier*>(this);
ctx.is_prediction_mode = true;
bool ok = fetch_and_inflate_gz(url, &naive_bayes_classifier::predict_write_cb, &ctx);
if (!ok) {
return std::nullopt;
}
std::string best_cat = compute_best_category(ctx.prediction_tokens);
return best_cat;
}
/******************************************************************************
* prune_common_tokens
*****************************************************************************/
void naive_bayes_classifier::prune_common_tokens() {
if (category_freqs_.empty()) {
return;
}
size_t category_count = category_freqs_.size();
std::vector<std::string> tokens_to_remove_vec;
tokens_to_remove_vec.reserve(vocabulary_.size());
for (const auto &[token, cats_set] : token_categories_map_) {
if (cats_set.size() == category_count) {
tokens_to_remove_vec.push_back(token);
}
}
for (const auto &tk : tokens_to_remove_vec) {
vocabulary_.erase(tk);
for (auto &cat_map : word_freqs_) {
cat_map.second.erase(tk);
}
token_categories_map_.erase(tk);
}
std::cout << "Pruned " << tokens_to_remove_vec.size()
<< " common tokens that appeared in all categories.\n";
}
/******************************************************************************
* train_token
*****************************************************************************/
void naive_bayes_classifier::train_token(const std::string &category, const std::string &token) {
if (token.empty()) return;
word_freqs_[category][token]++;
vocabulary_[token] = true;
token_categories_map_[token].insert(category);
}
/******************************************************************************
* compute_best_category
*****************************************************************************/
std::string naive_bayes_classifier::compute_best_category(const token_counts_t &tokens) const {
if (category_freqs_.empty() || total_samples_ <= 0.0) {
return "Unknown";
}
double best_score = -1e308;
std::string best_cat = "Unknown";
for (const auto &[cat, cat_count] : category_freqs_) {
double prior_log = std::log(cat_count / total_samples_);
double total_cat_words = 0.0;
auto cat_iter = word_freqs_.find(cat);
if (cat_iter != word_freqs_.end()) {
total_cat_words = std::accumulate(
cat_iter->second.begin(),
cat_iter->second.end(),
0.0,
[](double sum, const auto &p){ return sum + p.second; }
);
}
double score = prior_log;
for (const auto &[tk, freq] : tokens) {
double word_count = 0.0;
if (cat_iter != word_freqs_.end()) {
auto w_it = cat_iter->second.find(tk);
if (w_it != cat_iter->second.end()) {
word_count = w_it->second;
}
}
double smoothed = (word_count + 1.0) / (total_cat_words + vocabulary_.size());
score += freq * std::log(smoothed);
}
if (score > best_score) {
best_score = score;
best_cat = cat;
}
}
return best_cat;
}
/******************************************************************************
* chunk_to_tokens
*****************************************************************************/
std::generator<std::string> naive_bayes_classifier::chunk_to_tokens(
const std::string &chunk, std::string &partial_token)
{
for (char c : chunk) {
if (std::isalpha(static_cast<unsigned char>(c))) {
partial_token.push_back(static_cast<char>(std::tolower(static_cast<unsigned char>(c))));
} else {
if (!partial_token.empty()) {
co_yield partial_token;
partial_token.clear();
}
}
}
// leftover partial_token remains if chunk ends mid-token
}
/******************************************************************************
* train_write_cb
*****************************************************************************/
size_t naive_bayes_classifier::train_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) {
auto ctx = static_cast<streaming_context*>(userdata);
if (!ctx || !ctx->classifier || ctx->is_prediction_mode) {
return 0;
}
size_t bytes = size * nmemb;
std::string chunk(ptr, bytes);
for (auto &&tk : chunk_to_tokens(chunk, ctx->partial_token)) {
ctx->classifier->train_token(ctx->category, tk);
}
return bytes;
}
/******************************************************************************
* predict_write_cb
*****************************************************************************/
size_t naive_bayes_classifier::predict_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) {
auto ctx = static_cast<streaming_context*>(userdata);
if (!ctx || !ctx->classifier || !ctx->is_prediction_mode) {
return 0;
}
size_t bytes = size * nmemb;
std::string chunk(ptr, bytes);
for (auto &&tk : chunk_to_tokens(chunk, ctx->partial_token)) {
ctx->prediction_tokens[tk]++;
}
return bytes;
}
/******************************************************************************
* fetch_and_inflate_gz
*****************************************************************************/
struct inflating_context {
naive_bayes_classifier::streaming_context *user_ctx;
size_t (*callback)(char*, size_t, size_t, void*);
z_stream strm;
std::string decompress_buffer;
inflating_context() {
std::memset(&strm, 0, sizeof(strm));
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
inflateInit2(&strm, 16 + MAX_WBITS);
decompress_buffer.resize(64 * 1024);
}
~inflating_context() {
inflateEnd(&strm);
}
};
static size_t curl_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) {
auto *inf_ctx = static_cast<inflating_context*>(userdata);
size_t total_in = size * nmemb;
inf_ctx->strm.avail_in = static_cast<uInt>(total_in);
inf_ctx->strm.next_in = reinterpret_cast<unsigned char*>(ptr);
while (inf_ctx->strm.avail_in > 0) {
inf_ctx->strm.avail_out = static_cast<uInt>(inf_ctx->decompress_buffer.size());
inf_ctx->strm.next_out = reinterpret_cast<unsigned char*>(&inf_ctx->decompress_buffer[0]);
int ret = inflate(&inf_ctx->strm, Z_NO_FLUSH);
if (ret == Z_STREAM_ERROR || ret == Z_MEM_ERROR || ret == Z_DATA_ERROR) {
std::cerr << "zlib inflate error: " << inf_ctx->strm.msg << std::endl;
return 0;
}
size_t have = inf_ctx->decompress_buffer.size() - inf_ctx->strm.avail_out;
if (have > 0) {
size_t written = inf_ctx->callback(
&inf_ctx->decompress_buffer[0],
1,
have,
inf_ctx->user_ctx
);
if (written < have) {
return 0;
}
}
}
return total_in;
}
bool naive_bayes_classifier::fetch_and_inflate_gz(
const std::string &url,
size_t (*callback)(char*, size_t, size_t, void*),
void *user_context)
{
CURL *curl = curl_easy_init();
if (!curl) {
std::cerr << "Error: curl_easy_init failed.\n";
return false;
}
inflating_context inf_ctx;
inf_ctx.callback = callback;
inf_ctx.user_ctx = static_cast<streaming_context*>(user_context);
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, curl_write_cb);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &inf_ctx);
CURLcode res = curl_easy_perform(curl);
if (res != CURLE_OK) {
std::cerr << "cURL error fetching " << url << ": "
<< curl_easy_strerror(res) << std::endl;
curl_easy_cleanup(curl);
return false;
}
curl_easy_cleanup(curl);
auto *ctx = static_cast<streaming_context*>(user_context);
if (!ctx->partial_token.empty()) {
if (!ctx->is_prediction_mode) {
ctx->classifier->train_token(ctx->category, ctx->partial_token);
} else {
ctx->prediction_tokens[ctx->partial_token]++;
}
ctx->partial_token.clear();
}
return true;
}

@ -0,0 +1,124 @@
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#ifndef NAIVE_BAYES_CLASSIFIER_H
#define NAIVE_BAYES_CLASSIFIER_H
#include <string>
#include <unordered_map>
#include <unordered_set>
#include <optional>
#include <generator> // C++23 std::generator
#include <cmath>
/******************************************************************************
* Type aliases
*****************************************************************************/
using token_counts_t = std::unordered_map<std::string, double>;
using category_counts_t = std::unordered_map<std::string, double>;
/******************************************************************************
* naive_bayes_classifier
*
* A streaming-only Naive Bayes text classifier. It fetches .gz logs via cURL,
* decompresses them chunk by chunk, tokenizes, and trains or predicts
* incrementally without storing entire logs in memory.
*****************************************************************************/
class naive_bayes_classifier {
public:
naive_bayes_classifier();
~naive_bayes_classifier();
/**************************************************************************
* train_from_url
*
* Streams the .gz log from 'url', decompresses chunk by chunk, extracts
* tokens, and updates frequency counts for 'category'.
**************************************************************************/
bool train_from_url(const std::string &url, const std::string &category);
/**************************************************************************
* predict_from_url
*
* Streams the .gz log from 'url', decompresses, extracts tokens, and
* returns the most likely category. Returns std::nullopt if there's an error.
**************************************************************************/
std::optional<std::string> predict_from_url(const std::string &url) const;
/**************************************************************************
* prune_common_tokens
*
* Removes tokens that appear in *all* categories from the vocabulary_
* and per-category frequencies, reducing noise from universal tokens.
**************************************************************************/
void prune_common_tokens();
/**************************************************************************
* reset
*
* Clears all training data (word_freqs_, category_freqs_, etc.).
**************************************************************************/
void reset();
double total_samples() const { return total_samples_; }
size_t vocabulary_size() const { return vocabulary_.size(); }
public:
/**************************************************************************
* streaming_context
*
* Declared *public* so that external structures (like inflating_context)
* can refer to it. Tracks the current partial token, mode, etc.
**************************************************************************/
struct streaming_context {
naive_bayes_classifier *classifier = nullptr;
bool is_prediction_mode = false;
std::string category; // used if training
token_counts_t prediction_tokens;
std::string partial_token;
};
private:
/**************************************************************************
* Data
**************************************************************************/
std::unordered_map<std::string, token_counts_t> word_freqs_; // cat->(word->freq)
category_counts_t category_freqs_; // cat->count of logs
std::unordered_map<std::string, bool> vocabulary_; // global set of words
double total_samples_ = 0.0;
// For pruning, track which categories each token has appeared in
std::unordered_map<std::string, std::unordered_set<std::string>> token_categories_map_;
/**************************************************************************
* Internal methods
**************************************************************************/
void train_token(const std::string &category, const std::string &token);
std::string compute_best_category(const token_counts_t &tokens) const;
static std::generator<std::string> chunk_to_tokens(const std::string &chunk,
std::string &partial_token);
// Callback for training vs. predicting
static size_t train_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata);
static size_t predict_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata);
// cURL + zlib-based streaming
static bool fetch_and_inflate_gz(const std::string &url,
size_t (*callback)(char*, size_t, size_t, void*),
void *user_context);
};
#endif // NAIVE_BAYES_CLASSIFIER_H

@ -0,0 +1,581 @@
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#include "sources_parser.h"
#include "utilities.h"
#include "/usr/include/archive.h"
#include <archive_entry.h>
#include <curl/curl.h>
#include <algorithm>
#include <cctype>
#include <iostream>
#include <regex>
#include <sstream>
#include <stdexcept>
#include <map>
#include <set>
#include <vector>
#include <optional>
#include <fstream> // Added to resolve ofstream errors
#include <set>
#include <ranges>
#include <QtCore/QJsonArray>
#include <QtCore/QJsonDocument>
#include <QtCore/QJsonObject>
namespace SourcesParser {
// Function to write data fetched by libcurl into a std::vector<char>
size_t WriteCallback(void* contents, size_t size, size_t nmemb, void* userp) {
size_t totalSize = size * nmemb;
auto* buffer = static_cast<std::vector<char>*>(userp);
buffer->insert(buffer->end(), static_cast<char*>(contents), static_cast<char*>(contents) + totalSize);
return totalSize;
}
// Function to parse dependency relations
std::vector<std::vector<PackageInfo::ParsedRelation>> parse_relations(const std::string& raw) {
std::vector<std::vector<PackageInfo::ParsedRelation>> result;
// Split by comma to get top-level dependencies
std::regex comma_sep_RE(R"(\s*,\s*)");
std::sregex_token_iterator comma_it(raw.begin(), raw.end(), comma_sep_RE, -1);
std::sregex_token_iterator comma_end;
for (; comma_it != comma_end; ++comma_it) {
std::string top_dep = comma_it->str();
// Split by pipe to get alternative dependencies
std::regex pipe_sep_RE(R"(\s*\|\s*)");
std::sregex_token_iterator pipe_it(top_dep.begin(), top_dep.end(), pipe_sep_RE, -1);
std::sregex_token_iterator pipe_end;
std::vector<PackageInfo::ParsedRelation> alternatives;
for (; pipe_it != pipe_end; ++pipe_it) {
std::string dep = pipe_it->str();
// Remove any version constraints or architecture qualifiers
size_t pos_space = dep.find(' ');
size_t pos_paren = dep.find('(');
size_t pos = std::string::npos;
if (pos_space != std::string::npos && pos_paren != std::string::npos) {
pos = std::min(pos_space, pos_paren);
}
else if (pos_space != std::string::npos) {
pos = pos_space;
}
else if (pos_paren != std::string::npos) {
pos = pos_paren;
}
if (pos != std::string::npos) {
dep = dep.substr(0, pos);
}
// Trim whitespace
dep.erase(dep.find_last_not_of(" \t\n\r\f\v") + 1);
dep.erase(0, dep.find_first_not_of(" \t\n\r\f\v"));
// Handle architecture qualifiers (e.g., "libc6 (>= 2.27)")
std::regex arch_RE(R"(^([a-zA-Z0-9+\-\.]+)(?:\s*\(\s*([a-zA-Z]+)\s*([<>=]+)\s*([0-9a-zA-Z:\-+~.]+)\s*\))?$)");
std::smatch match;
if (std::regex_match(dep, match, arch_RE)) {
PackageInfo::ParsedRelation pr;
pr.name = match[1];
if (match[2].matched && match[3].matched && match[4].matched) {
// If architecture qualifier exists, store it
pr.archqual = match[2].str() + match[3].str() + match[4].str();
}
if (match[3].matched && match[4].matched) {
// Store version constraints
pr.version = std::make_pair(match[3].str(), match[4].str());
}
alternatives.push_back(pr);
}
else {
// If regex does not match, include raw dependency without qualifiers
dep = remove_suffix(dep, ":any");
dep = remove_suffix(dep, ":native");
PackageInfo::ParsedRelation pr;
pr.name = dep;
alternatives.push_back(pr);
std::cerr << "Warning: Cannot parse dependency relation \"" << dep << "\", returning it raw.\n";
}
}
if (!alternatives.empty()) {
result.push_back(alternatives);
}
}
return result;
}
// Function to download, decompress, and parse the Sources.gz data
std::optional<std::vector<PackageInfo>> fetch_and_parse_sources(const std::string& url) {
CURL* curl = curl_easy_init();
if (!curl) {
std::cerr << "Failed to initialize CURL.\n";
return std::nullopt;
}
std::vector<char> downloadedData;
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &downloadedData);
// Follow redirects if any
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
// Set a user agent
curl_easy_setopt(curl, CURLOPT_USERAGENT, "SourcesParser/1.0");
CURLcode res = curl_easy_perform(curl);
if (res != CURLE_OK) {
std::cerr << "CURL download error (Sources.gz): " << curl_easy_strerror(res) << "\n";
curl_easy_cleanup(curl);
return std::nullopt;
}
curl_easy_cleanup(curl);
// Initialize libarchive
struct archive* a = archive_read_new();
archive_read_support_filter_gzip(a);
archive_read_support_format_raw(a);
if (archive_read_open_memory(a, downloadedData.data(), downloadedData.size()) != ARCHIVE_OK) {
std::cerr << "Failed to open Sources.gz archive: " << archive_error_string(a) << "\n";
archive_read_free(a);
return std::nullopt;
}
struct archive_entry* entry;
std::string decompressedData;
// Read all entries (though there should typically be only one)
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
const void* buff;
size_t size;
la_int64_t offset;
while (true) {
int r = archive_read_data_block(a, &buff, &size, &offset);
if (r == ARCHIVE_EOF)
break;
if (r != ARCHIVE_OK) {
std::cerr << "Error during decompression (Sources.gz): " << archive_error_string(a) << "\n";
archive_read_free(a);
return std::nullopt;
}
decompressedData.append(static_cast<const char*>(buff), size);
}
}
archive_read_free(a);
// Parse the decompressed data
std::vector<PackageInfo> packages;
std::istringstream stream(decompressedData);
std::string line;
PackageInfo currentPackage;
bool in_entry = false;
while (std::getline(stream, line)) {
if (line.empty()) {
if (in_entry && !currentPackage.Package.empty()) {
// Finalize BuildDependsParsed
currentPackage.BuildDependsParsed = parse_relations(currentPackage.BuildDepends);
packages.push_back(currentPackage);
currentPackage = PackageInfo();
in_entry = false;
}
continue;
}
in_entry = true;
if (line.find("Build-Depends:") == 0) {
currentPackage.BuildDepends = line.substr(strlen("Build-Depends: "));
// Continue reading lines that start with a space or tab
while (std::getline(stream, line)) {
if (line.empty() || (!std::isspace(static_cast<unsigned char>(line[0]))))
break;
currentPackage.BuildDepends += " " + line.substr(1);
}
// If the last read line is not a continuation, process it in the next iteration
if (!line.empty() && !std::isspace(static_cast<unsigned char>(line[0]))) {
stream.seekg(-static_cast<int>(line.length()) - 1, std::ios_base::cur);
}
continue;
}
if (line.find("Binary:") == 0) {
std::string binary_str;
binary_str = line.substr(strlen("Binary: "));
// Continue reading lines that start with a space or tab
while (std::getline(stream, line)) {
if (line.empty() || (!std::isspace(static_cast<unsigned char>(line[0]))))
break;
binary_str += " " + line.substr(1);
}
// If the last read line is not a continuation, process it in the next iteration
if (!line.empty() && !std::isspace(static_cast<unsigned char>(line[0]))) {
stream.seekg(-static_cast<int>(line.length()) - 1, std::ios_base::cur);
}
currentPackage.Binary = split_string(binary_str, ", ");
continue;
}
// Extract Package
if (line.find("Package:") == 0) {
currentPackage.Package = line.substr(strlen("Package: "));
continue;
}
// Extract Provides (if any)
if (line.find("Provides:") == 0) {
std::string provides_line = line.substr(strlen("Provides: "));
// Split by commas
std::regex comma_sep_RE(R"(\s*,\s*)");
std::sregex_token_iterator provides_it(provides_line.begin(), provides_line.end(), comma_sep_RE, -1);
std::sregex_token_iterator provides_end;
for (; provides_it != provides_end; ++provides_it) {
std::string provide = provides_it->str();
// Extract the package name before any space or '('
size_t pos_space = provide.find(' ');
size_t pos_paren = provide.find('(');
size_t pos = std::string::npos;
if (pos_space != std::string::npos && pos_paren != std::string::npos) {
pos = std::min(pos_space, pos_paren);
}
else if (pos_space != std::string::npos) {
pos = pos_space;
}
else if (pos_paren != std::string::npos) {
pos = pos_paren;
}
if (pos != std::string::npos) {
provide = provide.substr(0, pos);
}
// Trim whitespace
provide.erase(provide.find_last_not_of(" \t\n\r\f\v") + 1);
provide.erase(0, provide.find_first_not_of(" \t\n\r\f\v"));
if (!provide.empty()) {
currentPackage.Provides.push_back(provide);
}
}
continue;
}
}
// Add the last package if the file doesn't end with a blank line
if (in_entry && !currentPackage.Package.empty()) {
// Finalize BuildDependsParsed
currentPackage.BuildDependsParsed = parse_relations(currentPackage.BuildDepends);
packages.push_back(currentPackage);
}
return packages;
}
// Function to download, decompress, and parse the Packages.gz data
std::optional<std::vector<PackageInfo>> fetch_and_parse_packages(const std::string& url) {
CURL* curl = curl_easy_init();
if (!curl) {
std::cerr << "Failed to initialize CURL.\n";
return std::nullopt;
}
std::vector<char> downloadedData;
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &downloadedData);
// Follow redirects if any
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
// Set a user agent
curl_easy_setopt(curl, CURLOPT_USERAGENT, "SourcesParser/1.0");
CURLcode res = curl_easy_perform(curl);
if (res != CURLE_OK) {
std::cerr << "CURL download error (Packages.gz): " << curl_easy_strerror(res) << "\n";
curl_easy_cleanup(curl);
return std::nullopt;
}
curl_easy_cleanup(curl);
// Initialize libarchive
struct archive* a = archive_read_new();
archive_read_support_filter_gzip(a);
archive_read_support_format_raw(a);
if (archive_read_open_memory(a, downloadedData.data(), downloadedData.size()) != ARCHIVE_OK) {
std::cerr << "Failed to open Packages.gz archive: " << archive_error_string(a) << "\n";
archive_read_free(a);
return std::nullopt;
}
struct archive_entry* entry;
std::string decompressedData;
// Read all entries (though there should typically be only one)
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
const void* buff;
size_t size;
la_int64_t offset;
while (true) {
int r = archive_read_data_block(a, &buff, &size, &offset);
if (r == ARCHIVE_EOF)
break;
if (r != ARCHIVE_OK) {
std::cerr << "Error during decompression (Packages.gz): " << archive_error_string(a) << "\n";
archive_read_free(a);
return std::nullopt;
}
decompressedData.append(static_cast<const char*>(buff), size);
}
}
archive_read_free(a);
// Parse the decompressed data
std::vector<PackageInfo> packages;
std::istringstream stream(decompressedData);
std::string line;
PackageInfo currentPackage;
bool in_entry = false;
while (std::getline(stream, line)) {
if (line.empty()) {
if (in_entry && !currentPackage.Package.empty()) {
packages.push_back(currentPackage);
currentPackage = PackageInfo();
in_entry = false;
}
continue;
}
in_entry = true;
// Extract Package
if (line.find("Package:") == 0) {
currentPackage.Package = line.substr(strlen("Package: "));
continue;
}
// Extract Source
if (line.find("Source:") == 0) {
currentPackage.Source = line.substr(strlen("Source: "));
continue;
}
// Extract Provides
if (line.find("Provides:") == 0) {
std::string provides_line = line.substr(strlen("Provides: "));
// Split by commas
std::regex comma_sep_RE(R"(\s*,\s*)");
std::sregex_token_iterator provides_it(provides_line.begin(), provides_line.end(), comma_sep_RE, -1);
std::sregex_token_iterator provides_end;
for (; provides_it != provides_end; ++provides_it) {
std::string provide = provides_it->str();
// Extract the package name before any space or '('
size_t pos_space = provide.find(' ');
size_t pos_paren = provide.find('(');
size_t pos = std::string::npos;
if (pos_space != std::string::npos && pos_paren != std::string::npos) {
pos = std::min(pos_space, pos_paren);
}
else if (pos_space != std::string::npos) {
pos = pos_space;
}
else if (pos_paren != std::string::npos) {
pos = pos_paren;
}
if (pos != std::string::npos) {
provide = provide.substr(0, pos);
}
// Trim whitespace
provide.erase(provide.find_last_not_of(" \t\n\r\f\v") + 1);
provide.erase(0, provide.find_first_not_of(" \t\n\r\f\v"));
if (!provide.empty()) {
currentPackage.Provides.push_back(provide);
}
}
continue;
}
// Any other fields are ignored for now
}
// Add the last package if the file doesn't end with a blank line
if (in_entry && !currentPackage.Package.empty()) {
packages.push_back(currentPackage);
}
return packages;
}
std::set<std::pair<std::string, std::string>> build_dependency_graph(
const std::vector<PackageInfo>& sources,
const std::vector<PackageInfo>& binaries) {
// Map of virtual package to real binary package(s)
std::map<std::string, std::vector<std::string>> virtual_to_real;
// Set of all real binary package names
std::set<std::string> real_binary_packages;
// Map of binary package to its source package
std::map<std::string, std::string> binary_to_source;
// Populate binary_to_source mapping and virtual_to_real
for (const auto& source_pkg : sources) {
for (const auto& binary_pkg : source_pkg.Binary) {
binary_to_source[binary_pkg] = source_pkg.Package;
real_binary_packages.insert(binary_pkg);
}
}
for (const auto& binary_pkg : binaries) {
if (binary_pkg.Source.has_value()) {
binary_to_source[binary_pkg.Package] = binary_pkg.Source.value();
}
real_binary_packages.insert(binary_pkg.Package);
// Process Provides
for (const auto& provide : binary_pkg.Provides) {
virtual_to_real[provide].push_back(binary_pkg.Package);
}
}
// Dependency graph as a set of edges (dependency -> package)
std::set<std::pair<std::string, std::string>> graph;
for (const auto& pkg : sources) {
if (!pkg.BuildDependsParsed.has_value())
continue; // Skip if no build dependencies
for (const auto& or_deps : pkg.BuildDependsParsed.value()) {
// For each set of alternative dependencies (logical OR)
for (const auto& dep : or_deps) {
std::string dep_name = dep.name;
// If dep.archqual exists, append it with ':'
if (dep.archqual.has_value())
dep_name += ":" + dep.archqual.value();
// If dep_name is a virtual package, map it to real binary package(s)
if (virtual_to_real.find(dep_name) != virtual_to_real.end()) {
for (const auto& real_pkg : virtual_to_real[dep_name]) {
// Map binary dependency to source package
if (binary_to_source.find(real_pkg) != binary_to_source.end()) {
std::string source_dep = binary_to_source[real_pkg];
// Avoid self-dependency
if (source_dep != pkg.Package) {
graph.emplace(source_dep, pkg.Package); // Reversed edge
}
}
else {
std::cerr << "Warning: Binary package \"" << real_pkg << "\" provided by \""
<< dep_name << "\" does not map to any source package.\n";
}
}
}
else if (real_binary_packages.find(dep_name) != real_binary_packages.end()) {
// Direct binary dependency
if (binary_to_source.find(dep_name) != binary_to_source.end()) {
std::string source_dep = binary_to_source[dep_name];
// Avoid self-dependency
if (source_dep != pkg.Package) {
graph.emplace(source_dep, pkg.Package); // Reversed edge
}
}
else {
std::cerr << "Warning: Binary dependency \"" << dep_name << "\" does not map to any source package.\n";
}
}
}
}
}
// Transitive reduction: Collect edges to remove first
std::vector<std::pair<std::string, std::string>> edges_to_remove;
// Build adjacency list from the graph
std::map<std::string, std::set<std::string>> adj;
for (const auto& edge : graph) {
adj[edge.first].insert(edge.second);
}
for (const auto& [u, neighbors] : adj) {
for (const auto& v : neighbors) {
if (adj.find(v) != adj.end()) {
for (const auto& w : adj[v]) {
if (adj[u].find(w) != adj[u].end()) {
edges_to_remove.emplace_back(u, w);
}
}
}
}
}
// Now remove the collected edges
for (const auto& edge : edges_to_remove) {
graph.erase(edge);
adj[edge.first].erase(edge.second);
}
return graph;
}
QString serialize_dependency_graph_to_json(const std::set<std::pair<std::string, std::string>>& graph) {
// Check if the graph is empty
if (graph.empty()) {
std::cerr << "Warning: Dependency graph is empty." << std::endl;
return "{}"; // Return empty JSON object
}
// Build adjacency list where key is dependency and value is list of packages that depend on it
std::map<std::string, QJsonArray> adjacency;
for (const auto& edge : graph) {
if (!edge.first.empty() && !edge.second.empty()) {
adjacency[edge.first].append(QString::fromStdString(edge.second));
}
}
// Convert to QJsonObject
QJsonObject jsonObj;
for (const auto& [dep, dependents] : adjacency) {
jsonObj[QString::fromStdString(dep)] = dependents;
}
// Convert to JSON string
QJsonDocument doc(jsonObj);
return QString(doc.toJson(QJsonDocument::Compact));
}
} // namespace SourcesParser

@ -0,0 +1,79 @@
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#ifndef SOURCES_PARSER_H
#define SOURCES_PARSER_H
#include <string>
#include <vector>
#include <optional>
#include <cstring>
#include <set>
#include <QtCore/QJsonObject>
#include <QtCore/QJsonDocument>
// Structure to hold the required fields
struct PackageInfo {
std::string Package; // Package name
std::vector<std::string> Provides; // Virtual packages provided
std::string BuildDepends; // Build dependencies (for source packages)
std::optional<std::string> Source; // Source package name (for binary packages)
std::vector<std::string> Binary;
// Nested structures for parsing dependencies
struct ArchRestriction {
bool enabled;
std::string arch;
};
struct BuildRestriction {
bool enabled;
std::string condition;
};
struct ParsedRelation {
std::string name; // Dependency package name
std::optional<std::string> archqual; // Architecture qualifier
std::optional<std::pair<std::string, std::string>> version; // Version relation and version
std::optional<std::vector<ArchRestriction>> arch; // Architecture restrictions
std::optional<std::vector<std::vector<BuildRestriction>>> restrictions; // Build restrictions
};
// Parsed BuildDepends and Binary relations
std::optional<std::vector<std::vector<ParsedRelation>>> BuildDependsParsed;
};
// Namespace to encapsulate the parser functionalities
namespace SourcesParser {
// Function to download, decompress, and parse the Sources.gz data
std::optional<std::vector<PackageInfo>> fetch_and_parse_sources(const std::string& url);
// Function to download, decompress, and parse the Packages.gz data
std::optional<std::vector<PackageInfo>> fetch_and_parse_packages(const std::string& url);
// Function to parse dependency relations
std::vector<std::vector<PackageInfo::ParsedRelation>> parse_relations(const std::string& raw);
// Function to build dependency graph
std::set<std::pair<std::string, std::string>> build_dependency_graph(
const std::vector<PackageInfo>& sources,
const std::vector<PackageInfo>& binaries);
// Function to serialize dependency graph to JSON
QString serialize_dependency_graph_to_json(const std::set<std::pair<std::string, std::string>>& graph);
} // namespace SourcesParser
#endif // SOURCES_PARSER_H

@ -0,0 +1,217 @@
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#include "task_queue.h"
#include <iostream>
#include <QSqlError>
TaskQueue::TaskQueue(size_t max_concurrent_tasks)
: max_concurrent_tasks_(max_concurrent_tasks), stop_(false),
tasks_(),
running_tasks_() {}
TaskQueue::~TaskQueue() {
stop();
}
// FIXME: copy of CiLogic::get_thread_connection()
std::atomic<unsigned int> TaskQueue::thread_id_counter{1200};
QSqlDatabase TaskQueue::get_thread_connection() {
std::lock_guard<std::mutex> lock(connection_mutex_);
thread_local unsigned int thread_unique_id = thread_id_counter.fetch_add(1);
QString connectionName = QString("LubuntuCIConnection_%1").arg(thread_unique_id);
// Check if the connection already exists for this thread
if (QSqlDatabase::contains(connectionName)) {
QSqlDatabase db = QSqlDatabase::database(connectionName);
if (!db.isOpen()) {
if (!db.open()) {
throw std::runtime_error("Failed to open thread-specific database connection: " + db.lastError().text().toStdString());
}
}
return db;
}
QSqlDatabase threadDb = QSqlDatabase::addDatabase("QSQLITE", connectionName);
threadDb.setDatabaseName("/srv/lubuntu-ci/repos/ci-tools/lubuntu_ci.db");
if (!threadDb.open()) {
throw std::runtime_error("Failed to open new database connection for thread: " + threadDb.lastError().text().toStdString());
}
return threadDb;
}
void TaskQueue::enqueue(std::shared_ptr<JobStatus> jobstatus,
std::function<void(std::shared_ptr<Log> log)> task_func,
std::shared_ptr<PackageConf> packageconf) {
{
auto connection = get_thread_connection();
auto now = std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch())
.count();
// Create the task
std::shared_ptr<Task> task_ptr = std::make_shared<Task>(connection, jobstatus, now, packageconf);
task_ptr->func = [task_func, self_weak = std::weak_ptr<Task>(task_ptr)](std::shared_ptr<Log> log) {
std::shared_ptr<Task> task_locked = self_weak.lock();
if (task_locked) {
log->assign_task_context(task_locked);
task_func(log);
}
};
packageconf->assign_task(jobstatus, task_ptr, packageconf);
std::unique_lock<std::mutex> lock(tasks_mutex_);
tasks_.emplace(task_ptr);
}
cv_.notify_all(); // Notify worker threads
}
void TaskQueue::start() {
stop_ = false;
for (size_t i = 0; i < max_concurrent_tasks_; ++i) {
workers_.emplace_back(&TaskQueue::worker_thread, this);
}
}
void TaskQueue::stop() {
{
std::unique_lock<std::mutex> tasks_lock(tasks_mutex_);
std::unique_lock<std::mutex> pkgconfs_lock(running_pkgconfs_mutex_);
std::unique_lock<std::mutex> running_tasks_lock(running_tasks_mutex_);
stop_ = true;
}
cv_.notify_all(); // Wake up all threads
for (auto& worker : workers_) {
if (worker.joinable()) {
worker.join();
}
}
}
std::set<std::shared_ptr<Task>, Task::TaskComparator> TaskQueue::get_tasks() const {
std::lock_guard<std::mutex> lock(tasks_mutex_);
return tasks_;
}
std::set<std::shared_ptr<Task>, Task::TaskComparator> TaskQueue::get_running_tasks() const {
std::lock_guard<std::mutex> lock(running_tasks_mutex_);
return running_tasks_;
}
void TaskQueue::worker_thread() {
int worker_id = max_worker_id++;
while (true) {
std::shared_ptr<Task> task_to_execute;
{
std::lock_guard<std::mutex> tasks_lock(tasks_mutex_);
if (stop_ && tasks_.empty()) {
return; // Exit thread if stopping and no tasks left
}
auto it = tasks_.begin();
bool found_valid = false;
// Iterate through the set until a valid task is found
while (it != tasks_.end()) {
std::lock_guard<std::mutex> lock(running_pkgconfs_mutex_);
std::shared_ptr<Task> it_task = *it;
task_to_execute = it_task;
int pkgconf_id = task_to_execute->get_parent_packageconf()->id;
auto running_pkgconf_it = std::find_if(running_pkgconfs_.begin(), running_pkgconfs_.end(),
[&pkgconf_id](const std::shared_ptr<PackageConf>& pkgconf) { return pkgconf->id == pkgconf_id; });
if (running_pkgconf_it != running_pkgconfs_.end()) {
++it; // Move to the next task
continue;
}
// Task is valid to execute
found_valid = true;
it = tasks_.erase(it);
break;
}
if (!found_valid) { continue; }
}
if (!task_to_execute || !task_to_execute->func) {
continue;
} else {
std::lock_guard<std::mutex> pkgconfslock(running_pkgconfs_mutex_);
running_pkgconfs_.insert(task_to_execute->get_parent_packageconf());
std::lock_guard<std::mutex> tasks_lock(running_tasks_mutex_);
running_tasks_.insert(task_to_execute);
}
// Set the start time
{
auto now = std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch())
.count();
task_to_execute->start_time = now;
auto connection = get_thread_connection();
task_to_execute->save(connection, 0);
}
try {
task_to_execute->func(task_to_execute->log); // Execute the task
task_to_execute->successful = true;
} catch (const std::exception& e) {
task_to_execute->successful = false;
std::ostringstream oss;
oss << "Exception type: " << typeid(e).name() << "\n"
<< "What: " << e.what();
task_to_execute->log->append(oss.str());
} catch (...) {
task_to_execute->successful = false;
task_to_execute->log->append("Unknown exception occurred");
}
{
auto now = std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch())
.count();
task_to_execute->finish_time = now;
auto connection = get_thread_connection();
task_to_execute->save(connection, 0);
}
{
// Remove the task from running_tasks_
std::lock_guard<std::mutex> lock(running_tasks_mutex_);
int id = task_to_execute->id;
auto running_task_it = std::find_if(running_tasks_.begin(), running_tasks_.end(),
[&id](const std::shared_ptr<Task>& task) { return task->id == id; });
if (running_task_it != running_tasks_.end()) {
running_tasks_.erase(running_task_it);
}
}
{
// Remove packageconf from running_pkgconfs_ by id
std::lock_guard<std::mutex> lock(running_pkgconfs_mutex_);
int pkgconf_id = task_to_execute->get_parent_packageconf()->id;
auto running_pkgconf_it = std::find_if(running_pkgconfs_.begin(), running_pkgconfs_.end(),
[&pkgconf_id](const std::shared_ptr<PackageConf>& pkgconf) { return pkgconf->id == pkgconf_id; });
if (running_pkgconf_it != running_pkgconfs_.end()) {
running_pkgconfs_.erase(running_pkgconf_it);
}
}
}
}

@ -0,0 +1,64 @@
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#ifndef TASK_QUEUE_H
#define TASK_QUEUE_H
#include "ci_database_objs.h"
#include <set>
#include <vector>
#include <thread>
#include <mutex>
#include <condition_variable>
#include <functional>
#include <string>
#include <queue>
#include <QSqlDatabase>
class TaskQueue {
public:
TaskQueue(size_t max_concurrent_tasks = 10);
~TaskQueue();
void enqueue(std::shared_ptr<JobStatus> jobstatus, std::function<void(std::shared_ptr<Log> log)> task_func, std::shared_ptr<PackageConf> packageconf);
void start();
void stop();
std::set<std::shared_ptr<Task>, Task::TaskComparator> get_tasks() const;
std::set<std::shared_ptr<Task>, Task::TaskComparator> get_running_tasks() const;
private:
size_t max_concurrent_tasks_;
std::set<std::shared_ptr<Task>, Task::TaskComparator> tasks_;
std::set<std::shared_ptr<Task>, Task::TaskComparator> running_tasks_;
std::set<std::shared_ptr<PackageConf>> running_pkgconfs_;
std::queue<std::function<void()>> thread_pool_tasks_;
mutable std::mutex tasks_mutex_;
mutable std::mutex running_pkgconfs_mutex_;
mutable std::mutex running_tasks_mutex_;
std::condition_variable cv_;
bool stop_;
std::vector<std::thread> workers_;
static std::atomic<unsigned int> thread_id_counter;
mutable std::mutex connection_mutex_;
int max_worker_id = 1;
void worker_thread();
QSqlDatabase get_thread_connection();
};
#endif // TASK_QUEUE_H

@ -0,0 +1,544 @@
/*
* A minimal Jinja2-like template engine in one file, supporting:
* - {% extends "base.html" %}
* - {% block content %} ... {% endblock %}
* - {{ scalarVariable }}
* - {% if expr %} ... {% elif expr %} ... {% else %} ... {% endif %}
* - {% for item in list %} ... {% endfor %}
* - Basic expression parsing with ==, !=, >, <, >=, <=
* - Simple filter usage: {{ var|add:-1 }}
*
* Updated to support nested variable access using dot notation (e.g., repo.packaging_commit).
*
* Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
*/
#include "template_renderer.h"
#include <string>
#include <vector>
#include <map>
#include <regex>
#include <fstream>
#include <sstream>
#include <iostream>
#include <filesystem>
#include <stdexcept>
#include <cstdlib>
#include <algorithm>
#include <exception>
#include <shared_mutex>
#include <mutex>
namespace fs = std::filesystem;
static std::mutex file_mutex;
std::string TemplateRenderer::build_template_path(const std::string &tplName)
{
if (!tplName.empty() && tplName.front() == '/') {
return tplName;
}
return "templates/" + tplName;
}
std::string TemplateRenderer::file_get_contents(const std::string &path)
{
std::unique_lock lock(file_mutex);
try {
fs::path rel(path);
fs::path abs = fs::absolute(rel);
auto open_file = [](const fs::path& file_path) -> std::ifstream {
std::ifstream file(file_path, std::ios::in);
if (!file) {
throw std::ios_base::failure("File could not be opened: " + file_path.string());
}
return file;
};
std::ifstream file = open_file(abs);
std::ostringstream contents;
contents << file.rdbuf();
return contents.str();
} catch (const std::exception& e) {
std::cerr << "Unable to get file contents in template_renderer: " << e.what() << "\n";
return "";
} catch (...) {
std::cerr << "Unable to get file contents in template_renderer (unknown exception.)\n";
return "";
}
}
std::string TemplateRenderer::apply_filter(const std::string &value, const std::string &filterPart)
{
size_t colonPos = filterPart.find(':');
std::string filterName = (colonPos == std::string::npos)
? filterPart
: filterPart.substr(0, colonPos);
std::string filterArg = (colonPos == std::string::npos)
? ""
: filterPart.substr(colonPos + 1);
if (filterName == "add") {
try {
int original = std::stoi(value);
int increment = std::stoi(filterArg);
return std::to_string(original + increment);
} catch(...) {
return value;
}
}
// Additional filters can be added here.
return value; // Unknown filter => pass through
}
std::string TemplateRenderer::apply_all_filters(const std::string &valueWithFilters,
const std::map<std::string,std::string> &ctx)
{
// Split on '|'
std::vector<std::string> parts;
size_t start = 0;
while (true) {
size_t pos = valueWithFilters.find('|', start);
if (pos == std::string::npos) {
parts.push_back(valueWithFilters.substr(start));
break;
}
parts.push_back(valueWithFilters.substr(start, pos - start));
start = pos + 1;
}
if (parts.empty()) {
return "";
}
std::string varExpression = parts[0];
std::string value = get_variable_value(varExpression, ctx);
// Apply filters if any
for (size_t i = 1; i < parts.size(); i++) {
value = apply_filter(value, parts[i]);
}
return value;
}
bool TemplateRenderer::evaluate_condition(const std::string &expr,
const std::map<std::string,std::string> &ctx)
{
// Define helper lambdas
auto trim = [](const std::string &s) -> std::string {
size_t start = 0;
while (start < s.size() && isspace(static_cast<unsigned char>(s[start]))) start++;
size_t end = s.size();
while (end > start && isspace(static_cast<unsigned char>(s[end - 1]))) end--;
return s.substr(start, end - start);
};
auto isInteger = [&](const std::string &s) -> bool {
if (s.empty()) return false;
size_t start = (s[0] == '-') ? 1 : 0;
for (size_t i = start; i < s.size(); ++i) {
if (!isdigit(static_cast<unsigned char>(s[i]))) return false;
}
return true;
};
auto unquoteIfNeeded = [&](const std::string &tok) -> std::string {
auto t = trim(tok);
if (t.size() >= 2 &&
((t.front() == '\'' && t.back() == '\'') ||
(t.front() == '\"' && t.back() == '\"'))) {
return t.substr(1, t.size() - 2);
}
return t;
};
auto parse_token_value = [&](const std::string &rawToken) -> std::string {
auto t = trim(rawToken);
if (t.size() >= 2 && ((t.front() == '\'' && t.back() == '\'') ||
(t.front() == '\"' && t.back() == '\"'))) {
// Literal string
return unquoteIfNeeded(t);
} else {
// Apply filters
return apply_all_filters(t, ctx);
}
};
// Split the expression by 'and'
std::vector<std::string> conditions;
std::regex andRe("\\s+and\\s+");
std::sregex_token_iterator it(expr.begin(), expr.end(), andRe, -1);
std::sregex_token_iterator end;
while (it != end) {
conditions.push_back(trim(*it));
++it;
}
// Evaluate each sub-condition
for (const auto &subExpr : conditions) {
std::string e = trim(subExpr);
if (e.empty()) continue;
// Operators
static std::vector<std::string> ops = {"==", "!=", "<=", ">=", ">", "<"};
size_t opPos = std::string::npos;
std::string opFound;
for (const auto &cand : ops) {
size_t p = e.find(cand);
if (p != std::string::npos) {
if (opPos == std::string::npos || p < opPos) {
opPos = p;
opFound = cand;
}
}
}
if (opPos == std::string::npos) {
// No operator => check truthiness of var
std::string val = parse_token_value(e);
if (val.empty()) return false;
continue;
}
std::string left = trim(e.substr(0, opPos));
std::string right = trim(e.substr(opPos + opFound.size()));
// Directly handle dot notation by using the entire composite key
std::string lv = parse_token_value(left);
std::string rv = parse_token_value(right);
bool li = isInteger(lv);
bool ri = isInteger(rv);
bool result = false;
if (li && ri) {
int lnum = std::stoi(lv);
int rnum = std::stoi(rv);
if (opFound == "==") result = (lnum == rnum);
else if (opFound == "!=") result = (lnum != rnum);
else if (opFound == ">") result = (lnum > rnum);
else if (opFound == "<") result = (lnum < rnum);
else if (opFound == ">=") result = (lnum >= rnum);
else if (opFound == "<=") result = (lnum <= rnum);
} else {
// String compare
if (opFound == "==") result = (lv == rv);
else if (opFound == "!=") result = (lv != rv);
else if (opFound == ">") result = (lv > rv);
else if (opFound == "<") result = (lv < rv);
else if (opFound == ">=") result = (lv >= rv);
else if (opFound == "<=") result = (lv <= rv);
}
if (!result) return false; // Short-circuit for 'and'
}
return true; // All sub-conditions passed
}
std::string TemplateRenderer::expand_conditionals(std::string input,
const std::map<std::string,std::string> &ctx)
{
static std::regex ifOpenRe("\\{\\%\\s*if\\s+[^\\}]+\\%\\}");
static std::regex ifCloseRe("\\{\\%\\s*endif\\s*\\%\\}");
while (true) {
// Gather all if-positions
std::vector<size_t> ifPositions;
{
size_t searchStart = 0;
while (true) {
std::smatch mOpen;
std::string sub = input.substr(searchStart);
if (!std::regex_search(sub, mOpen, ifOpenRe)) {
break;
}
size_t posAbsolute = searchStart + mOpen.position(0);
ifPositions.push_back(posAbsolute);
searchStart = posAbsolute + mOpen.length(0);
}
}
if (ifPositions.empty()) {
break;
}
// The last one is the innermost
size_t ifPos = ifPositions.back();
{
std::string sub2 = input.substr(ifPos);
std::smatch mclose;
if (!std::regex_search(sub2, mclose, ifCloseRe)) {
// No matching endif
break;
}
size_t closePosRelative = mclose.position(0);
size_t ifClosePos = ifPos + closePosRelative;
size_t blockLen = (ifClosePos - ifPos) + mclose.length(0);
// Entire block
std::string blockText = input.substr(ifPos, blockLen);
// Main regex to match the entire if-endif block
static std::regex mainRe(
"\\{\\%\\s*if\\s+([^\\}]+)\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endif\\s*\\%\\}"
);
std::smatch blockMatch;
if (!std::regex_match(blockText, blockMatch, mainRe)) {
break;
}
std::string condition = blockMatch[1].str();
std::string innerBlock = blockMatch[2].str();
// Parse out any {% elif ... %} / {% else %}
struct ConditionBlock {
std::string cond; // Empty => else
std::string content;
};
std::vector<ConditionBlock> blocks;
blocks.emplace_back(ConditionBlock{ condition, "" });
static std::regex elifElseRe("\\{\\%\\s*elif\\s+([^\\}]+)\\s*\\%\\}|\\{\\%\\s*else\\s*\\%\\}");
size_t lastPos = 0;
auto bBegin = std::sregex_iterator(innerBlock.begin(), innerBlock.end(), elifElseRe);
auto bEnd = std::sregex_iterator();
for (auto i = bBegin; i != bEnd; ++i) {
auto m2 = *i;
size_t pos2 = m2.position(0);
// Text up to pos2 is the previous block's content
blocks.back().content.append(innerBlock.substr(lastPos, pos2 - lastPos));
if (m2[1].matched) {
// Elif
blocks.emplace_back(ConditionBlock{ m2[1].str(), "" });
} else {
// Else
blocks.emplace_back(ConditionBlock{ "", "" });
}
lastPos = pos2 + m2.length(0);
}
// Leftover
if (!blocks.empty()) {
blocks.back().content.append(innerBlock.substr(lastPos));
}
// Evaluate
std::string finalText;
bool used = false;
for (auto &b : blocks) {
if (b.cond.empty()) {
// Else
if (!used) {
finalText = b.content;
}
break;
} else {
if (evaluate_condition(b.cond, ctx)) {
finalText = b.content;
used = true;
break;
}
}
}
// Replace that block region with finalText
input.replace(ifPos, blockLen, finalText);
}
}
return input;
}
std::string TemplateRenderer::expand_loops(const std::string &input,
const std::map<std::string,std::string> &scalarContext,
const std::map<std::string,
std::vector<std::map<std::string,std::string>>> &listContext)
{
std::string result = input;
static std::regex loopRegex("\\{\\%\\s*for\\s+(\\S+)\\s+in\\s+(\\S+)\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endfor\\s*\\%\\}");
while (true) {
std::smatch m;
if (!std::regex_search(result, m, loopRegex)) {
break;
}
std::string aliasName = m[1].str(); // e.g., 'repo'
std::string arrayName = m[2].str(); // e.g., 'repos'
std::string loopBody = m[3].str();
auto it = listContext.find(arrayName);
if (it == listContext.end()) {
// No such array => remove the block
result.replace(m.position(0), m.length(0), "");
continue;
}
std::string expanded;
for (const auto &oneItem : it->second) {
// Create a per-item scalar context with prefixed keys
std::map<std::string, std::string> perItemScalarContext = scalarContext;
for (const auto &kv : oneItem) {
perItemScalarContext[aliasName + "." + kv.first] = kv.second;
}
std::string chunk = loopBody;
// Expand conditionals with per-item scalar context
chunk = expand_conditionals(chunk, perItemScalarContext);
// Expand nested loops if any with per-item scalar context
chunk = expand_loops(chunk, perItemScalarContext, listContext);
// Final scalar expansions with per-item scalar context
chunk = replace_variables(chunk, perItemScalarContext);
// Remove excess whitespace
chunk = strip_excess_whitespace(chunk);
expanded += chunk;
}
result.replace(m.position(0), m.length(0), expanded);
}
return result;
}
std::string TemplateRenderer::replace_variables(const std::string &input,
const std::map<std::string,std::string> &context)
{
static std::regex varRe("\\{\\{\\s*(.*?)\\s*\\}\\}");
std::string output;
output.reserve(input.size());
size_t lastPos = 0;
auto begin = std::sregex_iterator(input.begin(), input.end(), varRe);
auto end = std::sregex_iterator();
for (auto it = begin; it != end; ++it) {
auto match = *it;
output.append(input, lastPos, match.position(0) - lastPos);
std::string expr = match[1].str();
// Directly apply all filters (which now handle composite keys)
std::string value = apply_all_filters(expr, context);
output.append(value);
lastPos = match.position(0) + match.length(0);
}
output.append(input, lastPos);
// Remove leftover {% ... %} if any
static std::regex leftover("\\{\\%.*?\\%\\}");
output = std::regex_replace(output, leftover, "");
return output;
}
std::string TemplateRenderer::render_jinja(
const std::string &tplPath,
const std::map<std::string,std::string> &scalarContext,
const std::map<std::string,
std::vector<std::map<std::string,std::string>>> &listContext)
{
std::string tpl = file_get_contents(tplPath);
if (tpl.empty()) {
return "<html><body><p>Template not found: " + tplPath + "</p></body></html>";
}
std::string step0 = expand_conditionals(tpl, scalarContext);
std::string step1 = expand_loops(step0, scalarContext, listContext);
std::string result = replace_variables(step1, scalarContext);
return result;
}
std::string TemplateRenderer::render_with_inheritance(
const std::string &childTplName,
const std::map<std::string,std::string> &scalarContext,
const std::map<std::string,
std::vector<std::map<std::string,std::string>>> &listContext)
{
// Load child template
std::string childText = file_get_contents(build_template_path(childTplName));
if (childText.empty()) {
return "<html><body><h1>Missing child template:</h1>"
+ build_template_path(childTplName) + "</body></html>";
}
// Check for {% extends "base.html" %}
static std::regex extendsRe("\\{\\%\\s*extends\\s*\"([^\"]+)\"\\s*\\%\\}");
std::smatch exm;
if (!std::regex_search(childText, exm, extendsRe)) {
// No extends => just do expansions
std::string step0 = expand_conditionals(childText, scalarContext);
std::string step1 = expand_loops(step0, scalarContext, listContext);
std::string result = replace_variables(step1, scalarContext);
return result;
}
// If extends => load base
std::string baseName = exm[1].str();
std::string baseText = file_get_contents(build_template_path(baseName));
if (baseText.empty()) {
return "<html><body><h1>Missing base template:</h1>"
+ baseName + "</body></html>";
}
// Extract child block content
static std::regex blockRe("\\{\\%\\s*block\\s+content\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endblock\\s*\\%\\}");
std::smatch blockMatch;
std::string childBlock;
if (std::regex_search(childText, blockMatch, blockRe)) {
childBlock = blockMatch[1].str();
}
// Process loops first, which handle their own conditionals with loop variables
std::string expandedChildBlock = expand_loops(childBlock, scalarContext, listContext);
// Then process any conditionals outside loops
expandedChildBlock = expand_conditionals(expandedChildBlock, scalarContext);
// Finally, replace variables in the child block
expandedChildBlock = replace_variables(expandedChildBlock, scalarContext);
// Replace {{BLOCK content}} in base with expanded child block
const std::string marker = "{{BLOCK content}}";
size_t pos = baseText.find(marker);
if (pos != std::string::npos) {
baseText.replace(pos, marker.size(), expandedChildBlock);
}
// Replace variables in the entire base template (to handle {{PAGE_TITLE}})
baseText = replace_variables(baseText, scalarContext);
// Remove any remaining {% ... %} tags
static std::regex leftover("\\{\\%.*?\\%\\}");
baseText = std::regex_replace(baseText, leftover, "");
return baseText;
}
std::string TemplateRenderer::strip_excess_whitespace(const std::string &str) {
// Remove leading/trailing spaces and unify consecutive whitespace into single spaces
std::string result;
result.reserve(str.size());
bool prevSpace = false;
for (char c: str) {
if (isspace(static_cast<unsigned char>(c))) {
if (!prevSpace) {
result += ' ';
prevSpace = true;
}
} else {
result += c;
prevSpace = false;
}
}
// Trim leading and trailing spaces
size_t start = 0;
while (start < result.size() && isspace(static_cast<unsigned char>(result[start]))) {
start++;
}
size_t end = result.size();
while (end > start && isspace(static_cast<unsigned char>(result[end - 1]))) {
end--;
}
return result.substr(start, end - start);
}
std::string TemplateRenderer::get_variable_value(const std::string &var,
const std::map<std::string, std::string> &ctx) {
auto it = ctx.find(var);
if (it != ctx.end()) {
return it->second;
}
return "";
}

@ -0,0 +1,85 @@
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#ifndef TEMPLATE_RENDERER_H
#define TEMPLATE_RENDERER_H
#include <string>
#include <map>
#include <vector>
#include <filesystem>
/**
* This class provides two styles of rendering:
*
* 1) render_jinja(...) -- A naive Jinja-like expansion for loops/variables.
* 2) render_with_inheritance(...) -- A minimal approach to handle
* {% extends "base.html" %} and {% block content %} usage, plus
* {{VARIABLE}} expansions.
*
* The "base.html" template is expected to contain something like:
* <html>... {{BLOCK content}} ...</html>
* And the child template might do:
* {% extends "base.html" %}
* {% block content %}Hello world{% endblock %}
*/
class TemplateRenderer {
public:
static std::string render_jinja(
const std::string &tplPath,
const std::map<std::string,std::string> &scalarContext,
const std::map<std::string,
std::vector<std::map<std::string,std::string>>> &listContext
);
static std::string render_with_inheritance(
const std::string &childTplName,
const std::map<std::string,std::string> &scalarContext,
const std::map<std::string,
std::vector<std::map<std::string,std::string>>> &listContext
);
private:
static std::string build_template_path(const std::string &tplName);
static std::string file_get_contents(const std::string &path);
// Filters
static std::string apply_filter(const std::string &value, const std::string &filterPart);
static std::string apply_all_filters(const std::string &valueWithFilters,
const std::map<std::string,std::string> &ctx);
// Conditionals
static std::string expand_conditionals(std::string input,
const std::map<std::string,std::string> &ctx);
static bool evaluate_condition(const std::string &expr,
const std::map<std::string,std::string> &ctx);
// For loops
static std::string expand_loops(const std::string &input,
const std::map<std::string,std::string> &scalarContext,
const std::map<std::string,
std::vector<std::map<std::string,std::string>>> &listContext);
// Final expansions
static std::string replace_variables(const std::string &input,
const std::map<std::string,std::string> &context);
// Helper: strip extraneous whitespace from final expansions
static std::string strip_excess_whitespace(const std::string &str);
static std::string get_variable_value(const std::string &var, const std::map<std::string, std::string> &ctx);
};
#endif // TEMPLATE_RENDERER_H

@ -1,4 +1,4 @@
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@ -24,7 +24,6 @@
namespace fs = std::filesystem;
// Definitions from update-maintainer.cpp moved here
static const char* PREVIOUS_UBUNTU_MAINTAINERS[] = {
"ubuntu core developers <ubuntu-devel@lists.ubuntu.com>",
"ubuntu core developers <ubuntu-devel-discuss@lists.ubuntu.com>",
@ -32,25 +31,16 @@ static const char* PREVIOUS_UBUNTU_MAINTAINERS[] = {
};
static const char* UBUNTU_MAINTAINER = "Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>";
class MaintainerUpdateException : public std::runtime_error {
public:
using std::runtime_error::runtime_error;
};
static std::optional<fs::path> find_control_file(const fs::path &debian_dir) {
static fs::path find_control_file(const fs::path &debian_dir) {
fs::path control_in = debian_dir / "control.in";
fs::path control = debian_dir / "control";
if (fs::exists(control_in)) return control_in;
if (fs::exists(control)) return control;
return std::nullopt;
}
static fs::path find_changelog_file(const fs::path &debian_dir) {
fs::path changelog = debian_dir / "changelog";
if (!fs::exists(changelog)) {
throw MaintainerUpdateException("No changelog file found");
if (fs::exists(control_in)) {
return control_in;
}
if (fs::exists(control)) {
return control;
}
return changelog;
throw std::runtime_error("No control file found in " + debian_dir.string());
}
static bool xsbc_managed_by_rules(const fs::path &debian_dir) {
@ -66,30 +56,9 @@ static bool xsbc_managed_by_rules(const fs::path &debian_dir) {
return false;
}
static std::string get_distribution(const fs::path &changelog_file) {
// parse first line of changelog: "package (version) dist; urgency=..."
// dist is the token after ')'
std::ifstream f(changelog_file);
if(!f) throw MaintainerUpdateException("Unable to open changelog.");
std::string first_line;
std::getline(f, first_line);
size_t pos = first_line.find(')');
if(pos == std::string::npos) throw MaintainerUpdateException("Invalid changelog format");
pos++;
while(pos < first_line.size() && std::isspace((unsigned char)first_line[pos])) pos++;
size_t start = pos;
while(pos < first_line.size() && !std::isspace((unsigned char)first_line[pos]) && first_line[pos] != ';') pos++;
std::string dist = first_line.substr(start, pos - start);
size_t dashpos = dist.find('-');
if (dashpos != std::string::npos) {
dist = dist.substr(0, dashpos);
}
return dist;
}
static std::string read_file(const fs::path &p) {
std::ifstream f(p);
if(!f) throw MaintainerUpdateException("Cannot read file: " + p.string());
if(!f) throw std::runtime_error("Cannot read file: " + p.string());
std::stringstream ss;
ss << f.rdbuf();
return ss.str();
@ -97,118 +66,99 @@ static std::string read_file(const fs::path &p) {
static void write_file(const fs::path &p, const std::string &content) {
std::ofstream f(p);
if(!f) throw MaintainerUpdateException("Cannot write file: " + p.string());
if(!f) throw std::runtime_error("Cannot write file: " + p.string());
f << content;
}
static std::optional<std::string> get_field(const std::string &content, const std::string &field_regex) {
std::regex r(field_regex, std::regex_constants::multiline);
std::smatch m;
if(std::regex_search(content, m, r)) {
return m[1].str();
}
return std::nullopt;
}
static std::string set_field(const std::string &content, const std::string &field_regex, const std::string &new_line) {
std::regex r(field_regex, std::regex_constants::multiline);
return std::regex_replace(content, r, new_line);
}
static void update_maintainer_file(const fs::path &control_file, const std::string &distribution, bool verbose) {
static void update_maintainer_file(const fs::path &control_file, bool verbose) {
std::string c = read_file(control_file);
auto original_maintainer = get_field(c, "^Maintainer:\\s?(.*)$");
if(!original_maintainer) {
throw MaintainerUpdateException("No Maintainer field found");
// Helper lambda to find a field
auto find_field = [&](const std::string &field) -> std::optional<std::string> {
std::regex r("^" + field + ":\\s?(.*)$", std::regex_constants::icase | std::regex_constants::multiline);
std::smatch m;
if(std::regex_search(c, m, r)) {
return m[1].str();
}
return std::nullopt;
};
// Helper lambda to replace a field line
auto replace_field = [&](const std::string &field, const std::string &val) {
std::regex r("^" + field + ":\\s?.*$", std::regex_constants::icase | std::regex_constants::multiline);
c = std::regex_replace(c, r, field + ": " + val);
};
auto original_maint = find_field("Maintainer");
if(!original_maint) {
throw std::runtime_error("No Maintainer field found in " + control_file.string());
}
std::string om = *original_maintainer;
std::string om_lower = om;
for (auto &ch : om_lower) ch = (char)std::tolower((unsigned char)ch);
std::string om_lower = *original_maint;
for (auto &ch : om_lower) {
ch = (char)std::tolower((unsigned char)ch);
}
// Check previous ubuntu maintainers
// If the original maintainer is a known Ubuntu style, just unify
for (auto &pm : PREVIOUS_UBUNTU_MAINTAINERS) {
std::string pm_lower = pm;
for (auto &ch: pm_lower) ch=(char)std::tolower((unsigned char)ch);
if(pm_lower == om_lower) {
for (auto &ch: pm_lower) {
ch = (char)std::tolower((unsigned char)ch);
}
if (pm_lower == om_lower) {
if(verbose) {
std::cout<<"The old maintainer was: "<<om<<"\n";
std::cout<<"Resetting as: "<<UBUNTU_MAINTAINER<<"\n";
std::cout << "[update-maintainer] Old maintainer was: " << *original_maint << "\n"
<< "Resetting as: " << UBUNTU_MAINTAINER << std::endl;
}
// just set maintainer
std::regex maint_re("^Maintainer:\\s?.*$", std::regex_constants::multiline);
c = std::regex_replace(c, maint_re, "Maintainer: " + std::string(UBUNTU_MAINTAINER));
replace_field("Maintainer", UBUNTU_MAINTAINER);
write_file(control_file, c);
return;
}
}
// If ends with ubuntu.com, do nothing
// e.g. ... <someone@ubuntu.com>
if (om_lower.size() >= 11 &&
om_lower.rfind("ubuntu.com>", om_lower.size()-11) != std::string::npos)
{
std::string lower_om = om_lower;
if (lower_om.rfind("ubuntu.com>", lower_om.size()-11) != std::string::npos) {
if(verbose) {
std::cout<<"The Maintainer email is ubuntu.com address. Doing nothing.\n";
}
return;
}
}
// Debian distributions: stable, testing, unstable, experimental
if(distribution=="stable"||distribution=="testing"||distribution=="unstable"||distribution=="experimental") {
if(verbose) {
std::cout<<"The package targets Debian. Doing nothing.\n";
std::cout << "[update-maintainer] Maintainer is an @ubuntu.com address. Doing nothing.\n";
}
return;
}
// set XSBC-Original-Maintainer if needed
auto orig_field = get_field(c, "^(?:[XSBC]*-)?Original-Maintainer:\\s?(.*)$");
if(orig_field && verbose) {
std::cout<<"Overwriting original maintainer: "<< *orig_field <<"\n";
}
if(verbose) {
std::cout<<"The original maintainer is: "<< om <<"\n";
std::cout<<"Resetting as: "<<UBUNTU_MAINTAINER<<"\n";
}
// set original maint
if(orig_field) {
// pattern to replace original maint
std::regex orig_re("^(?:[XSBC]*-)?Original-Maintainer:.*$", std::regex_constants::multiline);
c = std::regex_replace(c, orig_re, "XSBC-Original-Maintainer: " + om);
// If there's no XSBC-Original, insert it after Maintainer
auto check_xsbc = find_field("XSBC-Original-Maintainer");
if(!check_xsbc) {
std::regex maint_re("^(Maintainer:.*)$",
std::regex_constants::multiline | std::regex_constants::icase);
if(std::regex_search(c, maint_re)) {
c = std::regex_replace(c, maint_re,
"$1\nXSBC-Original-Maintainer: " + *original_maint);
}
} else {
// insert after Maintainer line
std::regex maint_re("^(Maintainer:.*)$", std::regex_constants::multiline);
c = std::regex_replace(c, maint_re, "$1\nXSBC-Original-Maintainer: " + om);
if(verbose) {
std::cout << "[update-maintainer] Overwriting XSBC-Original-Maintainer with: " << *original_maint << "\n";
}
replace_field("XSBC-Original-Maintainer", *original_maint);
}
// now set maint
{
std::regex maint_re("^Maintainer:\\s?.*$", std::regex_constants::multiline);
c = std::regex_replace(c, maint_re, "Maintainer: " + std::string(UBUNTU_MAINTAINER));
if(verbose) {
std::cout << "[update-maintainer] Setting Maintainer to: " << UBUNTU_MAINTAINER << std::endl;
}
replace_field("Maintainer", UBUNTU_MAINTAINER);
write_file(control_file, c);
}
void update_maintainer(const std::string &debian_directory, bool verbose) {
fs::path debian_dir(debian_directory);
auto control_file = find_control_file(debian_dir);
if(!control_file) {
throw MaintainerUpdateException("No control file found");
}
fs::path changelog = find_changelog_file(debian_dir);
fs::path control_file = find_control_file(debian_dir);
if(xsbc_managed_by_rules(debian_dir)) {
if(verbose) {
std::cout<<"XSBC-Original is managed by rules. Doing nothing.\n";
std::cout << "[update-maintainer] XSBC is managed by debian/rules, skipping.\n";
}
return;
}
std::string distribution = get_distribution(changelog);
update_maintainer_file(*control_file, distribution, verbose);
update_maintainer_file(control_file, verbose);
}

@ -1,4 +1,4 @@
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@ -13,7 +13,16 @@
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#pragma once
#ifndef UPDATE_MAINTAINER_LIB_H
#define UPDATE_MAINTAINER_LIB_H
#include <string>
void update_maintainer(const std::string &debian_directory, bool verbose = false);
//
// Update the "Maintainer" field in debian/control (or control.in)
// to a standard Ubuntu field, preserving the original field in
// XSBC-Original-Maintainer if needed.
//
void update_maintainer(const std::string &debian_directory, bool verbose);
#endif

@ -1,3 +1,5 @@
// cpp/update-maintainer.cpp
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
@ -13,7 +15,7 @@
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#include "update-maintainer-lib.h"
#include "lubuntuci_lib.h"
#include <iostream>
int main(int argc, char** argv) {
@ -32,7 +34,7 @@ int main(int argc, char** argv) {
}
try {
update_maintainer(debian_directory, verbose);
//LubuntuCI::update_maintainer(debian_directory, verbose);
if(verbose) {
std::cout << "Maintainer updated successfully." << std::endl;
}

@ -1,4 +1,4 @@
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@ -14,6 +14,7 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#include "utilities.h"
#include "common.h"
#include <fstream>
#include <iostream>
@ -22,47 +23,62 @@
#include <zlib.h>
#include <curl/curl.h>
#include <sys/stat.h>
#include <sstream>
#include <random>
#include <queue>
#include <ranges>
#include <format> // for std::format in C++20/23
namespace fs = std::filesystem;
// Define a semaphore with a maximum of 10 concurrent jobs
static std::counting_semaphore<10> sem(10);
// Job queue and synchronization primitives
static std::queue<std::function<void()>> job_queue;
static std::mutex queue_mutex;
static std::atomic<bool> daemon_running{false};
// Function to read the entire content of a file into a string
std::string readFile(const fs::path& filePath) {
std::ifstream inFile(filePath, std::ios::binary);
if (inFile) {
return std::string((std::istreambuf_iterator<char>(inFile)),
std::string read_file(const fs::path& file_path) {
std::ifstream in_file(file_path, std::ios::binary);
if (in_file) {
return std::string((std::istreambuf_iterator<char>(in_file)),
std::istreambuf_iterator<char>());
}
return "";
}
// Function to write a string into a file
void writeFile(const fs::path& filePath, const std::string& content) {
std::ofstream outFile(filePath, std::ios::binary);
if (outFile) {
outFile << content;
void write_file(const fs::path& file_path, const std::string& content) {
std::ofstream out_file(file_path, std::ios::binary);
if (out_file) {
out_file << content;
}
}
// Function to perform in-place regex replace on a file
void regexReplaceInFile(const fs::path& filePath, const std::string& pattern, const std::string& replace) {
std::string content = readFile(filePath);
content = std::regex_replace(content, std::regex(pattern), replace);
writeFile(filePath, content);
void regex_replace_in_file(const fs::path& file_path,
const std::string& pattern,
const std::string& replacement) {
std::string content = read_file(file_path);
content = std::regex_replace(content, std::regex(pattern), replacement);
write_file(file_path, content);
}
// Function to decompress gzipped files
std::string decompressGzip(const fs::path& filePath) {
gzFile infile = gzopen(filePath.c_str(), "rb");
std::string decompress_gzip(const fs::path& file_path) {
gzFile infile = gzopen(file_path.c_str(), "rb");
if (!infile) return "";
std::string decompressedData;
std::string decompressed_data;
char buffer[8192];
int numRead = 0;
while ((numRead = gzread(infile, buffer, sizeof(buffer))) > 0) {
decompressedData.append(buffer, numRead);
int num_read = 0;
while ((num_read = gzread(infile, buffer, sizeof(buffer))) > 0) {
decompressed_data.append(buffer, num_read);
}
gzclose(infile);
return decompressedData;
return decompressed_data;
}
// Helper function for libcurl write callback
@ -72,18 +88,20 @@ size_t write_data(void* ptr, size_t size, size_t nmemb, void* stream) {
}
// Function to download a file with timestamping using libcurl
void downloadFileWithTimestamping(const std::string& url, const fs::path& outputPath,
const fs::path& logFilePath, std::mutex& logMutex) {
void download_file_with_timestamping(const std::string& url,
const fs::path& output_path,
const fs::path& log_file_path,
std::mutex& log_mutex) {
CURL* curl;
CURLcode res;
FILE* fp;
curl = curl_easy_init();
if (curl) {
fs::path tempFilePath = outputPath.string() + ".tmp";
fp = fopen(tempFilePath.c_str(), "wb");
fs::path temp_file_path = output_path.string() + ".tmp";
fp = fopen(temp_file_path.c_str(), "wb");
if (!fp) {
std::cerr << "Failed to open file: " << tempFilePath << std::endl;
std::cerr << "Failed to open file: " << temp_file_path << std::endl;
curl_easy_cleanup(curl);
return;
}
@ -95,7 +113,7 @@ void downloadFileWithTimestamping(const std::string& url, const fs::path& output
// Timestamping: set If-Modified-Since header
struct stat file_info;
if (stat(outputPath.c_str(), &file_info) == 0) {
if (stat(output_path.c_str(), &file_info) == 0) {
// Set the time condition to If-Modified-Since
curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
curl_easy_setopt(curl, CURLOPT_TIMEVALUE, file_info.st_mtime);
@ -113,20 +131,165 @@ void downloadFileWithTimestamping(const std::string& url, const fs::path& output
// Log the result and handle the downloaded file
{
std::lock_guard<std::mutex> lock(logMutex);
std::ofstream logFile(logFilePath, std::ios::app);
std::lock_guard<std::mutex> lock(log_mutex);
std::ofstream log_file(log_file_path, std::ios::app);
if (res == CURLE_OK && (response_code == 200 || response_code == 201)) {
fs::rename(tempFilePath, outputPath);
logFile << "Downloaded: " << url << std::endl;
fs::rename(temp_file_path, output_path);
log_file << "Downloaded: " << url << std::endl;
} else if (response_code == 304) {
fs::remove(tempFilePath);
logFile << "Not Modified: " << url << std::endl;
fs::remove(temp_file_path);
log_file << "Not Modified: " << url << std::endl;
} else {
fs::remove(tempFilePath);
logFile << "Failed to download: " << url << std::endl;
fs::remove(temp_file_path);
log_file << "Failed to download: " << url << std::endl;
}
}
} else {
std::cerr << "Failed to initialize CURL." << std::endl;
}
}
std::filesystem::path create_temp_directory() {
auto temp_dir = std::filesystem::temp_directory_path() / generate_random_string(32);
std::filesystem::create_directory(temp_dir);
return temp_dir;
}
// Function to copy a directory recursively
void copy_directory(const fs::path& source, const fs::path& destination) {
if (!std::filesystem::exists(source) || !std::filesystem::is_directory(source)) {
throw std::runtime_error("Source directory does not exist or is not a directory: " + source.string());
}
// Create the destination directory
std::filesystem::create_directories(destination);
// Copy files and directories recursively
for (const auto& entry : std::filesystem::recursive_directory_iterator(source)) {
auto relative_path = std::filesystem::relative(entry.path(), source);
auto target_path = destination / relative_path;
try {
if (std::filesystem::is_directory(entry)) {
std::filesystem::create_directory(target_path);
} else if (std::filesystem::is_regular_file(entry)) {
std::filesystem::copy(entry, target_path, std::filesystem::copy_options::overwrite_existing);
}
} catch (...) {
continue;
}
}
}
// Function to generate a random string of given length
std::string generate_random_string(size_t length) {
const std::string chars =
"abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"0123456789";
thread_local std::mt19937 rg{std::random_device{}()};
thread_local std::uniform_int_distribution<> pick(0, chars.size() - 1);
std::string s;
s.reserve(length);
while (length--)
s += chars[pick(rg)];
return s;
}
// Function to get current UTC time formatted as per the given format string
std::string get_current_utc_time(const std::string& format) {
auto now = std::chrono::system_clock::now();
std::time_t now_time = std::chrono::system_clock::to_time_t(now);
std::tm tm_utc;
gmtime_r(&now_time, &tm_utc);
char buf[64]; // Ensure sufficient buffer size for different formats
std::strftime(buf, sizeof(buf), format.c_str(), &tm_utc);
return std::string(buf);
}
// Function to convert filesystem time to time_t
std::time_t to_time_t(const fs::file_time_type& ftime) {
using namespace std::chrono;
// Convert to system_clock time_point
auto sctp = time_point_cast<system_clock::duration>(
ftime - fs::file_time_type::clock::now() + system_clock::now()
);
return system_clock::to_time_t(sctp);
}
std::vector<std::string> split_string(const std::string& input, const std::string& delimiter) {
std::vector<std::string> result;
size_t start = 0;
size_t end = 0;
while ((end = input.find(delimiter, start)) != std::string::npos) {
result.emplace_back(input.substr(start, end - start));
start = end + delimiter.length();
}
// Add the remaining part of the string
result.emplace_back(input.substr(start));
return result;
}
std::string remove_suffix(const std::string& input, const std::string& suffix) {
if (input.size() >= suffix.size() &&
input.compare(input.size() - suffix.size(), suffix.size(), suffix) == 0) {
return input.substr(0, input.size() - suffix.size());
}
return input; // Return the original string if the suffix doesn't exist
}
// Utility which basically does the following:
// "noble" (std::string) -> 2504 (int)
// The bool represents whether this codename is the development release
std::pair<int, bool> get_version_from_codename(const std::string& codename) {
std::ifstream file("/usr/share/distro-info/ubuntu.csv");
if (!file.is_open()) {
throw std::runtime_error("Failed to open file.");
}
std::string line;
// Skip the header line
std::getline(file, line);
std::string last_codename;
int version = 0;
while (std::getline(file, line)) {
std::istringstream iss(line);
std::string version_str, name, series;
std::getline(iss, version_str, ',');
std::getline(iss, name, ',');
std::getline(iss, series, ',');
if (series == codename) {
version_str.erase(std::remove(version_str.begin(), version_str.end(), '.'),
version_str.end());
version = std::stoi(version_str);
}
last_codename = series;
}
bool is_last = (codename == last_codename);
if (version == 0) {
throw std::runtime_error("Codename not found.");
}
return {version, is_last};
}
void ensure_git_inited() {
static std::once_flag git_init_flag;
std::call_once(git_init_flag, []() {
git_libgit2_init();
});
}
void run_task_every(std::stop_token _stop_token, int interval_minutes, std::function<void()> task) {
while (!_stop_token.stop_requested()) {
task();
std::this_thread::sleep_for(std::chrono::minutes(interval_minutes));
}
}

@ -1,4 +1,4 @@
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@ -18,22 +18,50 @@
#include <string>
#include <filesystem>
#include <mutex>
#include <future>
#include <semaphore>
#include <functional>
#include <git2.h>
// Function to read the entire content of a file into a string
std::string readFile(const std::filesystem::path& filePath);
std::string read_file(const std::filesystem::path& filePath);
// Function to write a string into a file
void writeFile(const std::filesystem::path& filePath, const std::string& content);
void write_file(const std::filesystem::path& filePath, const std::string& content);
// Function to perform in-place regex replace on a file
void regexReplaceInFile(const std::filesystem::path& filePath, const std::string& pattern, const std::string& replace);
void regex_replace_in_file(const std::filesystem::path& filePath, const std::string& pattern, const std::string& replace);
// Function to decompress gzipped files
std::string decompressGzip(const std::filesystem::path& filePath);
std::string decompress_gzip(const std::filesystem::path& filePath);
// Function to download a file with timestamping using libcurl
void downloadFileWithTimestamping(const std::string& url, const std::filesystem::path& outputPath,
void download_file_with_timestamping(const std::string& url, const std::filesystem::path& outputPath,
const std::filesystem::path& logFilePath, std::mutex& logMutex);
// Helper function for libcurl write callback
size_t write_data(void* ptr, size_t size, size_t nmemb, void* stream);
// Function to create a temporary directory with a random name
std::filesystem::path create_temp_directory();
// Function to copy a directory recursively
void copy_directory(const std::filesystem::path& source, const std::filesystem::path& destination);
// Time utilities
std::string get_current_utc_time(const std::string& format);
std::time_t to_time_t(const std::filesystem::file_time_type& ftime);
// String utilities
std::vector<std::string> split_string(const std::string& input, const std::string& delimiter);
std::string remove_suffix(const std::string& input, const std::string& suffix);
std::string generate_random_string(size_t length);
// Get version from codename using distro-info
std::pair<int, bool> get_version_from_codename(const std::string& codename);
// Git utilities
void ensure_git_inited();
void run_task_every(std::stop_token _stop_token, int interval_minutes, std::function<void()> task);

File diff suppressed because it is too large Load Diff

@ -0,0 +1,54 @@
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#ifndef WEB_SERVER_H
#define WEB_SERVER_H
#include "ci_database_objs.h"
#include "task_queue.h"
#include <QDateTime>
#include <QObject>
#include <QHttpServer>
#include <QMap>
#include <QSqlDatabase>
#include <QString>
#include <QTcpServer>
#include <string>
class WebServer : public QObject {
Q_OBJECT
public:
explicit WebServer(QObject *parent = nullptr);
bool start_server(quint16 port);
private:
[[nodiscard]] std::map<QString, QString> parse_query_parameters(const QString &query);
[[nodiscard]] bool validate_token(const QString& token);
[[nodiscard]] QHttpServerResponse verify_session_token(const QHttpServerRequest &request, const QHttpHeaders &headers);
void load_tokens(QSqlDatabase& p_db);
QHttpServer http_server_;
QTcpServer tcp_server_;
std::unique_ptr<TaskQueue> task_queue;
std::jthread expire_tokens_thread_;
std::jthread process_sources_thread_;
QMap<int, QDateTime> _in_progress_tokens;
QMap<QString, QDateTime> _active_tokens;
QMap<QString, Person> _token_person;
};
#endif // WEB_SERVER_H

@ -0,0 +1,176 @@
/**
* Get an array of all currently selected repositories (checkboxes).
*/
function getSelectedRepos() {
const checkboxes = document.querySelectorAll('input[name="repoSelect"]:checked');
const repoNames = [];
checkboxes.forEach(cb => repoNames.push(cb.value));
return repoNames;
}
/**
* Show a quick status message on the console (or replace with a fancy UI element).
*/
function showStatus(msg) {
console.log('[STATUS]', msg);
}
/**
* A tiny helper to handle server responses (text) and display them.
*/
function handleServerResponse(text) {
// For simplicity, we just log it. You can also insert it into the DOM if you want.
console.log('[SERVER RESPONSE]', text);
alert(text);
}
///////////////////////////////
// Individual Action Handlers
///////////////////////////////
/**
* Pull a single repository by name, calling /pull?repo=<repoName>.
*/
function doPull(repoName, buttonElem) {
if (!repoName) {
alert('No repo specified!');
return;
}
showStatus(`Pulling repo: ${repoName}...`);
fetch('/pull?repo=' + encodeURIComponent(repoName))
.then(resp => resp.text())
.then(txt => handleServerResponse(txt))
.catch(err => console.error('[ERROR]', err));
}
/**
* Build a single repository by name, calling /build?repo=<repoName>.
*/
function doBuild(repoName, buttonElem) {
if (!repoName) {
alert('No repo specified!');
return;
}
showStatus(`Building repo: ${repoName}...`);
fetch('/build?repo=' + encodeURIComponent(repoName))
.then(resp => resp.text())
.then(txt => handleServerResponse(txt))
.catch(err => console.error('[ERROR]', err));
}
/**
* View logs for a single repository by name, calling /logs?repo=<repoName>.
* This example opens in a new tab. Alternatively, you could fetch and display in a modal.
*/
function doViewLog(repoName, buttonElem) {
if (!repoName) {
alert('No repo specified!');
return;
}
const url = '/logs?repo=' + encodeURIComponent(repoName);
window.open(url, '_blank');
// If you wanted to do a fetch instead:
// fetch(url).then(...) ...
}
/**
* Pull ALL repositories at once, calling /pull-all.
*/
function doPullAll(buttonElem) {
showStatus('Pulling ALL repositories...');
fetch('/pull-all')
.then(resp => resp.text())
.then(txt => handleServerResponse(txt))
.catch(err => console.error('[ERROR]', err));
}
/**
* Build ALL repositories at once, calling /build-all.
*/
function doBuildAll(buttonElem) {
showStatus('Building ALL repositories...');
fetch('/build-all')
.then(resp => resp.text())
.then(txt => handleServerResponse(txt))
.catch(err => console.error('[ERROR]', err));
}
/**
* Pull AND build ALL repositories at once, calling /pull-and-build-all.
*/
function doPullAndBuildAll(buttonElem) {
showStatus('Pulling & building ALL repositories...');
fetch('/pull-and-build-all')
.then(resp => resp.text())
.then(txt => handleServerResponse(txt))
.catch(err => console.error('[ERROR]', err));
}
/**
* Pull the selected repositories, calling /pull-selected?repos=<comma-separated-list>.
*/
function doPullSelected(buttonElem) {
const repos = getSelectedRepos();
if (repos.length === 0) {
alert('No repositories selected!');
return;
}
const query = '/pull-selected?repos=' + encodeURIComponent(repos.join(','));
showStatus('Pulling selected repos: ' + repos.join(', '));
fetch(query)
.then(resp => resp.text())
.then(txt => handleServerResponse(txt))
.catch(err => console.error('[ERROR]', err));
}
/**
* Build the selected repositories, calling /build-selected?repos=<comma-separated-list>.
*/
function doBuildSelected(buttonElem) {
const repos = getSelectedRepos();
if (repos.length === 0) {
alert('No repositories selected!');
return;
}
const query = '/build-selected?repos=' + encodeURIComponent(repos.join(','));
showStatus('Building selected repos: ' + repos.join(', '));
fetch(query)
.then(resp => resp.text())
.then(txt => handleServerResponse(txt))
.catch(err => console.error('[ERROR]', err));
}
/**
* Pull AND build selected repositories, calling /pull-and-build-selected?repos=...
*/
function doPullAndBuildSelected(buttonElem) {
const repos = getSelectedRepos();
if (repos.length === 0) {
alert('No repositories selected!');
return;
}
const query = '/pull-and-build-selected?repos=' + encodeURIComponent(repos.join(','));
showStatus('Pulling & building selected repos: ' + repos.join(', '));
fetch(query)
.then(resp => resp.text())
.then(txt => handleServerResponse(txt))
.catch(err => console.error('[ERROR]', err));
}
///////////////////////////////
// "Select All" checkbox logic
///////////////////////////////
window.addEventListener('DOMContentLoaded', () => {
const selectAllCb = document.getElementById('selectAll');
if (selectAllCb) {
selectAllCb.addEventListener('change', function () {
// Check or uncheck all "repoSelect" checkboxes
const allRepoCbs = document.querySelectorAll('input[name="repoSelect"]');
allRepoCbs.forEach(cb => {
cb.checked = selectAllCb.checked;
});
});
}
});

@ -0,0 +1,31 @@
<!--
Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
...
-->
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>{{ PAGE_TITLE }}</title>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css">
<link rel="stylesheet"
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.7.2/css/all.min.css">
</head>
<body class="bg-light">
<nav class="navbar navbar-expand-lg navbar-light bg-white border-bottom mb-3">
<div class="container-fluid">
<a class="navbar-brand" href="/">Lubuntu CI</a>
</div>
</nav>
<div class="mt-2 px-5">
{{BLOCK content}}
</div>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js"></script>
<script src="/static/main.js"></script>
</body>
</html>

@ -0,0 +1,10 @@
{% extends "base.html" %}
{% block content %}
<div class="alert alert-danger" role="alert">
<h4 class="alert-heading">Error</h4>
<p>{{ERROR_MESSAGE}}</p>
<hr>
<p class="mb-0">Please check your configuration or contact support.</p>
</div>
{% endblock %}

@ -0,0 +1,170 @@
{% extends "base.html" %}
{% block content %}
<script src="https://cdnjs.cloudflare.com/ajax/libs/cytoscape/3.30.4/cytoscape.min.js"></script>
<style>
/* Reset default margins and paddings */
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
/* Set body and html to take full height */
html, body {
height: 100%;
font-family: Arial, sans-serif;
}
/* Container for the entire content */
.container {
display: flex;
flex-direction: column;
align-items: center;
height: 100%;
padding: 20px;
background-color: #f9f9f9;
}
/* Style for the graph container */
#cy {
flex: 1;
width: 100%;
max-width: 1200px;
height: 600px;
border: 1px solid #ccc;
background-color: #fff;
}
/* Style for control buttons */
.controls {
margin-bottom: 10px;
}
.controls button {
padding: 10px 20px;
margin: 0 5px;
font-size: 16px;
cursor: pointer;
border: none;
background-color: #0074D9;
color: #fff;
border-radius: 4px;
transition: background-color 0.3s;
}
.controls button:hover {
background-color: #005fa3;
}
</style>
<h1>{{ PAGE_TITLE }}</h1>
<div class="container">
<!-- Control Buttons -->
<div class="controls">
<button id="zoom-in">Zoom In</button>
<button id="zoom-out">Zoom Out</button>
<button id="reset">Reset Zoom</button>
</div>
<!-- Graph Container -->
<div id="cy"></div>
</div>
<script>
// Wait for the DOM to fully load
document.addEventListener('DOMContentLoaded', function() {
// Initialize Cytoscape with elements and style
const data = {{ GRAPH_JSON }};
const elements = [];
// Iterate through each key-value pair in the JSON object
for (const [key, values] of Object.entries(data)) {
// Add a node for the key
elements.push({ data: { id: key, label: key } });
// Add nodes and edges for each value
values.forEach(value => {
elements.push({ data: { id: value, label: value } }); // Node for value
elements.push({ data: { source: value, target: key } }); // Edge from value to key
});
}
var cy = cytoscape({
container: document.getElementById('cy'), // Container to render in
elements: elements,
style: [ // Styling for nodes and edges
{
selector: 'node',
style: {
'background-color': '#0074D9',
'label': 'data(label)',
'color': '#fff',
'text-valign': 'center',
'text-halign': 'center',
'font-size': '10px',
'width': '60px',
'height': '60px'
}
},
{
selector: 'edge',
style: {
'width': 2,
'line-color': '#ccc',
'target-arrow-color': '#ccc',
'target-arrow-shape': 'triangle',
'curve-style': 'bezier'
}
},
{
selector: ':selected',
style: {
'background-color': '#FF4136',
'line-color': '#FF4136',
'target-arrow-color': '#FF4136',
'source-arrow-color': '#FF4136'
}
}
],
layout: {
name: 'breadthfirst',
directed: true,
spacingFactor: 2.75,
animate: true
}
});
// Fit the graph within the container
cy.on('layoutready', function(){
cy.fit(cy.elements(), 50);
});
// Optional: Add interactivity
cy.on('tap', 'node', function(evt){
var node = evt.target;
alert('Tapped node: ' + node.id());
});
// Zoom and Pan Controls
document.getElementById('zoom-in').addEventListener('click', function(){
cy.zoom({
level: cy.zoom() * 1.2, // Zoom in by 20%
renderedPosition: { x: cy.width() / 2, y: cy.height() / 2 }
});
});
document.getElementById('zoom-out').addEventListener('click', function(){
cy.zoom({
level: cy.zoom() / 1.2, // Zoom out by ~16.7%
renderedPosition: { x: cy.width() / 2, y: cy.height() / 2 }
});
});
document.getElementById('reset').addEventListener('click', function(){
cy.fit(cy.elements(), 50); // Fit the graph to the container with padding
});
});
</script>
{% endblock %}

@ -0,0 +1,155 @@
{% extends "base.html" %}
{% block content %}
<h1>{{PAGE_TITLE}}</h1>
<p class="lead">Below is the list of repositories we can build &amp; pull.</p>
<div class="btn-group mb-3" role="group">
<button class="btn btn-lg btn-success" onclick="doPullAll(this)">Pull All</button>
<button class="btn btn-lg btn-secondary" onclick="doBuildAll(this)">Build All</button>
<button class="btn btn-lg btn-dark" onclick="doPullAndBuildAll(this)">Pull &amp; Build All</button>
</div>
<hr/>
<div class="d-flex justify-content-between align-items-center mb-3">
<div class="fw-bold">Page {{page}} of {{total_pages}}</div>
<nav>
<ul class="pagination pagination-sm mb-0">
{% if page>1 %}<li class="page-item"><a class="page-link" href="?page={{page|add:-1}}&sort_by={{sort_by}}&sort_order={{sort_order}}" aria-label="Previous"><span aria-hidden="true">&laquo;</span></a></li>{% endif %}
{% for p in pages %}<li class="page-item{% if p==page %}active{% endif %}"><a class="page-link" href="?page={{p}}&sort_by={{sort_by}}&sort_order={{sort_order}}">{{p}}</a></li>{% endfor %}
{% if page<total_pages %}<li class="page-item"><a class="page-link" href="?page={{page|add:1}}&sort_by={{sort_by}}&sort_order={{sort_order}}" aria-label="Next"><span aria-hidden="true">&raquo;</span></a></li>{% endif %}
</ul>
</nav>
</div>
<table class="table table-striped">
<thead>
<tr>
<th><input style="width: 1.5em; height: 1.5em;" type="checkbox" id="selectAll"></th>
<th class="align-middle">
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=branch_name&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}">Branch{% if sort_by=='branch' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='branch' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
</th>
<th class="align-middle">
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=codename&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}">Branch{% if sort_by=='codename' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='codename' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
</th>
<th class="align-middle">
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=name&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}">Repository{% if sort_by=='name' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='name' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
</th>
<th class="align-middle">
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=packaging_commit&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}"><i class="fa-brands fa-git-alt"></i> Latest Packaging Commit{% if sort_by=='packaging_commit' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='packaging_commit' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
</th>
<th class="align-middle">
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=upstream_commit&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}"><i class="fa-brands fa-git-alt"></i> Latest Upstream Commit{% if sort_by=='upstream_commit' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='upstream_commit' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
</th>
<th class="align-middle">
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=build_status&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}"><i class="fa-brands fa-git-alt"></i> Build Status{% if sort_by=='build_status' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='build_status' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
</th>
<th class="align-middle">Actions</th>
</tr>
</thead>
<tbody>
{% for repo in repos %}<tr>
<td class="align-middle"><input style="width: 1.25em; height: 1.25em;" type="checkbox" name="repoSelect" value="{{repo.id}}"></td>
<td class="align-middle">{{repo.branch_name}}</td>
<td class="align-middle">{{repo.codename}}</td>
<td class="align-middle">{{repo.name}}</td>
<td class="align-middle">
{% if repo.packaging_commit != "" %}
<a href="{{repo.packaging_commit_url}}">
<i class="fa-solid fa-code-commit"></i> {{repo.packaging_commit}}
</a>
{% else %}
No commit found.
{% endif %}
</td>
<td class="align-middle">
{% if repo.upstream_commit != "" %}
<a href="{{repo.upstream_commit_url}}">
<i class="fa-solid fa-code-commit"></i> {{repo.upstream_commit}}
</a>
{% else %}
No commit found.
{% endif %}
</td>
<td>
<table class="table text-center">
<tbody>
<tr>
{% if repo.pull_class != "" %}
<td>
<div class="justify-content-center align-items-center align-middle {{repo.pull_class}} text-white">
Pull
</div>
</td>
{% endif %}
{% if repo.tarball_class != "" %}
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
<td>
<div class="justify-content-center align-items-center align-middle {{repo.tarball_class}} text-white">
Tarball
</div>
</td>
{% endif %}
{% if repo.source_build_class != "" %}
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
<td>
<div class="justify-content-center align-items-center align-middle {{repo.source_build_class}} text-white">
Source Build
</div>
</td>
{% endif %}
{% if repo.upload_class != "" %}
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
<td>
<div class="justify-content-center align-items-center align-middle {{repo.upload_class}} text-white">
Upload
</div>
</td>
{% endif %}
</tr>
<tr>
{% if repo.source_check_class != "" %}
<td>
<div class="justify-content-center align-items-center align-middle {{repo.source_check_class}} text-white">
Source Check
</div>
</td>
{% endif %}
{% if repo.build_check_class != "" %}
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
<td>
<div class="justify-content-center align-items-center align-middle {{repo.build_check_class}} text-white">
Build Check
</div>
</td>
{% endif %}
{% if repo.lintian_class != "" %}
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
<td>
<div class="justify-content-center align-items-center align-middle {{repo.lintian_class}} text-white">
Lintian
</div>
</td>
{% endif %}
{% if repo.britney_class != "" %}
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
<td>
<div class="justify-content-center align-items-center align-middle {{repo.britney_class}} text-white">
Britney
</div>
</td>
{% endif %}
</tr>
</tbody>
</table>
</td>
<td class="align-middle">
<button class="btn btn-outline-secondary" onclick="doPull('{{repo.id}}', this)">Pull</button>
<button class="btn btn-primary" onclick="doBuild('{{repo.id}}', this)">Build</button>
<button class="btn btn-secondary" onclick="doViewLog('{{repo.id}}', this)">View Log</button>
</td>
</tr>{% endfor %}
</tbody>
</table>
<div class="btn-group" role="group">
<button class="btn btn-lg btn-primary" onclick="doPullSelected(this)">Pull Selected</button>
<button class="btn btn-lg btn-success" onclick="doBuildSelected(this)">Build Selected</button>
<button class="btn btn-lg btn-info" onclick="doPullAndBuildSelected(this)">Pull &amp; Build Selected</button>
</div>
{% endblock %}

@ -0,0 +1,142 @@
{% extends "base.html" %}
{% block content %}
<h1>{{PAGE_TITLE}}</h1>
<ul class="nav nav-tabs">
<li class="nav-item">
{% if PAGE_TYPE == 'running' %}
<a class="nav-link active" aria-current="page" href="/tasks">Running</a>
{% else %}
<a class="nav-link" href="/tasks">Running</a>
{% endif %}
</li>
<li class="nav-item">
{% if PAGE_TYPE == 'queued' %}
<a class="nav-link active" aria-current="page" href="/tasks?type=queued">Queued</a>
{% else %}
<a class="nav-link" href="/tasks?type=queued">Queued</a>
{% endif %}
</li>
<li class="nav-item">
{% if PAGE_TYPE == 'complete' %}
<a class="nav-link active" aria-current="page" href="/tasks?type=complete">Complete</a>
{% else %}
<a class="nav-link" href="/tasks?type=complete">Complete</a>
{% endif %}
</li>
</ul>
<div class="table-responsive">
<table class="table table-striped table-bordered">
<thead class="table-dark">
<tr>
<th scope="col">Score</th>
<th scope="col">Queued</th>
<th scope="col">Package</th>
<th scope="col">Status</th>
{% if PAGE_TYPE != 'queued' %}
<th scope="col">Log</th>
{% endif %}
</tr>
</thead>
<tbody>
{% for task in tasks %}
<tr>
<td>{{ task.score }}</td>
<td>
{% if PAGE_TYPE == 'running' %}
Started at <span data-timestamp="{{ task.start_timestamp }}"></span><br />
(Duration: <span data-timedelta="{{ task.running_timedelta }}"></span>)
{% elif PAGE_TYPE == 'queued' %}
<span data-timestamp="{{ task.queued_timestamp }}"></span>
{% else %}
{% if task.successful == 'true' %}
<i class="fas fa-check" style="color: green;"></i> <b>Task Succeeded</b><br />
{% else %}
<i class="fas fa-times-circle" style="color: red;"></i> <b>Task Failed</b><br />
{% endif %}
Started at <span data-timestamp="{{ task.start_timestamp }}"></span><br />
Finished at <span data-timestamp="{{ task.finish_timestamp }}"></span><br />
(Duration: <span data-timedelta="{{ task.running_timedelta }}"></span>)
{% endif %}
</td>
<td>
Name: {{ task.package_name }}<br />
Release: {{ task.package_codename }}
</td>
<td>{{ task.job_status }}</td>
{% if PAGE_TYPE != 'queued' %}
<td>
<div class="bg-light font-monospace p-2 rounded overflow-scroll" style="max-height: 15em; white-space: pre-wrap;">{{ task.log }}</div>
</td>
{% endif %}
</tr>
{% endfor %}
</tbody>
</table>
</div>
<script>
function formatDuration(ms) {
if (typeof ms !== "number" || ms < 0) {
throw new Error("Input must be a non-negative number representing milliseconds.");
}
// statics
const millisecondsInOneSecond = 1000;
const millisecondsInOneMinute = 60 * millisecondsInOneSecond;
const millisecondsInOneHour = 60 * millisecondsInOneMinute;
const millisecondsInOneDay = 24 * millisecondsInOneHour;
// calculate
const days = Math.floor(ms / millisecondsInOneDay);
const hours = Math.floor((ms % millisecondsInOneDay) / millisecondsInOneHour);
const minutes = Math.floor((ms % millisecondsInOneHour) / millisecondsInOneMinute);
const seconds = Math.floor((ms % millisecondsInOneMinute) / millisecondsInOneSecond);
const milliseconds = ms % millisecondsInOneSecond;
/**
* Pads a number with leading zeros to achieve the desired length.
*
* @param {number} num - The number to pad.
* @param {number} size - The desired string length.
* @returns {string} - The padded string.
*/
const pad = (num, size) => {
let s = num.toString();
while (s.length < size) s = "0" + s;
return s;
};
// Construct the formatted string
let formatted = "";
if (days > 0) {
formatted += `${days}:`;
}
formatted += `${pad(hours, 2)}:${pad(minutes, 2)}:${pad(seconds, 2)}.${pad(milliseconds, 3)}`;
return formatted;
}
document.querySelectorAll("[data-timestamp]").forEach((el) => {
const timestamp = parseInt(el.getAttribute("data-timestamp"), 10);
if (!isNaN(timestamp)) {
const date = new Date(timestamp);
const formattedDate = date.toLocaleString(undefined, {
timeZoneName: "short"
});
el.textContent = formattedDate;
}
});
document.querySelectorAll("[data-timedelta]").forEach((el) => {
const timestamp = parseInt(el.getAttribute("data-timedelta"));
if (!isNaN(timestamp)) {
el.textContent = formatDuration(timestamp);
}
});
</script>
{% endblock %}
Loading…
Cancel
Save