diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt index 73a2937..539640e 100644 --- a/cpp/CMakeLists.txt +++ b/cpp/CMakeLists.txt @@ -1,63 +1,141 @@ -cmake_minimum_required(VERSION 3.21) -project(lubuntuci CXX) +cmake_minimum_required(VERSION 3.16) +project(lubuntu_ci_all CXX) +set(CMAKE_AUTOMOC ON) set(CMAKE_CXX_STANDARD 23) set(CMAKE_CXX_STANDARD_REQUIRED ON) -set(CMAKE_CXX_EXTENSIONS OFF) +set(CMAKE_BUILD_TYPE Debug CACHE STRING "Build type" FORCE) -set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) -set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) +# +# Allow the user to override LAUNCHPAD_CPP_INCLUDE_DIR/LAUNCHPAD_CPP_LIBRARY +# +if (NOT DEFINED LAUNCHPAD_CPP_INCLUDE_DIR) + set(LAUNCHPAD_CPP_INCLUDE_DIR "/srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp") +endif() +if (NOT DEFINED LAUNCHPAD_CPP_LIBRARY) + set(LAUNCHPAD_CPP_LIBRARY "/srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so") +endif() -find_package(yaml-cpp REQUIRED) +find_package(Qt6 REQUIRED COMPONENTS Core HttpServer Sql) find_package(PkgConfig REQUIRED) -pkg_check_modules(LIBGIT2 REQUIRED IMPORTED_TARGET libgit2) -find_package(CURL REQUIRED) -find_library(UUID_LIB uuid) -find_package(ZLIB REQUIRED) - +find_package(yaml-cpp REQUIRED) pkg_check_modules(LIBARCHIVE REQUIRED libarchive) -include_directories(${LIBARCHIVE_INCLUDE_DIRS}) -link_directories(${LIBARCHIVE_LIBRARY_DIRS}) -add_definitions(${LIBARCHIVE_CFLAGS_OTHER}) +pkg_check_modules(LIBGIT2 REQUIRED libgit2) +find_package(ZLIB REQUIRED) +find_package(CURL REQUIRED) +set(UUID_LIB "uuid") -include_directories(/srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp) +# +# 1. The main library: lubuntuci_lib +# +add_library(lubuntuci_lib SHARED + common.cpp + utilities.cpp + ci_logic.cpp + ci_database_objs.cpp + lubuntuci_lib.cpp + task_queue.cpp + template_renderer.cpp + web_server.cpp + sources_parser.cpp + naive_bayes_classifier.cpp +) -add_library(lubuntuci SHARED common.cpp utilities.cpp) -target_include_directories(lubuntuci PUBLIC ${CMAKE_CURRENT_SOURCE_DIR} ${LIBARCHIVE_INCLUDE_DIRS}) -target_link_libraries(lubuntuci PUBLIC yaml-cpp::yaml-cpp PRIVATE CURL::libcurl ${LIBARCHIVE_LIBRARIES} /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so) +target_include_directories(lubuntuci_lib PUBLIC + ${CMAKE_CURRENT_SOURCE_DIR} + "${LAUNCHPAD_CPP_INCLUDE_DIR}" +) -add_library(update_maintainer_lib STATIC update-maintainer-lib.cpp) -target_include_directories(update_maintainer_lib PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp) -target_link_libraries(update_maintainer_lib PRIVATE lubuntuci yaml-cpp::yaml-cpp CURL::libcurl) +target_link_libraries(lubuntuci_lib + Qt6::Core + Qt6::HttpServer + Qt6::Sql + yaml-cpp + ${LIBARCHIVE_LIBRARIES} + ${LIBGIT2_LIBRARIES} + "${LAUNCHPAD_CPP_LIBRARY}" + ZLIB::ZLIB + CURL::libcurl + ${UUID_LIB} +) -add_executable(update-maintainer update-maintainer.cpp) -target_link_libraries(update-maintainer PRIVATE update_maintainer_lib) +# +# 2. The update-maintainer-lib library +# +add_library(update_maintainer_lib STATIC + update-maintainer-lib.cpp +) +target_include_directories(update_maintainer_lib PRIVATE + "${LAUNCHPAD_CPP_INCLUDE_DIR}" +) +target_link_libraries(update_maintainer_lib + lubuntuci_lib + yaml-cpp + CURL::libcurl + ${LIBARCHIVE_LIBRARIES} + ${LIBGIT2_LIBRARIES} + ZLIB::ZLIB + "${LAUNCHPAD_CPP_LIBRARY}" + ${UUID_LIB} +) -add_executable(build-packages build-packages.cpp) -target_include_directories(build-packages PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp) -target_link_libraries(build-packages PRIVATE lubuntuci PkgConfig::LIBGIT2 update_maintainer_lib yaml-cpp::yaml-cpp) +# +# 3. Build each executable +# -add_executable(fetch-indexes fetch-indexes.cpp utilities.cpp) -target_include_directories(fetch-indexes PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp) -target_link_libraries(fetch-indexes PRIVATE lubuntuci CURL::libcurl yaml-cpp::yaml-cpp ZLIB::ZLIB /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so) +add_executable(update-maintainer update-maintainer.cpp) +target_link_libraries(update-maintainer + lubuntuci_lib + update_maintainer_lib + yaml-cpp + ${LIBARCHIVE_LIBRARIES} + ${LIBGIT2_LIBRARIES} + ZLIB::ZLIB + CURL::libcurl + "${LAUNCHPAD_CPP_LIBRARY}" + ${UUID_LIB} +) add_executable(lintian-ppa lintian-ppa.cpp) -target_include_directories(lintian-ppa PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp) -target_link_libraries(lintian-ppa PRIVATE lubuntuci ${UUID_LIB} /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so) - -set_target_properties(lubuntuci build-packages fetch-indexes update-maintainer lintian-ppa PROPERTIES - BUILD_WITH_INSTALL_RPATH TRUE - INSTALL_RPATH "$ORIGIN/lib" +target_link_libraries(lintian-ppa + lubuntuci_lib + yaml-cpp + ${LIBARCHIVE_LIBRARIES} + ${LIBGIT2_LIBRARIES} + "${LAUNCHPAD_CPP_LIBRARY}" + ZLIB::ZLIB + CURL::libcurl + ${UUID_LIB} ) -install(TARGETS lubuntuci - LIBRARY DESTINATION lib +add_executable(fetch-indexes fetch-indexes.cpp) +target_link_libraries(fetch-indexes + lubuntuci_lib + yaml-cpp + ${LIBARCHIVE_LIBRARIES} + ${LIBGIT2_LIBRARIES} + "${LAUNCHPAD_CPP_LIBRARY}" + ZLIB::ZLIB + CURL::libcurl + ${UUID_LIB} ) -install(TARGETS build-packages fetch-indexes update-maintainer lintian-ppa - RUNTIME DESTINATION . +add_executable(web_ui main.cpp) +target_link_libraries(web_ui + lubuntuci_lib + yaml-cpp + ${LIBARCHIVE_LIBRARIES} + ${LIBGIT2_LIBRARIES} + ZLIB::ZLIB + CURL::libcurl + "${LAUNCHPAD_CPP_LIBRARY}" + ${UUID_LIB} ) -install(FILES common.h update-maintainer-lib.h utilities.h - DESTINATION include/lubuntuci -) +# +# Copy templates +# +file(COPY ../templates + DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) +file(COPY ../static + DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) diff --git a/cpp/build-packages.cpp b/cpp/build-packages.cpp deleted file mode 100644 index dafafbd..0000000 --- a/cpp/build-packages.cpp +++ /dev/null @@ -1,1138 +0,0 @@ -// Copyright (C) 2024 Simon Quigley -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -#include "common.h" -#include "update-maintainer-lib.h" -#include "utilities.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -namespace fs = std::filesystem; - -// Mutex to protect access to the repo_mutexes map -static std::mutex repo_map_mutex; - -// Map to hold mutexes for each repository path -static std::unordered_map repo_mutexes; -static std::mutex& get_repo_mutex(const fs::path& repo_path); - -// Mutex to protect access to the dput_futures vector -static std::mutex dput_futures_mutex; - -// Vector to store dput futures -static std::vector> dput_futures; - -// Mutex and map to store failed packages and their reasons -static std::mutex failures_mutex; -static std::map failed_packages; - -// Struct to represent a package -struct Package { - std::string name; - std::string upload_target; - std::string upstream_url; - std::string packaging_url; - std::optional packaging_branch; - bool large; - std::vector changes_files; - std::vector devel_changes_files; -}; - -static const std::string BASE_DIR = "/srv/lubuntu-ci/repos"; -static const std::string DEBFULLNAME = "Lugito"; -static const std::string DEBEMAIL = "info@lubuntu.me"; -static const std::string OUTPUT_DIR = BASE_DIR + "/build_output"; -static const std::vector SUPPRESSED_LINTIAN_TAGS = { - "orig-tarball-missing-upstream-signature", - "package-has-long-file-name", - "adopted-extended-field" -}; -static const std::string BASE_OUTPUT_DIR = "/srv/lubuntu-ci/output"; -static const std::string LOG_DIR = BASE_OUTPUT_DIR + "/logs/source_builds"; -static std::string BASE_LINTIAN_DIR; -static const std::string REAL_LINTIAN_DIR = BASE_OUTPUT_DIR + "/lintian"; -static std::string urgency_level_override = "low"; -static int worker_count = 5; - -static bool verbose = false; -static std::ofstream log_file_stream; - -// Function to get the current UTC time as a formatted string -std::string get_current_utc_time() { - auto now = std::chrono::system_clock::now(); - std::time_t now_c = std::chrono::system_clock::to_time_t(now); - std::tm tm_utc; - gmtime_r(&now_c, &tm_utc); - char buffer[20]; - std::strftime(buffer, sizeof(buffer), "%Y-%m-%dT%H:%M:%SZ", &tm_utc); - return std::string(buffer); -} - -// Logging functions -static void log_all(const std::string &level, const std::string &msg, bool is_error = false) { - std::string timestamp = get_current_utc_time(); - std::string full_msg = "[" + timestamp + "] [" + level + "] " + msg + "\n"; - - if (is_error) { - std::cerr << full_msg; - } else if (level != "VERBOSE") { - std::cout << full_msg; - } - - if (log_file_stream.is_open()) { - log_file_stream << full_msg; - log_file_stream.flush(); - } -} - -static void log_info(const std::string &msg) { - log_all("INFO", msg); -} - -static void log_warning(const std::string &msg) { - log_all("WARN", msg, false); -} - -static void log_error(const std::string &msg) { - log_all("ERROR", msg, true); -} - -static void log_verbose(const std::string &msg) { - if (verbose) { - log_all("VERBOSE", msg); - } -} - -static void print_help(const std::string &prog_name) { - std::cout << "Usage: " << prog_name << " [OPTIONS] \n" - << "Options:\n" - << " --skip-dput Skip uploading changes with dput.\n" - << " --skip-cleanup Skip cleaning up the output directory after execution.\n" - << " --urgency-level=LEVEL Set the urgency level (default: low).\n" - << " --workers=N Set the number of worker threads (default: 5).\n" - << " --verbose, -v Enable verbose logging.\n" - << " --help, -h Display this help message.\n"; -} - -// Function to run a command silently and throw an exception on failure -static void run_command_silent_on_success(const std::vector &cmd, const std::optional &cwd = std::nullopt) { - semaphore_guard guard(semaphore); - - std::string command_str = std::accumulate(cmd.begin(), cmd.end(), std::string(), - [](const std::string &a, const std::string &b) -> std::string { return a + (a.empty() ? "" : " ") + b; }); - - log_info("Running command: " + command_str); - if(cwd) { - log_info("Executing in directory: " + cwd->string()); - } - - std::string exec_cmd = command_str; - if(cwd) exec_cmd = "cd " + cwd->string() + " && " + exec_cmd; - - FILE* pipe = popen(exec_cmd.c_str(), "r"); - if(!pipe) { - log_error("Failed to run command: " + command_str); - throw std::runtime_error("Command failed to start"); - } - std::stringstream ss; - { - char buffer[256]; - while(fgets(buffer, sizeof(buffer), pipe)) { - ss << buffer; - } - } - int ret = pclose(pipe); - if (ret != 0) { - log_error("Command failed with code " + std::to_string(ret) + ": " + command_str); - log_error("Output:\n" + ss.str()); - throw std::runtime_error("Command execution failed"); - } else { - log_verbose("Command executed successfully: " + command_str); - } -} - -static void git_init_once() { - static std::once_flag flag; - std::call_once(flag, [](){ - log_info("Initializing libgit2"); - git_libgit2_init(); - log_verbose("libgit2 initialized"); - }); -} - -static int submodule_update_callback(git_submodule *sm, const char *name, void *payload) { - // We'll just update + log - if (!name) { - log_info("Processing submodule: (unknown)"); - } else { - log_info("Processing submodule: " + std::string(name)); - } - - if (git_submodule_update(sm, /*init=*/1, /*opts=*/nullptr) != 0) { - const git_error *e = git_error_last(); - std::string err_msg = (e && e->message) ? e->message : "unknown error"; - log_error("Failed to update submodule " + std::string(name ? name : "unknown") + ": " + err_msg); - // Return a non-zero error so git_submodule_foreach() can short-circuit - return -1; - } - return 0; -} - -static void update_submodules_for_repo(git_repository* repo_ptr, const std::string& repo_label) { - log_info("Updating submodules for repository: " + repo_label); - if (git_submodule_foreach(repo_ptr, submodule_update_callback, nullptr) == 0) { - log_info("Submodules processed successfully."); - } else { - log_warning("One or more submodules failed to update."); - } -} - -static void git_fetch_and_checkout(const fs::path& repo_path, const std::string& repo_url, const std::optional& branch) { - log_info("Fetching and checking out repository: " + repo_url + " into " + repo_path.string()); - git_init_once(); - - auto repo_deleter = [](git_repository* r) { if (r) git_repository_free(r); }; - std::unique_ptr repo(nullptr, repo_deleter); - - bool need_clone = false; - - if (fs::exists(repo_path)) { - log_verbose("Repository path exists. Attempting to open repository."); - git_repository* raw_repo = nullptr; - int err = git_repository_open(&raw_repo, repo_path.string().c_str()); - if (err < 0) { - log_warning("Cannot open repo at " + repo_path.string() + ", recloning."); - fs::remove_all(repo_path); - need_clone = true; - } else { - repo.reset(raw_repo); - log_verbose("Repository opened successfully."); - } - } else { - log_verbose("Repository path does not exist. Cloning required."); - need_clone = true; - } - - if (need_clone) { - log_info("Cloning repository from " + repo_url + " to " + repo_path.string()); - git_repository* raw_newrepo = nullptr; - git_clone_options clone_opts = GIT_CLONE_OPTIONS_INIT; - git_checkout_options co_opts = GIT_CHECKOUT_OPTIONS_INIT; - co_opts.checkout_strategy = GIT_CHECKOUT_FORCE; - clone_opts.checkout_opts = co_opts; - clone_opts.fetch_opts = GIT_FETCH_OPTIONS_INIT; - - if (git_clone(&raw_newrepo, repo_url.c_str(), repo_path.string().c_str(), &clone_opts) != 0) { - const git_error* e = git_error_last(); - log_error(std::string("Git clone failed: ") + (e ? e->message : "unknown error")); - throw std::runtime_error("Git clone failed"); - } - repo.reset(raw_newrepo); - log_info("Repository cloned successfully."); - - // Checkout branch if specified - if (branch) { - std::string fullbranch = "refs/remotes/origin/" + *branch; - git_object* target = nullptr; - if (git_revparse_single(&target, repo.get(), fullbranch.c_str()) == 0) { - git_checkout_options co_opts2 = GIT_CHECKOUT_OPTIONS_INIT; - co_opts2.checkout_strategy = GIT_CHECKOUT_FORCE; - if (git_checkout_tree(repo.get(), target, &co_opts2) != 0) { - const git_error* e = git_error_last(); - log_error(std::string("Git checkout failed: ") + (e ? e->message : "unknown error")); - git_object_free(target); - throw std::runtime_error("Git checkout failed"); - } - git_repository_set_head(repo.get(), fullbranch.c_str()); - git_object_free(target); - log_info("Checked out branch: " + *branch); - } else { - log_warning("Branch " + *branch + " not found."); - } - } - - // Initialize and update submodules - log_info("Initializing and updating submodules for cloned repository."); - update_submodules_for_repo(repo.get(), repo_path.string()); - - } else if (repo) { - // Remote validation and fetch - log_verbose("Validating remote origin."); - auto remote_deleter = [](git_remote* r) { if (r) git_remote_free(r); }; - std::unique_ptr remote(nullptr, remote_deleter); - git_remote* raw_remote = nullptr; - - int err = git_remote_lookup(&raw_remote, repo.get(), "origin"); - if (err < 0) { - log_warning("No origin remote found. Recloning."); - fs::remove_all(repo_path); - throw std::runtime_error("No origin remote found."); - } - remote.reset(raw_remote); - const char* url = git_remote_url(remote.get()); - if (!url || repo_url != url) { - log_warning("Remote URL differs. Recloning."); - fs::remove_all(repo_path); - throw std::runtime_error("Remote URL mismatch."); - } - - // Fetch changes - log_verbose("Fetching latest changes."); - git_fetch_options fetch_opts = GIT_FETCH_OPTIONS_INIT; - if (git_remote_fetch(remote.get(), nullptr, &fetch_opts, nullptr) != 0) { - const git_error* e = git_error_last(); - log_error(std::string("Git fetch failed: ") + (e ? e->message : "unknown error")); - throw std::runtime_error("Git fetch failed"); - } - - log_verbose("Fetch completed."); - - // Update submodules after fetch - log_info("Updating submodules after fetching."); - update_submodules_for_repo(repo.get(), repo_path.string()); - } - log_verbose("Completed fetch and checkout for: " + repo_path.string()); -} - -static YAML::Node load_config(const fs::path &config_path) { - log_info("Loading configuration from " + config_path.string()); - YAML::Node config = YAML::LoadFile(config_path.string()); - if (!config["packages"] || !config["releases"]) { - log_error("Config file missing 'packages' or 'releases' sections."); - throw std::runtime_error("Config file must contain 'packages' and 'releases' sections."); - } - log_verbose("Configuration loaded successfully."); - return config; -} - -static void publish_lintian() { - log_info("Publishing Lintian results."); - if(!BASE_LINTIAN_DIR.empty() && fs::exists(BASE_LINTIAN_DIR)) { - for (auto &p : fs::recursive_directory_iterator(BASE_LINTIAN_DIR)) { - if (fs::is_regular_file(p)) { - fs::path rel = fs::relative(p.path(), BASE_LINTIAN_DIR); - fs::path dest = fs::path(REAL_LINTIAN_DIR) / rel; - fs::create_directories(dest.parent_path()); - std::error_code ec; - fs::copy_file(p.path(), dest, fs::copy_options::overwrite_existing, ec); - if(ec) { - log_error("Failed to copy Lintian file: " + p.path().string() + " to " + dest.string() + ". Error: " + ec.message()); - } else { - log_verbose("Copied Lintian file: " + p.path().string() + " to " + dest.string()); - } - } - } - fs::remove_all(BASE_LINTIAN_DIR); - log_info("Removed temporary Lintian directory: " + BASE_LINTIAN_DIR); - } else { - log_verbose("No Lintian directory to publish."); - } -} - -static std::vector get_exclusions(const fs::path &packaging) { - log_verbose("Retrieving exclusions from: " + packaging.string()); - std::vector exclusions = {".git"}; - fs::path cpr = packaging / "debian" / "copyright"; - if(!fs::exists(cpr)) { - log_verbose("No copyright file found."); - return exclusions; - } - - std::ifstream f(cpr); - if(!f) { - log_warning("Failed to open copyright file."); - return exclusions; - } - std::string line; - bool found = false; - while(std::getline(f, line)) { - if (line.find("Files-Excluded:") != std::string::npos) { - log_verbose("Found 'Files-Excluded' in copyright."); - size_t pos = line.find(':'); - if(pos != std::string::npos) { - std::string excl = line.substr(pos + 1); - std::istringstream iss(excl); - std::string token; - while(iss >> token) { - exclusions.push_back(token); - log_verbose("Exclusion added: " + token); - } - } - found = true; - break; - } - } - if(!found) { - log_verbose("'Files-Excluded' not found in copyright."); - } - return exclusions; -} - -static void run_source_lintian(const std::string &name, const fs::path &source_path) { - semaphore_guard guard(semaphore); - log_info("Running Lintian for package: " + name); - fs::path temp_file = fs::temp_directory_path() / ("lintian_suppress_" + name + ".txt"); - { - std::ofstream of(temp_file); - for (auto &tag: SUPPRESSED_LINTIAN_TAGS) { - of << tag << "\n"; - } - } - log_verbose("Created Lintian suppression file: " + temp_file.string()); - std::string cmd = "lintian -EvIL +pedantic --suppress-tags-from-file " + temp_file.string() + " " + source_path.string() + " 2>&1"; - FILE* pipe = popen(cmd.c_str(), "r"); - std::stringstream ss; - if(pipe) { - char buffer[256]; - while(fgets(buffer, sizeof(buffer), pipe)) { - ss << buffer; - } - int ret = pclose(pipe); - fs::remove(temp_file); - log_verbose("Lintian command exited with code: " + std::to_string(ret)); - if(ret != 0) { - log_error("Lintian reported issues for " + name + ":\n" + ss.str()); - if(!ss.str().empty()) { - fs::path pkgdir = fs::path(BASE_LINTIAN_DIR) / name; - fs::create_directories(pkgdir); - std::ofstream out(pkgdir / "source.txt", std::ios::app); - out << ss.str() << "\n"; - } - } else { - if(!ss.str().empty()) { - fs::path pkgdir = fs::path(BASE_LINTIAN_DIR) / name; - fs::create_directories(pkgdir); - std::ofstream out(pkgdir / "source.txt", std::ios::app); - out << ss.str() << "\n"; - } - } - } else { - fs::remove(temp_file); - log_error("Failed to run Lintian for package: " + name); - } - log_verbose("Completed Lintian run for package: " + name); -} - -// Function to upload changes with dput -static void dput_source(const std::string &name, const std::string &upload_target, - const std::vector &changes_files, - const std::vector &devel_changes_files) { - log_info("Uploading changes for package: " + name + " to " + upload_target); - if(!changes_files.empty()) { - std::string hr_changes; - for(auto &c: changes_files) hr_changes += c + " "; - log_verbose("Changes files: " + hr_changes); - std::vector cmd = {"dput", upload_target}; - for(auto &c: changes_files) cmd.push_back(c); - try { - run_command_silent_on_success(cmd, OUTPUT_DIR); - log_info("Successfully uploaded changes for package: " + name); - for(auto &file: devel_changes_files) { - if(!file.empty()) { - run_source_lintian(name, file); - } - } - } catch (...) { - log_warning("dput to " + upload_target + " failed. Trying ssh-ppa."); - std::string ssh_upload_target = "ssh-" + upload_target; - std::vector ssh_cmd = {"dput", ssh_upload_target}; - for(auto &c: changes_files) ssh_cmd.push_back(c); - try { - run_command_silent_on_success(ssh_cmd, OUTPUT_DIR); - log_info("Successfully uploaded changes for package: " + name + " using ssh-ppa."); - for(auto &file: devel_changes_files) { - if(!file.empty()) { - run_source_lintian(name, file); - } - } - } catch (...) { - log_error("Failed to upload changes for package: " + name + " with both dput commands."); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[name] = "Failed to upload changes with dput and ssh-dput."; - } - } - } else { - log_warning("No changes files to upload for package: " + name); - } -} - -// Function to update the changelog -static void update_changelog(const fs::path &packaging_dir, const std::string &release, const std::string &version_with_epoch) { - std::string name = packaging_dir.filename().string(); - log_info("Updating changelog for " + name + " to version " + version_with_epoch + "-0ubuntu1~ppa1"); - try { - run_command_silent_on_success({"git", "checkout", "debian/changelog"}, packaging_dir); - log_verbose("Checked out debian/changelog for " + name); - } catch (const std::exception &e) { - log_error("Failed to checkout debian/changelog for " + name + ": " + e.what()); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[name] = "Failed to checkout debian/changelog: " + std::string(e.what()); - throw; - } - std::vector cmd = { - "dch", "--distribution", release, "--package", name, "--newversion", - version_with_epoch + "-0ubuntu1~ppa1", "--urgency", urgency_level_override, "CI upload." - }; - try { - run_command_silent_on_success(cmd, packaging_dir); - log_info("Changelog updated for " + name); - } catch (const std::exception &e) { - log_error("Failed to update changelog for " + name + ": " + e.what()); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[name] = "Failed to update changelog: " + std::string(e.what()); - throw; - } -} - -static std::string build_package(const fs::path &packaging_dir, - const std::map &env_vars, - bool large, const std::string &pkg_name) { - log_info("Building source package for " + pkg_name); - fs::path temp_dir; - std::error_code ec; - - // If anything fails, we still want to clean up. - auto cleanup = [&]() { - if(!temp_dir.empty()) { - fs::remove_all(temp_dir, ec); - if(ec) { - log_warning("Failed to remove temporary directory: " + temp_dir.string() + - " Error: " + ec.message()); - } else { - log_verbose("Removed temporary build directory: " + temp_dir.string()); - } - } - }; - - try { - if(large) { - temp_dir = fs::path(OUTPUT_DIR) / (".tmp_" + pkg_name + "_" + env_vars.at("VERSION")); - fs::create_directories(temp_dir); - } else { - temp_dir = fs::temp_directory_path() / ("tmp_build_" + pkg_name + "_" + env_vars.at("VERSION")); - fs::create_directories(temp_dir); - } - log_verbose("Temporary packaging directory created at: " + temp_dir.string()); - - fs::path temp_packaging_dir = temp_dir / pkg_name; - fs::create_directories(temp_packaging_dir, ec); - if(ec) { - log_error("Failed to create temporary packaging directory: " + temp_packaging_dir.string() + - " Error: " + ec.message()); - throw std::runtime_error("Temporary packaging directory creation failed"); - } - - fs::copy(packaging_dir / "debian", temp_packaging_dir / "debian", fs::copy_options::recursive, ec); - if(ec) { - log_error("Failed to copy debian directory: " + ec.message()); - throw std::runtime_error("Failed to copy debian directory"); - } - - std::string tarball_name = pkg_name + "_" + env_vars.at("VERSION") + ".orig.tar.gz"; - fs::path tarball_source = fs::path(BASE_DIR) / (pkg_name + "_MAIN.orig.tar.gz"); - fs::path tarball_dest = temp_dir / tarball_name; - fs::copy_file(tarball_source, tarball_dest, fs::copy_options::overwrite_existing, ec); - if(ec) { - log_error("Failed to copy tarball: " + ec.message()); - throw std::runtime_error("Failed to copy tarball"); - } - - for (auto &e: env_vars) { - setenv(e.first.c_str(), e.second.c_str(), 1); - log_verbose("Set environment variable: " + e.first + " = " + e.second); - } - - std::vector cmd_build = {"debuild", "--no-lintian", "-S", "-d", "-sa", "-nc"}; - run_command_silent_on_success(cmd_build, temp_packaging_dir); - - run_command_silent_on_success({"git", "checkout", "debian/changelog"}, packaging_dir); - log_info("Built package for " + pkg_name); - - std::string pattern = pkg_name + "_" + env_vars.at("VERSION"); - std::string changes_file; - for(auto &entry: fs::directory_iterator(temp_dir)) { - std::string fname = entry.path().filename().string(); - if(fname.rfind(pattern, 0) == 0) { - fs::path dest = fs::path(OUTPUT_DIR) / fname; - fs::copy_file(entry.path(), dest, fs::copy_options::overwrite_existing, ec); - if(!ec) { - log_verbose("Copied built package " + fname + " to " + OUTPUT_DIR); - } - } - } - - for(auto &entry : fs::directory_iterator(OUTPUT_DIR)) { - std::string fname = entry.path().filename().string(); - // e.g. package_1.2.3_source.changes - if(fname.rfind(pkg_name + "_" + env_vars.at("VERSION"), 0) == 0 - && fname.size() >= 16 - && fname.substr(fname.size() - 15) == "_source.changes") { - changes_file = entry.path().string(); - log_info("Found changes file: " + changes_file); - } - } - - if(changes_file.empty()) { - log_error("No changes file found after build for package: " + pkg_name); - throw std::runtime_error("Changes file not found"); - } - - log_info("Built package successfully, changes file: " + changes_file); - - cleanup(); - return changes_file; - } catch(const std::exception &e) { - cleanup(); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[pkg_name] = "Build failed: " + std::string(e.what()); - throw; - } -} - -static void pull_package(Package &pkg, const YAML::Node &releases) { - semaphore_guard guard(semaphore); - log_info("Pulling package: " + pkg.name); - fs::path packaging_destination = fs::path(BASE_DIR) / pkg.name; - fs::path upstream_destination = fs::path(BASE_DIR) / ("upstream-" + pkg.name); - fs::path packaging_repo = packaging_destination; - - std::mutex& upstream_mutex = get_repo_mutex(upstream_destination); - std::mutex& packaging_mutex = get_repo_mutex(packaging_repo); - - std::scoped_lock lock(upstream_mutex, packaging_mutex); - - try { - git_fetch_and_checkout(upstream_destination, pkg.upstream_url, std::nullopt); - git_fetch_and_checkout(packaging_repo, pkg.packaging_url, pkg.packaging_branch); - } catch(const std::exception &e) { - log_error("Failed to fetch and checkout repositories for package " + pkg.name + ": " + e.what()); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[pkg.name] = "Failed to fetch/checkout repositories: " + std::string(e.what()); - return; - } - - try { - log_info("Updating maintainer for package: " + pkg.name); - update_maintainer((packaging_destination / "debian").string(), false); - log_info("Maintainer updated for package: " + pkg.name); - } catch(std::exception &e) { - log_warning("update_maintainer error for " + pkg.name + ": " + std::string(e.what())); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[pkg.name] = "Failed to update maintainer: " + std::string(e.what()); - } - - auto exclusions = get_exclusions(packaging_destination); - log_info("Creating tarball for package: " + pkg.name); - try { - create_tarball(pkg.name + "_MAIN.orig.tar.gz", upstream_destination.string(), exclusions); - log_info("Tarball created for package: " + pkg.name); - } catch(const std::exception &e) { - log_error("Failed to create tarball for package " + pkg.name + ": " + e.what()); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[pkg.name] = "Failed to create tarball: " + std::string(e.what()); - } -} - -static void build_package_stage(Package &pkg, const YAML::Node &releases) { - fs::path packaging_destination = fs::path(BASE_DIR) / pkg.name; - fs::path changelog_path = packaging_destination / "debian" / "changelog"; - std::string version = ""; - - try { - version = parse_version(changelog_path); - } catch(const std::exception &e) { - log_error("Failed to parse version for package " + pkg.name + ": " + e.what()); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[pkg.name] = "Failed to parse version: " + std::string(e.what()); - return; - } - - bool large = pkg.large; - if(large) { - log_info("Package " + pkg.name + " is marked as large."); - } - - std::map env_map; - env_map["DEBFULLNAME"] = DEBFULLNAME; - env_map["DEBEMAIL"] = DEBEMAIL; - - std::string epoch; - std::string version_no_epoch = version; - if(auto pos = version.find(':'); pos != std::string::npos) { - epoch = version.substr(0, pos); - version_no_epoch = version.substr(pos + 1); - log_verbose("Package " + pkg.name + " has epoch: " + epoch); - } - env_map["VERSION"] = version_no_epoch; - - for(auto rel : releases) { - std::string release = rel.as(); - log_info("Building " + pkg.name + " for release: " + release); - - std::string release_version_no_epoch = version_no_epoch + "~" + release; - std::string version_for_dch = epoch.empty() ? release_version_no_epoch - : (epoch + ":" + release_version_no_epoch); - env_map["UPLOAD_TARGET"] = pkg.upload_target; - - try { - update_changelog(packaging_destination, release, version_for_dch); - } catch(const std::exception &e) { - log_error("Failed to update changelog for package " + pkg.name + ": " + e.what()); - continue; - } - - env_map["VERSION"] = release_version_no_epoch; - - try { - std::string changes_file = build_package(packaging_destination, env_map, large, pkg.name); - if(!changes_file.empty()) { - pkg.changes_files.push_back(changes_file); - if(rel == releases[0]) { - pkg.devel_changes_files.push_back(changes_file); - } else { - pkg.devel_changes_files.push_back(""); - } - } - } catch(std::exception &e) { - log_error("Error building package '" + pkg.name + "' for release '" + release + "': " + std::string(e.what())); - // Failure already recorded in build_package - } - } - - fs::path main_tarball = fs::path(BASE_DIR) / (pkg.name + "_MAIN.orig.tar.gz"); - fs::remove(main_tarball); - log_verbose("Removed main orig tarball for package: " + pkg.name); -} - -static void build_package_stage_wrapper(Package &pkg, const YAML::Node &releases) { - try { - build_package_stage(pkg, releases); - } catch(const std::exception &e) { - log_error(std::string("Exception in building package: ") + e.what()); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages[pkg.name] = "Exception during build: " + std::string(e.what()); - } -} - -static void upload_package_stage(Package &pkg, bool skip_dput) { - if(skip_dput) { - log_info("Skipping dput upload for package: " + pkg.name); - return; - } - - if(!pkg.changes_files.empty() && !pkg.upload_target.empty()) { - dput_source(pkg.name, pkg.upload_target, pkg.changes_files, pkg.devel_changes_files); - } else { - log_warning("No changes files to upload for package: " + pkg.name); - } -} - -static void run_lintian_stage(Package &pkg) { - for(const auto &changes_file : pkg.changes_files) { - run_source_lintian(pkg.name, changes_file); - } -} - -// Function to summarize and cleanup -static void summary(bool skip_cleanup) { - if(!skip_cleanup) { - log_info("Cleaning up output directory: " + OUTPUT_DIR); - try { - clean_old_logs(LOG_DIR); // Using common::clean_old_logs - fs::remove_all(OUTPUT_DIR); - log_info("Cleanup completed."); - } catch(const std::exception &e) { - log_error("Failed to clean up: " + std::string(e.what())); - } - } else { - log_info("Skipping cleanup as per flag."); - } - - // Publish Lintian results - log_info("Publishing Lintian results."); - publish_lintian(); - - // Final Cleanup of old logs - log_info("Cleaning old logs."); - try { - clean_old_logs(LOG_DIR); // Using common::clean_old_logs - } catch(const std::exception &e) { - log_error("Failed to clean old logs: " + std::string(e.what())); - } - - // Summary of failures - { - std::lock_guard lock(failures_mutex); - if(!failed_packages.empty()) { - log_error("Summary of Failures:"); - for(const auto &entry : failed_packages) { - log_error("Package: " + entry.first + " - Reason: " + entry.second); - } - std::cerr << "Some packages failed during processing. Check the log file for details.\n"; - } else { - log_info("All packages processed successfully."); - } - } - - log_info("Script completed."); -} - -// Function to process a single package -static void process_package(const YAML::Node &pkg_node, const YAML::Node &releases) { - Package pkg; - pkg.name = pkg_node["name"] ? pkg_node["name"].as() : ""; - pkg.upload_target = pkg_node["upload_target"] ? pkg_node["upload_target"].as() : "ppa:lubuntu-ci/unstable-ci-proposed"; - pkg.upstream_url = pkg_node["upstream_url"] ? pkg_node["upstream_url"].as() : ("https://github.com/lxqt/" + pkg.name + ".git"); - pkg.packaging_url = pkg_node["packaging_url"] ? pkg_node["packaging_url"].as() : ("https://git.lubuntu.me/Lubuntu/" + pkg.name + "-packaging.git"); - if(pkg_node["packaging_branch"] && pkg_node["packaging_branch"].IsScalar()) { - pkg.packaging_branch = pkg_node["packaging_branch"].as(); - } - pkg.large = pkg_node["large"] ? pkg_node["large"].as() : false; - - if(pkg.name.empty()) { - log_warning("Skipping package due to missing name."); - return; - } - - log_info("Processing package: " + pkg.name); - - // Stage 1: Pull repositories and create tarball - pull_package(pkg, releases); - - // Stage 2: Build package - build_package_stage(pkg, releases); - - // Stage 3: Upload package - upload_package_stage(pkg, false); - - // Stage 4: Run Lintian - run_lintian_stage(pkg); -} - -// Main function -int main(int argc, char** argv) { - std::string prog_name = fs::path(argv[0]).filename().string(); - bool skip_dput = false; - bool skip_cleanup = false; - std::string config_path; - - // Parse initial arguments for help and verbose - for(int i = 1; i < argc; i++) { - std::string arg = argv[i]; - if(arg == "--help" || arg == "-h") { - print_help(prog_name); - return 0; - } - if(arg == "--verbose" || arg == "-v") { - verbose = true; - // Remove the verbose flag from argv - for(int j = i; j < argc - 1; j++) { - argv[j] = argv[j+1]; - } - argc--; - i--; - continue; - } - } - - log_info("Script started."); - fs::create_directories(LOG_DIR); - log_info("Ensured log directory exists: " + LOG_DIR); - fs::create_directories(OUTPUT_DIR); - log_info("Ensured output directory exists: " + OUTPUT_DIR); - - auto now = std::time(nullptr); - std::tm tm_time; - gmtime_r(&now, &tm_time); - char buf_time[20]; - std::strftime(buf_time, sizeof(buf_time), "%Y%m%dT%H%M%S", &tm_time); - std::string current_time = buf_time; - - std::string uuid_part = current_time.substr(0,10); - BASE_LINTIAN_DIR = BASE_OUTPUT_DIR + "/.lintian.tmp." + uuid_part; - fs::create_directories(BASE_LINTIAN_DIR); - log_info("Created Lintian temporary directory: " + BASE_LINTIAN_DIR); - - fs::path log_file = fs::path(LOG_DIR) / (current_time + ".log"); - log_info("Opening log file: " + log_file.string()); - log_file_stream.open(log_file); - if(!log_file_stream.is_open()) { - std::cerr << "[ERROR] Unable to open log file.\n"; - return 1; - } - log_info("Log file opened successfully."); - - // Parse remaining arguments - for(int i = 1; i < argc; i++) { - std::string arg = argv[i]; - log_info("Processing argument: " + arg); - if(arg == "--skip-dput") { - skip_dput = true; - log_info("Flag set: --skip-dput"); - } else if(arg == "--skip-cleanup") { - skip_cleanup = true; - log_info("Flag set: --skip-cleanup"); - } else if(arg.rfind("--urgency-level=", 0) == 0) { - urgency_level_override = arg.substr(std::string("--urgency-level=").size()); - log_info("Urgency level overridden to: " + urgency_level_override); - } else if(arg.rfind("--workers=", 0) == 0) { - try { - worker_count = std::stoi(arg.substr(std::string("--workers=").size())); - if(worker_count < 1) worker_count = 1; - log_info("Worker count set to: " + std::to_string(worker_count)); - } catch(const std::exception &e) { - log_error("Invalid worker count provided. Using default value of 5."); - worker_count = 5; - } - } else if(config_path.empty()) { - config_path = arg; - log_info("Config path set to: " + config_path); - } - } - - if(config_path.empty()) { - log_error("No config file specified."); - print_help(prog_name); - return 1; - } - - setenv("DEBFULLNAME", DEBFULLNAME.c_str(), 1); - log_info("Set DEBFULLNAME to: " + DEBFULLNAME); - setenv("DEBEMAIL", DEBEMAIL.c_str(), 1); - log_info("Set DEBEMAIL to: " + DEBEMAIL); - - YAML::Node config; - try { - config = load_config(config_path); - } catch (std::exception &e) { - log_error(std::string("Error loading config file: ") + e.what()); - return 1; - } - - auto packages_node = config["packages"]; - auto releases = config["releases"]; - log_info("Loaded " + std::to_string(packages_node.size()) + " packages and " + std::to_string(releases.size()) + " releases from config."); - - // Populate the packages vector - std::vector packages; - for(auto pkg_node : packages_node) { - Package pkg; - pkg.name = pkg_node["name"] ? pkg_node["name"].as() : ""; - pkg.upload_target = pkg_node["upload_target"] ? pkg_node["upload_target"].as() : "ppa:lubuntu-ci/unstable-ci-proposed"; - pkg.upstream_url = pkg_node["upstream_url"] ? pkg_node["upstream_url"].as() : ("https://github.com/lxqt/" + pkg.name + ".git"); - pkg.packaging_url = pkg_node["packaging_url"] ? pkg_node["packaging_url"].as() : ("https://git.lubuntu.me/Lubuntu/" + pkg.name + "-packaging.git"); - if(pkg_node["packaging_branch"] && pkg_node["packaging_branch"].IsScalar()) { - pkg.packaging_branch = pkg_node["packaging_branch"].as(); - } - pkg.large = pkg_node["large"] ? pkg_node["large"].as() : false; - - if(pkg.name.empty()) { - log_warning("Skipping package due to missing name."); - continue; - } - packages.emplace_back(std::move(pkg)); - } - log_info("Prepared " + std::to_string(packages.size()) + " packages for processing."); - - fs::current_path(BASE_DIR); - log_info("Set current working directory to BASE_DIR: " + BASE_DIR); - - // Stage 1: Pull all packages in parallel - log_info("Starting Stage 1: Pulling all packages."); - std::vector> pull_futures; - for(auto &pkg : packages) { - pull_futures.emplace_back(std::async(std::launch::async, pull_package, std::ref(pkg), releases)); - } - - for(auto &fut : pull_futures) { - try { - fut.get(); - log_info("Package pulled successfully."); - } catch(std::exception &e) { - log_error(std::string("Pull task generated an exception: ") + e.what()); - // Failure already recorded inside pull_package - } - } - log_info("Completed Stage 1: All packages pulled."); - - // Check for failures after Stage 1 - bool has_failures = false; - { - std::lock_guard lock(failures_mutex); - if(!failed_packages.empty()) { - log_error("Failures detected after Stage 1: Pulling packages."); - has_failures = true; - } - } - - // Stage 2: Build all packages in parallel - log_info("Starting Stage 2: Building all packages."); - std::vector> build_futures; - for(auto &pkg : packages) { - build_futures.emplace_back(std::async(std::launch::async, build_package_stage_wrapper, std::ref(pkg), releases)); - } - - for(auto &fut : build_futures) { - try { - fut.get(); - log_info("Package built successfully."); - } catch(std::exception &e) { - log_error(std::string("Build task generated an exception: ") + e.what()); - // Failure already recorded inside build_package_stage_wrapper - } - } - log_info("Completed Stage 2: All packages built."); - - // Check for failures after Stage 2 - { - std::lock_guard lock(failures_mutex); - if(!failed_packages.empty()) { - log_error("Failures detected after Stage 2: Building packages."); - has_failures = true; - } - } - - // Stage 3: Dput upload all packages in parallel - log_info("Starting Stage 3: Uploading all packages with dput."); - std::vector> upload_futures; - for(auto &pkg : packages) { - upload_futures.emplace_back(std::async(std::launch::async, upload_package_stage, std::ref(pkg), skip_dput)); - } - - for(auto &fut : upload_futures) { - try { - fut.get(); - log_info("Package uploaded successfully."); - } catch(std::exception &e) { - log_error(std::string("Upload task generated an exception: ") + e.what()); - // Failure already recorded inside upload_package_stage - } - } - log_info("Completed Stage 3: All packages uploaded."); - - // Check for failures after Stage 3 - { - std::lock_guard lock(failures_mutex); - if(!failed_packages.empty()) { - log_error("Failures detected after Stage 3: Uploading packages."); - has_failures = true; - } - } - - // Stage 4: Run Lintian on all packages in parallel - log_info("Starting Stage 4: Running Lintian on all packages."); - std::vector> lintian_futures; - for(auto &pkg : packages) { - lintian_futures.emplace_back(std::async(std::launch::async, run_lintian_stage, std::ref(pkg))); - } - - for(auto &fut : lintian_futures) { - try { - fut.get(); - log_info("Lintian run successfully."); - } catch(std::exception &e) { - log_error(std::string("Lintian task generated an exception: ") + e.what()); - // Record the failure - std::lock_guard lock_fail(failures_mutex); - failed_packages["Lintian"] = "Exception during Lintian run: " + std::string(e.what()); - } - } - log_info("Completed Stage 4: All Lintian runs completed."); - - // Proceed to summary and cleanup - summary(skip_cleanup); - - // Final Exit Status - { - std::lock_guard lock(failures_mutex); - if(!failed_packages.empty()) { - return 1; - } - } - - return 0; -} - -static std::optional run_lintian(const fs::path& source_path) { - std::stringstream issues; - fs::path temp_file = fs::temp_directory_path() / "lintian_suppress.txt"; - { - std::ofstream ofs(temp_file); - for(const auto &tag : SUPPRESSED_LINTIAN_TAGS) { - ofs << tag << "\n"; - } - } - - std::string cmd = "lintian -EvIL +pedantic --suppress-tags-from-file " + temp_file.string() + " " + source_path.string() + " 2>&1"; - FILE* pipe = popen(cmd.c_str(), "r"); - if(!pipe) { - log_error("Failed to run Lintian command: " + cmd); - fs::remove(temp_file); - return std::nullopt; - } - - char buffer[256]; - while(fgets(buffer, sizeof(buffer), pipe)) { - issues << buffer; - } - - int ret = pclose(pipe); - fs::remove(temp_file); - - if(ret != 0) { - return issues.str(); - } else { - return std::nullopt; - } -} - -static std::mutex& get_repo_mutex(const fs::path& repo_path) { - std::lock_guard lock(repo_map_mutex); - return repo_mutexes[repo_path]; -} diff --git a/cpp/ci_database_objs.cpp b/cpp/ci_database_objs.cpp new file mode 100644 index 0000000..fe37f39 --- /dev/null +++ b/cpp/ci_database_objs.cpp @@ -0,0 +1,1287 @@ +// Copyright (C) 2023-2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#include "ci_database_objs.h" +#include "utilities.h" + +#include +#include +#include + +#include +#include +#include + +// Person +// Minimal representation of a Launchpad Person +Person::Person(int id, const std::string username, const std::string logo_url) + : id(id), username(username), logo_url(logo_url) {} + +// End of Person +// Release +// +// We do not define any setter or getter functions here. It is assumed that the Release +// values will be created in batch, in a separate function, likely from the database +Release::Release(int id, int version, const std::string& codename, bool isDefault) + : id(id), version(version), codename(codename), isDefault(isDefault) {} + +std::vector Release::get_releases(QSqlDatabase& p_db) { + std::vector result; + QString query_str = "SELECT id, version, codename, isDefault FROM release;"; + QSqlQuery query(query_str, p_db); + while (query.next()) { + Release current_release(query.value("id").toInt(), query.value("version").toInt(), + query.value("codename").toString().toStdString(), + query.value("isDefault").toBool()); + result.emplace_back(current_release); + } + return result; +} + +Release Release::get_release_by_id(QSqlDatabase& p_db, int id) { + QSqlQuery query(p_db); + + query.prepare("SELECT id, version, codename, isDefault FROM release WHERE id = ? LIMIT 1"); + query.bindValue(0, id); + if (!query.exec()) { + qDebug() << "Error executing query:" << query.lastError().text(); + return Release(); + } + if (query.next()) { + int release_id = query.value(0).toInt(); + int version = query.value(1).toInt(); + QString codename = query.value(2).toString(); + bool isDefault = query.value(3).toBool(); + + // Create and return the Release object + return Release(release_id, version, codename.toStdString(), isDefault); + } else { + std::cout << "No release found for ID: " << id << "\n"; + } + + // Return an empty Release object if no match is found + return Release(); +} + +bool Release::set_releases(QSqlDatabase& p_db, YAML::Node& releases) { + std::vector current_releases = get_releases(p_db); + + // Use set subtraction to determine which releases need to be added and removed + // The first operation is releases - current_releases which shows all *additions* + // The second operation is current_releases - releases which shows all *deletions* + std::vector additions, deletions; + + // Get all of the release codenames from current_releases + std::set current_codenames; + for (const auto& release : current_releases) { + current_codenames.insert(release.codename); + } + // Convert the YAML node to a proper set + std::set releases_set; + for (const auto& release : releases) { + releases_set.insert(release.as()); + } + + // Set subtractions + std::ranges::set_difference(releases_set, current_codenames, std::back_inserter(additions)); + std::ranges::set_difference(current_codenames, releases_set, std::back_inserter(deletions)); + + // Insert the additions + for (const auto& release : additions) { + auto [version, is_last] = get_version_from_codename(release); + QSqlQuery query(p_db); + query.prepare("INSERT INTO release (version, codename, isDefault) VALUES (?, ?, ?)"); + query.bindValue(0, version); + query.bindValue(1, QString::fromStdString(release)); + query.bindValue(2, is_last); + if (!query.exec()) { return false; } + } + + // Remove the deletions + for (const auto& release : deletions) { + QSqlQuery query(p_db); + query.prepare("DELETE FROM release WHERE codename = ?"); + query.bindValue(0, QString::fromStdString(release)); + if (!query.exec()) { return false; } + } + + return true; +} +// End of Release + +// Package +// +// We do not define any setter or getter functions here. It is assumed that the Package +// values will be created in batch, in a separate function, likely from the database +Package::Package(int id, const std::string& name, bool large, const std::string& upstream_url, const std::string& packaging_branch, const std::string& packaging_url) + : id(id), name(name), large(large), upstream_url(upstream_url), packaging_branch(packaging_branch), packaging_url(packaging_url) { + upstream_browser = transform_url(upstream_url); + packaging_browser = transform_url(packaging_url); +} + +std::vector Package::get_packages(QSqlDatabase& p_db) { + std::vector result; + QString query_str = "SELECT id, name, large, upstream_url, packaging_branch, packaging_url FROM package"; + QSqlQuery query(query_str, p_db); + while (query.next()) { + Package current_package(query.value("id").toInt(), query.value("name").toString().toStdString(), + query.value("large").toBool(), + query.value("upstream_url").toString().toStdString(), + query.value("packaging_branch").toString().toStdString(), + query.value("packaging_url").toString().toStdString()); + result.emplace_back(current_package); + } + return result; +} + +Package Package::get_package_by_id(QSqlDatabase& p_db, int id) { + QSqlQuery query(p_db); + query.prepare("SELECT id, name, large, upstream_url, packaging_branch, packaging_url FROM package WHERE id = ? LIMIT 1"); + query.bindValue(0, id); + if (!query.exec()) { + qDebug() << "Error executing query:" << query.lastError().text(); + return Package(); + } + if (query.next()) { + Package current_package(query.value("id").toInt(), query.value("name").toString().toStdString(), + query.value("large").toBool(), + query.value("upstream_url").toString().toStdString(), + query.value("packaging_branch").toString().toStdString(), + query.value("packaging_url").toString().toStdString()); + return current_package; + } + return Package(); +} + +bool Package::set_packages(QSqlDatabase& p_db, YAML::Node& packages) { + std::vector current_packages = get_packages(p_db); + std::unordered_map packages_map; + for (const auto& package : packages) { + if (package["name"]) { + packages_map[package["name"].as()] = YAML::Node(package); + } + } + + // Use set subtraction to determine which releases need to be added and removed + // The first operation is releases - current_releases which shows all *additions* + // The second operation is current_releases - releases which shows all *deletions* + std::vector additions, deletions; + + // Get all of the release codenames from current_releases + std::set current_pkgs; + for (const auto& package : current_packages) { + current_pkgs.insert(package.name); + } + // Convert the YAML node to a proper set + std::set packages_set; + for (const auto& package : packages) { + packages_set.insert(package["name"].as()); + } + + // Set subtractions + std::ranges::set_difference(packages_set, current_pkgs, std::back_inserter(additions)); + std::ranges::set_difference(current_pkgs, packages_set, std::back_inserter(deletions)); + + // Insert the additions + for (const auto& package : additions) { + auto package_yaml = packages_map.find(package); + if (package_yaml == packages_map.end()) { continue; } + const YAML::Node& package_node = package_yaml->second; + + bool large; + QString name, upstream_url, packaging_branch, packaging_url; + name = QString::fromStdString(package); + large = package_node["large"] ? package_node["large"].as() : false; + upstream_url = package_node["upstream_url"] ? + QString::fromStdString(package_node["upstream_url"].as()) : + QString::fromStdString("https://github.com/lxqt/" + name.toStdString() + ".git"); + packaging_url = package_node["packaging_url"] ? + QString::fromStdString(package_node["packaging_url"].as()) : + QString::fromStdString("https://git.lubuntu.me/Lubuntu/" + name.toStdString() + "-packaging.git"); + packaging_branch = package_node["packaging_branch"] + ? QString::fromStdString(package_node["packaging_branch"].as()) + : QString(""); + + QSqlQuery query(p_db); + query.prepare("INSERT INTO package (name, large, upstream_url, packaging_branch, packaging_url) VALUES (?, ?, ?, ?, ?)"); + query.bindValue(0, name); + query.bindValue(1, large); + query.bindValue(2, upstream_url); + query.bindValue(3, packaging_branch); + query.bindValue(4, packaging_url); + if (!query.exec()) { return false; } + } + + // Remove the deletions + for (const auto& package : deletions) { + QSqlQuery query(p_db); + query.prepare("DELETE FROM package WHERE name = ?"); + query.bindValue(0, QString::fromStdString(package)); + if (!query.exec()) { return false; } + } + + return true; +} + +std::string Package::transform_url(const std::string& url) { + // Precompiled regex patterns and their replacements + static const std::vector> patterns = { + // git.launchpad.net: Append "/commit/?id=" + { std::regex(R"(^(https://git\.launchpad\.net/.*)$)"), "$1/commit/?id=" }, + + // code.qt.io: Replace "/qt/" with "/cgit/qt/" and append "/commit/?id=" + { std::regex(R"(^https://code\.qt\.io/qt/([^/]+\.git)$)"), "https://code.qt.io/cgit/qt/$1/commit/?id=" }, + + // invent.kde.org: Replace ".git" with "/-/commit/" + { std::regex(R"(^https://invent\.kde\.org/([^/]+/[^/]+)\.git$)"), "https://invent.kde.org/$1/-/commit/" }, + + // git.lubuntu.me: Replace ".git" with "/commit/" + { std::regex(R"(^https://git\.lubuntu\.me/([^/]+/[^/]+)\.git$)"), "https://git.lubuntu.me/$1/commit/" }, + + // gitlab.kitware.com: Replace ".git" with "/-/commit/" + { std::regex(R"(^https://gitlab\.kitware\.com/([^/]+/[^/]+)\.git$)"), "https://gitlab.kitware.com/$1/-/commit/" }, + }; + + // Iterate through patterns and apply the first matching transformation + for (const auto& [pattern, replacement] : patterns) { + if (std::regex_match(url, pattern)) { + return std::regex_replace(url, pattern, replacement); + } + } + + // Return the original URL if no patterns match + return url; +} +// End of Package + +// Branch +// +// We do not define any setter or getter functions here. It is assumed that the Branch +// values will be created in batch, in a separate function, likely from the database +Branch::Branch(int id, const std::string& name, const std::string& upload_target, const std::string& upload_target_ssh) + : id(id), name(name), upload_target(upload_target), upload_target_ssh(upload_target_ssh) {} + +std::vector Branch::get_branches(QSqlDatabase& p_db) { + std::vector result; + QString query_str = "SELECT id, name, upload_target, upload_target_ssh FROM branch"; + QSqlQuery query(query_str, p_db); + while (query.next()) { + Branch current_branch(query.value("id").toInt(), query.value("name").toString().toStdString(), + query.value("upload_target").toString().toStdString(), + query.value("upload_target_ssh").toString().toStdString()); + result.emplace_back(current_branch); + } + return result; +} + +Branch Branch::get_branch_by_id(QSqlDatabase& p_db, int id) { + QSqlQuery query(p_db); + query.prepare("SELECT id, name, upload_target, upload_target_ssh FROM branch WHERE id = ? LIMIT 1"); + query.bindValue(0, id); + if (!query.exec()) { + qDebug() << "Error executing query:" << query.lastError().text(); + return Branch(); + } + if (query.next()) { + Branch current_branch(query.value("id").toInt(), query.value("name").toString().toStdString(), + query.value("upload_target").toString().toStdString(), + query.value("upload_target_ssh").toString().toStdString()); + return current_branch; + } + return Branch(); +} +// End of Branch +// PackageConf +// +// This is the main class which will be iterated on by the CI +// It includes pointers to Package, Release, and Branch, plus some basic commit info +PackageConf::PackageConf(int id, std::shared_ptr package, std::shared_ptr release, std::shared_ptr branch, + std::shared_ptr packaging_commit, std::shared_ptr upstream_commit) + : id(id), package(package), release(release), branch(branch), packaging_commit(packaging_commit), upstream_commit(upstream_commit) {} + +std::vector> PackageConf::get_package_confs(QSqlDatabase& p_db, std::map> jobstatus_map) { + Branch _tmp_brch = Branch(); + Package _tmp_pkg = Package(); + Release _tmp_rel = Release(); + std::vector> result; + + // Get the default release for setting the packaging branch + std::string default_release; + for (const Release& release : _tmp_rel.get_releases(p_db)) { + if (release.isDefault) { + default_release = release.codename; + break; + } + } + + for (const Branch& branch : _tmp_brch.get_branches(p_db)) { + int branch_id = branch.id; + std::shared_ptr shared_branch = std::make_shared(branch); + + for (const Release& release : _tmp_rel.get_releases(p_db)) { + int release_id = release.id; + std::shared_ptr shared_release = std::make_shared(release); + + for (const Package& package : _tmp_pkg.get_packages(p_db)) { + int package_id = package.id; + + Package new_package = package; + if (package.packaging_branch.empty()) { + new_package.packaging_branch = "ubuntu/" + default_release; + } + std::shared_ptr shared_package = std::make_shared(new_package); + + QSqlQuery query_local(p_db); + query_local.prepare(R"( + SELECT id, upstream_version, ppa_revision, package_id, release_id, branch_id, packaging_commit_id, upstream_commit_id + FROM packageconf + WHERE package_id = ? AND release_id = ? AND branch_id = ? + LIMIT 1)"); + query_local.bindValue(0, package_id); + query_local.bindValue(1, release_id); + query_local.bindValue(2, branch_id); + if (!query_local.exec()) { + qDebug() << "Failed to get packageconf:" << query_local.lastError().text() + << package_id << release_id << branch_id; + } + + GitCommit _tmp_commit; + + if (query_local.next()) { + QVariant pkg_commit_variant = query_local.value("packaging_commit_id"); + QVariant ups_commit_variant = query_local.value("upstream_commit_id"); + + std::shared_ptr packaging_commit_ptr; + std::shared_ptr upstream_commit_ptr; + + if (!pkg_commit_variant.isNull()) { + int pkg_commit_id = pkg_commit_variant.toInt(); + GitCommit tmp_pkg_commit = _tmp_commit.get_commit_by_id(p_db, pkg_commit_id); + packaging_commit_ptr = std::make_shared(tmp_pkg_commit); + } + + if (!ups_commit_variant.isNull()) { + int ups_commit_id = ups_commit_variant.toInt(); + GitCommit tmp_ups_commit = _tmp_commit.get_commit_by_id(p_db, ups_commit_id); + upstream_commit_ptr = std::make_shared(tmp_ups_commit); + } + + std::shared_ptr package_conf = std::make_shared( + query_local.value("id").toInt(), + shared_package, + shared_release, + shared_branch, + packaging_commit_ptr, // can be nullptr if the column was NULL + upstream_commit_ptr // can be nullptr if the column was NULL + ); + package_conf->upstream_version = query_local.value("upstream_version").toString().toStdString(); + package_conf->ppa_revision = query_local.value("ppa_revision").toInt(); + + result.emplace_back(package_conf); + } + } + } + } + + { + // 1. Query all rows from `task` + QSqlQuery query(p_db); + query.prepare(R"( + SELECT + t.id AS id, + pjs.packageconf_id AS packageconf_id, + t.jobstatus_id AS jobstatus_id, + t.queue_time AS queue_time, + t.start_time AS start_time, + t.finish_time AS finish_time, + t.successful AS successful, + t.log AS log + FROM + task t + INNER JOIN + packageconf_jobstatus_id pjs + ON + t.id = pjs.task_id + )"); + if (!query.exec()) { + qDebug() << "Failed to load tasks:" << query.lastError().text(); + } + + // 2. For each row in `task`, attach it to the correct PackageConf + std::map, std::shared_ptr> pull_tasks; + std::map, std::shared_ptr> tarball_tasks; + while (query.next()) { + int tid = query.value("id").toInt(); + int pcid = query.value("packageconf_id").toInt(); + int jsid = query.value("jobstatus_id").toInt(); + + // Find the matching PackageConf in "result" + auto it = std::find_if( + result.begin(), result.end(), + [pcid](const std::shared_ptr& pc) { + return (pc->id == pcid); + } + ); + if (it == result.end()) { + // No matching PackageConf found; skip + continue; + } + std::shared_ptr pc = *it; + + // Find the matching JobStatus + std::shared_ptr jobstatus_ptr; + for (const auto &kv : jobstatus_map) { + if (kv.second && kv.second->id == jsid) { + jobstatus_ptr = kv.second; + break; + } + } + if (!jobstatus_ptr) { + // No match for this jobstatus_id, skip + continue; + } + + // If the jobstatus matches pull or tarball, grab the existing Task if it exists + if (jobstatus_ptr->name == "pull") { + if (auto it = pull_tasks.find(pc->package); it != pull_tasks.end()) { + pc->assign_task(jobstatus_ptr, it->second, pc); + continue; + } + } else if (jobstatus_ptr->name == "tarball") { + if (auto it = tarball_tasks.find(pc->package); it != tarball_tasks.end()) { + pc->assign_task(jobstatus_ptr, it->second, pc); + continue; + } + } + + // Build a Task + auto task_ptr = std::make_shared(); + task_ptr->id = tid; + task_ptr->jobstatus = jobstatus_ptr; + task_ptr->queue_time = query.value("queue_time").toLongLong(); + task_ptr->start_time = query.value("start_time").toLongLong(); + task_ptr->finish_time = query.value("finish_time").toLongLong(); + task_ptr->successful = (query.value("successful").toInt() == 1); + + // Attach the log + task_ptr->log = std::make_shared(); + task_ptr->log->set_log(query.value("log").toString().toStdString()); + + // Point the Task back to its parent + task_ptr->parent_packageconf = pc; + + // Link the Task to the PackageConf + pc->assign_task(jobstatus_ptr, task_ptr, pc); + + if (jobstatus_ptr->name == "pull") { + pull_tasks[pc->package] = task_ptr; + } else if (jobstatus_ptr->name == "tarball") { + tarball_tasks[pc->package] = task_ptr; + } + } + } + + return result; +} + +std::vector> PackageConf::get_package_confs_by_package_name(QSqlDatabase& p_db, std::vector> packageconfs, const std::string& package_name) { + Branch _tmp_brch = Branch(); + Package _tmp_pkg = Package(); + PackageConf _tmp_pkg_conf = PackageConf(); + Release _tmp_rel = Release(); + std::vector> result; + + // Process the existing packageconf entries; if we find this package, just return that instead + for (auto pkgconf : packageconfs) { + if (pkgconf->package->name == package_name) { + result.emplace_back(pkgconf); + } + } + if (!result.empty()) { return result; } + + // Get the default release for setting the packaging branch + std::string default_release; + for (const Release& release : _tmp_rel.get_releases(p_db)) { + if (release.isDefault) { + default_release = release.codename; + break; + } + } + + for (const Branch& branch : _tmp_brch.get_branches(p_db)) { + int branch_id = branch.id; + std::shared_ptr shared_branch = std::make_shared(branch); + + for (const Release& release : _tmp_rel.get_releases(p_db)) { + int release_id = release.id; + std::shared_ptr shared_release = std::make_shared(release); + for (const Package& package : _tmp_pkg.get_packages(p_db)) { + int package_id = package.id; + + Package new_package = package; + if (package.packaging_branch.empty()) { + new_package.packaging_branch = "ubuntu/" + default_release; + } + std::shared_ptr shared_package = std::make_shared(new_package); + + QSqlQuery query_local(p_db); + query_local.prepare(R"( + SELECT id, package_id, release_id, branch_id, packaging_commit_id, upstream_commit_id + FROM packageconf + WHERE package_id = ? AND release_id = ? AND branch_id = ? + LIMIT 1)"); + query_local.bindValue(0, package_id); + query_local.bindValue(1, release_id); + query_local.bindValue(2, branch_id); + if (!query_local.exec()) { + qDebug() << "Failed to get packageconf:" << query_local.lastError().text() + << package_id << release_id << branch_id; + } + + GitCommit _tmp_commit; + + if (query_local.next()) { + QVariant pkg_commit_variant = query_local.value("packaging_commit_id"); + QVariant ups_commit_variant = query_local.value("upstream_commit_id"); + + std::shared_ptr packaging_commit_ptr; + std::shared_ptr upstream_commit_ptr; + + if (!pkg_commit_variant.isNull()) { + int pkg_commit_id = pkg_commit_variant.toInt(); + GitCommit tmp_pkg_commit = _tmp_commit.get_commit_by_id(p_db, pkg_commit_id); + packaging_commit_ptr = std::make_shared(tmp_pkg_commit); + } + + if (!ups_commit_variant.isNull()) { + int ups_commit_id = ups_commit_variant.toInt(); + GitCommit tmp_ups_commit = _tmp_commit.get_commit_by_id(p_db, ups_commit_id); + upstream_commit_ptr = std::make_shared(tmp_ups_commit); + } + + std::shared_ptr package_conf = std::make_shared(PackageConf( + query_local.value("id").toInt(), + shared_package, + shared_release, + shared_branch, + packaging_commit_ptr, // can be nullptr if the column was NULL + upstream_commit_ptr // can be nullptr if the column was NULL + )); + + result.emplace_back(package_conf); + } + } + } + } + + { + // 1. Query all rows from `task` + QSqlQuery query(p_db); + query.prepare(R"( + SELECT id, packageconf_id, jobstatus_id, queue_time, start_time, + finish_time, successful, log + FROM task + )"); + if (!query.exec()) { + qDebug() << "Failed to load tasks:" << query.lastError().text(); + } + + // 2. Build a small map of jobstatus_id -> JobStatus object + // so we can quickly look up a JobStatus by its ID: + std::map> all_jobstatuses; + { + QSqlQuery q2(p_db); + q2.prepare("SELECT id FROM jobstatus"); + if (!q2.exec()) { + qDebug() << "Failed to load jobstatus list:" << q2.lastError().text(); + } + while (q2.next()) { + int js_id = q2.value(0).toInt(); + auto js_ptr = std::make_shared(JobStatus(p_db, js_id)); + all_jobstatuses[js_id] = js_ptr; + } + } + + // 3. For each row in `task`, attach it to the correct PackageConf + while (query.next()) { + int tid = query.value("id").toInt(); + int pcid = query.value("packageconf_id").toInt(); + int jsid = query.value("jobstatus_id").toInt(); + + // Find the matching PackageConf in "result" + auto it = std::find_if( + result.begin(), result.end(), + [pcid](const std::shared_ptr& pc) { + return (pc->id == pcid); + } + ); + if (it == result.end()) { + // No matching PackageConf found; skip + continue; + } + std::shared_ptr pc = *it; + + // Find the matching JobStatus + auto jsit = all_jobstatuses.find(jsid); + if (jsit == all_jobstatuses.end()) { + // No matching JobStatus found; skip + continue; + } + std::shared_ptr jobstatus_ptr = jsit->second; + + // Build a Task + auto task_ptr = std::make_shared(); + task_ptr->id = tid; + task_ptr->jobstatus = jobstatus_ptr; + task_ptr->queue_time = query.value("queue_time").toLongLong(); + task_ptr->start_time = query.value("start_time").toLongLong(); + task_ptr->finish_time = query.value("finish_time").toLongLong(); + task_ptr->successful = (query.value("successful").toInt() == 1); + + // Attach the log + task_ptr->log = std::make_shared(); + task_ptr->log->set_log(query.value("log").toString().toStdString()); + + // Point the Task back to its parent + task_ptr->parent_packageconf = pc; + + // Finally, link the Task to the PackageConf + pc->assign_task(jobstatus_ptr, task_ptr, pc); + } + } + + return result; +} + +int PackageConf::successful_task_count() { + std::lock_guard lock(*task_mutex_); + + int successful_count = 0; + for (const auto& [job_status, task] : jobstatus_task_map_) { + if (task && task->successful && task->finish_time > 0) { + ++successful_count; + } + } + return successful_count; +} + +int PackageConf::total_task_count() { + std::lock_guard lock(*task_mutex_); + + int successful_count = 0; + for (const auto& [job_status, task] : jobstatus_task_map_) if (task) ++successful_count; + return successful_count; +} + +std::shared_ptr PackageConf::get_task_by_jobstatus(std::shared_ptr jobstatus) { + if (!jobstatus) { + throw std::invalid_argument("jobstatus is null"); + } + + std::lock_guard lock(*task_mutex_); + + // Search for the JobStatus in the map + auto it = jobstatus_task_map_.find(jobstatus); + if (it != jobstatus_task_map_.end()) { + return it->second; + } + + return nullptr; +} + +void PackageConf::assign_task(std::shared_ptr jobstatus, std::shared_ptr task_ptr, std::weak_ptr packageconf_ptr) { + if (!jobstatus || !task_ptr) { + throw std::invalid_argument("jobstatus or task_ptr is null"); + } + + std::lock_guard lock(*task_mutex_); + task_ptr->parent_packageconf = task_ptr->parent_packageconf.lock() ? task_ptr->parent_packageconf : packageconf_ptr; + jobstatus_task_map_[jobstatus] = task_ptr; +} + + +bool PackageConf::set_package_confs(QSqlDatabase& p_db) { + // Fetch current PackageConf entries from the database + QSqlQuery query(p_db); + query.prepare("SELECT package_id, release_id, branch_id FROM packageconf"); + if (!query.exec()) { + qDebug() << "Failed to fetch existing packageconfs:" << query.lastError().text(); + return false; + } + + std::set database_confs; + while (query.next()) { + PackageConfPlain conf_plain{ + query.value("package_id").toInt(), + query.value("release_id").toInt(), + query.value("branch_id").toInt() + }; + database_confs.insert(conf_plain); + } + + // Fetch all package, release, and branch IDs + QSqlQuery pkg_query("SELECT id FROM package", p_db); + std::set package_ids; + while (pkg_query.next()) { package_ids.insert(pkg_query.value(0).toInt()); } + + QSqlQuery rel_query("SELECT id FROM release", p_db); + std::set release_ids; + while (rel_query.next()) { release_ids.insert(rel_query.value(0).toInt()); } + + QSqlQuery br_query("SELECT id FROM branch", p_db); + std::set branch_ids; + while (br_query.next()) { branch_ids.insert(br_query.value(0).toInt()); } + + + // Generate desired PackageConf entries (cross-product) + std::set desired_confs; + for (int pkg_id : package_ids) { + for (int rel_id : release_ids) { + for (int br_id : branch_ids) { + desired_confs.insert(PackageConfPlain{pkg_id, rel_id, br_id}); + } + } + } + + // Determine additions (desired_confs - database_confs) + std::vector additions; + std::ranges::set_difference( + desired_confs, + database_confs, + std::back_inserter(additions), + [](auto const &a, auto const &b){ return a < b; }); + + // Determine deletions (database_confs - desired_confs) + std::vector deletions; + std::ranges::set_difference( + database_confs, + desired_confs, + std::back_inserter(deletions), + [](auto const &a, auto const &b){ return a < b; }); + + // Insert additions, now including packaging_commit_id/upstream_commit_id as NULL + for (const auto& conf : additions) { + QSqlQuery insert_query(p_db); + insert_query.prepare(R"( + INSERT INTO packageconf ( + package_id, + release_id, + branch_id, + packaging_commit_id, + upstream_commit_id + ) VALUES (?, ?, ?, NULL, NULL) + )"); + insert_query.addBindValue(conf.package_id); + insert_query.addBindValue(conf.release_id); + insert_query.addBindValue(conf.branch_id); + + if (!insert_query.exec()) { + log_error("Failed to insert PackageConf: " + + insert_query.lastError().text().toStdString() + + " Package ID " + std::to_string(conf.package_id) + + ", Release ID " + std::to_string(conf.release_id) + + ", Branch ID " + std::to_string(conf.branch_id)); + return false; + } + } + + // Remove deletions + for (const auto& conf : deletions) { + QSqlQuery delete_query(p_db); + delete_query.prepare(R"( + DELETE FROM packageconf + WHERE package_id = ? + AND release_id = ? + AND branch_id = ? + )"); + delete_query.addBindValue(conf.package_id); + delete_query.addBindValue(conf.release_id); + delete_query.addBindValue(conf.branch_id); + + if (!delete_query.exec()) { + qDebug() << "Failed to delete packageconf:" << delete_query.lastError().text(); + return false; + } + log_info("Deleted PackageConf: Package ID " + std::to_string(conf.package_id) + + ", Release ID " + std::to_string(conf.release_id) + + ", Branch ID " + std::to_string(conf.branch_id)); + } + + return true; +} + +void PackageConf::sync(QSqlDatabase& p_db) { + bool oneshot = true; + while (oneshot) { + oneshot = false; + try { + QSqlQuery query(p_db); + + if ((!packaging_commit || !upstream_commit) || ((!packaging_commit || packaging_commit->id == 0) && (!upstream_commit || upstream_commit->id == 0))) break; + else if ((packaging_commit && packaging_commit->id == 0) && (!upstream_commit || upstream_commit->id != 0)) { + query.prepare("UPDATE packageconf SET upstream_commit_id = ?, upstream_version = ?, ppa_revision = ? WHERE package_id = ? AND branch_id = ? AND release_id = ?"); + query.addBindValue(upstream_commit ? upstream_commit->id : 0); + } + else if ((!packaging_commit || (packaging_commit->id != 0)) && (upstream_commit && upstream_commit->id == 0)) { + query.prepare("UPDATE packageconf SET packaging_commit_id = ?, upstream_version = ?, ppa_revision = ? WHERE package_id = ? AND branch_id = ? AND release_id = ?"); + query.addBindValue(packaging_commit ? packaging_commit->id : 0); + } + else { + query.prepare("UPDATE packageconf SET packaging_commit_id = ?, upstream_commit_id = ?, upstream_version = ?, ppa_revision = ? WHERE package_id = ? AND branch_id = ? AND release_id = ?"); + query.addBindValue(packaging_commit->id); + query.addBindValue(upstream_commit->id); + } + + query.addBindValue(QString::fromStdString(upstream_version)); + query.addBindValue(ppa_revision); + query.addBindValue(package->id); + query.addBindValue(branch->id); + query.addBindValue(release->id); + + if (!query.exec()) { + qDebug() << "Failed to sync PackageConf:" << query.lastError().text(); + } + } catch (...) {} + } + + // Also sync all of the child tasks + { + std::lock_guard lock(*task_mutex_); + for (auto [job_status, task] : jobstatus_task_map_) { + if (task) { + auto sync_func = [this, task, p_db]() mutable { + task->save(p_db, id); + }; + sync_func(); + } + } + } +} + +bool PackageConf::can_check_source_upload() { + int _successful_task_count = successful_task_count(); + if (_successful_task_count == 0) return false; + + std::int64_t upload_timestamp = 0; + std::int64_t source_check_timestamp = 0; + std::set valid_successful_statuses = {"pull", "tarball", "source_build", "upload"}; + for (auto &kv : jobstatus_task_map_) { + auto &jobstatus = kv.first; + auto &task_ptr = kv.second; + + if (valid_successful_statuses.contains(jobstatus->name)) _successful_task_count--; + + if (jobstatus->name == "upload" && task_ptr && task_ptr->successful) { + upload_timestamp = task_ptr->finish_time; + continue; + } + + if (jobstatus->name == "source_check" && task_ptr && !task_ptr->successful) { + source_check_timestamp = task_ptr->finish_time; + continue; + } + } + bool all_req_tasks_present = _successful_task_count == 0; + if (!all_req_tasks_present || (upload_timestamp == 0 && source_check_timestamp == 0)) { + return false; + } else if (all_req_tasks_present && upload_timestamp != 0 && source_check_timestamp == 0) { + return true; + } else if (all_req_tasks_present) { + return source_check_timestamp <= upload_timestamp; + } + return false; +} + +bool PackageConf::can_check_builds() { + std::lock_guard lock(*task_mutex_); + + if (!(jobstatus_task_map_.size() == 5)) { return false; } + + static const std::array statuses = { "pull", "tarball", "source_build", "upload", "source_check" }; + int cur_status = 0; + std::int64_t cur_timestamp = 0; + bool return_status = false; + for (auto &kv : jobstatus_task_map_) { + auto &jobstatus = kv.first; + auto &task_ptr = kv.second; + + if (jobstatus->name == statuses[cur_status] && task_ptr) { + if (task_ptr->finish_time >= cur_timestamp && task_ptr->successful) { + return_status = true; + cur_timestamp = task_ptr->finish_time; + cur_status++; + } else { + return_status = false; + break; + } + } + } + return return_status && cur_status == 5; +} +// End of PackageConf +// Start of GitCommit +// Constructor which also adds it to the database +GitCommit::GitCommit( + QSqlDatabase& p_db, + const std::string& commit_hash, + const std::string& commit_summary, + const std::string& commit_message, + const std::chrono::zoned_time& commit_datetime, + const std::string& commit_author, + const std::string& commit_committer) + : commit_hash(commit_hash), + commit_summary(commit_summary), + commit_message(commit_message), + commit_datetime(commit_datetime), + commit_author(commit_author), + commit_committer(commit_committer) { + // Insert the entry into the database right away + QSqlQuery insert_query(p_db); + + // Convert commit_datetime to a string in ISO 8601 format + auto sys_time = commit_datetime.get_sys_time(); + auto time_t = std::chrono::system_clock::to_time_t(sys_time); + char datetime_buf[20]; // "YYYY-MM-DD HH:MM:SS" -> 19 + 1 for null terminator + std::strftime(datetime_buf, sizeof(datetime_buf), "%Y-%m-%d %H:%M:%S", std::gmtime(&time_t)); + + insert_query.prepare("INSERT INTO git_commit (commit_hash, commit_summary, commit_message, commit_datetime, commit_author, commit_committer) VALUES (?, ?, ?, ?, ?, ?)"); + insert_query.addBindValue(QString::fromStdString(commit_hash)); // Text + insert_query.addBindValue(QString::fromStdString(commit_summary)); // Text + insert_query.addBindValue(QString::fromStdString(commit_message)); // Text + insert_query.addBindValue(QString(datetime_buf)); // ISO 8601 Text + insert_query.addBindValue(QString::fromStdString(commit_author)); // Text + insert_query.addBindValue(QString::fromStdString(commit_committer)); // Text + + if (!insert_query.exec()) { + // Log error with relevant details + log_error("Failed to insert GitCommit: " + insert_query.lastError().text().toStdString()); + return; + } + QVariant last_id = insert_query.lastInsertId(); + if (last_id.isValid()) { + id = last_id.toInt(); + } +} + +// ID-based constructor +GitCommit::GitCommit( + const int id, + const std::string& commit_hash, + const std::string& commit_summary, + const std::string& commit_message, + const std::chrono::zoned_time& commit_datetime, + const std::string& commit_author, + const std::string& commit_committer) + : id(id), + commit_hash(commit_hash), + commit_summary(commit_summary), + commit_message(commit_message), + commit_datetime(commit_datetime), + commit_author(commit_author), + commit_committer(commit_committer) {} + +std::chrono::zoned_time GitCommit::convert_timestr_to_zonedtime(const std::string& datetime_str) { + std::tm tm_utc{}; + std::sscanf(datetime_str.c_str(), "%d-%d-%d %d:%d:%d", + &tm_utc.tm_year, &tm_utc.tm_mon, &tm_utc.tm_mday, + &tm_utc.tm_hour, &tm_utc.tm_min, &tm_utc.tm_sec); + tm_utc.tm_year -= 1900; // Years since 1900 + tm_utc.tm_mon -= 1; // Months since January + + // Convert to time_t (UTC) + std::time_t time_t_value = timegm(&tm_utc); + auto sys_time = std::chrono::system_clock::from_time_t(time_t_value); + + // Construct zoned_time with std::chrono::seconds + std::chrono::zoned_time db_commit_datetime( + std::chrono::current_zone(), + std::chrono::time_point_cast(sys_time) + ); + + return db_commit_datetime; +} + +GitCommit GitCommit::get_commit_by_id(QSqlDatabase& p_db, int id) { + QSqlQuery query(p_db); + query.prepare( + "SELECT id, commit_hash, commit_summary, commit_message, commit_datetime, " + " commit_author, commit_committer " + "FROM git_commit WHERE id = ? LIMIT 1" + ); + query.bindValue(0, id); + + if (!query.exec()) { + qDebug() << "Error executing query:" << query.lastError().text(); + return GitCommit(); + } + + if (query.next()) { + try { + int db_id = query.value("id").toInt(); + std::string db_commit_hash = query.value("commit_hash").toString().toStdString(); + std::string db_commit_summary = query.value("commit_summary").toString().toStdString(); + std::string db_commit_message = query.value("commit_message").toString().toStdString(); + std::string db_commit_datetime_str = query.value("commit_datetime").toString().toStdString(); + std::string db_commit_author = query.value("commit_author").toString().toStdString(); + std::string db_commit_committer = query.value("commit_committer").toString().toStdString(); + + // Convert datetime string to std::chrono::zoned_time + if (db_commit_datetime_str.size() >= 19) { // "YYYY-MM-DD HH:MM:SS" + auto db_commit_datetime = convert_timestr_to_zonedtime(db_commit_datetime_str); + + return GitCommit(db_id, + db_commit_hash, + db_commit_summary, + db_commit_message, + db_commit_datetime, + db_commit_author, + db_commit_committer); + } + } catch (const std::exception& e) { + qDebug() << "Error parsing commit_datetime:" << e.what(); + } + } + + return GitCommit(); +} + +std::optional GitCommit::get_commit_by_hash(QSqlDatabase& p_db, const std::string commit_hash) { + QSqlQuery query(p_db); + query.prepare( + "SELECT id, commit_hash, commit_summary, commit_message, commit_datetime, " + " commit_author, commit_committer " + "FROM git_commit WHERE commit_hash = ? LIMIT 1" + ); + query.bindValue(0, QString::fromStdString(commit_hash)); + + if (!query.exec()) { + qDebug() << "Error executing query:" << query.lastError().text(); + return GitCommit(); + } + + if (query.next()) { + try { + int db_id = query.value("id").toInt(); + std::string db_commit_hash = query.value("commit_hash").toString().toStdString(); + std::string db_commit_summary = query.value("commit_summary").toString().toStdString(); + std::string db_commit_message = query.value("commit_message").toString().toStdString(); + std::string db_commit_datetime_str = query.value("commit_datetime").toString().toStdString(); + std::string db_commit_author = query.value("commit_author").toString().toStdString(); + std::string db_commit_committer = query.value("commit_committer").toString().toStdString(); + + // Convert datetime string to std::chrono::zoned_time + if (db_commit_datetime_str.size() >= 19) { // "YYYY-MM-DD HH:MM:SS" + auto db_commit_datetime = convert_timestr_to_zonedtime(db_commit_datetime_str); + + return GitCommit(db_id, + db_commit_hash, + db_commit_summary, + db_commit_message, + db_commit_datetime, + db_commit_author, + db_commit_committer); + } + } catch (const std::exception& e) { + qDebug() << "Error parsing commit_datetime:" << e.what(); + } + } + + return GitCommit(); +} +// End of GitCommit +// Start of JobStatus +JobStatus::JobStatus(QSqlDatabase& p_db, int id) : id(id) { + QSqlQuery query(p_db); + query.prepare( + "SELECT id, build_score, name, display_name " + "FROM jobstatus WHERE id = ? LIMIT 1" + ); + query.bindValue(0, id); + + if (!query.exec()) { + qDebug() << "Error executing query:" << query.lastError().text(); + } else if (query.next()) { + id = query.value("id").toInt(); + build_score = query.value("build_score").toInt(); + name = query.value("name").toString().toStdString(); + display_name = query.value("display_name").toString().toStdString(); + } +} +// End of JobStatus +// Start of Task +Task::Task(QSqlDatabase& p_db, std::shared_ptr jobstatus, std::int64_t time, std::shared_ptr packageconf) + : jobstatus(jobstatus), queue_time(time), is_running(false), log(std::make_shared()), parent_packageconf(packageconf) +{ + assert(log != nullptr && "Log pointer should never be null"); + QSqlQuery insert_query(p_db); + insert_query.prepare("INSERT INTO task (packageconf_id, jobstatus_id, queue_time) VALUES (?, ?, ?)"); + insert_query.addBindValue(packageconf->id); + insert_query.addBindValue(jobstatus->id); + insert_query.addBindValue(QVariant::fromValue(static_cast(time))); + + build_score = jobstatus->build_score; + + if (!insert_query.exec()) { + // Log error with relevant details + log_error("Failed to insert Task: " + insert_query.lastError().text().toStdString()); + return; + } + QVariant last_id = insert_query.lastInsertId(); + if (last_id.isValid()) { + id = last_id.toInt(); + } +} +Task::Task() {} + + +bool Task::compare(const std::shared_ptr& lhs, const std::shared_ptr& rhs) { + if (!lhs && !rhs) return false; + if (!lhs) return true; // nullptr is considered less than any valid pointer + if (!rhs) return false; // Any valid pointer is greater than nullptr + if (lhs.get() == rhs.get()) return false; // They are considered to be the same + + if (lhs->build_score != rhs->build_score) { + return lhs->build_score > rhs->build_score; // Higher build_score first + } + if (lhs->start_time != rhs->start_time) { + return lhs->start_time < rhs->start_time; // Earlier start_time first + } + if (lhs->finish_time != rhs->finish_time) { + return lhs->finish_time < rhs->finish_time; // Earlier finish_time first + } + if (lhs->queue_time != rhs->queue_time) { + return lhs->queue_time < rhs->queue_time; // Earlier queue_time first + } + if (lhs->get_parent_packageconf()->id != rhs->get_parent_packageconf()->id) { + return lhs->get_parent_packageconf()->id < rhs->get_parent_packageconf()->id; + } + if (lhs->get_parent_packageconf()->release->id != rhs->get_parent_packageconf()->release->id) { + return lhs->get_parent_packageconf()->release->id < rhs->get_parent_packageconf()->release->id; + } + if (lhs->get_parent_packageconf()->package->id != rhs->get_parent_packageconf()->package->id) { + return lhs->get_parent_packageconf()->package->id < rhs->get_parent_packageconf()->package->id; + } + if (lhs->get_parent_packageconf()->branch->id != rhs->get_parent_packageconf()->branch->id) { + return lhs->get_parent_packageconf()->branch->id < rhs->get_parent_packageconf()->branch->id; + } + if (lhs->jobstatus->id != rhs->jobstatus->id) { + return lhs->jobstatus->id < rhs->jobstatus->id; + } + return lhs->id < rhs->id; // Earlier id first +} + +std::set> Task::get_completed_tasks(QSqlDatabase& p_db, std::vector> packageconfs, std::map> job_statuses, int page, int per_page) { + std::set> result; + + if (per_page < 1) { per_page = 1; } + + QSqlQuery query(p_db); + query.prepare( + "SELECT id, packageconf_id, jobstatus_id, start_time, finish_time, successful, log " + "FROM task WHERE start_time != 0 AND finish_time != 0 ORDER BY finish_time DESC LIMIT ? OFFSET ?" + ); + query.bindValue(0, per_page); + query.bindValue(1, page); + + if (!query.exec()) { + qDebug() << "Error getting completed tasks:" << query.lastError().text(); + } while (query.next()) { + std::shared_ptr log = std::make_shared(); + Task this_task; + + this_task.id = query.value("id").toInt(); + for (auto pkgconf : packageconfs) { + if (pkgconf->id == query.value("packageconf_id").toInt()) { + this_task.parent_packageconf = pkgconf; + break; + } + } + for (auto status : job_statuses) { + if (status.second->id == query.value("jobstatus_id").toInt()) { + this_task.jobstatus = status.second; + break; + } + } + this_task.start_time = static_cast(query.value("start_time").toLongLong()); + this_task.finish_time = static_cast(query.value("finish_time").toLongLong()); + this_task.successful = query.value("successful").toInt() == 1; + log->set_log(query.value("log").toString().toStdString()); + this_task.log = log; + + result.insert(std::make_shared(this_task)); + } + + return result; +} + +void Task::save(QSqlDatabase& p_db, int _packageconf_id) { + QSqlQuery query(p_db); + query.prepare("UPDATE task SET jobstatus_id = ?, queue_time = ?, start_time = ?, finish_time = ?, successful = ?, log = ? WHERE id = ?"); + query.addBindValue(jobstatus->id); + query.addBindValue(QVariant::fromValue(static_cast(queue_time))); + query.addBindValue(QVariant::fromValue(static_cast(start_time))); + query.addBindValue(QVariant::fromValue(static_cast(finish_time))); + query.addBindValue(successful); + query.addBindValue(QString::fromStdString(std::regex_replace(log->get(), std::regex(R"(^\s+)"), ""))); + query.addBindValue(id); + if (!query.exec()) { + qDebug() << "Failed to save task to database:" << query.lastError().text(); + return; + } + + QSqlQuery link_query(p_db); + + int packageconf_id; + // Max length of int, or default + if (_packageconf_id == 0 || _packageconf_id == 32767) { + auto pkgconf = get_parent_packageconf(); + packageconf_id = pkgconf ? pkgconf->id : 0; + } else { + packageconf_id = _packageconf_id; + } + + // Step 1: Update if the record exists + link_query.prepare(R"( + UPDATE packageconf_jobstatus_id + SET task_id = :task_id + WHERE packageconf_id = :packageconf_id AND jobstatus_id = :jobstatus_id + )"); + link_query.bindValue(":task_id", id); + link_query.bindValue(":packageconf_id", packageconf_id); + link_query.bindValue(":jobstatus_id", jobstatus->id); + + if (!link_query.exec()) { + qDebug() << "Failed to update packageconf_jobstatus_id for task" << id << ":" + << link_query.lastError().text(); + qDebug() << "packageconf_id:" << packageconf_id << "jobstatus_id:" << jobstatus->id + << "task_id:" << id; + } else if (link_query.numRowsAffected() == 0) { + // Step 2: Insert if no rows were updated + link_query.prepare(R"( + INSERT INTO packageconf_jobstatus_id (packageconf_id, jobstatus_id, task_id) + VALUES (:packageconf_id, :jobstatus_id, :task_id) + )"); + link_query.bindValue(":packageconf_id", packageconf_id); + link_query.bindValue(":jobstatus_id", jobstatus->id); + link_query.bindValue(":task_id", id); + + if (!link_query.exec()) { + qDebug() << "Failed to insert into packageconf_jobstatus_id for task" << id << ":" + << link_query.lastError().text(); + qDebug() << "packageconf_id:" << packageconf_id << "jobstatus_id:" << jobstatus->id + << "task_id:" << id; + } + } +} diff --git a/cpp/ci_database_objs.h b/cpp/ci_database_objs.h new file mode 100644 index 0000000..7ed56c9 --- /dev/null +++ b/cpp/ci_database_objs.h @@ -0,0 +1,269 @@ +// Copyright (C) 2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#ifndef CI_DATABASE_OBJS_H +#define CI_DATABASE_OBJS_H + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "common.h" + +class Person { +public: + int id; + std::string username; + std::string logo_url; + + Person(int id = 0, const std::string username = "", const std::string logo_url = ""); +}; + +class Release { +public: + int id; + int version; + std::string codename; + bool isDefault; + + Release(int id = 0, int version = 0, const std::string& codename = "", bool isDefault = false); + std::vector get_releases(QSqlDatabase& p_db); + Release get_release_by_id(QSqlDatabase& p_db, int id); + bool set_releases(QSqlDatabase& p_db, YAML::Node& releases); +}; + +class Package { +public: + int id; + std::string name; + bool large; + std::string upstream_browser; + std::string packaging_browser; + std::string upstream_url; + std::string packaging_branch; + std::string packaging_url; + + Package(int id = 0, const std::string& name = "", bool large = false, const std::string& upstream_url = "", const std::string& packaging_branch = "", const std::string& packaging_url = ""); + std::vector get_packages(QSqlDatabase& p_db); + Package get_package_by_id(QSqlDatabase& p_db, int id); + bool set_packages(QSqlDatabase& p_db, YAML::Node& packages); + +private: + std::string transform_url(const std::string& url); +}; + +class Branch { +public: + int id; + std::string name; + std::string upload_target; + std::string upload_target_ssh; + + Branch(int id = 0, const std::string& name = "", const std::string& upload_target = "", const std::string& upload_target_ssh = ""); + std::vector get_branches(QSqlDatabase& p_db); + Branch get_branch_by_id(QSqlDatabase& p_db, int id); +}; + +class GitCommit { +public: + int id = 0; + std::string commit_hash; + std::string commit_summary; + std::string commit_message; + std::chrono::zoned_time commit_datetime; + std::string commit_author; + std::string commit_committer; + + GitCommit( + QSqlDatabase& p_db, + const std::string& commit_hash = "", + const std::string& commit_summary = "", + const std::string& commit_message = "", + const std::chrono::zoned_time& commit_datetime = std::chrono::zoned_time(), + const std::string& commit_author = "", + const std::string& commit_committer = "" + ); + GitCommit( + const int id = 0, + const std::string& commit_hash = "", + const std::string& commit_summary = "", + const std::string& commit_message = "", + const std::chrono::zoned_time& commit_datetime = std::chrono::zoned_time(), + const std::string& commit_author = "", + const std::string& commit_committer = "" + ); + + GitCommit get_commit_by_id(QSqlDatabase& p_db, int id); + std::optional get_commit_by_hash(QSqlDatabase& p_db, const std::string commit_hash); + +private: + std::chrono::zoned_time convert_timestr_to_zonedtime(const std::string& datetime_str); +}; + +class JobStatus { +public: + int id; + int build_score; + std::string name; + std::string display_name; + + JobStatus(QSqlDatabase& p_db, int id); +}; + +class PackageConf { +public: + int id = 0; + std::shared_ptr package; + std::shared_ptr release; + std::shared_ptr branch; + std::shared_ptr packaging_commit = std::make_shared(); + std::shared_ptr upstream_commit = std::make_shared(); + std::string upstream_version; + int ppa_revision = 1; + + bool operator<(const PackageConf& other) const { + if (package->id != other.package->id) + return package->id < other.package->id; + if (release->id != other.release->id) + return release->id < other.release->id; + if (branch->id != other.branch->id) + return branch->id < other.branch->id; + return id < other.id; + } + bool operator==(const PackageConf& other) const { + // Intentionally leave out our ID + return package->id == other.package->id && + release->id == other.release->id && + branch->id == other.branch->id; + } + + PackageConf(int id = 0, std::shared_ptr package = NULL, std::shared_ptr release = NULL, std::shared_ptr branch = NULL, + std::shared_ptr packaging_commit = NULL, std::shared_ptr upstream_commit = NULL); + std::vector> get_package_confs(QSqlDatabase& p_db, std::map> jobstatus_map); + std::vector> get_package_confs_by_package_name(QSqlDatabase& p_db, + std::vector> packageconfs, + const std::string& package_name); + void assign_task(std::shared_ptr jobstatus, std::shared_ptr task_ptr, std::weak_ptr packageconf_ptr); + int successful_task_count(); + int total_task_count(); + std::shared_ptr get_task_by_jobstatus(std::shared_ptr jobstatus); + bool set_package_confs(QSqlDatabase& p_db); + bool set_commit_id(const std::string& _commit_id = ""); + bool set_commit_time(const std::chrono::zoned_time& _commit_time = std::chrono::zoned_time{}); + void sync(QSqlDatabase& p_db); + bool can_check_source_upload(); + bool can_check_builds(); + + struct PackageConfPlain { + int package_id; + int release_id; + int branch_id; + bool operator<(const PackageConf::PackageConfPlain& other) const { + if (package_id != other.package_id) + return package_id < other.package_id; + if (release_id != other.release_id) + return release_id < other.release_id; + return branch_id < other.branch_id; + } + + bool operator==(const PackageConf::PackageConfPlain& other) const { + return package_id == other.package_id && + release_id == other.release_id && + branch_id == other.branch_id; + } + }; + +private: + std::unordered_map, std::shared_ptr> jobstatus_task_map_; + std::unique_ptr task_mutex_ = std::make_unique(); +}; + +class Task { +public: + int id; + int build_score = 0; + bool successful; + std::int64_t queue_time = 0; + std::int64_t start_time = 0; + std::int64_t finish_time = 0; + std::function log)> func; + std::shared_ptr log; + std::shared_ptr jobstatus; + std::weak_ptr parent_packageconf; + bool is_running; + + Task(QSqlDatabase& p_db, std::shared_ptr jobstatus, std::int64_t time, std::shared_ptr packageconf); + Task(); + + std::set> get_completed_tasks(QSqlDatabase& p_db, std::vector> packageconfs, std::map> job_statuses, int page, int per_page); + void save(QSqlDatabase& p_db, int _packageconf_id = 0); + + std::shared_ptr get_parent_packageconf() const { + return parent_packageconf.lock(); + } + + struct TaskComparator { + bool operator()(const std::shared_ptr& lhs, const std::shared_ptr& rhs) const { + return Task::compare(lhs, rhs); + } + }; + + // Custom comparator for task ordering + bool operator<(const Task& other) const { + if (build_score != other.build_score) { + return build_score < other.build_score; + } else if (queue_time != other.queue_time) { + return queue_time < other.queue_time; + } else if (start_time != other.start_time) { + return start_time < other.start_time; + } else if (finish_time != other.finish_time) { + return finish_time < other.finish_time; + } + return true; + } + + bool operator<(const std::shared_ptr& other) const { + if (build_score != other->build_score) { + return build_score < other->build_score; + } else if (queue_time != other->queue_time) { + return queue_time < other->queue_time; + } else if (start_time != other->start_time) { + return start_time < other->start_time; + } else if (finish_time != other->finish_time) { + return finish_time < other->finish_time; + } + return true; + } + + static bool compare(const std::shared_ptr& lhs, const std::shared_ptr& rhs); +}; + +inline size_t qHash(const PackageConf::PackageConfPlain& key, size_t seed = 0) { + size_t res = 0; + res ^= std::hash()(key.package_id) + 0x9e3779b9 + (res << 6) + (res >> 2); + res ^= std::hash()(key.release_id) + 0x9e3779b9 + (res << 6) + (res >> 2); + res ^= std::hash()(key.branch_id) + 0x9e3779b9 + (res << 6) + (res >> 2); + return res; +} + +#endif // CI_DATABASE_OBJS_H + diff --git a/cpp/ci_logic.cpp b/cpp/ci_logic.cpp new file mode 100644 index 0000000..9e16700 --- /dev/null +++ b/cpp/ci_logic.cpp @@ -0,0 +1,1426 @@ +// Copyright (C) 2024-2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#include "task_queue.h" +#include "ci_logic.h" +#include "lubuntuci_lib.h" +#include "common.h" +#include "utilities.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace fs = std::filesystem; + +static std::mutex g_cfg_mutex; +static std::atomic thread_id_counter{1}; + +/** + * Merge "packages" and "releases" from partial into master. + */ +static void merge_yaml_nodes(YAML::Node &master, const YAML::Node &partial) { + // Merge packages: + if (partial["packages"]) { + if (!master["packages"]) { + master["packages"] = YAML::Node(YAML::NodeType::Sequence); + } + for (auto pkg : partial["packages"]) { + master["packages"].push_back(pkg); + } + } + // Merge releases: + if (partial["releases"]) { + if (!master["releases"]) { + master["releases"] = YAML::Node(YAML::NodeType::Sequence); + } + for (auto rel : partial["releases"]) { + master["releases"].push_back(rel); + } + } +} + +QSqlDatabase CiLogic::get_thread_connection() { + std::lock_guard lock(connection_mutex_); + thread_local unsigned int thread_unique_id = thread_id_counter.fetch_add(1); + QString connectionName = QString("LubuntuCIConnection_%1").arg(thread_unique_id); + + // Check if the connection already exists for this thread + if (QSqlDatabase::contains(connectionName)) { + QSqlDatabase db = QSqlDatabase::database(connectionName); + if (!db.isOpen()) { + if (!db.open()) { + throw std::runtime_error("Failed to open thread-specific database connection: " + db.lastError().text().toStdString()); + } + } + return db; + } + + QSqlDatabase threadDb = QSqlDatabase::addDatabase("QSQLITE", connectionName); + threadDb.setDatabaseName("/srv/lubuntu-ci/repos/ci-tools/lubuntu_ci.db"); + + if (!threadDb.open()) { + throw std::runtime_error("Failed to open new database connection for thread: " + threadDb.lastError().text().toStdString()); + } + + return threadDb; +} + +// This returns the following information about a commit: +// 1) commit_hash +// 2) commit_summary +// 3) commit_message +// 4) commit_datetime +// 5) commit_author +// 6) commit_committer +GitCommit get_commit_from_pkg_repo(QSqlDatabase& p_db, const std::string& repo_name, std::shared_ptr log) { + // Ensure libgit2 is initialized + ensure_git_inited(); + + // Define the repository path + std::filesystem::path repo_dir = repo_name; + + git_repository* repo = nullptr; + git_revwalk* walker = nullptr; + git_commit* commit = nullptr; + + static const std::vector COMMIT_SUMMARY_EXCLUSIONS = { + "GIT_SILENT", + "SVN_SILENT", + "Qt Submodule Update Bot", + "CMake Nightly Date Stamp", + "https://translate.lxqt-project.org/" + }; + + GitCommit _tmp_commit; + std::string commit_hash; + std::string commit_summary; + std::string commit_message; + std::chrono::zoned_time commit_datetime{ + std::chrono::locate_zone("UTC"), + std::chrono::floor(std::chrono::system_clock::time_point{}) + }; + std::string commit_author; + std::string commit_committer; + + // Attempt to open the repository + int error = git_repository_open(&repo, repo_dir.c_str()); + if (error != 0) { + const git_error* e = git_error_last(); + std::string msg = (e && e->message) ? e->message : "unknown error"; + log->append("Failed to open repository: " + msg); + return GitCommit(); + } + + // Initialize the revwalk + error = git_revwalk_new(&walker, repo); + if (error != 0) { + const git_error* e = git_error_last(); + log->append("Failed to create revwalk: " + std::string(e && e->message ? e->message : "unknown error")); + git_repository_free(repo); + return GitCommit(); + } + + // Push HEAD to the walker + error = git_revwalk_push_head(walker); + if (error != 0) { + const git_error* e = git_error_last(); + log->append("Failed to push HEAD to revwalk: " + std::string(e && e->message ? e->message : "unknown error")); + git_revwalk_free(walker); + git_repository_free(repo); + return GitCommit(); + } + + // Optional: Sort commits in topological order and by time + git_revwalk_sorting(walker, GIT_SORT_TIME | GIT_SORT_TOPOLOGICAL); + + bool found_valid_commit = false; + + // Iterate through commits + git_oid oid; + while ((error = git_revwalk_next(&oid, walker)) == 0) { + // Lookup the commit object using the oid + error = git_commit_lookup(&commit, repo, &oid); + if (error != 0) { + const git_error* e = git_error_last(); + log->append("Failed to lookup commit: " + std::string(e && e->message ? e->message : "unknown error")); + continue; // Skip to next commit + } + + // Extract commit summary + const char* summary_cstr = git_commit_summary(commit); + if (!summary_cstr) { + git_commit_free(commit); + continue; // No summary, skip + } + std::string current_summary = summary_cstr; + + // Check if the commit summary contains any exclusion strings + bool is_excluded = false; + for (const auto& excl : COMMIT_SUMMARY_EXCLUSIONS) { + if (current_summary.find(excl) != std::string::npos) { + is_excluded = true; + char hash_str[GIT_OID_HEXSZ + 1]; + git_oid_tostr(hash_str, sizeof(hash_str), &oid); + log->append("Skipping commit " + std::string(hash_str) + + " due to exclusion string: \"" + excl + "\""); + break; + } + } + + if (is_excluded) { + git_commit_free(commit); + continue; // Skip this commit and move to the next one + } + + // 1) Extract commit hash + char hash_str[GIT_OID_HEXSZ + 1]; + git_oid_tostr(hash_str, sizeof(hash_str), &oid); + commit_hash = hash_str; + + // 2) Extract commit message + const char* message = git_commit_message(commit); + if (message) { + commit_message = message; + } + + // 3) Extract commit datetime and convert to UTC + git_time_t c_time = git_commit_time(commit); + int c_time_offset = git_commit_time_offset(commit); // Offset in minutes from UTC + std::chrono::system_clock::time_point commit_tp = + std::chrono::system_clock::from_time_t(static_cast(c_time)); + std::chrono::minutes offset_minutes(c_time_offset); + auto utc_time_tp = commit_tp - std::chrono::duration_cast(offset_minutes); + commit_datetime = std::chrono::zoned_time{ + std::chrono::locate_zone("UTC"), + std::chrono::floor(utc_time_tp) + }; + + // 4) Extract commit author + git_signature* author = nullptr; + error = git_commit_author_with_mailmap(&author, commit, nullptr); + if (error == 0 && author) { + commit_author = std::format("{} <{}>", author->name, author->email); + git_signature_free(author); + } + + // 5) Extract commit committer + git_signature* committer_sig = nullptr; + error = git_commit_committer_with_mailmap(&committer_sig, commit, nullptr); + if (error == 0 && committer_sig) { + commit_committer = std::format("{} <{}>", committer_sig->name, committer_sig->email); + git_signature_free(committer_sig); + } + + // Cleanup the commit object + git_commit_free(commit); + commit = nullptr; + + // Construct and return the GitCommit object with collected data + GitCommit git_commit_instance( + p_db, + commit_hash, + current_summary, // Use the current commit summary + commit_message, + commit_datetime, + commit_author, + commit_committer + ); + + // Check if the commit already exists in the DB + auto existing_commit = _tmp_commit.get_commit_by_hash(p_db, commit_hash); + if (existing_commit) { + found_valid_commit = true; + // Cleanup revwalk and repository before returning + git_revwalk_free(walker); + git_repository_free(repo); + return *existing_commit; + } else { + // Insert the new commit into the DB + found_valid_commit = true; + // Cleanup revwalk and repository before returning + git_revwalk_free(walker); + git_repository_free(repo); + return git_commit_instance; + } + } + + if (error != GIT_ITEROVER && error != 0) { + const git_error* e = git_error_last(); + log->append("Error during revwalk: " + std::string(e && e->message ? e->message : "unknown error")); + } + + // Cleanup + git_revwalk_free(walker); + git_repository_free(repo); + + if (!found_valid_commit) { + log->append("No valid commit found without exclusions in repository: " + repo_name); + return GitCommit(); + } + + // This point should not be reached if a valid commit is found + return GitCommit(); +} + +/** + * Load a YAML file from a given path. + */ +YAML::Node CiLogic::load_yaml_config(const fs::path &config_path) { + if (!fs::exists(config_path)) { + throw std::runtime_error("Config file does not exist: " + config_path.string()); + } + return YAML::LoadFile(config_path.string()); +} + +/** + * init_database(): + * If the DB connection name is known, reuse it. Otherwise, create it. + */ +bool CiLogic::init_database(const QString& connectionName, const QString& databasePath) { + // Initialize the base connection in the main thread + if (QSqlDatabase::contains(connectionName)) { + QSqlDatabase::removeDatabase(connectionName); + } + + QSqlDatabase baseDb = QSqlDatabase::addDatabase("QSQLITE", connectionName); + baseDb.setDatabaseName(databasePath); + + if (!baseDb.open()) { + log_error("Cannot open database: " + baseDb.lastError().text().toStdString()); + return false; + } + + // Apply PRAGMAs + QSqlQuery pragmaQuery(baseDb); + pragmaQuery.exec("PRAGMA journal_mode = WAL;"); + pragmaQuery.exec("PRAGMA synchronous = FULL;"); + pragmaQuery.exec("PRAGMA foreign_keys = ON;"); + + // Run the schema creation (or migration) statements + QStringList sqlStatements = QString(R"( + PRAGMA foreign_keys = ON; + + CREATE TABLE IF NOT EXISTS person ( + id INTEGER PRIMARY KEY, + username TEXT NOT NULL, + logo_url TEXT + ); + + CREATE TABLE IF NOT EXISTS person_token ( + id INTEGER PRIMARY KEY, + person_id INTEGER NOT NULL, + token TEXT NOT NULL, + expiry_date TEXT NOT NULL, + FOREIGN KEY (person_id) REFERENCES person(id) ON DELETE CASCADE + ); + + CREATE TABLE IF NOT EXISTS package ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + large INTEGER NOT NULL DEFAULT 0, + upstream_url TEXT NOT NULL, + packaging_branch TEXT NOT NULL, + packaging_url TEXT NOT NULL + ); + + CREATE TABLE IF NOT EXISTS release ( + id INTEGER PRIMARY KEY, + version INTEGER NOT NULL UNIQUE, + codename TEXT NOT NULL UNIQUE, + isDefault INTEGER NOT NULL DEFAULT 0 + ); + + CREATE TABLE IF NOT EXISTS branch ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + upload_target TEXT NOT NULL, + upload_target_ssh TEXT NOT NULL + ); + + INSERT INTO branch (name, upload_target, upload_target_ssh) + SELECT 'unstable', 'ppa:lubuntu-ci/unstable-ci-proposed', 'ssh-ppa:lubuntu-ci/unstable-ci-proposed' + WHERE NOT EXISTS (SELECT 1 FROM branch WHERE name='unstable'); + + CREATE TABLE IF NOT EXISTS git_commit ( + id INTEGER PRIMARY KEY, + commit_hash TEXT NOT NULL, + commit_summary TEXT NOT NULL, + commit_message TEXT NOT NULL, + commit_datetime DATETIME NOT NULL, + commit_author TEXT NOT NULL, + commit_committer TEXT NOT NULL + ); + + CREATE TABLE IF NOT EXISTS packageconf ( + id INTEGER PRIMARY KEY, + upstream_version TEXT, + ppa_revision INTEGER, + package_id INTEGER NOT NULL, + release_id INTEGER NOT NULL, + branch_id INTEGER NOT NULL, + packaging_commit_id INTEGER, + upstream_commit_id INTEGER, + FOREIGN KEY (package_id) REFERENCES package(id) ON DELETE CASCADE, + FOREIGN KEY (release_id) REFERENCES release(id) ON DELETE CASCADE, + FOREIGN KEY (branch_id) REFERENCES branch(id) ON DELETE CASCADE, + FOREIGN KEY (packaging_commit_id) REFERENCES git_commit(id) ON DELETE CASCADE, + FOREIGN KEY (upstream_commit_id) REFERENCES git_commit(id) ON DELETE CASCADE + ); + + CREATE TABLE IF NOT EXISTS jobstatus ( + id INTEGER PRIMARY KEY, + build_score INTEGER NOT NULL, + name TEXT NOT NULL UNIQUE, + display_name TEXT NOT NULL + ); + + INSERT OR IGNORE INTO jobstatus (build_score, name, display_name) + VALUES + (80, 'pull', 'Pull'), + (70, 'tarball', 'Create Tarball'), + (60, 'source_build', 'Source Build'), + (50, 'upload', 'Upload'), + (40, 'source_check', 'Source Check'), + (30, 'build_check', 'Build Check'), + (20, 'lintian', 'Lintian'), + (10, 'britney', 'Britney'); + + CREATE TABLE IF NOT EXISTS task ( + id INTEGER PRIMARY KEY, + packageconf_id INTEGER NOT NULL, + jobstatus_id INTEGER NOT NULL, + queue_time INTEGER DEFAULT 0, + start_time INTEGER DEFAULT 0, + finish_time INTEGER DEFAULT 0, + successful INTEGER, + log TEXT, + FOREIGN KEY (packageconf_id) REFERENCES packageconf(id), + FOREIGN KEY (jobstatus_id) REFERENCES jobstatus(id) + ); + + CREATE TABLE IF NOT EXISTS packageconf_jobstatus_id ( + id INTEGER PRIMARY KEY, + packageconf_id INTEGER NOT NULL, + jobstatus_id INTEGER NOT NULL, + task_id INTEGER NOT NULL, + FOREIGN KEY (packageconf_id) REFERENCES packageconf(id), + FOREIGN KEY (jobstatus_id) REFERENCES jobstatus(id), + FOREIGN KEY (task_id) REFERENCES task(id) + ); + + )").split(';', Qt::SkipEmptyParts); + + { + QSqlQuery query(baseDb); + for (const QString &statement : sqlStatements) { + QString trimmed = statement.trimmed(); + if (!trimmed.isEmpty() && !query.exec(trimmed)) { + std::cout << "Failed to execute SQL: " << trimmed.toStdString() + << "\nError: " << query.lastError().text().toStdString() << "\n"; + return false; + } + } + } + + return true; +} + +/** + * init_global(): + * 1. Reads all *.yaml in /srv/lubuntu-ci/repos/ci-tools/configs/ + * 2. Merges them into g_config. + * 3. Ensures libgit2 is initialized once. + */ +void CiLogic::init_global() { + std::lock_guard lk(g_cfg_mutex); + Branch _tmp_brnch; + Package _tmp_pkg; + PackageConf _tmp_pkg_conf; + Release _tmp_rel; + + ensure_git_inited(); + if (!init_database()) return; + + if (branches.empty() || packages.empty() || releases.empty() || packageconfs.empty()) { + YAML::Node g_config; + fs::path config_dir = "/srv/lubuntu-ci/repos/ci-tools/configs"; + if (!fs::exists(config_dir) || !fs::is_directory(config_dir)) { + std::cerr << "[WARNING] Config directory not found: " << config_dir << "\n"; + std::cerr << "[WARNING] Continuing with empty config.\n"; + return; + } + + YAML::Node merged; + bool found_any_yaml = false; + + for (auto &entry : fs::directory_iterator(config_dir)) { + if (entry.is_regular_file()) { + auto p = entry.path(); + if (p.extension() == ".yaml") { + found_any_yaml = true; + try { + YAML::Node partial = YAML::LoadFile(p.string()); + merge_yaml_nodes(merged, partial); + } catch (std::exception &ex) { + std::cerr << "[WARNING] Could not parse YAML in " << p.string() + << ": " << ex.what() << "\n"; + } + } + } + } + + if (!found_any_yaml) { + std::cerr << "[WARNING] No .yaml files found in " << config_dir << "\n"; + std::cerr << "[WARNING] Continuing with empty config.\n"; + } + + g_config = merged; + + if (g_config["packages"]) { + log_info("Merged config has " + + std::to_string(g_config["packages"].size()) + + " 'packages' items total."); + } else { + log_error("No 'packages' found in the final merged YAML config!"); + } + if (g_config["releases"]) { + log_info("Merged config has " + + std::to_string(g_config["releases"].size()) + + " 'releases' items total."); + } + + // Set the packages in the DB + YAML::Node yaml_packages = g_config["packages"]; + auto connection = get_thread_connection(); + if (!_tmp_pkg.set_packages(connection, yaml_packages)) { + log_error("Failed to set packages."); + } + packages = _tmp_pkg.get_packages(connection); + + // Set the releases in the DB + YAML::Node yaml_releases = g_config["releases"]; + connection = get_thread_connection(); + if (!_tmp_rel.set_releases(connection, yaml_releases)) { + log_error("Failed to set releases."); + } + connection = get_thread_connection(); + releases = _tmp_rel.get_releases(connection); + + // Add missing packageconf entries + connection = get_thread_connection(); + if (!_tmp_pkg_conf.set_package_confs(connection)) { + log_error("Failed to set package configurations."); + } + set_packageconfs(_tmp_pkg_conf.get_package_confs(connection, get_job_statuses())); + + // Finally, store the branches + connection = get_thread_connection(); + if (branches.empty()) { + branches = _tmp_brnch.get_branches(connection); + } + } + } + +/** + * Convert a YAML node to CiProject + */ +CiProject CiLogic::yaml_to_project(const YAML::Node &pkg_node) { + CiProject project; + project.name = pkg_node["name"].as(); + project.upload_target = pkg_node["upload_target"] + ? pkg_node["upload_target"].as() + : "ppa:lubuntu-ci/unstable-ci-proposed"; + project.upstream_url = pkg_node["upstream_url"] + ? pkg_node["upstream_url"].as() + : ("https://github.com/lxqt/" + project.name + ".git"); + project.packaging_url = pkg_node["packaging_url"] + ? pkg_node["packaging_url"].as() + : ("https://git.lubuntu.me/Lubuntu/" + project.name + "-packaging.git"); + project.packaging_branch = + pkg_node["packaging_branch"] + ? std::optional(pkg_node["packaging_branch"].as()) + : std::nullopt; + project.large = pkg_node["large"] ? pkg_node["large"].as() : false; + return project; +} + +// Trampoline function to bridge C callback to C++ lambda +static int submodule_trampoline(git_submodule* sm, const char* name, void* payload) { + // Cast payload back to the C++ lambda + auto* callback = static_cast*>(payload); + return (*callback)(sm, name, payload); +} + +/** + * clone_or_fetch: clone if needed, else fetch + */ +void CiLogic::clone_or_fetch(const std::filesystem::path &repo_dir, + const std::string &repo_url, + const std::optional &branch, + std::shared_ptr log) +{ + ensure_git_inited(); + + git_repository* repo = nullptr; + int error = git_repository_open(&repo, repo_dir.c_str()); + if (error == GIT_ENOTFOUND) { + log->append("Cloning: " + repo_url + " => " + repo_dir.string()); + git_clone_options opts = GIT_CLONE_OPTIONS_INIT; + if (branch.has_value()) { + opts.checkout_branch = branch->c_str(); + } + + opts.checkout_opts.checkout_strategy |= GIT_CHECKOUT_UPDATE_SUBMODULES; + + error = git_clone(&repo, repo_url.c_str(), repo_dir.c_str(), &opts); + if (error != 0) { + const git_error *e = git_error_last(); + throw std::runtime_error("Failed to clone: " + + std::string(e && e->message ? e->message : "unknown")); + } + log->append("Repo cloned OK."); + } + else if (error == 0) { + git_remote *remote = nullptr; + if (git_remote_lookup(&remote, repo, "origin") != 0) { + const git_error *e = git_error_last(); + git_repository_free(repo); + throw std::runtime_error("No remote origin: " + + std::string(e && e->message ? e->message : "unknown")); + } + + if (git_remote_fetch(remote, nullptr, nullptr, nullptr) < 0) { + const git_error *e = git_error_last(); + git_remote_free(remote); + git_repository_free(repo); + throw std::runtime_error("Fetch failed: " + + std::string(e && e->message ? e->message : "unknown")); + } + + std::string detected_branch = "master"; + git_reference* head_ref = nullptr; + error = git_reference_lookup(&head_ref, repo, "refs/remotes/origin/HEAD"); + if (error == 0 && head_ref != nullptr) { + if (git_reference_type(head_ref) & GIT_REFERENCE_SYMBOLIC) { + const char* symref = git_reference_symbolic_target(head_ref); + if (symref) { + std::string s = symref; + std::string prefix = "refs/remotes/origin/"; + if (s.find(prefix) == 0) { + detected_branch = s.substr(prefix.size()); + } + } + } + git_reference_free(head_ref); + } + + std::string b = branch.value_or(detected_branch); + log->append("Using branch: " + b); + + bool successPull = false; + do { + std::string localRef = "refs/heads/" + b; + std::string remoteRef = "refs/remotes/origin/" + b; + + git_reference* localBranch = nullptr; + if (git_reference_lookup(&localBranch, repo, localRef.c_str()) == GIT_ENOTFOUND) { + git_object* remObj = nullptr; + if (git_revparse_single(&remObj, repo, remoteRef.c_str()) == 0) { + git_reference* newB = nullptr; + git_branch_create(&newB, repo, b.c_str(), (const git_commit*)remObj, 0); + if (newB) git_reference_free(newB); + git_object_free(remObj); + git_reference_lookup(&localBranch, repo, localRef.c_str()); + } + } + if (!localBranch) break; + + git_object* remoteObj = nullptr; + if (git_revparse_single(&remoteObj, repo, remoteRef.c_str()) < 0) { + git_reference_free(localBranch); + break; + } + git_oid remoteOid = *git_object_id(remoteObj); + + git_reference* updated = nullptr; + int ffErr = git_reference_set_target(&updated, localBranch, &remoteOid, "Fast-forward"); + git_reference_free(localBranch); + git_object_free(remoteObj); + if (ffErr < 0) { + if (updated) git_reference_free(updated); + break; + } + { + git_object* obj = nullptr; + if (git_revparse_single(&obj, repo, localRef.c_str()) == 0) { + git_checkout_options co = GIT_CHECKOUT_OPTIONS_INIT; + // Use a more forceful checkout strategy + co.checkout_strategy = GIT_CHECKOUT_FORCE | GIT_CHECKOUT_UPDATE_SUBMODULES; + if (git_checkout_tree(repo, obj, &co) == 0) { + if (git_repository_set_head(repo, localRef.c_str()) == 0) { + // Perform a hard reset to ensure working directory and index match HEAD + error = git_reset(repo, obj, GIT_RESET_HARD, nullptr); + if (error != 0) { + const git_error* e = git_error_last(); + log->append("Failed to reset repository: " + std::string(e && e->message ? e->message : "unknown error")); + git_repository_free(repo); + throw std::runtime_error("Failed to reset repository after checkout."); + } + successPull = true; + } + } + git_object_free(obj); + } + } + if (updated) git_reference_free(updated); + } while(false); + + if (!successPull) { + std::string bRem = "refs/remotes/origin/" + b; + git_object* origObj = nullptr; + if (git_revparse_single(&origObj, repo, bRem.c_str()) == 0) { + git_reset(repo, origObj, GIT_RESET_HARD, nullptr); + git_object_free(origObj); + + git_oid newOid; + if (git_revparse_single(&origObj, repo, bRem.c_str()) == 0) { + newOid = *git_object_id(origObj); + git_object_free(origObj); + std::string lRef = "refs/heads/" + b; + git_reference* fRef = nullptr; + git_reference_create(&fRef, repo, lRef.c_str(), &newOid, 1, + "Forced local update"); + if (fRef) git_reference_free(fRef); + git_object* co = nullptr; + if (git_revparse_single(&co, repo, lRef.c_str()) == 0) { + git_checkout_options o = GIT_CHECKOUT_OPTIONS_INIT; + o.checkout_strategy = GIT_CHECKOUT_FORCE; + if (!git_checkout_tree(repo, co, &o)) + git_repository_set_head(repo, lRef.c_str()); + git_object_free(co); + } + } + } + } + + std::function submodule_callback; + submodule_callback = [&](git_submodule* sm, const char* name, void* payload) -> int { + // Initialize submodule + if (git_submodule_init(sm, 1) != 0) { + log->append("Failed to initialize submodule " + std::string(name) + "\n"); + return 0; // Continue with other submodules + } + + // Set up update options + git_submodule_update_options opts = GIT_SUBMODULE_UPDATE_OPTIONS_INIT; + opts.version = GIT_SUBMODULE_UPDATE_OPTIONS_VERSION; + opts.fetch_opts = GIT_FETCH_OPTIONS_INIT; + opts.fetch_opts.version = GIT_FETCH_OPTIONS_VERSION; + opts.checkout_opts = GIT_CHECKOUT_OPTIONS_INIT; + opts.checkout_opts.checkout_strategy = GIT_CHECKOUT_SAFE; + + // Update submodule + if (git_submodule_update(sm, 1, &opts) != 0) { + const git_error* e = git_error_last(); + log->append("Failed to update submodule " + std::string(name) + ": " + + (e && e->message ? e->message : "unknown") + "\n"); + } else { + log->append("Updated submodule: " + std::string(name) + "\n"); + } + + // Open the submodule repository + git_repository* subrepo = nullptr; + if (git_submodule_open(&subrepo, sm) != 0) { + log->append("Failed to open submodule repository: " + std::string(name) + "\n"); + return 0; // Continue with other submodules + } + + // Recurse into nested submodules + // Pass the same lambda as the callback by casting it to std::function + if (git_submodule_foreach(subrepo, submodule_trampoline, &submodule_callback) != 0) { + const git_error* e = git_error_last(); + log->append("Failed to iterate nested submodules in " + std::string(name) + ": " + + (e && e->message ? e->message : "unknown") + "\n"); + } + + git_repository_free(subrepo); + return 0; + }; + + // Start processing submodules with the top-level repository + if (git_submodule_foreach(repo, submodule_trampoline, &submodule_callback) != 0) { + const git_error* e = git_error_last(); + log->append("Failed to iterate over submodules: " + + std::string(e && e->message ? e->message : "unknown") + "\n"); + } + + git_remote_free(remote); + git_repository_free(repo); + } +} + +/** + * parse_version(...) from debian/changelog + */ +std::string parse_version(const fs::path &changelog_path) { + if (!fs::exists(changelog_path)) { + throw std::runtime_error("Changelog not found: " + changelog_path.string()); + } + std::ifstream infile(changelog_path); + if (!infile.is_open()) { + throw std::runtime_error("Cannot open changelog: " + changelog_path.string()); + } + std::string line; + std::regex version_regex("^\\S+ \\(([^)]+)\\) .+"); + while (std::getline(infile, line)) { + std::smatch match; + if (std::regex_match(line, match, version_regex)) { + if (match.size() >= 2) { + std::string full_version = match[1].str(); + auto dash_pos = full_version.find('-'); + if (dash_pos != std::string::npos) { + return full_version.substr(0, dash_pos); + } else { + return full_version; + } + } + } + } + throw std::runtime_error("parse_version: can't parse debian/changelog"); +} + +/** + * update_changelog with dch ... + */ +void update_changelog(const fs::path &packaging_dir, + const std::string &release, + const std::string &new_version, + const std::string &ppa_suffix, + std::shared_ptr log) +{ + std::vector dch_cmd { + "dch", "--distribution", release, + "--newversion", new_version + "-0ubuntu0~ppa" + ppa_suffix, + "--urgency", "low", + "CI upload." + }; + if (run_command(dch_cmd, packaging_dir, false, log)) { + log->append("dch: updated changelog for " + release); + } else { + log->append("dch: failed for release " + release); + } +} + +/** + * debuild_package ... + */ +void CiLogic::debuild_package(const fs::path &packaging_dir, std::shared_ptr log) { + std::vector cmd { + "debuild", + "--no-lintian", + "-S", + "-d", + "-sa" + }; + + if (run_command(cmd, packaging_dir, false, log)) { + log->append("debuild OK in " + packaging_dir.string() + "\n"); + } else { + cmd.emplace_back("-nc"); + log->append("debuild failed in " + packaging_dir.string() + + " - trying again without cleaning\n"); + if (run_command(cmd, packaging_dir, false, log)) { + log->append("debuild without cleaning OK in " + packaging_dir.string() + "\n"); + } else { + log->append("debuild failed in " + packaging_dir.string() + "\n"); + } + } +} + +/** + * collect_changes_files from build_output + */ +std::vector collect_changes_files(const std::string &repo_name, + const std::string &version) +{ + fs::path outdir = "/srv/lubuntu-ci/repos/build_output"; + fs::create_directories(outdir); + std::vector results; + + std::string prefix = repo_name + "_" + version; + for (auto &entry : fs::directory_iterator(outdir)) { + std::string filename = entry.path().filename().string(); + if (filename.rfind(prefix, 0) == 0 + && filename.size() >= 16 + && filename.substr(filename.size() - 15) == "_source.changes") + { + results.push_back(entry.path().string()); + } + } + if (results.empty()) { + throw std::runtime_error("No .changes found for " + repo_name); + } + return results; +} + +/** + * reset_changelog to HEAD content + */ +static void reset_changelog(const fs::path &repo_dir, const fs::path &changelog_path) { + git_repository *repo = nullptr; + if (git_repository_open(&repo, repo_dir.c_str()) != 0) { + const git_error *e = git_error_last(); + throw std::runtime_error(std::string("reset_changelog: open failed: ") + + (e && e->message ? e->message : "???")); + } + git_reference *head_ref = nullptr; + if (git_repository_head(&head_ref, repo) != 0) { + const git_error *e = git_error_last(); + git_repository_free(repo); + throw std::runtime_error(std::string("reset_changelog: repository_head: ") + + (e && e->message ? e->message : "???")); + } + git_commit *commit = nullptr; + if (git_reference_peel((git_object**)&commit, head_ref, GIT_OBJECT_COMMIT) != 0) { + const git_error *e = git_error_last(); + git_reference_free(head_ref); + git_repository_free(repo); + throw std::runtime_error(std::string("reset_changelog: peel HEAD: ") + + (e && e->message ? e->message : "???")); + } + git_tree *tree = nullptr; + if (git_commit_tree(&tree, commit) != 0) { + const git_error *e = git_error_last(); + git_commit_free(commit); + git_reference_free(head_ref); + git_repository_free(repo); + throw std::runtime_error(std::string("reset_changelog: commit_tree: ") + + (e && e->message ? e->message : "???")); + } + std::error_code ec; + auto rel_path = fs::relative(changelog_path, repo_dir, ec); + if (ec) { + git_tree_free(tree); + git_commit_free(commit); + git_reference_free(head_ref); + git_repository_free(repo); + throw std::runtime_error("reset_changelog: relative path error: " + ec.message()); + } + git_tree_entry *entry = nullptr; + if (git_tree_entry_bypath(&entry, tree, rel_path.string().c_str()) != 0) { + git_tree_free(tree); + git_commit_free(commit); + git_reference_free(head_ref); + git_repository_free(repo); + throw std::runtime_error("reset_changelog: cannot find debian/changelog in HEAD"); + } + git_blob *blob = nullptr; + if (git_tree_entry_to_object((git_object**)&blob, repo, entry) != 0) { + git_tree_entry_free(entry); + git_tree_free(tree); + git_commit_free(commit); + git_reference_free(head_ref); + git_repository_free(repo); + const git_error *e = git_error_last(); + throw std::runtime_error(std::string("reset_changelog: cannot get blob: ") + + (e && e->message ? e->message : "???")); + } + const char *content = (const char*)git_blob_rawcontent(blob); + size_t sz = git_blob_rawsize(blob); + { + std::ofstream out(changelog_path, std::ios::binary | std::ios::trunc); + if (!out.is_open()) { + git_blob_free(blob); + git_tree_entry_free(entry); + git_tree_free(tree); + git_commit_free(commit); + git_reference_free(head_ref); + git_repository_free(repo); + throw std::runtime_error("reset_changelog: cannot open " + changelog_path.string()); + } + out.write(content, sz); + } + git_blob_free(blob); + git_tree_entry_free(entry); + git_tree_free(tree); + git_commit_free(commit); + git_reference_free(head_ref); + git_repository_free(repo); +} + +/** + * pull_project: + * 1. clone/fetch repos + * 2. read HEAD commits + * 3. sync + */ +bool CiLogic::pull_project(std::shared_ptr &proj, std::shared_ptr log) { + ensure_git_inited(); + + log->append("Git initialized. Setting variables...\n"); + fs::path base_dir = "/srv/lubuntu-ci/repos"; + fs::path packaging_dir = base_dir / proj->package->name; + fs::path upstream_dir = base_dir / ("upstream-" + proj->package->name); + + // First do the actual pulls/fetches + try { + log->append("Cloning or fetching the upstream directory...\n"); + clone_or_fetch(upstream_dir, proj->package->upstream_url, std::nullopt, log); + log->append("Cloning or fetching the packaging directory...\n"); + clone_or_fetch(packaging_dir, proj->package->packaging_url, proj->package->packaging_branch, log); + } catch (...) { + return false; + } + + // Now read the HEAD commits and store them + log->append("Fetching complete. Storing Git commit data...\n"); + auto connection = get_thread_connection(); + *proj->packaging_commit = get_commit_from_pkg_repo(connection, packaging_dir.string(), log); + connection = get_thread_connection(); + *proj->upstream_commit = get_commit_from_pkg_repo(connection, upstream_dir.string(), log); + connection = get_thread_connection(); + proj->sync(connection); + + log->append("Done!"); + return true; +} + +/** + * create_project_tarball + */ +bool CiLogic::create_project_tarball(std::shared_ptr &proj, std::shared_ptr log) { + log->append("Getting metadata for orig tarball...\n"); + fs::path base_dir = "/srv/lubuntu-ci/repos"; + fs::path packaging_dir = base_dir / proj->package->name; + fs::path upstream_dir = base_dir / ("upstream-" + proj->package->name); + fs::path main_tarball = base_dir / (proj->package->name + "_MAIN.orig.tar.gz"); + fs::path copyright = packaging_dir / "debian" / "copyright"; + + std::vector excludes; + try { + excludes = extract_files_excluded(copyright.string()); + } catch(...) {} + excludes.emplace_back(".git/"); + log->append("Creating " + main_tarball.string() + " with the following exclusions:\n"); + for (auto exclude : excludes) { log->append(" - " + exclude + "\n"); } + + create_tarball(main_tarball.string(), upstream_dir.string(), excludes, log); + + log->append("Done!"); + return true; +} + +/** + * build_project + */ +std::tuple> CiLogic::build_project(std::shared_ptr proj, std::shared_ptr log) { + log->append("Building: " + proj->package->name + ", initializing...\n"); + std::set changes_files; + try { + fs::path base_dir = "/srv/lubuntu-ci/repos"; + fs::path packaging_dir = base_dir / proj->package->name; + fs::path changelog = packaging_dir / "debian" / "changelog"; + std::string base_ver = parse_version(changelog); + std::string current_time = get_current_utc_time("%Y%m%d%H%M"); + std::string base_git_ver = base_ver + "+git" + current_time; + + fs::path working_dir; + if (proj->package->large) { + working_dir = "/srv/lubuntu-ci/repos/build_output/.tmp.build." + + proj->package->name + "_" + base_git_ver; + } else { + working_dir = create_temp_directory(); + } + + log->append(" => " + proj->package->name + " for " + proj->release->codename); + proj->upstream_version = base_git_ver + "~" + proj->release->codename; + sync(proj); + + // Update changelog for this release + update_changelog(packaging_dir, proj->release->codename, proj->upstream_version, std::to_string(proj->ppa_revision), log); + log->append("Changelog updated, copying the packaging..."); + + // Now copy entire packaging into a subfolder + fs::path dest_dir = working_dir / (proj->package->name + "-" + proj->upstream_version); + copy_directory(packaging_dir, dest_dir); + log->append("Copied packaging to " + dest_dir.string() + ", copying tarball..."); + + // Reset changelog after dchd$ (so local changes aren't committed) + reset_changelog(packaging_dir.parent_path() / proj->package->name, changelog); + log->append("Reset debian/changelog to HEAD..."); + + setenv("DEBFULLNAME", "Lugito", 1); + setenv("DEBEMAIL", "info@lubuntu.me", 1); + + // Copy main tarball in place + fs::path main_tarball = base_dir / (proj->package->name + "_MAIN.orig.tar.gz"); + size_t epoch_pos = proj->upstream_version.find(':'); + std::string tar_version = (epoch_pos != std::string::npos) + ? proj->upstream_version.substr(epoch_pos + 1) + : proj->upstream_version; + fs::path tar_name = proj->package->name + "_" + tar_version + ".orig.tar.gz"; + fs::path tar_dest = working_dir / tar_name; + fs::copy(main_tarball, tar_dest, fs::copy_options::overwrite_existing); + log->append("Copied tarball to " + tar_dest.string() + ", building..."); + + // Build + debuild_package(dest_dir, log); + + log->append("Source package built! Moving build artifacts..."); + + // Move build products to build_output + fs::path build_out = "/srv/lubuntu-ci/repos/build_output"; + fs::create_directories(build_out); + for (auto &entry : fs::directory_iterator(working_dir)) { + if (fs::is_regular_file(entry)) { + try { + fs::rename(entry.path(), build_out / entry.path().filename()); + } catch(const fs::filesystem_error &fe) { + if (fe.code() == std::errc::cross_device_link) { + fs::copy_file( + entry.path(), + build_out / entry.path().filename(), + fs::copy_options::overwrite_existing + ); + fs::remove(entry.path()); + } else { + throw; + } + } + } + } + + // Collect the changes files for this release + auto changes = collect_changes_files(proj->package->name, tar_version); + for (auto &c : changes) { + changes_files.insert(c); + } + log->append("Build done for " + proj->release->codename + "\n"); + + fs::remove_all(working_dir); + } catch(std::exception &ex) { + log->append("Build fail for " + proj->package->name + ": " + ex.what() + "\n"); + throw; + } + std::tuple> result = {true, changes_files}; + return result; +} + +/** + * upload_and_lint + */ +bool CiLogic::upload_and_lint(std::shared_ptr &proj, + const std::set changes_files, + bool skip_dput, + std::shared_ptr log) { + if (skip_dput) { + log->append("Skipping dput as requested.\n"); + return true; + } + if (changes_files.empty()) { + log->append("No changes to upload for " + proj->package->name + "\n"); + return false; + } + std::string base_target = proj->branch->upload_target; + for (auto &chfile : changes_files) { + bool uploaded = false; + std::string t = base_target; + for (int attempt = 1; attempt <= 5 && !uploaded; attempt++) { + log->append("dput attempt " + std::to_string(attempt) + + ": " + chfile + " => " + t + "\n"); + std::vector cmd {"dput", t, chfile}; + try { + if (!run_command(cmd, std::nullopt, false, log)) { + log->append("dput to " + t + " returned error!\n"); + } else { + log->append("Uploaded " + chfile + " => " + t + "\n"); + uploaded = true; + } + } catch(std::exception &ex) { + log->append("Upload error: " + std::string(ex.what()) + "\n"); + } + if (!uploaded) { + // If failed, try SSH variant + t = proj->branch->upload_target_ssh; + } + } + } + return true; +} + +/** + * do_summary + */ +void CiLogic::do_summary(bool skip_cleanup) { + log_info("Summary/cleanup stage"); + if (!skip_cleanup) { + fs::path outdir = "/srv/lubuntu-ci/repos/build_output"; + fs::remove_all(outdir); + log_info("Cleaned build output in " + outdir.string()); + } else { + log_info("skip_cleanup => leaving build_output alone."); + } +} + +/** + * Orchestrate entire pipeline + */ +void CiLogic::process_entire_pipeline(std::shared_ptr &proj, + bool skip_dput, + bool skip_cleanup) +{ + try { + bool pull_success = pull_project(proj); + bool tarball_success = create_project_tarball(proj); + const auto [build_success, changes_files] = build_project(proj); + upload_and_lint(proj, changes_files, skip_dput); + do_summary(skip_cleanup); + log_info("Pipeline done for " + proj->package->name); + } catch(std::exception &ex) { + log_error("Pipeline fail for " + proj->package->name + ": " + ex.what()); + } +} + +/** + * get_config + */ +std::vector> CiLogic::get_config(const std::string &repo_name, + int page, + int per_page, + const std::string &sort_by, + const std::string &sort_order) { + // If we have page/per_page/sort_by/sort_order, do a sort & pagination + if (page != 0 && per_page != 0 && (!sort_by.empty()) && (!sort_order.empty())) { + auto getComparator = [](const std::string& sort_by, const std::string& order) { + return [sort_by, order](const std::shared_ptr& a, const std::shared_ptr& b) { + if (sort_by == "name") { + return (order == "asc") + ? (a->package->name < b->package->name) + : (a->package->name > b->package->name); + } else if (sort_by == "branch_name") { + return (order == "asc") + ? (a->branch->name < b->branch->name) + : (a->branch->name > b->branch->name); + } else if (sort_by == "packaging_commit") { + if (a->packaging_commit && b->packaging_commit) { + auto time_a = a->packaging_commit->commit_datetime.get_sys_time(); + auto time_b = b->packaging_commit->commit_datetime.get_sys_time(); + return (order == "asc") ? (time_a < time_b) : (time_a > time_b); + } else { + // fallback comparison + return (order == "asc") + ? (a->package->name < b->package->name) + : (a->package->name > b->package->name); + } + } else if (sort_by == "upstream_commit") { + if (a->upstream_commit && b->upstream_commit) { + auto time_a = a->upstream_commit->commit_datetime.get_sys_time(); + auto time_b = b->upstream_commit->commit_datetime.get_sys_time(); + return (order == "asc") ? (time_a < time_b) : (time_a > time_b); + } else { + // fallback comparison + return (order == "asc") + ? (a->package->name < b->package->name) + : (a->package->name > b->package->name); + } + } else if (sort_by == "build_status") { + int a_successful_task_count = a->successful_task_count(); + int b_successful_task_count = b->successful_task_count(); + if (a_successful_task_count != b_successful_task_count) { + return (order == "asc") + ? (a_successful_task_count < b_successful_task_count) + : (a_successful_task_count > b_successful_task_count); + } else { + return (order == "asc") + ? (a->total_task_count() < b->total_task_count()) + : (a->total_task_count() > b->total_task_count()); + } + } + // if invalid sort_by + return false; + }; + }; + auto paginate = [getComparator](std::vector>& items, + int page, int per_page, + const std::string& sort_by, + const std::string& sort_order) { + std::sort(items.begin(), items.end(), getComparator(sort_by, sort_order)); + int startIdx = (page - 1) * per_page; + int endIdx = std::min(startIdx + per_page, static_cast(items.size())); + if (startIdx >= (int)items.size()) { + return std::vector>(); + } + return std::vector>(items.begin() + startIdx, items.begin() + endIdx); + }; + + auto copy_confs = get_packageconfs(); + return paginate(copy_confs, page, per_page, sort_by, sort_order); + } + // If just repo_name is provided, filter by that. If empty, return all + else if (!repo_name.empty()) { + std::vector> filtered; + for (const auto &pc : get_packageconfs()) { + if (pc->package->name == repo_name) { + filtered.push_back(pc); + } + } + return filtered; + } + + // Otherwise return everything + return get_packageconfs(); +} + +std::string CiLogic::queue_pull_tarball(std::vector> repos, + std::unique_ptr& task_queue, + const std::map> job_statuses) { + std::string msg; + std::map> encountered_items; + std::mutex task_assignment_mutex; + + try { + for (auto r : repos) { + bool is_ghost_pull = false; + + // Attempt to find if we've seen this package->name before + auto found_it = encountered_items.find(r->package->name); + std::shared_ptr new_item = std::make_shared(); + if (found_it != encountered_items.end()) { + std::lock_guard lock(task_assignment_mutex); + is_ghost_pull = true; + + r->assign_task(job_statuses.at("pull"), found_it->second->first_pull_task, r); + r->assign_task(job_statuses.at("tarball"), found_it->second->first_tarball_task, r); + r->packaging_commit = found_it->second->packaging_commit; + r->upstream_commit = found_it->second->upstream_commit; + sync(r); + + continue; + } + + task_queue->enqueue( + job_statuses.at("pull"), + [this](std::shared_ptr log) mutable { + std::shared_ptr pkgconf = log->get_task_context()->get_parent_packageconf(); + bool pull_ok = pull_project(pkgconf, log); + }, + r + ); + + new_item->first_pull_task = r->get_task_by_jobstatus(job_statuses.at("pull")); + + task_queue->enqueue( + job_statuses.at("tarball"), + [this](std::shared_ptr log) mutable { + std::shared_ptr pkgconf = log->get_task_context()->get_parent_packageconf(); + bool tarball_ok = create_project_tarball(pkgconf, log); + }, + r + ); + + new_item->first_tarball_task = r->get_task_by_jobstatus(job_statuses.at("tarball")); + new_item->first_pkgconf = r; + + new_item->packaging_commit = r->packaging_commit; + new_item->upstream_commit = r->upstream_commit; + encountered_items[r->package->name] = new_item; + } + msg = "Succeeded"; + } catch (...) { + msg = "Failed"; + } + + return msg; +} + +std::map> CiLogic::get_job_statuses() { + if (!_cached_job_statuses.empty()) { return _cached_job_statuses; } + + auto connection = get_thread_connection(); + static const std::map> statuses = { + {"pull", std::make_shared(JobStatus(connection, 1))}, + {"tarball", std::make_shared(JobStatus(connection, 2))}, + {"source_build", std::make_shared(JobStatus(connection, 3))}, + {"upload", std::make_shared(JobStatus(connection, 4))}, + {"source_check", std::make_shared(JobStatus(connection, 5))}, + {"build_check", std::make_shared(JobStatus(connection, 6))}, + {"lintian", std::make_shared(JobStatus(connection, 7))}, + {"britney", std::make_shared(JobStatus(connection, 8))} + }; + _cached_job_statuses = statuses; + return statuses; +} + +std::vector> CiLogic::get_packageconfs() { + std::lock_guard lock(packageconfs_mutex_); + return packageconfs; +} + +std::shared_ptr CiLogic::get_packageconf_by_id(int id) { + std::lock_guard lock(packageconfs_mutex_); + auto it = std::ranges::find_if(packageconfs, [id](auto pkgconf) { + return pkgconf->id == id; + }); + + if (it != packageconfs.end()) { + return *it; + } + throw std::runtime_error("PackageConf not found"); +} + +std::vector> CiLogic::get_packageconfs_by_ids(std::set ids) { + std::lock_guard lock(packageconfs_mutex_); + + auto filtered_view = packageconfs + | std::views::filter([&](auto pkgconf) { + return ids.contains(pkgconf->id); + }); + + return std::vector>(filtered_view.begin(), filtered_view.end()); +} + +void CiLogic::set_packageconfs(std::vector> _pkgconfs) { + std::lock_guard lock(packageconfs_mutex_); + packageconfs = _pkgconfs; +} + +void CiLogic::sync(std::shared_ptr pkgconf) { + std::lock_guard lock(packageconfs_mutex_); + auto connection = get_thread_connection(); + pkgconf->sync(connection); +} + +/** + * Stub logs + */ +std::string CiLogic::get_logs_for_repo_conf(int package_conf_id) { + return "Not implemented"; +} diff --git a/cpp/ci_logic.h b/cpp/ci_logic.h new file mode 100644 index 0000000..35aeae2 --- /dev/null +++ b/cpp/ci_logic.h @@ -0,0 +1,133 @@ +// Copyright (C) 2024 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +// cpp/ci_logic.h +// [License Header as in original] + +#ifndef CI_LOGIC_H +#define CI_LOGIC_H + +#include "ci_database_objs.h" +#include "task_queue.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +struct CiProject; + +/** + * Data describing one package to pull/build/etc. + */ +struct CiProject { + std::string name; + std::string version; + std::string time; + std::string upload_target; + std::string upstream_url; + std::string packaging_url; + std::optional packaging_branch; + std::filesystem::path main_tarball; + bool large = false; + + // These get populated during build + std::vector changes_files; + std::vector devel_changes_files; +}; + +class CiLogic { + public: + // Initialize global configurations + void init_global(); + + // Load YAML configuration from a given path + YAML::Node load_yaml_config(const std::filesystem::path &config_path); + + // Convert a YAML node to a CiProject structure + CiProject yaml_to_project(const YAML::Node &pkg_node); + + // Clone or fetch a git repository + void clone_or_fetch(const std::filesystem::path &repo_dir, const std::string &repo_url, const std::optional &branch, std::shared_ptr log = NULL); + + bool pull_project(std::shared_ptr &proj, std::shared_ptr log = NULL); + bool create_project_tarball(std::shared_ptr &proj, std::shared_ptr log = NULL); + std::tuple> build_project(std::shared_ptr proj, std::shared_ptr log = NULL); + bool upload_and_lint(std::shared_ptr &proj, const std::set changes_files, bool skip_dput, std::shared_ptr log = NULL); + + // Perform cleanup and summarize the build process + void do_summary(bool skip_cleanup); + + // Process the entire pipeline for a given PackageConf ID + void process_entire_pipeline(std::shared_ptr &proj, bool skip_dput, bool skip_cleanup); + + // Retrieve all PackageConf entries from the database + std::vector> get_config(const std::string &repo_name = "", int page = 0, int per_page = 0, const std::string& sort_by = "", const std::string& sort_order = ""); + + // Function to enqueue tasks + void enqueue(std::function task); + + // Fetch logs for a specific PackageConf ID + std::string get_logs_for_repo_conf(int package_conf_id); + + std::map> get_job_statuses(); + std::vector> get_packageconfs(); + std::shared_ptr get_packageconf_by_id(int id); + std::vector> get_packageconfs_by_ids(std::set ids); + void set_packageconfs(std::vector> _pkgconfs); + void sync(std::shared_ptr pkgconf); + + QSqlDatabase get_thread_connection(); + + std::string queue_pull_tarball(std::vector> repos, + std::unique_ptr& task_queue, + const std::map> job_statuses); + + std::vector releases; + std::vector packages; + std::vector branches; + + private: + // Initialize the database + bool init_database(const QString& connectionName = "LubuntuCIConnection", + const QString& databasePath = "/srv/lubuntu-ci/repos/ci-tools/lubuntu_ci.db"); + + void debuild_package(const fs::path &packaging_dir, std::shared_ptr log); + + QSqlDatabase p_db; + + mutable std::mutex connection_mutex_; + mutable std::mutex packageconfs_mutex_; + std::vector> packageconfs; + std::map> _cached_job_statuses; + + struct package_conf_item { + std::shared_ptr first_pkgconf; + std::shared_ptr first_pull_task = std::make_shared(); + std::shared_ptr first_tarball_task = std::make_shared(); + std::shared_ptr packaging_commit = std::make_shared(); + std::shared_ptr upstream_commit = std::make_shared(); + }; +}; + +#endif // CI_LOGIC_H diff --git a/cpp/common.cpp b/cpp/common.cpp index 0c5ef8e..bdea803 100644 --- a/cpp/common.cpp +++ b/cpp/common.cpp @@ -1,4 +1,4 @@ -// Copyright (C) 2024 Simon Quigley +// Copyright (C) 2024-2025 Simon Quigley // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by @@ -14,110 +14,146 @@ // along with this program. If not, see . #include "common.h" +#include "utilities.h" #include "/usr/include/archive.h" -#include "/usr/include/archive_entry.h" -#include +#include #include #include #include #include #include #include -#include -#include #include +#include +#include +#include +#include +#include -namespace fs = std::filesystem; +// Define the global 'verbose' variable +bool verbose = false; -static void log_info(const std::string &msg) { +// Logger function implementations +void log_info(const std::string &msg) { std::cout << "[INFO] " << msg << "\n"; } -static void log_error(const std::string &msg) { + +void log_warning(const std::string &msg) { + std::cerr << "[WARNING] " << msg << "\n"; +} + +void log_error(const std::string &msg) { std::cerr << "[ERROR] " << msg << "\n"; } -std::string parse_version(const fs::path &changelog_path) { - if (!fs::exists(changelog_path)) { - throw std::runtime_error("Changelog not found: " + changelog_path.string()); - } - std::ifstream f(changelog_path); - if (!f) throw std::runtime_error("Unable to open changelog"); - std::string first_line; - std::getline(f, first_line); - f.close(); - - size_t start = first_line.find('('); - size_t end = first_line.find(')'); - if (start == std::string::npos || end == std::string::npos) { - throw std::runtime_error("Invalid changelog format"); +void log_verbose(const std::string &msg) { + if (verbose) { + std::cout << "[VERBOSE] " << msg << "\n"; } - std::string version_match = first_line.substr(start+1, end - (start+1)); +} - std::string epoch; - std::string upstream_version = version_match; - if (auto pos = version_match.find(':'); pos != std::string::npos) { - epoch = version_match.substr(0, pos); - upstream_version = version_match.substr(pos+1); - } - if (auto pos = upstream_version.find('-'); pos != std::string::npos) { - upstream_version = upstream_version.substr(0, pos); - } +namespace fs = std::filesystem; - std::regex git_regex("(\\+git[0-9]+)?(~[a-z]+)?$"); - upstream_version = std::regex_replace(upstream_version, git_regex, ""); +bool run_command(const std::vector &cmd, + const std::optional &cwd, + bool show_output, + std::shared_ptr log) { + if (cmd.empty()) { + throw std::runtime_error("Command is empty"); + } - auto t = std::time(nullptr); - std::tm tm = *std::gmtime(&t); - char buf[32]; - std::strftime(buf, sizeof(buf), "%Y%m%d%H%M", &tm); - std::string current_date = buf; + QProcess process; - std::string version; - if (!epoch.empty()) { - version = epoch + ":" + upstream_version + "+git" + current_date; - } else { - version = upstream_version + "+git" + current_date; + // Set the working directory if provided + if (cwd) { + process.setWorkingDirectory(QString::fromStdString(cwd->string())); } - return version; -} + // Set up the environment (if needed) + QProcessEnvironment env = QProcessEnvironment::systemEnvironment(); + process.setProcessEnvironment(env); -void run_command(const std::vector &cmd, const std::optional &cwd, bool show_output) { - std::string full_cmd; - for (const auto &c : cmd) { - full_cmd += c + " "; + // Extract executable and arguments + QString program = QString::fromStdString(cmd[0]); + QStringList arguments; + for (size_t i = 1; i < cmd.size(); ++i) { + arguments << QString::fromStdString(cmd[i]); } - if (cwd) { - full_cmd = "cd " + cwd->string() + " && " + full_cmd; + + // Start the command + process.start(program, arguments); + if (!process.waitForStarted()) { + throw std::runtime_error("Failed to start the command: " + program.toStdString()); } - log_info("Executing: " + full_cmd); - int ret = std::system(full_cmd.c_str()); - if (ret != 0) { - log_error("Command failed: " + full_cmd); - throw std::runtime_error("Command failed"); + + // Stream output while the process is running + while (process.state() == QProcess::Running) { + if (process.waitForReadyRead()) { + QByteArray output = process.readAllStandardOutput(); + QByteArray error = process.readAllStandardError(); + + if (log) { + log->append(output.toStdString()); + log->append(error.toStdString()); + } + + if (show_output) { + std::cout << output.toStdString(); + std::cerr << error.toStdString(); + } + } } - if (show_output) { - std::cout << "[INFO] Command succeeded: " + full_cmd << "\n"; + + // Wait for the process to finish + process.waitForFinished(); + + // Capture return code and errors + if (process.exitStatus() != QProcess::NormalExit || process.exitCode() != 0) { + QByteArray error_output = process.readAllStandardError(); + std::string error_message = "Command failed with exit code: " + std::to_string(process.exitCode()); + if (!error_output.isEmpty()) { + error_message += "\nError Output: " + error_output.toStdString(); + } + throw std::runtime_error(error_message); } + + return true; } -void clean_old_logs(const fs::path &log_dir, int max_age_seconds) { - auto now = std::chrono::system_clock::now(); - for (auto &entry : fs::directory_iterator(log_dir)) { - if (fs::is_regular_file(entry)) { - auto ftime = fs::last_write_time(entry); - auto sctp = decltype(ftime)::clock::to_sys(ftime); - auto age = std::chrono::duration_cast(now - sctp).count(); - if (age > max_age_seconds) { - fs::remove(entry); +// Function to extract excluded files from a copyright file +std::vector extract_files_excluded(const std::string& filepath) { + std::ifstream file(filepath); + if (!file.is_open()) { + throw std::runtime_error("Failed to open file: " + filepath); + } + + std::vector files_excluded; + std::string line; + std::regex files_excluded_pattern(R"(Files-Excluded:\s*(.*))"); + bool in_files_excluded = false; + + while (std::getline(file, line)) { + if (std::regex_match(line, files_excluded_pattern)) { + in_files_excluded = true; + std::smatch match; + if (std::regex_search(line, match, files_excluded_pattern) && match.size() > 1) { + files_excluded.emplace_back(match[1]); + } + } else if (in_files_excluded) { + if (!line.empty() && (line[0] == ' ' || line[0] == '\t')) { + files_excluded.emplace_back(line.substr(1)); + } else { + break; // End of Files-Excluded block } } } -} + return files_excluded; +} -void create_tarball(const std::string& tarballPath, const std::string& directory, const std::vector& exclusions) { - std::cout << "[INFO] Creating tarball: " << tarballPath << std::endl; +// Function to create a tarball +void create_tarball(const std::string& tarballPath, const std::string& directory, const std::vector& exclusions, std::shared_ptr log) { + log->append("Creating tarball: " + tarballPath); struct archive* a = archive_write_new(); if (!a) { @@ -145,43 +181,69 @@ void create_tarball(const std::string& tarballPath, const std::string& directory throw std::runtime_error(err); } - for (auto it = fs::recursive_directory_iterator(directory, fs::directory_options::follow_directory_symlink | fs::directory_options::skip_permission_denied); + // Initialize a set to track added relative paths to prevent duplication + std::unordered_set added_paths; + + // Iterate through the directory recursively without following symlinks + for (auto it = fs::recursive_directory_iterator( + directory, + fs::directory_options::skip_permission_denied); it != fs::recursive_directory_iterator(); ++it) { const auto& path = it->path(); std::error_code ec; - fs::path relativePath = fs::relative(path, directory, ec); + fs::path relative_path = fs::relative(path, directory, ec); if (ec) { - log_error("Failed to compute relative path for: " + path.string() + " Error: " + ec.message()); + log->append("Failed to compute relative path for: " + path.string() + " Error: " + ec.message()); continue; } - bool excluded = std::any_of(exclusions.begin(), exclusions.end(), [&relativePath](const std::string& exclusion) { - return relativePath.string().find(exclusion) != std::string::npos; + // Normalize the relative path to avoid discrepancies + fs::path normalized_relative_path = relative_path.lexically_normal(); + std::string relative_path_str = normalized_relative_path.string(); + + // Check if this path has already been added + if (!added_paths.insert(relative_path_str).second) { + log->append("Duplicate path detected and skipped: " + relative_path_str); + continue; // Skip adding this duplicate path + } + + // Exclusion logic (if any exclusions are provided) + bool excluded = std::any_of(exclusions.begin(), exclusions.end(), [&relative_path_str](const std::string& exclusion) { + return relative_path_str.find(exclusion) != std::string::npos; }); if (excluded) { continue; } fs::file_status fstatus = it->symlink_status(ec); if (ec) { - log_error("Failed to get file status for: " + path.string() + " Error: " + ec.message()); + log->append("Failed to get file status for: " + path.string() + " Error: " + ec.message()); continue; } struct archive_entry* entry = archive_entry_new(); if (!entry) { - log_error("Failed to create archive entry for: " + path.string()); + log->append("Failed to create archive entry for: " + path.string()); archive_write_free(a); throw std::runtime_error("Failed to create archive entry."); } - archive_entry_set_pathname(entry, relativePath.c_str()); + std::string entry_path = relative_path_str; + if (fs::is_directory(fstatus)) { + // Ensure the directory pathname ends with '/' + if (!entry_path.empty() && entry_path.back() != '/') { + entry_path += '/'; + } + archive_entry_set_pathname(entry, entry_path.c_str()); + } else { + archive_entry_set_pathname(entry, entry_path.c_str()); + } // Set file type, permissions, and size if (fs::is_regular_file(fstatus)) { // Regular file uintmax_t filesize = fs::file_size(path, ec); if (ec) { - log_error("Cannot get file size for: " + path.string() + " Error: " + ec.message()); + log->append("Cannot get file size for: " + path.string() + " Error: " + ec.message()); archive_entry_free(entry); continue; } @@ -192,7 +254,7 @@ void create_tarball(const std::string& tarballPath, const std::string& directory else if (fs::is_symlink(fstatus)) { fs::path target = fs::read_symlink(path, ec); if (ec) { - log_error("Cannot read symlink for: " + path.string() + " Error: " + ec.message()); + log->append("Cannot read symlink for: " + path.string() + " Error: " + ec.message()); archive_entry_free(entry); continue; } @@ -206,7 +268,7 @@ void create_tarball(const std::string& tarballPath, const std::string& directory archive_entry_set_perm(entry, static_cast(fstatus.permissions())); } else { - log_error("Unsupported file type for: " + path.string()); + log->append("Unsupported file type for: " + path.string()); archive_entry_free(entry); continue; } @@ -215,18 +277,18 @@ void create_tarball(const std::string& tarballPath, const std::string& directory fs::file_time_type ftime = fs::last_write_time(path, ec); std::time_t mtime; if (ec) { - log_error("Failed to get last write time for: " + path.string() + " Error: " + ec.message()); + log->append("Failed to get last write time for: " + path.string() + " Error: " + ec.message()); // Obtain current UTC time as fallback auto now = std::chrono::system_clock::now(); mtime = std::chrono::system_clock::to_time_t(now); - log_info("Setting default mtime (current UTC time) for: " + path.string()); + log->append("Setting default mtime (current UTC time) for: " + path.string()); } else { mtime = to_time_t(ftime); } archive_entry_set_mtime(entry, mtime, 0); if (archive_write_header(a, entry) != ARCHIVE_OK) { - log_error("Failed to write header for: " + path.string() + " Error: " + archive_error_string(a)); + log->append("Failed to write header for: " + path.string() + " Error: " + archive_error_string(a)); archive_entry_free(entry); continue; } @@ -234,7 +296,7 @@ void create_tarball(const std::string& tarballPath, const std::string& directory if (fs::is_regular_file(fstatus)) { std::ifstream fileStream(path, std::ios::binary); if (!fileStream) { - log_error("Failed to open file for reading: " + path.string()); + log->append("Failed to open file for reading: " + path.string()); archive_entry_free(entry); continue; } @@ -246,14 +308,14 @@ void create_tarball(const std::string& tarballPath, const std::string& directory std::streamsize bytesRead = fileStream.gcount(); if (bytesRead > 0) { if (archive_write_data(a, buffer, static_cast(bytesRead)) < 0) { - log_error("Failed to write data for: " + path.string() + " Error: " + archive_error_string(a)); + log->append("Failed to write data for: " + path.string() + " Error: " + archive_error_string(a)); break; } } } if (fileStream.bad()) { - log_error("Error reading file: " + path.string()); + log->append("Error reading file: " + path.string()); } } @@ -273,23 +335,5 @@ void create_tarball(const std::string& tarballPath, const std::string& directory throw std::runtime_error(err); } - std::cout << "[INFO] Tarball created and compressed: " << tarballPath << std::endl; -} - -std::string get_current_utc_time() { - auto now = std::chrono::system_clock::now(); - std::time_t now_time = std::chrono::system_clock::to_time_t(now); - std::tm tm_utc; - gmtime_r(&now_time, &tm_utc); - char buf[20]; - std::strftime(buf, sizeof(buf), "%Y-%m-%dT%H:%M:%S", &tm_utc); - return std::string(buf); -} - -std::time_t to_time_t(const fs::file_time_type& ftime) { - using namespace std::chrono; - // Convert to system_clock time_point - auto sctp = time_point_cast(ftime - fs::file_time_type::clock::now() - + system_clock::now()); - return system_clock::to_time_t(sctp); + log->append("Tarball created and compressed: " + tarballPath); } diff --git a/cpp/common.h b/cpp/common.h index 5afebad..649db87 100644 --- a/cpp/common.h +++ b/cpp/common.h @@ -13,24 +13,73 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -#pragma once +#ifndef COMMON_H +#define COMMON_H + +#include "utilities.h" #include -#include -#include #include -#include - -std::string parse_version(const std::filesystem::path &changelog_path); -void run_command(const std::vector &cmd, const std::optional &cwd = std::nullopt, bool show_output=false); -void clean_old_logs(const std::filesystem::path &log_dir, int max_age_seconds=86400); -void create_tarball(const std::string& tarballPath, const std::string& directory, const std::vector& exclusions); -std::string get_current_utc_time(); -std::time_t to_time_t(const std::filesystem::file_time_type& ftime); - -static std::counting_semaphore<5> semaphore(5); -struct semaphore_guard { - std::counting_semaphore<5> &sem; - semaphore_guard(std::counting_semaphore<5> &s) : sem(s) { sem.acquire(); } - ~semaphore_guard() { sem.release(); } +#include +#include +#include +#include +#include + +namespace fs = std::filesystem; +class Task; + +class Log { +private: + std::string data = ""; + mutable std::shared_mutex lock_; + std::weak_ptr task_context_; + +public: + void append(const std::string& str) { + std::unique_lock lock(lock_); + if (str.empty()) { return; } + data += std::format("[{}] {}", get_current_utc_time("%Y-%m-%dT%H:%M:%SZ"), str.ends_with('\n') ? str : str + '\n'); + } + + void set_log(const std::string& str) { + std::unique_lock lock(lock_); + data = str; + } + + std::string get() const { + std::shared_lock lock(lock_); + return std::regex_replace(data, std::regex(R"(^\s+)"), ""); + } + + void assign_task_context(std::shared_ptr task) { + task_context_ = task; + } + + std::shared_ptr get_task_context() const { + return task_context_.lock(); + } }; +// Logger functions +extern bool verbose; +void log_info(const std::string &msg); +void log_warning(const std::string &msg); +void log_error(const std::string &msg); +void log_verbose(const std::string &msg); + +// Function to run a command with optional working directory and show output +bool run_command(const std::vector &cmd, + const std::optional &cwd = std::nullopt, + bool show_output = false, + std::shared_ptr log = nullptr); + +// Function to extract excluded files from a copyright file +std::vector extract_files_excluded(const std::string& filepath); + +// Function to create a tarball +void create_tarball(const std::string& tarballPath, + const std::string& directory, + const std::vector& exclusions, + std::shared_ptr log = nullptr); + +#endif // COMMON_H diff --git a/cpp/fetch-indexes.cpp b/cpp/fetch-indexes.cpp index cb4ca00..07ed9e8 100644 --- a/cpp/fetch-indexes.cpp +++ b/cpp/fetch-indexes.cpp @@ -54,7 +54,6 @@ void processRelease(const std::string& release, const YAML::Node& config); void refresh(const std::string& url, const std::string& pocket, const std::string& britneyCache, std::mutex& logMutex); int executeAndLog(const std::string& command); -// Change global_lp_opt to match login() return type static std::optional> global_lp_opt; static launchpad* global_lp = nullptr; @@ -486,7 +485,7 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) { std::string DEST = BRITNEY_DATADIR + RELEASE + "-proposed"; fs::create_directories(DEST); fs::create_directories(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state"); - writeFile(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state" / "age-policy-dates", ""); + write_file(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state" / "age-policy-dates", ""); fs::remove(fs::path(DEST) / "Hints"); fs::create_symlink(BRITNEY_HINTDIR, fs::path(DEST) / "Hints"); @@ -495,39 +494,39 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) { std::string sourcesContent; for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) { if (p.path().filename() == "Sources.gz") { - sourcesContent += decompressGzip(p.path()); + sourcesContent += decompress_gzip(p.path()); } } - writeFile(fs::path(DEST) / "Sources", sourcesContent); + write_file(fs::path(DEST) / "Sources", sourcesContent); for (const auto& arch : ARCHES) { std::string packagesContent; for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) { if (p.path().filename() == "Packages.gz" && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) { - packagesContent += decompressGzip(p.path()); + packagesContent += decompress_gzip(p.path()); } } - writeFile(fs::path(DEST) / ("Packages_" + arch), packagesContent); + write_file(fs::path(DEST) / ("Packages_" + arch), packagesContent); } for (const auto& arch : PORTS_ARCHES) { std::string packagesContent; for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) { if (p.path().filename() == "Packages.gz" && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) { - packagesContent += decompressGzip(p.path()); + packagesContent += decompress_gzip(p.path()); } } - writeFile(fs::path(DEST) / ("Packages_" + arch), packagesContent); + write_file(fs::path(DEST) / ("Packages_" + arch), packagesContent); } - writeFile(fs::path(DEST) / "Blocks", ""); - writeFile(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", ""); + write_file(fs::path(DEST) / "Blocks", ""); + write_file(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", ""); } { DEST = BRITNEY_DATADIR + RELEASE; fs::create_directories(DEST); fs::create_directories(fs::path(BRITNEY_DATADIR) / RELEASE / "state"); - writeFile(fs::path(BRITNEY_DATADIR) / RELEASE / "state" / "age-policy-dates", ""); + write_file(fs::path(BRITNEY_DATADIR) / RELEASE / "state" / "age-policy-dates", ""); fs::remove(fs::path(DEST) / "Hints"); fs::create_symlink(BRITNEY_HINTDIR, fs::path(DEST) / "Hints"); @@ -536,45 +535,45 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) { std::string sourcesContent; for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) { if (p.path().filename() == "Sources.gz" && p.path().string().find(RELEASE) != std::string::npos) { - sourcesContent += decompressGzip(p.path()); + sourcesContent += decompress_gzip(p.path()); } } - writeFile(fs::path(DEST) / "Sources", sourcesContent); - regexReplaceInFile(fs::path(DEST) / "Sources", "Section: universe/", "Section: "); + write_file(fs::path(DEST) / "Sources", sourcesContent); + regex_replace_in_file(fs::path(DEST) / "Sources", "Section: universe/", "Section: "); } for (const auto& arch : ARCHES) { std::string packagesContent; for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) { if (p.path().filename() == "Packages.gz" && p.path().string().find(RELEASE) != std::string::npos && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) { - packagesContent += decompressGzip(p.path()); + packagesContent += decompress_gzip(p.path()); } } fs::path packagesFilePath = fs::path(DEST) / ("Packages_" + arch); - writeFile(packagesFilePath, packagesContent); - regexReplaceInFile(packagesFilePath, "Section: universe/", "Section: "); + write_file(packagesFilePath, packagesContent); + regex_replace_in_file(packagesFilePath, "Section: universe/", "Section: "); } for (const auto& arch : PORTS_ARCHES) { std::string packagesContent; for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) { if (p.path().filename() == "Packages.gz" && p.path().string().find(RELEASE) != std::string::npos && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) { - packagesContent += decompressGzip(p.path()); + packagesContent += decompress_gzip(p.path()); } } fs::path packagesFilePath = fs::path(DEST) / ("Packages_" + arch); - writeFile(packagesFilePath, packagesContent); - regexReplaceInFile(packagesFilePath, "Section: universe/", "Section: "); + write_file(packagesFilePath, packagesContent); + regex_replace_in_file(packagesFilePath, "Section: universe/", "Section: "); } - writeFile(fs::path(DEST) / "Blocks", ""); - writeFile(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", ""); + write_file(fs::path(DEST) / "Blocks", ""); + write_file(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", ""); } { - std::string configContent = readFile(BRITNEY_CONF); + std::string configContent = read_file(BRITNEY_CONF); configContent = std::regex_replace(configContent, std::regex("%\\{SERIES\\}"), RELEASE); - writeFile("britney.conf", configContent); + write_file("britney.conf", configContent); } std::cout << "Running britney..." << std::endl; @@ -769,5 +768,5 @@ void refresh(const std::string& url, const std::string& pocket, const std::strin fs::path outputPath = dir / urlPath.filename(); - downloadFileWithTimestamping(url, outputPath, logFilePath, logMutex); + download_file_with_timestamping(url, outputPath, logFilePath, logMutex); } diff --git a/cpp/lintian-ppa.cpp b/cpp/lintian-ppa.cpp index 8e913b0..6374b1e 100644 --- a/cpp/lintian-ppa.cpp +++ b/cpp/lintian-ppa.cpp @@ -1,481 +1,30 @@ -// Copyright (C) 2024 Simon Quigley -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - #include "common.h" -#include "utilities.h" - -#include "launchpad.h" -#include "archive.h" -#include "distribution.h" -#include "distro_series.h" -#include "person.h" - +#include "ci_logic.h" +#include +#include #include -#include -#include -#include #include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace fs = std::filesystem; - -// Global variables for logging -std::mutex logMutex; -std::ofstream globalLogFile; - -// Function to log informational messages -void log_info_custom(const std::string &msg) { - std::lock_guard lock(logMutex); - if (globalLogFile.is_open()) { - auto now = std::chrono::system_clock::now(); - std::time_t now_c = std::chrono::system_clock::to_time_t(now); - char timebuf[20]; - std::strftime(timebuf, sizeof(timebuf), "%Y-%m-%d %H:%M:%S", std::gmtime(&now_c)); - globalLogFile << timebuf << " - INFO - " << msg << "\n"; - globalLogFile.flush(); - } -} - -// Function to log error messages -void log_error_custom(const std::string &msg) { - std::lock_guard lock(logMutex); - if (globalLogFile.is_open()) { - auto now = std::chrono::system_clock::now(); - std::time_t now_c = std::chrono::system_clock::to_time_t(now); - char timebuf[20]; - std::strftime(timebuf, sizeof(timebuf), "%Y-%m-%d %H:%M:%S", std::gmtime(&now_c)); - globalLogFile << timebuf << " - ERROR - " << msg << "\n"; - globalLogFile.flush(); - } -} - -// Function to parse command-line arguments -struct Arguments { - std::string user; - std::string ppa; - std::optional ppa2; - std::optional override_output; -}; - -Arguments parseArguments(int argc, char* argv[]) { - Arguments args; - int opt; - bool showHelp = false; - - static struct option long_options[] = { - {"user", required_argument, 0, 'u'}, - {"ppa", required_argument, 0, 'p'}, - {"ppa2", required_argument, 0, '2'}, - {"override-output", required_argument, 0, 'o'}, - {"help", no_argument, 0, 'h'}, - {0, 0, 0, 0} - }; - - while ((opt = getopt_long(argc, argv, "u:p:2:o:h", long_options, nullptr)) != -1) { - switch (opt) { - case 'u': - args.user = optarg; - break; - case 'p': - args.ppa = optarg; - break; - case '2': - args.ppa2 = optarg; - break; - case 'o': - args.override_output = optarg; - break; - case 'h': - default: - std::cout << "Usage: " << argv[0] << " --user --ppa [--ppa2 ] [--override-output ]\n"; - exit(0); - } - } - - if (args.user.empty() || args.ppa.empty()) { - std::cerr << "Error: --user and --ppa are required arguments.\n"; - std::cout << "Usage: " << argv[0] << " --user --ppa [--ppa2 ] [--override-output ]\n"; - exit(1); - } - - return args; -} - -// Function to parse the Changes file and extract Source and Architecture -struct ChangesInfo { - std::string source; - std::string architecture; -}; - -std::optional parse_changes_file(const fs::path& changesPath) { - if (!fs::exists(changesPath)) { - log_error_custom("Changelog not found: " + changesPath.string()); - return std::nullopt; - } - - std::ifstream infile(changesPath); - if (!infile.is_open()) { - log_error_custom("Unable to open changelog: " + changesPath.string()); - return std::nullopt; - } - - ChangesInfo info; - std::string line; - while (std::getline(infile, line)) { - if (line.empty()) - break; // End of headers - if (line.find("Source:") == 0) { - info.source = line.substr(7); - // Trim whitespace - info.source.erase(0, info.source.find_first_not_of(" \t")); - } - if (line.find("Architecture:") == 0) { - info.architecture = line.substr(13); - // Trim whitespace - info.architecture.erase(0, info.architecture.find_first_not_of(" \t")); - } - } - - infile.close(); +#include - if (info.source.empty() || info.architecture.empty()) { - log_error_custom("Invalid changelog format in: " + changesPath.string()); - return std::nullopt; +int main(int argc, char** argv) { + if (argc<2) { + std::cerr << "Usage: lintian-ppa [--verbose]\n"; + return 1; } - - return info; -} - -// Function to run lintian and capture its output -std::optional run_lintian(const fs::path& changesPath) { - std::vector lintianCmd = {"lintian", "-EvIL", "+pedantic", changesPath.filename().string()}; - try { - // Redirect stdout and stderr to capture output - std::string command = "lintian -EvIL +pedantic \"" + changesPath.string() + "\""; - std::array buffer; - std::string result; - std::unique_ptr pipe(popen(command.c_str(), "r"), pclose); - if (!pipe) { - log_error_custom("Failed to run lintian command."); - return std::nullopt; - } - while (fgets(buffer.data(), buffer.size(), pipe.get()) != nullptr) { - result += buffer.data(); + for (int i=1; i in the temporary directory - std::vector dgetCmd = {"dget", "-u", url}; - try { - run_command(dgetCmd, tmpdir); - } catch (const std::exception& e) { - log_error_custom("dget command failed for URL: " + url); - fs::remove_all(tmpdir); - return; - } - - // Parse the Changes file - fs::path changesPath = fs::path(tmpdir) / changes_file; - auto changesInfoOpt = parse_changes_file(changesPath); - if (!changesInfoOpt.has_value()) { - fs::remove_all(tmpdir); - return; - } - - ChangesInfo changesInfo = changesInfoOpt.value(); - - // Handle Architecture field - std::string arch = changesInfo.architecture; - arch = std::regex_replace(arch, std::regex("all"), ""); - arch = std::regex_replace(arch, std::regex("_translations"), ""); - std::istringstream iss(arch); - std::string arch_clean; - iss >> arch_clean; - if (arch_clean.empty()) { - fs::remove_all(tmpdir); - return; - } - - log_info_custom("Running Lintian for " + changesInfo.source + " on " + arch_clean); - - // Run lintian and capture output - auto lintianOutputOpt = run_lintian(changesPath); - if (!lintianOutputOpt.has_value()) { - fs::remove_all(tmpdir); - return; - } - std::string lintianOutput = lintianOutputOpt.value(); - - // Write lintian output to lintian_tmp/source/.txt - fs::path outputPath = lintianTmpDir / changesInfo.source; - fs::create_directories(outputPath); - fs::path archOutputFile = outputPath / (arch_clean + ".txt"); - try { - writeFile(archOutputFile, lintianOutput); - } catch (const std::exception& e) { - log_error_custom("Failed to write lintian output for " + changesInfo.source + " on " + arch_clean); } + std::string changes_path = argv[1]; - // Remove temporary directory - fs::remove_all(tmpdir); -} - -// Function to perform rsync-like copy -void rsync_copy(const fs::path& source, const fs::path& destination) { try { - if (!fs::exists(destination)) { - fs::create_directories(destination); - } - for (const auto& entry : fs::recursive_directory_iterator(source)) { - const auto& path = entry.path(); - auto relativePath = fs::relative(path, source); - fs::path destPath = destination / relativePath; - - if (fs::is_symlink(path)) { - if (fs::exists(destPath) || fs::is_symlink(destPath)) { - fs::remove(destPath); - } - auto target = fs::read_symlink(path); - fs::create_symlink(target, destPath); - } else if (fs::is_directory(path)) { - fs::create_directories(destPath); - } else if (fs::is_regular_file(path)) { - fs::copy_file(path, destPath, fs::copy_options::overwrite_existing); - } - } - } catch (const std::exception& e) { - log_error_custom("rsync_copy failed from " + source.string() + " to " + destination.string() + ": " + e.what()); - } -} - -int main(int argc, char* argv[]) { - // Parse command-line arguments - Arguments args = parseArguments(argc, argv); - - // Set BASE_OUTPUT_DIR - std::string BASE_OUTPUT_DIR = "/srv/lubuntu-ci/output/"; - if (args.override_output.has_value()) { - BASE_OUTPUT_DIR = args.override_output.value(); - } - - // Set LOG_DIR - fs::path LOG_DIR = fs::path(BASE_OUTPUT_DIR) / "logs" / "lintian"; - fs::create_directories(LOG_DIR); - - // Create log file with current UTC timestamp - auto now = std::chrono::system_clock::now(); - std::time_t now_c = std::chrono::system_clock::to_time_t(now); - char timestamp[20]; - std::strftime(timestamp, sizeof(timestamp), "%Y%m%dT%H%M%S", std::gmtime(&now_c)); - fs::path logFilePath = LOG_DIR / (std::string(timestamp) + ".log"); - - // Open global log file - globalLogFile.open(logFilePath, std::ios::app); - if (!globalLogFile.is_open()) { - std::cerr << "Error: Unable to open log file: " << logFilePath << std::endl; - return 1; - } - - log_info_custom("Starting lintian-ppa."); - - // Authenticate with Launchpad - log_info_custom("Logging into Launchpad..."); - auto lp_opt = launchpad::login(); - if (!lp_opt.has_value()) { - std::cerr << "Failed to authenticate with Launchpad.\n"; - return 1; - } - auto lp = lp_opt.value().get(); - - auto ubuntu_opt = lp->distributions["ubuntu"]; - distribution ubuntu = ubuntu_opt.value(); - // FIXME - //auto ds_opt = ubuntu.current_series; - auto ds_opt = ubuntu.getSeries("plucky"); - if (!ds_opt) { - std::cerr << "Failed to get current_series.\n"; - return 1; - } - auto current_series = ds_opt; - - // Retrieve user and PPA - auto user_opt = lp->people[args.user]; - person user = user_opt.value(); - - auto ppa_opt = user.getPPAByName(ubuntu, args.ppa); - if (!ppa_opt.has_value()) { - log_error_custom("Failed to retrieve PPA: " + args.ppa); - return 1; - } - archive ppa = ppa_opt.value(); - log_info_custom("Retrieved PPA: " + args.ppa); - - std::optional ppa2_opt; - if (args.ppa2.has_value()) { - auto ppa2_found = user.getPPAByName(ubuntu, args.ppa2.value()); - if (!ppa2_found.has_value()) { - log_error_custom("Failed to retrieve PPA2: " + args.ppa2.value()); + if (!run_command({"lintian", "-EvIL", "+pedantic", changes_path}, std::nullopt, false)) { return 1; } - ppa2_opt = ppa2_found.value(); - log_info_custom("Retrieved PPA2: " + args.ppa2.value()); - } - - // Set up lintian directories - fs::path lintianDir = fs::path(BASE_OUTPUT_DIR) / "lintian"; - fs::path lintianTmpDir; - { - std::string uuid_str; - uuid_t uuid_bytes; - uuid_generate(uuid_bytes); - char uuid_cstr[37]; - uuid_unparse(uuid_bytes, uuid_cstr); - uuid_str = std::string(uuid_cstr); - // Truncate UUID to first 8 characters - uuid_str = uuid_str.substr(0, 8); - lintianTmpDir = fs::path(BASE_OUTPUT_DIR) / ("lintian_tmp_" + uuid_str); - } - fs::create_directories(lintianDir); - fs::create_directories(lintianTmpDir); - - // Initialize a vector to hold all threads - std::vector threads; - - // Mutex for managing the published sources iterator - std::mutex sourcesMutex; - - // Function to iterate over published sources and enqueue tasks - auto main_source_iter = [&](std::vector& threadsRef) { - // Path to .LAST_RUN file - fs::path lastRunFile = lintianDir / ".LAST_RUN"; - std::chrono::system_clock::time_point lastRunTime = std::chrono::system_clock::now() - std::chrono::hours(24*365); - - if (fs::exists(lastRunFile)) { - std::ifstream infile(lastRunFile); - if (infile.is_open()) { - std::string lastRunStr; - std::getline(infile, lastRunStr); - infile.close(); - std::tm tm = {}; - std::istringstream ss(lastRunStr); - ss >> std::get_time(&tm, "%Y-%m-%dT%H:%M:%S"); - if (!ss.fail()) { - lastRunTime = std::chrono::system_clock::from_time_t(timegm(&tm)); - log_info_custom("Last run time: " + lastRunStr); - } else { - log_error_custom("Invalid format in .LAST_RUN file."); - } - } - } else { - log_info_custom(".LAST_RUN file does not exist. Using default last run time."); - } - - // Update .LAST_RUN with current time - { - std::ofstream outfile(lastRunFile, std::ios::trunc); - if (outfile.is_open()) { - auto currentTime = std::chrono::system_clock::now(); - std::time_t currentTime_c = std::chrono::system_clock::to_time_t(currentTime); - char timebuf[20]; - std::strftime(timebuf, sizeof(timebuf), "%Y-%m-%dT%H:%M:%S", std::gmtime(¤tTime_c)); - outfile << timebuf; - outfile.close(); - log_info_custom("Updated .LAST_RUN with current time: " + std::string(timebuf)); - } else { - log_error_custom("Failed to update .LAST_RUN file."); - } - } - - // Iterate over published sources - for (const auto& source : ppa.getPublishedSources("", "", current_series, false, true, "", "", "Published", "")) { - for (const auto& build : source.getBuilds()) { - if (build.buildstate == "Successfully built") { - // Assuming build.datebuilt is a std::chrono::system_clock::time_point - if (build.datebuilt >= lastRunTime) { - // Enqueue the process_sources task using semaphore and threads - threadsRef.emplace_back([=]() { - semaphore_guard guard(semaphore); - process_sources(build.changesfile_url, fs::path(BASE_OUTPUT_DIR), lintianTmpDir); - }); - } - } - } - } - }; - - // Start the main_source_iter and enqueue tasks - main_source_iter(threads); - - // Wait for all threads to complete - for(auto &t : threads) { - if(t.joinable()) { - t.join(); - } - } - - log_info_custom("All lintian tasks completed. Syncing temporary lintian data to final directory."); - rsync_copy(lintianTmpDir, lintianDir); - - // Remove temporary lintian directory - fs::remove_all(lintianTmpDir); - - // Clean old logs - clean_old_logs(LOG_DIR, 86400); // 1 day in seconds, adjust as needed - - log_info_custom("Lintian-ppa processing completed successfully."); - - // Close the global log file - if (globalLogFile.is_open()) { - globalLogFile.close(); + } catch(...) { + log_error("Lintian reported some issues with " + changes_path); } return 0; diff --git a/cpp/lubuntuci_lib.cpp b/cpp/lubuntuci_lib.cpp new file mode 100644 index 0000000..db64836 --- /dev/null +++ b/cpp/lubuntuci_lib.cpp @@ -0,0 +1,92 @@ +// Copyright (C) 2024-2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#include "lubuntuci_lib.h" +#include "ci_logic.h" +#include "common.h" +#include +#include +#include +#include +#include +#include +#include + +namespace fs = std::filesystem; + +/** + * list_known_repos(): + * Make sure we call CiLogic::init_global() before reading + * the config, otherwise the config node will be empty. + */ +std::vector> LubuntuCI::list_known_repos(int page, int per_page, const std::string& sort_by, const std::string& sort_order) +{ + cilogic.init_global(); + if (page == 0 || per_page == 0 || sort_by.empty() || sort_order.empty()) { return cilogic.get_config(); } + return cilogic.get_config("", page, per_page, sort_by, sort_order); +} + +/** + * pull_repo(): + * - We do not call init_global() here because list_known_repos() + * or build_repo() might do it. But calling it again is safe. + */ +bool LubuntuCI::pull_repo(const std::string &repo_name, std::shared_ptr log) +{ + log->append("Ensuring the global config is initialized...\n"); + cilogic.init_global(); + log->append("Global config is initialized. Getting the configs for the package name...\n"); + auto pkgconfs = cilogic.get_config(repo_name); + log->append("Configs retrieved. Performing the pull...\n"); + return cilogic.pull_project(pkgconfs.at(0), log); +} + +/** + * create_project_tarball + */ +bool LubuntuCI::create_project_tarball(const std::string &repo_name, std::shared_ptr log) +{ + cilogic.init_global(); + log->append("Global config is initialized. Getting the configs for the package name...\n"); + auto pkgconfs = cilogic.get_config(repo_name); + log->append("Configs retrieved. Performing the tarball creation...\n"); + return cilogic.create_project_tarball(pkgconfs.at(0), log); +} + +/** + * build_repo(): + * - Also safely calls init_global(). + * - Reads skip_dput from config if present (default = false). + */ +bool LubuntuCI::build_repo(const std::string &repo_name, std::shared_ptr log) +{ + cilogic.init_global(); + bool success = true; + for (auto pkgconf : cilogic.get_config(repo_name)) { + const auto [build_success, changes_files] = cilogic.build_project(pkgconf, log); + success = success && build_success && cilogic.upload_and_lint(pkgconf, changes_files, false); + } + return success; +} + +/** + * get_repo_log(): + * - Directly opens the repo in /srv/lubuntu-ci/repos/ + * - Reads HEAD commit message + */ +std::string LubuntuCI::get_repo_log(const std::string &repo_name) +{ + // FIXME: unused +} diff --git a/cpp/lubuntuci_lib.h b/cpp/lubuntuci_lib.h new file mode 100644 index 0000000..fe5eb0b --- /dev/null +++ b/cpp/lubuntuci_lib.h @@ -0,0 +1,53 @@ +// Copyright (C) 2024-2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#ifndef LUBUNTUCI_LIB_H +#define LUBUNTUCI_LIB_H + +#include +#include +#include "ci_logic.h" + +class LubuntuCI { +public: + /** + * List all known repositories from the merged config. + */ + std::vector> list_known_repos(int page = 0, + int per_page = 0, + const std::string& sort_by = "", + const std::string& sort_order = ""); + + /** + * Pull a specific repository by name (returns true on success). + */ + bool pull_repo(const std::string &repo_name, std::shared_ptr log = NULL); + + bool create_project_tarball(const std::string &repo_name, std::shared_ptr log); + + /** + * Build a specific repository by name (returns true on success). + */ + bool build_repo(const std::string &repo_name, std::shared_ptr log = NULL); + + /** + * Retrieve the most recent commit log from a named repo. + */ + std::string get_repo_log(const std::string &repo_name); + + CiLogic cilogic = CiLogic(); +}; + +#endif // LUBUNTUCI_LIB_H diff --git a/cpp/update-maintainer.h b/cpp/main.cpp similarity index 62% rename from cpp/update-maintainer.h rename to cpp/main.cpp index 8fcb7b6..2ad48c7 100644 --- a/cpp/update-maintainer.h +++ b/cpp/main.cpp @@ -13,7 +13,21 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -#pragma once -#include +#include +#include +#include "web_server.h" -void update_maintainer(const std::string &debian_directory, bool verbose); +int main(int argc, char *argv[]) +{ + QCoreApplication app(argc, argv); + + WebServer server; + // You can pick 80 if running as root or with CAP_NET_BIND_SERVICE + // or 8080 if unprivileged + if (!server.start_server(8080)) { + std::cerr << "[ERROR] Failed to start server on port 8080\n"; + return 1; + } + + return app.exec(); +} diff --git a/cpp/naive_bayes_classifier.cpp b/cpp/naive_bayes_classifier.cpp new file mode 100644 index 0000000..7374de1 --- /dev/null +++ b/cpp/naive_bayes_classifier.cpp @@ -0,0 +1,314 @@ +// Copyright (C) 2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#include "naive_bayes_classifier.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include // for std::memset + +/****************************************************************************** + * Constructor / Destructor + *****************************************************************************/ +naive_bayes_classifier::naive_bayes_classifier() = default; +naive_bayes_classifier::~naive_bayes_classifier() = default; + +/****************************************************************************** + * reset + *****************************************************************************/ +void naive_bayes_classifier::reset() { + word_freqs_.clear(); + category_freqs_.clear(); + vocabulary_.clear(); + token_categories_map_.clear(); + total_samples_ = 0.0; +} + +/****************************************************************************** + * train_from_url + *****************************************************************************/ +bool naive_bayes_classifier::train_from_url(const std::string &url, const std::string &category) { + streaming_context ctx; + ctx.classifier = this; + ctx.is_prediction_mode = false; + ctx.category = category; + + bool ok = fetch_and_inflate_gz(url, &naive_bayes_classifier::train_write_cb, &ctx); + if (!ok) { + std::cerr << "Error: train_from_url failed for " << url << std::endl; + return false; + } + category_freqs_[category]++; + total_samples_++; + return true; +} + +/****************************************************************************** + * predict_from_url + *****************************************************************************/ +std::optional naive_bayes_classifier::predict_from_url(const std::string &url) const { + streaming_context ctx; + ctx.classifier = const_cast(this); + ctx.is_prediction_mode = true; + + bool ok = fetch_and_inflate_gz(url, &naive_bayes_classifier::predict_write_cb, &ctx); + if (!ok) { + return std::nullopt; + } + std::string best_cat = compute_best_category(ctx.prediction_tokens); + return best_cat; +} + +/****************************************************************************** + * prune_common_tokens + *****************************************************************************/ +void naive_bayes_classifier::prune_common_tokens() { + if (category_freqs_.empty()) { + return; + } + size_t category_count = category_freqs_.size(); + + std::vector tokens_to_remove_vec; + tokens_to_remove_vec.reserve(vocabulary_.size()); + + for (const auto &[token, cats_set] : token_categories_map_) { + if (cats_set.size() == category_count) { + tokens_to_remove_vec.push_back(token); + } + } + + for (const auto &tk : tokens_to_remove_vec) { + vocabulary_.erase(tk); + for (auto &cat_map : word_freqs_) { + cat_map.second.erase(tk); + } + token_categories_map_.erase(tk); + } + + std::cout << "Pruned " << tokens_to_remove_vec.size() + << " common tokens that appeared in all categories.\n"; +} + +/****************************************************************************** + * train_token + *****************************************************************************/ +void naive_bayes_classifier::train_token(const std::string &category, const std::string &token) { + if (token.empty()) return; + word_freqs_[category][token]++; + vocabulary_[token] = true; + token_categories_map_[token].insert(category); +} + +/****************************************************************************** + * compute_best_category + *****************************************************************************/ +std::string naive_bayes_classifier::compute_best_category(const token_counts_t &tokens) const { + if (category_freqs_.empty() || total_samples_ <= 0.0) { + return "Unknown"; + } + + double best_score = -1e308; + std::string best_cat = "Unknown"; + + for (const auto &[cat, cat_count] : category_freqs_) { + double prior_log = std::log(cat_count / total_samples_); + + double total_cat_words = 0.0; + auto cat_iter = word_freqs_.find(cat); + if (cat_iter != word_freqs_.end()) { + total_cat_words = std::accumulate( + cat_iter->second.begin(), + cat_iter->second.end(), + 0.0, + [](double sum, const auto &p){ return sum + p.second; } + ); + } + + double score = prior_log; + for (const auto &[tk, freq] : tokens) { + double word_count = 0.0; + if (cat_iter != word_freqs_.end()) { + auto w_it = cat_iter->second.find(tk); + if (w_it != cat_iter->second.end()) { + word_count = w_it->second; + } + } + double smoothed = (word_count + 1.0) / (total_cat_words + vocabulary_.size()); + score += freq * std::log(smoothed); + } + + if (score > best_score) { + best_score = score; + best_cat = cat; + } + } + + return best_cat; +} + +/****************************************************************************** + * chunk_to_tokens + *****************************************************************************/ +std::generator naive_bayes_classifier::chunk_to_tokens( + const std::string &chunk, std::string &partial_token) +{ + for (char c : chunk) { + if (std::isalpha(static_cast(c))) { + partial_token.push_back(static_cast(std::tolower(static_cast(c)))); + } else { + if (!partial_token.empty()) { + co_yield partial_token; + partial_token.clear(); + } + } + } + // leftover partial_token remains if chunk ends mid-token +} + +/****************************************************************************** + * train_write_cb + *****************************************************************************/ +size_t naive_bayes_classifier::train_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) { + auto ctx = static_cast(userdata); + if (!ctx || !ctx->classifier || ctx->is_prediction_mode) { + return 0; + } + size_t bytes = size * nmemb; + std::string chunk(ptr, bytes); + + for (auto &&tk : chunk_to_tokens(chunk, ctx->partial_token)) { + ctx->classifier->train_token(ctx->category, tk); + } + return bytes; +} + +/****************************************************************************** + * predict_write_cb + *****************************************************************************/ +size_t naive_bayes_classifier::predict_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) { + auto ctx = static_cast(userdata); + if (!ctx || !ctx->classifier || !ctx->is_prediction_mode) { + return 0; + } + size_t bytes = size * nmemb; + std::string chunk(ptr, bytes); + + for (auto &&tk : chunk_to_tokens(chunk, ctx->partial_token)) { + ctx->prediction_tokens[tk]++; + } + return bytes; +} + +/****************************************************************************** + * fetch_and_inflate_gz + *****************************************************************************/ +struct inflating_context { + naive_bayes_classifier::streaming_context *user_ctx; + size_t (*callback)(char*, size_t, size_t, void*); + z_stream strm; + std::string decompress_buffer; + + inflating_context() { + std::memset(&strm, 0, sizeof(strm)); + strm.zalloc = Z_NULL; + strm.zfree = Z_NULL; + strm.opaque = Z_NULL; + inflateInit2(&strm, 16 + MAX_WBITS); + decompress_buffer.resize(64 * 1024); + } + + ~inflating_context() { + inflateEnd(&strm); + } +}; + +static size_t curl_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) { + auto *inf_ctx = static_cast(userdata); + size_t total_in = size * nmemb; + + inf_ctx->strm.avail_in = static_cast(total_in); + inf_ctx->strm.next_in = reinterpret_cast(ptr); + + while (inf_ctx->strm.avail_in > 0) { + inf_ctx->strm.avail_out = static_cast(inf_ctx->decompress_buffer.size()); + inf_ctx->strm.next_out = reinterpret_cast(&inf_ctx->decompress_buffer[0]); + + int ret = inflate(&inf_ctx->strm, Z_NO_FLUSH); + if (ret == Z_STREAM_ERROR || ret == Z_MEM_ERROR || ret == Z_DATA_ERROR) { + std::cerr << "zlib inflate error: " << inf_ctx->strm.msg << std::endl; + return 0; + } + + size_t have = inf_ctx->decompress_buffer.size() - inf_ctx->strm.avail_out; + if (have > 0) { + size_t written = inf_ctx->callback( + &inf_ctx->decompress_buffer[0], + 1, + have, + inf_ctx->user_ctx + ); + if (written < have) { + return 0; + } + } + } + return total_in; +} + +bool naive_bayes_classifier::fetch_and_inflate_gz( + const std::string &url, + size_t (*callback)(char*, size_t, size_t, void*), + void *user_context) +{ + CURL *curl = curl_easy_init(); + if (!curl) { + std::cerr << "Error: curl_easy_init failed.\n"; + return false; + } + + inflating_context inf_ctx; + inf_ctx.callback = callback; + inf_ctx.user_ctx = static_cast(user_context); + + curl_easy_setopt(curl, CURLOPT_URL, url.c_str()); + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, curl_write_cb); + curl_easy_setopt(curl, CURLOPT_WRITEDATA, &inf_ctx); + + CURLcode res = curl_easy_perform(curl); + if (res != CURLE_OK) { + std::cerr << "cURL error fetching " << url << ": " + << curl_easy_strerror(res) << std::endl; + curl_easy_cleanup(curl); + return false; + } + curl_easy_cleanup(curl); + + auto *ctx = static_cast(user_context); + if (!ctx->partial_token.empty()) { + if (!ctx->is_prediction_mode) { + ctx->classifier->train_token(ctx->category, ctx->partial_token); + } else { + ctx->prediction_tokens[ctx->partial_token]++; + } + ctx->partial_token.clear(); + } + return true; +} diff --git a/cpp/naive_bayes_classifier.h b/cpp/naive_bayes_classifier.h new file mode 100644 index 0000000..87f79d8 --- /dev/null +++ b/cpp/naive_bayes_classifier.h @@ -0,0 +1,124 @@ +// Copyright (C) 2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#ifndef NAIVE_BAYES_CLASSIFIER_H +#define NAIVE_BAYES_CLASSIFIER_H + +#include +#include +#include +#include +#include // C++23 std::generator +#include + +/****************************************************************************** + * Type aliases + *****************************************************************************/ +using token_counts_t = std::unordered_map; +using category_counts_t = std::unordered_map; + +/****************************************************************************** + * naive_bayes_classifier + * + * A streaming-only Naive Bayes text classifier. It fetches .gz logs via cURL, + * decompresses them chunk by chunk, tokenizes, and trains or predicts + * incrementally without storing entire logs in memory. + *****************************************************************************/ +class naive_bayes_classifier { +public: + naive_bayes_classifier(); + ~naive_bayes_classifier(); + + /************************************************************************** + * train_from_url + * + * Streams the .gz log from 'url', decompresses chunk by chunk, extracts + * tokens, and updates frequency counts for 'category'. + **************************************************************************/ + bool train_from_url(const std::string &url, const std::string &category); + + /************************************************************************** + * predict_from_url + * + * Streams the .gz log from 'url', decompresses, extracts tokens, and + * returns the most likely category. Returns std::nullopt if there's an error. + **************************************************************************/ + std::optional predict_from_url(const std::string &url) const; + + /************************************************************************** + * prune_common_tokens + * + * Removes tokens that appear in *all* categories from the vocabulary_ + * and per-category frequencies, reducing noise from universal tokens. + **************************************************************************/ + void prune_common_tokens(); + + /************************************************************************** + * reset + * + * Clears all training data (word_freqs_, category_freqs_, etc.). + **************************************************************************/ + void reset(); + + double total_samples() const { return total_samples_; } + size_t vocabulary_size() const { return vocabulary_.size(); } + +public: + /************************************************************************** + * streaming_context + * + * Declared *public* so that external structures (like inflating_context) + * can refer to it. Tracks the current partial token, mode, etc. + **************************************************************************/ + struct streaming_context { + naive_bayes_classifier *classifier = nullptr; + bool is_prediction_mode = false; + std::string category; // used if training + token_counts_t prediction_tokens; + std::string partial_token; + }; + +private: + /************************************************************************** + * Data + **************************************************************************/ + std::unordered_map word_freqs_; // cat->(word->freq) + category_counts_t category_freqs_; // cat->count of logs + std::unordered_map vocabulary_; // global set of words + double total_samples_ = 0.0; + + // For pruning, track which categories each token has appeared in + std::unordered_map> token_categories_map_; + + /************************************************************************** + * Internal methods + **************************************************************************/ + void train_token(const std::string &category, const std::string &token); + std::string compute_best_category(const token_counts_t &tokens) const; + + static std::generator chunk_to_tokens(const std::string &chunk, + std::string &partial_token); + + // Callback for training vs. predicting + static size_t train_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata); + static size_t predict_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata); + + // cURL + zlib-based streaming + static bool fetch_and_inflate_gz(const std::string &url, + size_t (*callback)(char*, size_t, size_t, void*), + void *user_context); +}; + +#endif // NAIVE_BAYES_CLASSIFIER_H diff --git a/cpp/sources_parser.cpp b/cpp/sources_parser.cpp new file mode 100644 index 0000000..c6236ef --- /dev/null +++ b/cpp/sources_parser.cpp @@ -0,0 +1,581 @@ +// Copyright (C) 2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#include "sources_parser.h" +#include "utilities.h" + +#include "/usr/include/archive.h" +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include // Added to resolve ofstream errors +#include +#include + +#include +#include +#include + + + +namespace SourcesParser { + +// Function to write data fetched by libcurl into a std::vector +size_t WriteCallback(void* contents, size_t size, size_t nmemb, void* userp) { + size_t totalSize = size * nmemb; + auto* buffer = static_cast*>(userp); + buffer->insert(buffer->end(), static_cast(contents), static_cast(contents) + totalSize); + return totalSize; +} + +// Function to parse dependency relations +std::vector> parse_relations(const std::string& raw) { + std::vector> result; + + // Split by comma to get top-level dependencies + std::regex comma_sep_RE(R"(\s*,\s*)"); + std::sregex_token_iterator comma_it(raw.begin(), raw.end(), comma_sep_RE, -1); + std::sregex_token_iterator comma_end; + + for (; comma_it != comma_end; ++comma_it) { + std::string top_dep = comma_it->str(); + // Split by pipe to get alternative dependencies + std::regex pipe_sep_RE(R"(\s*\|\s*)"); + std::sregex_token_iterator pipe_it(top_dep.begin(), top_dep.end(), pipe_sep_RE, -1); + std::sregex_token_iterator pipe_end; + + std::vector alternatives; + + for (; pipe_it != pipe_end; ++pipe_it) { + std::string dep = pipe_it->str(); + // Remove any version constraints or architecture qualifiers + size_t pos_space = dep.find(' '); + size_t pos_paren = dep.find('('); + size_t pos = std::string::npos; + if (pos_space != std::string::npos && pos_paren != std::string::npos) { + pos = std::min(pos_space, pos_paren); + } + else if (pos_space != std::string::npos) { + pos = pos_space; + } + else if (pos_paren != std::string::npos) { + pos = pos_paren; + } + + if (pos != std::string::npos) { + dep = dep.substr(0, pos); + } + + // Trim whitespace + dep.erase(dep.find_last_not_of(" \t\n\r\f\v") + 1); + dep.erase(0, dep.find_first_not_of(" \t\n\r\f\v")); + + // Handle architecture qualifiers (e.g., "libc6 (>= 2.27)") + std::regex arch_RE(R"(^([a-zA-Z0-9+\-\.]+)(?:\s*\(\s*([a-zA-Z]+)\s*([<>=]+)\s*([0-9a-zA-Z:\-+~.]+)\s*\))?$)"); + std::smatch match; + if (std::regex_match(dep, match, arch_RE)) { + PackageInfo::ParsedRelation pr; + pr.name = match[1]; + if (match[2].matched && match[3].matched && match[4].matched) { + // If architecture qualifier exists, store it + pr.archqual = match[2].str() + match[3].str() + match[4].str(); + } + if (match[3].matched && match[4].matched) { + // Store version constraints + pr.version = std::make_pair(match[3].str(), match[4].str()); + } + alternatives.push_back(pr); + } + else { + // If regex does not match, include raw dependency without qualifiers + dep = remove_suffix(dep, ":any"); + dep = remove_suffix(dep, ":native"); + PackageInfo::ParsedRelation pr; + pr.name = dep; + alternatives.push_back(pr); + std::cerr << "Warning: Cannot parse dependency relation \"" << dep << "\", returning it raw.\n"; + } + } + + if (!alternatives.empty()) { + result.push_back(alternatives); + } + } + + return result; +} + +// Function to download, decompress, and parse the Sources.gz data +std::optional> fetch_and_parse_sources(const std::string& url) { + CURL* curl = curl_easy_init(); + if (!curl) { + std::cerr << "Failed to initialize CURL.\n"; + return std::nullopt; + } + + std::vector downloadedData; + + curl_easy_setopt(curl, CURLOPT_URL, url.c_str()); + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback); + curl_easy_setopt(curl, CURLOPT_WRITEDATA, &downloadedData); + // Follow redirects if any + curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L); + // Set a user agent + curl_easy_setopt(curl, CURLOPT_USERAGENT, "SourcesParser/1.0"); + + CURLcode res = curl_easy_perform(curl); + if (res != CURLE_OK) { + std::cerr << "CURL download error (Sources.gz): " << curl_easy_strerror(res) << "\n"; + curl_easy_cleanup(curl); + return std::nullopt; + } + + curl_easy_cleanup(curl); + + // Initialize libarchive + struct archive* a = archive_read_new(); + archive_read_support_filter_gzip(a); + archive_read_support_format_raw(a); + + if (archive_read_open_memory(a, downloadedData.data(), downloadedData.size()) != ARCHIVE_OK) { + std::cerr << "Failed to open Sources.gz archive: " << archive_error_string(a) << "\n"; + archive_read_free(a); + return std::nullopt; + } + + struct archive_entry* entry; + std::string decompressedData; + + // Read all entries (though there should typically be only one) + while (archive_read_next_header(a, &entry) == ARCHIVE_OK) { + const void* buff; + size_t size; + la_int64_t offset; + + while (true) { + int r = archive_read_data_block(a, &buff, &size, &offset); + if (r == ARCHIVE_EOF) + break; + if (r != ARCHIVE_OK) { + std::cerr << "Error during decompression (Sources.gz): " << archive_error_string(a) << "\n"; + archive_read_free(a); + return std::nullopt; + } + decompressedData.append(static_cast(buff), size); + } + } + + archive_read_free(a); + + // Parse the decompressed data + std::vector packages; + std::istringstream stream(decompressedData); + std::string line; + PackageInfo currentPackage; + bool in_entry = false; + + while (std::getline(stream, line)) { + if (line.empty()) { + if (in_entry && !currentPackage.Package.empty()) { + // Finalize BuildDependsParsed + currentPackage.BuildDependsParsed = parse_relations(currentPackage.BuildDepends); + packages.push_back(currentPackage); + currentPackage = PackageInfo(); + in_entry = false; + } + continue; + } + + in_entry = true; + + if (line.find("Build-Depends:") == 0) { + currentPackage.BuildDepends = line.substr(strlen("Build-Depends: ")); + // Continue reading lines that start with a space or tab + while (std::getline(stream, line)) { + if (line.empty() || (!std::isspace(static_cast(line[0])))) + break; + currentPackage.BuildDepends += " " + line.substr(1); + } + // If the last read line is not a continuation, process it in the next iteration + if (!line.empty() && !std::isspace(static_cast(line[0]))) { + stream.seekg(-static_cast(line.length()) - 1, std::ios_base::cur); + } + continue; + } + + if (line.find("Binary:") == 0) { + std::string binary_str; + binary_str = line.substr(strlen("Binary: ")); + // Continue reading lines that start with a space or tab + while (std::getline(stream, line)) { + if (line.empty() || (!std::isspace(static_cast(line[0])))) + break; + binary_str += " " + line.substr(1); + } + // If the last read line is not a continuation, process it in the next iteration + if (!line.empty() && !std::isspace(static_cast(line[0]))) { + stream.seekg(-static_cast(line.length()) - 1, std::ios_base::cur); + } + currentPackage.Binary = split_string(binary_str, ", "); + continue; + } + + // Extract Package + if (line.find("Package:") == 0) { + currentPackage.Package = line.substr(strlen("Package: ")); + continue; + } + + // Extract Provides (if any) + if (line.find("Provides:") == 0) { + std::string provides_line = line.substr(strlen("Provides: ")); + // Split by commas + std::regex comma_sep_RE(R"(\s*,\s*)"); + std::sregex_token_iterator provides_it(provides_line.begin(), provides_line.end(), comma_sep_RE, -1); + std::sregex_token_iterator provides_end; + + for (; provides_it != provides_end; ++provides_it) { + std::string provide = provides_it->str(); + // Extract the package name before any space or '(' + size_t pos_space = provide.find(' '); + size_t pos_paren = provide.find('('); + size_t pos = std::string::npos; + if (pos_space != std::string::npos && pos_paren != std::string::npos) { + pos = std::min(pos_space, pos_paren); + } + else if (pos_space != std::string::npos) { + pos = pos_space; + } + else if (pos_paren != std::string::npos) { + pos = pos_paren; + } + + if (pos != std::string::npos) { + provide = provide.substr(0, pos); + } + + // Trim whitespace + provide.erase(provide.find_last_not_of(" \t\n\r\f\v") + 1); + provide.erase(0, provide.find_first_not_of(" \t\n\r\f\v")); + + if (!provide.empty()) { + currentPackage.Provides.push_back(provide); + } + } + + continue; + } + } + + // Add the last package if the file doesn't end with a blank line + if (in_entry && !currentPackage.Package.empty()) { + // Finalize BuildDependsParsed + currentPackage.BuildDependsParsed = parse_relations(currentPackage.BuildDepends); + packages.push_back(currentPackage); + } + + return packages; +} + +// Function to download, decompress, and parse the Packages.gz data +std::optional> fetch_and_parse_packages(const std::string& url) { + CURL* curl = curl_easy_init(); + if (!curl) { + std::cerr << "Failed to initialize CURL.\n"; + return std::nullopt; + } + + std::vector downloadedData; + + curl_easy_setopt(curl, CURLOPT_URL, url.c_str()); + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback); + curl_easy_setopt(curl, CURLOPT_WRITEDATA, &downloadedData); + // Follow redirects if any + curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L); + // Set a user agent + curl_easy_setopt(curl, CURLOPT_USERAGENT, "SourcesParser/1.0"); + + CURLcode res = curl_easy_perform(curl); + if (res != CURLE_OK) { + std::cerr << "CURL download error (Packages.gz): " << curl_easy_strerror(res) << "\n"; + curl_easy_cleanup(curl); + return std::nullopt; + } + + curl_easy_cleanup(curl); + + // Initialize libarchive + struct archive* a = archive_read_new(); + archive_read_support_filter_gzip(a); + archive_read_support_format_raw(a); + + if (archive_read_open_memory(a, downloadedData.data(), downloadedData.size()) != ARCHIVE_OK) { + std::cerr << "Failed to open Packages.gz archive: " << archive_error_string(a) << "\n"; + archive_read_free(a); + return std::nullopt; + } + + struct archive_entry* entry; + std::string decompressedData; + + // Read all entries (though there should typically be only one) + while (archive_read_next_header(a, &entry) == ARCHIVE_OK) { + const void* buff; + size_t size; + la_int64_t offset; + + while (true) { + int r = archive_read_data_block(a, &buff, &size, &offset); + if (r == ARCHIVE_EOF) + break; + if (r != ARCHIVE_OK) { + std::cerr << "Error during decompression (Packages.gz): " << archive_error_string(a) << "\n"; + archive_read_free(a); + return std::nullopt; + } + decompressedData.append(static_cast(buff), size); + } + } + + archive_read_free(a); + + // Parse the decompressed data + std::vector packages; + std::istringstream stream(decompressedData); + std::string line; + PackageInfo currentPackage; + bool in_entry = false; + + while (std::getline(stream, line)) { + if (line.empty()) { + if (in_entry && !currentPackage.Package.empty()) { + packages.push_back(currentPackage); + currentPackage = PackageInfo(); + in_entry = false; + } + continue; + } + + in_entry = true; + + // Extract Package + if (line.find("Package:") == 0) { + currentPackage.Package = line.substr(strlen("Package: ")); + continue; + } + + // Extract Source + if (line.find("Source:") == 0) { + currentPackage.Source = line.substr(strlen("Source: ")); + continue; + } + + // Extract Provides + if (line.find("Provides:") == 0) { + std::string provides_line = line.substr(strlen("Provides: ")); + // Split by commas + std::regex comma_sep_RE(R"(\s*,\s*)"); + std::sregex_token_iterator provides_it(provides_line.begin(), provides_line.end(), comma_sep_RE, -1); + std::sregex_token_iterator provides_end; + + for (; provides_it != provides_end; ++provides_it) { + std::string provide = provides_it->str(); + // Extract the package name before any space or '(' + size_t pos_space = provide.find(' '); + size_t pos_paren = provide.find('('); + size_t pos = std::string::npos; + if (pos_space != std::string::npos && pos_paren != std::string::npos) { + pos = std::min(pos_space, pos_paren); + } + else if (pos_space != std::string::npos) { + pos = pos_space; + } + else if (pos_paren != std::string::npos) { + pos = pos_paren; + } + + if (pos != std::string::npos) { + provide = provide.substr(0, pos); + } + + // Trim whitespace + provide.erase(provide.find_last_not_of(" \t\n\r\f\v") + 1); + provide.erase(0, provide.find_first_not_of(" \t\n\r\f\v")); + + if (!provide.empty()) { + currentPackage.Provides.push_back(provide); + } + } + + continue; + } + + // Any other fields are ignored for now + } + + // Add the last package if the file doesn't end with a blank line + if (in_entry && !currentPackage.Package.empty()) { + packages.push_back(currentPackage); + } + + return packages; +} + +std::set> build_dependency_graph( + const std::vector& sources, + const std::vector& binaries) { + + // Map of virtual package to real binary package(s) + std::map> virtual_to_real; + // Set of all real binary package names + std::set real_binary_packages; + // Map of binary package to its source package + std::map binary_to_source; + + // Populate binary_to_source mapping and virtual_to_real + for (const auto& source_pkg : sources) { + for (const auto& binary_pkg : source_pkg.Binary) { + binary_to_source[binary_pkg] = source_pkg.Package; + real_binary_packages.insert(binary_pkg); + } + } + for (const auto& binary_pkg : binaries) { + if (binary_pkg.Source.has_value()) { + binary_to_source[binary_pkg.Package] = binary_pkg.Source.value(); + } + real_binary_packages.insert(binary_pkg.Package); + + // Process Provides + for (const auto& provide : binary_pkg.Provides) { + virtual_to_real[provide].push_back(binary_pkg.Package); + } + } + + // Dependency graph as a set of edges (dependency -> package) + std::set> graph; + + for (const auto& pkg : sources) { + if (!pkg.BuildDependsParsed.has_value()) + continue; // Skip if no build dependencies + + for (const auto& or_deps : pkg.BuildDependsParsed.value()) { + // For each set of alternative dependencies (logical OR) + for (const auto& dep : or_deps) { + std::string dep_name = dep.name; + // If dep.archqual exists, append it with ':' + if (dep.archqual.has_value()) + dep_name += ":" + dep.archqual.value(); + + // If dep_name is a virtual package, map it to real binary package(s) + if (virtual_to_real.find(dep_name) != virtual_to_real.end()) { + for (const auto& real_pkg : virtual_to_real[dep_name]) { + // Map binary dependency to source package + if (binary_to_source.find(real_pkg) != binary_to_source.end()) { + std::string source_dep = binary_to_source[real_pkg]; + // Avoid self-dependency + if (source_dep != pkg.Package) { + graph.emplace(source_dep, pkg.Package); // Reversed edge + } + } + else { + std::cerr << "Warning: Binary package \"" << real_pkg << "\" provided by \"" + << dep_name << "\" does not map to any source package.\n"; + } + } + } + else if (real_binary_packages.find(dep_name) != real_binary_packages.end()) { + // Direct binary dependency + if (binary_to_source.find(dep_name) != binary_to_source.end()) { + std::string source_dep = binary_to_source[dep_name]; + // Avoid self-dependency + if (source_dep != pkg.Package) { + graph.emplace(source_dep, pkg.Package); // Reversed edge + } + } + else { + std::cerr << "Warning: Binary dependency \"" << dep_name << "\" does not map to any source package.\n"; + } + } + } + } + } + + // Transitive reduction: Collect edges to remove first + std::vector> edges_to_remove; + + // Build adjacency list from the graph + std::map> adj; + for (const auto& edge : graph) { + adj[edge.first].insert(edge.second); + } + + for (const auto& [u, neighbors] : adj) { + for (const auto& v : neighbors) { + if (adj.find(v) != adj.end()) { + for (const auto& w : adj[v]) { + if (adj[u].find(w) != adj[u].end()) { + edges_to_remove.emplace_back(u, w); + } + } + } + } + } + + // Now remove the collected edges + for (const auto& edge : edges_to_remove) { + graph.erase(edge); + adj[edge.first].erase(edge.second); + } + + return graph; +} + +QString serialize_dependency_graph_to_json(const std::set>& graph) { + // Check if the graph is empty + if (graph.empty()) { + std::cerr << "Warning: Dependency graph is empty." << std::endl; + return "{}"; // Return empty JSON object + } + + // Build adjacency list where key is dependency and value is list of packages that depend on it + std::map adjacency; + for (const auto& edge : graph) { + if (!edge.first.empty() && !edge.second.empty()) { + adjacency[edge.first].append(QString::fromStdString(edge.second)); + } + } + + // Convert to QJsonObject + QJsonObject jsonObj; + for (const auto& [dep, dependents] : adjacency) { + jsonObj[QString::fromStdString(dep)] = dependents; + } + + // Convert to JSON string + QJsonDocument doc(jsonObj); + return QString(doc.toJson(QJsonDocument::Compact)); +} + +} // namespace SourcesParser diff --git a/cpp/sources_parser.h b/cpp/sources_parser.h new file mode 100644 index 0000000..eb743d0 --- /dev/null +++ b/cpp/sources_parser.h @@ -0,0 +1,79 @@ +// Copyright (C) 2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#ifndef SOURCES_PARSER_H +#define SOURCES_PARSER_H + +#include +#include +#include +#include +#include + +#include +#include + +// Structure to hold the required fields +struct PackageInfo { + std::string Package; // Package name + std::vector Provides; // Virtual packages provided + std::string BuildDepends; // Build dependencies (for source packages) + std::optional Source; // Source package name (for binary packages) + std::vector Binary; + + // Nested structures for parsing dependencies + struct ArchRestriction { + bool enabled; + std::string arch; + }; + + struct BuildRestriction { + bool enabled; + std::string condition; + }; + + struct ParsedRelation { + std::string name; // Dependency package name + std::optional archqual; // Architecture qualifier + std::optional> version; // Version relation and version + std::optional> arch; // Architecture restrictions + std::optional>> restrictions; // Build restrictions + }; + + // Parsed BuildDepends and Binary relations + std::optional>> BuildDependsParsed; +}; + +// Namespace to encapsulate the parser functionalities +namespace SourcesParser { + // Function to download, decompress, and parse the Sources.gz data + std::optional> fetch_and_parse_sources(const std::string& url); + + // Function to download, decompress, and parse the Packages.gz data + std::optional> fetch_and_parse_packages(const std::string& url); + + // Function to parse dependency relations + std::vector> parse_relations(const std::string& raw); + + // Function to build dependency graph + std::set> build_dependency_graph( + const std::vector& sources, + const std::vector& binaries); + + // Function to serialize dependency graph to JSON + QString serialize_dependency_graph_to_json(const std::set>& graph); +} // namespace SourcesParser + +#endif // SOURCES_PARSER_H diff --git a/cpp/task_queue.cpp b/cpp/task_queue.cpp new file mode 100644 index 0000000..ff6b78a --- /dev/null +++ b/cpp/task_queue.cpp @@ -0,0 +1,217 @@ +// Copyright (C) 2024-2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#include "task_queue.h" +#include +#include + +TaskQueue::TaskQueue(size_t max_concurrent_tasks) + : max_concurrent_tasks_(max_concurrent_tasks), stop_(false), + tasks_(), + running_tasks_() {} + +TaskQueue::~TaskQueue() { + stop(); +} + +// FIXME: copy of CiLogic::get_thread_connection() +std::atomic TaskQueue::thread_id_counter{1200}; +QSqlDatabase TaskQueue::get_thread_connection() { + std::lock_guard lock(connection_mutex_); + thread_local unsigned int thread_unique_id = thread_id_counter.fetch_add(1); + QString connectionName = QString("LubuntuCIConnection_%1").arg(thread_unique_id); + + // Check if the connection already exists for this thread + if (QSqlDatabase::contains(connectionName)) { + QSqlDatabase db = QSqlDatabase::database(connectionName); + if (!db.isOpen()) { + if (!db.open()) { + throw std::runtime_error("Failed to open thread-specific database connection: " + db.lastError().text().toStdString()); + } + } + return db; + } + + QSqlDatabase threadDb = QSqlDatabase::addDatabase("QSQLITE", connectionName); + threadDb.setDatabaseName("/srv/lubuntu-ci/repos/ci-tools/lubuntu_ci.db"); + + if (!threadDb.open()) { + throw std::runtime_error("Failed to open new database connection for thread: " + threadDb.lastError().text().toStdString()); + } + + return threadDb; +} + +void TaskQueue::enqueue(std::shared_ptr jobstatus, + std::function log)> task_func, + std::shared_ptr packageconf) { + { + auto connection = get_thread_connection(); + auto now = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + + // Create the task + std::shared_ptr task_ptr = std::make_shared(connection, jobstatus, now, packageconf); + task_ptr->func = [task_func, self_weak = std::weak_ptr(task_ptr)](std::shared_ptr log) { + std::shared_ptr task_locked = self_weak.lock(); + if (task_locked) { + log->assign_task_context(task_locked); + task_func(log); + } + }; + packageconf->assign_task(jobstatus, task_ptr, packageconf); + + std::unique_lock lock(tasks_mutex_); + tasks_.emplace(task_ptr); + } + cv_.notify_all(); // Notify worker threads +} + +void TaskQueue::start() { + stop_ = false; + for (size_t i = 0; i < max_concurrent_tasks_; ++i) { + workers_.emplace_back(&TaskQueue::worker_thread, this); + } +} + +void TaskQueue::stop() { + { + std::unique_lock tasks_lock(tasks_mutex_); + std::unique_lock pkgconfs_lock(running_pkgconfs_mutex_); + std::unique_lock running_tasks_lock(running_tasks_mutex_); + stop_ = true; + } + cv_.notify_all(); // Wake up all threads + for (auto& worker : workers_) { + if (worker.joinable()) { + worker.join(); + } + } +} + +std::set, Task::TaskComparator> TaskQueue::get_tasks() const { + std::lock_guard lock(tasks_mutex_); + return tasks_; +} + +std::set, Task::TaskComparator> TaskQueue::get_running_tasks() const { + std::lock_guard lock(running_tasks_mutex_); + return running_tasks_; +} + +void TaskQueue::worker_thread() { + int worker_id = max_worker_id++; + while (true) { + std::shared_ptr task_to_execute; + { + std::lock_guard tasks_lock(tasks_mutex_); + + if (stop_ && tasks_.empty()) { + return; // Exit thread if stopping and no tasks left + } + + auto it = tasks_.begin(); + bool found_valid = false; + // Iterate through the set until a valid task is found + while (it != tasks_.end()) { + std::lock_guard lock(running_pkgconfs_mutex_); + std::shared_ptr it_task = *it; + task_to_execute = it_task; + + int pkgconf_id = task_to_execute->get_parent_packageconf()->id; + auto running_pkgconf_it = std::find_if(running_pkgconfs_.begin(), running_pkgconfs_.end(), + [&pkgconf_id](const std::shared_ptr& pkgconf) { return pkgconf->id == pkgconf_id; }); + + if (running_pkgconf_it != running_pkgconfs_.end()) { + ++it; // Move to the next task + continue; + } + + // Task is valid to execute + found_valid = true; + it = tasks_.erase(it); + break; + } + if (!found_valid) { continue; } + } + + if (!task_to_execute || !task_to_execute->func) { + continue; + } else { + std::lock_guard pkgconfslock(running_pkgconfs_mutex_); + running_pkgconfs_.insert(task_to_execute->get_parent_packageconf()); + std::lock_guard tasks_lock(running_tasks_mutex_); + running_tasks_.insert(task_to_execute); + } + + // Set the start time + { + auto now = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + task_to_execute->start_time = now; + auto connection = get_thread_connection(); + task_to_execute->save(connection, 0); + } + + try { + task_to_execute->func(task_to_execute->log); // Execute the task + task_to_execute->successful = true; + } catch (const std::exception& e) { + task_to_execute->successful = false; + std::ostringstream oss; + oss << "Exception type: " << typeid(e).name() << "\n" + << "What: " << e.what(); + task_to_execute->log->append(oss.str()); + } catch (...) { + task_to_execute->successful = false; + task_to_execute->log->append("Unknown exception occurred"); + } + + { + auto now = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + task_to_execute->finish_time = now; + auto connection = get_thread_connection(); + task_to_execute->save(connection, 0); + } + + { + // Remove the task from running_tasks_ + std::lock_guard lock(running_tasks_mutex_); + int id = task_to_execute->id; + auto running_task_it = std::find_if(running_tasks_.begin(), running_tasks_.end(), + [&id](const std::shared_ptr& task) { return task->id == id; }); + + if (running_task_it != running_tasks_.end()) { + running_tasks_.erase(running_task_it); + } + } + + { + // Remove packageconf from running_pkgconfs_ by id + std::lock_guard lock(running_pkgconfs_mutex_); + int pkgconf_id = task_to_execute->get_parent_packageconf()->id; + auto running_pkgconf_it = std::find_if(running_pkgconfs_.begin(), running_pkgconfs_.end(), + [&pkgconf_id](const std::shared_ptr& pkgconf) { return pkgconf->id == pkgconf_id; }); + + if (running_pkgconf_it != running_pkgconfs_.end()) { + running_pkgconfs_.erase(running_pkgconf_it); + } + } + } +} diff --git a/cpp/task_queue.h b/cpp/task_queue.h new file mode 100644 index 0000000..c246042 --- /dev/null +++ b/cpp/task_queue.h @@ -0,0 +1,64 @@ +// Copyright (C) 2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#ifndef TASK_QUEUE_H +#define TASK_QUEUE_H + +#include "ci_database_objs.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +class TaskQueue { +public: + TaskQueue(size_t max_concurrent_tasks = 10); + ~TaskQueue(); + + void enqueue(std::shared_ptr jobstatus, std::function log)> task_func, std::shared_ptr packageconf); + void start(); + void stop(); + + std::set, Task::TaskComparator> get_tasks() const; + std::set, Task::TaskComparator> get_running_tasks() const; + +private: + size_t max_concurrent_tasks_; + std::set, Task::TaskComparator> tasks_; + std::set, Task::TaskComparator> running_tasks_; + std::set> running_pkgconfs_; + std::queue> thread_pool_tasks_; + mutable std::mutex tasks_mutex_; + mutable std::mutex running_pkgconfs_mutex_; + mutable std::mutex running_tasks_mutex_; + std::condition_variable cv_; + bool stop_; + std::vector workers_; + static std::atomic thread_id_counter; + mutable std::mutex connection_mutex_; + int max_worker_id = 1; + + void worker_thread(); + QSqlDatabase get_thread_connection(); +}; + +#endif // TASK_QUEUE_H diff --git a/cpp/template_renderer.cpp b/cpp/template_renderer.cpp new file mode 100644 index 0000000..aa07e03 --- /dev/null +++ b/cpp/template_renderer.cpp @@ -0,0 +1,544 @@ +/* + * A minimal Jinja2-like template engine in one file, supporting: + * - {% extends "base.html" %} + * - {% block content %} ... {% endblock %} + * - {{ scalarVariable }} + * - {% if expr %} ... {% elif expr %} ... {% else %} ... {% endif %} + * - {% for item in list %} ... {% endfor %} + * - Basic expression parsing with ==, !=, >, <, >=, <= + * - Simple filter usage: {{ var|add:-1 }} + * + * Updated to support nested variable access using dot notation (e.g., repo.packaging_commit). + * + * Copyright (C) 2024-2025 Simon Quigley + */ + +#include "template_renderer.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace fs = std::filesystem; +static std::mutex file_mutex; + +std::string TemplateRenderer::build_template_path(const std::string &tplName) +{ + if (!tplName.empty() && tplName.front() == '/') { + return tplName; + } + return "templates/" + tplName; +} + +std::string TemplateRenderer::file_get_contents(const std::string &path) +{ + std::unique_lock lock(file_mutex); + try { + fs::path rel(path); + fs::path abs = fs::absolute(rel); + auto open_file = [](const fs::path& file_path) -> std::ifstream { + std::ifstream file(file_path, std::ios::in); + if (!file) { + throw std::ios_base::failure("File could not be opened: " + file_path.string()); + } + return file; + }; + + std::ifstream file = open_file(abs); + + std::ostringstream contents; + contents << file.rdbuf(); + return contents.str(); + } catch (const std::exception& e) { + std::cerr << "Unable to get file contents in template_renderer: " << e.what() << "\n"; + return ""; + } catch (...) { + std::cerr << "Unable to get file contents in template_renderer (unknown exception.)\n"; + return ""; + } +} + +std::string TemplateRenderer::apply_filter(const std::string &value, const std::string &filterPart) +{ + size_t colonPos = filterPart.find(':'); + std::string filterName = (colonPos == std::string::npos) + ? filterPart + : filterPart.substr(0, colonPos); + std::string filterArg = (colonPos == std::string::npos) + ? "" + : filterPart.substr(colonPos + 1); + + if (filterName == "add") { + try { + int original = std::stoi(value); + int increment = std::stoi(filterArg); + return std::to_string(original + increment); + } catch(...) { + return value; + } + } + // Additional filters can be added here. + return value; // Unknown filter => pass through +} + +std::string TemplateRenderer::apply_all_filters(const std::string &valueWithFilters, + const std::map &ctx) +{ + // Split on '|' + std::vector parts; + size_t start = 0; + while (true) { + size_t pos = valueWithFilters.find('|', start); + if (pos == std::string::npos) { + parts.push_back(valueWithFilters.substr(start)); + break; + } + parts.push_back(valueWithFilters.substr(start, pos - start)); + start = pos + 1; + } + if (parts.empty()) { + return ""; + } + std::string varExpression = parts[0]; + std::string value = get_variable_value(varExpression, ctx); + + // Apply filters if any + for (size_t i = 1; i < parts.size(); i++) { + value = apply_filter(value, parts[i]); + } + return value; +} + +bool TemplateRenderer::evaluate_condition(const std::string &expr, + const std::map &ctx) +{ + // Define helper lambdas + auto trim = [](const std::string &s) -> std::string { + size_t start = 0; + while (start < s.size() && isspace(static_cast(s[start]))) start++; + size_t end = s.size(); + while (end > start && isspace(static_cast(s[end - 1]))) end--; + return s.substr(start, end - start); + }; + + auto isInteger = [&](const std::string &s) -> bool { + if (s.empty()) return false; + size_t start = (s[0] == '-') ? 1 : 0; + for (size_t i = start; i < s.size(); ++i) { + if (!isdigit(static_cast(s[i]))) return false; + } + return true; + }; + + auto unquoteIfNeeded = [&](const std::string &tok) -> std::string { + auto t = trim(tok); + if (t.size() >= 2 && + ((t.front() == '\'' && t.back() == '\'') || + (t.front() == '\"' && t.back() == '\"'))) { + return t.substr(1, t.size() - 2); + } + return t; + }; + + auto parse_token_value = [&](const std::string &rawToken) -> std::string { + auto t = trim(rawToken); + if (t.size() >= 2 && ((t.front() == '\'' && t.back() == '\'') || + (t.front() == '\"' && t.back() == '\"'))) { + // Literal string + return unquoteIfNeeded(t); + } else { + // Apply filters + return apply_all_filters(t, ctx); + } + }; + + // Split the expression by 'and' + std::vector conditions; + std::regex andRe("\\s+and\\s+"); + std::sregex_token_iterator it(expr.begin(), expr.end(), andRe, -1); + std::sregex_token_iterator end; + while (it != end) { + conditions.push_back(trim(*it)); + ++it; + } + + // Evaluate each sub-condition + for (const auto &subExpr : conditions) { + std::string e = trim(subExpr); + if (e.empty()) continue; + + // Operators + static std::vector ops = {"==", "!=", "<=", ">=", ">", "<"}; + size_t opPos = std::string::npos; + std::string opFound; + for (const auto &cand : ops) { + size_t p = e.find(cand); + if (p != std::string::npos) { + if (opPos == std::string::npos || p < opPos) { + opPos = p; + opFound = cand; + } + } + } + + if (opPos == std::string::npos) { + // No operator => check truthiness of var + std::string val = parse_token_value(e); + if (val.empty()) return false; + continue; + } + + std::string left = trim(e.substr(0, opPos)); + std::string right = trim(e.substr(opPos + opFound.size())); + + // Directly handle dot notation by using the entire composite key + std::string lv = parse_token_value(left); + std::string rv = parse_token_value(right); + + bool li = isInteger(lv); + bool ri = isInteger(rv); + bool result = false; + + if (li && ri) { + int lnum = std::stoi(lv); + int rnum = std::stoi(rv); + if (opFound == "==") result = (lnum == rnum); + else if (opFound == "!=") result = (lnum != rnum); + else if (opFound == ">") result = (lnum > rnum); + else if (opFound == "<") result = (lnum < rnum); + else if (opFound == ">=") result = (lnum >= rnum); + else if (opFound == "<=") result = (lnum <= rnum); + } else { + // String compare + if (opFound == "==") result = (lv == rv); + else if (opFound == "!=") result = (lv != rv); + else if (opFound == ">") result = (lv > rv); + else if (opFound == "<") result = (lv < rv); + else if (opFound == ">=") result = (lv >= rv); + else if (opFound == "<=") result = (lv <= rv); + } + + if (!result) return false; // Short-circuit for 'and' + } + + return true; // All sub-conditions passed +} + +std::string TemplateRenderer::expand_conditionals(std::string input, + const std::map &ctx) +{ + static std::regex ifOpenRe("\\{\\%\\s*if\\s+[^\\}]+\\%\\}"); + static std::regex ifCloseRe("\\{\\%\\s*endif\\s*\\%\\}"); + + while (true) { + // Gather all if-positions + std::vector ifPositions; + { + size_t searchStart = 0; + while (true) { + std::smatch mOpen; + std::string sub = input.substr(searchStart); + if (!std::regex_search(sub, mOpen, ifOpenRe)) { + break; + } + size_t posAbsolute = searchStart + mOpen.position(0); + ifPositions.push_back(posAbsolute); + searchStart = posAbsolute + mOpen.length(0); + } + } + if (ifPositions.empty()) { + break; + } + + // The last one is the innermost + size_t ifPos = ifPositions.back(); + + { + std::string sub2 = input.substr(ifPos); + std::smatch mclose; + if (!std::regex_search(sub2, mclose, ifCloseRe)) { + // No matching endif + break; + } + + size_t closePosRelative = mclose.position(0); + size_t ifClosePos = ifPos + closePosRelative; + size_t blockLen = (ifClosePos - ifPos) + mclose.length(0); + + // Entire block + std::string blockText = input.substr(ifPos, blockLen); + + // Main regex to match the entire if-endif block + static std::regex mainRe( + "\\{\\%\\s*if\\s+([^\\}]+)\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endif\\s*\\%\\}" + ); + std::smatch blockMatch; + if (!std::regex_match(blockText, blockMatch, mainRe)) { + break; + } + + std::string condition = blockMatch[1].str(); + std::string innerBlock = blockMatch[2].str(); + + // Parse out any {% elif ... %} / {% else %} + struct ConditionBlock { + std::string cond; // Empty => else + std::string content; + }; + std::vector blocks; + blocks.emplace_back(ConditionBlock{ condition, "" }); + + static std::regex elifElseRe("\\{\\%\\s*elif\\s+([^\\}]+)\\s*\\%\\}|\\{\\%\\s*else\\s*\\%\\}"); + size_t lastPos = 0; + auto bBegin = std::sregex_iterator(innerBlock.begin(), innerBlock.end(), elifElseRe); + auto bEnd = std::sregex_iterator(); + for (auto i = bBegin; i != bEnd; ++i) { + auto m2 = *i; + size_t pos2 = m2.position(0); + // Text up to pos2 is the previous block's content + blocks.back().content.append(innerBlock.substr(lastPos, pos2 - lastPos)); + if (m2[1].matched) { + // Elif + blocks.emplace_back(ConditionBlock{ m2[1].str(), "" }); + } else { + // Else + blocks.emplace_back(ConditionBlock{ "", "" }); + } + lastPos = pos2 + m2.length(0); + } + // Leftover + if (!blocks.empty()) { + blocks.back().content.append(innerBlock.substr(lastPos)); + } + + // Evaluate + std::string finalText; + bool used = false; + for (auto &b : blocks) { + if (b.cond.empty()) { + // Else + if (!used) { + finalText = b.content; + } + break; + } else { + if (evaluate_condition(b.cond, ctx)) { + finalText = b.content; + used = true; + break; + } + } + } + + // Replace that block region with finalText + input.replace(ifPos, blockLen, finalText); + } + } + + return input; +} + +std::string TemplateRenderer::expand_loops(const std::string &input, + const std::map &scalarContext, + const std::map>> &listContext) +{ + std::string result = input; + static std::regex loopRegex("\\{\\%\\s*for\\s+(\\S+)\\s+in\\s+(\\S+)\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endfor\\s*\\%\\}"); + while (true) { + std::smatch m; + if (!std::regex_search(result, m, loopRegex)) { + break; + } + std::string aliasName = m[1].str(); // e.g., 'repo' + std::string arrayName = m[2].str(); // e.g., 'repos' + std::string loopBody = m[3].str(); + auto it = listContext.find(arrayName); + if (it == listContext.end()) { + // No such array => remove the block + result.replace(m.position(0), m.length(0), ""); + continue; + } + std::string expanded; + for (const auto &oneItem : it->second) { + // Create a per-item scalar context with prefixed keys + std::map perItemScalarContext = scalarContext; + for (const auto &kv : oneItem) { + perItemScalarContext[aliasName + "." + kv.first] = kv.second; + } + + std::string chunk = loopBody; + + // Expand conditionals with per-item scalar context + chunk = expand_conditionals(chunk, perItemScalarContext); + + // Expand nested loops if any with per-item scalar context + chunk = expand_loops(chunk, perItemScalarContext, listContext); + + // Final scalar expansions with per-item scalar context + chunk = replace_variables(chunk, perItemScalarContext); + + // Remove excess whitespace + chunk = strip_excess_whitespace(chunk); + + expanded += chunk; + } + result.replace(m.position(0), m.length(0), expanded); + } + return result; +} + +std::string TemplateRenderer::replace_variables(const std::string &input, + const std::map &context) +{ + static std::regex varRe("\\{\\{\\s*(.*?)\\s*\\}\\}"); + std::string output; + output.reserve(input.size()); + size_t lastPos = 0; + auto begin = std::sregex_iterator(input.begin(), input.end(), varRe); + auto end = std::sregex_iterator(); + for (auto it = begin; it != end; ++it) { + auto match = *it; + output.append(input, lastPos, match.position(0) - lastPos); + std::string expr = match[1].str(); + + // Directly apply all filters (which now handle composite keys) + std::string value = apply_all_filters(expr, context); + + output.append(value); + lastPos = match.position(0) + match.length(0); + } + output.append(input, lastPos); + + // Remove leftover {% ... %} if any + static std::regex leftover("\\{\\%.*?\\%\\}"); + output = std::regex_replace(output, leftover, ""); + return output; +} + +std::string TemplateRenderer::render_jinja( + const std::string &tplPath, + const std::map &scalarContext, + const std::map>> &listContext) +{ + std::string tpl = file_get_contents(tplPath); + if (tpl.empty()) { + return "

Template not found: " + tplPath + "

"; + } + std::string step0 = expand_conditionals(tpl, scalarContext); + std::string step1 = expand_loops(step0, scalarContext, listContext); + std::string result = replace_variables(step1, scalarContext); + return result; +} + +std::string TemplateRenderer::render_with_inheritance( + const std::string &childTplName, + const std::map &scalarContext, + const std::map>> &listContext) +{ + // Load child template + std::string childText = file_get_contents(build_template_path(childTplName)); + if (childText.empty()) { + return "

Missing child template:

" + + build_template_path(childTplName) + ""; + } + + // Check for {% extends "base.html" %} + static std::regex extendsRe("\\{\\%\\s*extends\\s*\"([^\"]+)\"\\s*\\%\\}"); + std::smatch exm; + if (!std::regex_search(childText, exm, extendsRe)) { + // No extends => just do expansions + std::string step0 = expand_conditionals(childText, scalarContext); + std::string step1 = expand_loops(step0, scalarContext, listContext); + std::string result = replace_variables(step1, scalarContext); + return result; + } + + // If extends => load base + std::string baseName = exm[1].str(); + std::string baseText = file_get_contents(build_template_path(baseName)); + if (baseText.empty()) { + return "

Missing base template:

" + + baseName + ""; + } + + // Extract child block content + static std::regex blockRe("\\{\\%\\s*block\\s+content\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endblock\\s*\\%\\}"); + std::smatch blockMatch; + std::string childBlock; + if (std::regex_search(childText, blockMatch, blockRe)) { + childBlock = blockMatch[1].str(); + } + + // Process loops first, which handle their own conditionals with loop variables + std::string expandedChildBlock = expand_loops(childBlock, scalarContext, listContext); + // Then process any conditionals outside loops + expandedChildBlock = expand_conditionals(expandedChildBlock, scalarContext); + // Finally, replace variables in the child block + expandedChildBlock = replace_variables(expandedChildBlock, scalarContext); + + // Replace {{BLOCK content}} in base with expanded child block + const std::string marker = "{{BLOCK content}}"; + size_t pos = baseText.find(marker); + if (pos != std::string::npos) { + baseText.replace(pos, marker.size(), expandedChildBlock); + } + + // Replace variables in the entire base template (to handle {{PAGE_TITLE}}) + baseText = replace_variables(baseText, scalarContext); + + // Remove any remaining {% ... %} tags + static std::regex leftover("\\{\\%.*?\\%\\}"); + baseText = std::regex_replace(baseText, leftover, ""); + + return baseText; +} + +std::string TemplateRenderer::strip_excess_whitespace(const std::string &str) { + // Remove leading/trailing spaces and unify consecutive whitespace into single spaces + std::string result; + result.reserve(str.size()); + bool prevSpace = false; + for (char c: str) { + if (isspace(static_cast(c))) { + if (!prevSpace) { + result += ' '; + prevSpace = true; + } + } else { + result += c; + prevSpace = false; + } + } + // Trim leading and trailing spaces + size_t start = 0; + while (start < result.size() && isspace(static_cast(result[start]))) { + start++; + } + size_t end = result.size(); + while (end > start && isspace(static_cast(result[end - 1]))) { + end--; + } + return result.substr(start, end - start); +} + +std::string TemplateRenderer::get_variable_value(const std::string &var, + const std::map &ctx) { + auto it = ctx.find(var); + if (it != ctx.end()) { + return it->second; + } + return ""; +} diff --git a/cpp/template_renderer.h b/cpp/template_renderer.h new file mode 100644 index 0000000..80cef55 --- /dev/null +++ b/cpp/template_renderer.h @@ -0,0 +1,85 @@ +// Copyright (C) 2024-2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#ifndef TEMPLATE_RENDERER_H +#define TEMPLATE_RENDERER_H + +#include +#include +#include +#include + +/** + * This class provides two styles of rendering: + * + * 1) render_jinja(...) -- A naive Jinja-like expansion for loops/variables. + * 2) render_with_inheritance(...) -- A minimal approach to handle + * {% extends "base.html" %} and {% block content %} usage, plus + * {{VARIABLE}} expansions. + * + * The "base.html" template is expected to contain something like: + * ... {{BLOCK content}} ... + * And the child template might do: + * {% extends "base.html" %} + * {% block content %}Hello world{% endblock %} + */ +class TemplateRenderer { +public: + static std::string render_jinja( + const std::string &tplPath, + const std::map &scalarContext, + const std::map>> &listContext + ); + + static std::string render_with_inheritance( + const std::string &childTplName, + const std::map &scalarContext, + const std::map>> &listContext + ); + +private: + static std::string build_template_path(const std::string &tplName); + static std::string file_get_contents(const std::string &path); + + // Filters + static std::string apply_filter(const std::string &value, const std::string &filterPart); + static std::string apply_all_filters(const std::string &valueWithFilters, + const std::map &ctx); + + // Conditionals + static std::string expand_conditionals(std::string input, + const std::map &ctx); + static bool evaluate_condition(const std::string &expr, + const std::map &ctx); + + // For loops + static std::string expand_loops(const std::string &input, + const std::map &scalarContext, + const std::map>> &listContext); + + // Final expansions + static std::string replace_variables(const std::string &input, + const std::map &context); + + // Helper: strip extraneous whitespace from final expansions + static std::string strip_excess_whitespace(const std::string &str); + + static std::string get_variable_value(const std::string &var, const std::map &ctx); +}; + +#endif // TEMPLATE_RENDERER_H diff --git a/cpp/update-maintainer-lib.cpp b/cpp/update-maintainer-lib.cpp index 0c5477a..617d35d 100644 --- a/cpp/update-maintainer-lib.cpp +++ b/cpp/update-maintainer-lib.cpp @@ -1,4 +1,4 @@ -// Copyright (C) 2024 Simon Quigley +// Copyright (C) 2024-2025 Simon Quigley // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by @@ -24,7 +24,6 @@ namespace fs = std::filesystem; -// Definitions from update-maintainer.cpp moved here static const char* PREVIOUS_UBUNTU_MAINTAINERS[] = { "ubuntu core developers ", "ubuntu core developers ", @@ -32,25 +31,16 @@ static const char* PREVIOUS_UBUNTU_MAINTAINERS[] = { }; static const char* UBUNTU_MAINTAINER = "Ubuntu Developers "; -class MaintainerUpdateException : public std::runtime_error { -public: - using std::runtime_error::runtime_error; -}; - -static std::optional find_control_file(const fs::path &debian_dir) { +static fs::path find_control_file(const fs::path &debian_dir) { fs::path control_in = debian_dir / "control.in"; fs::path control = debian_dir / "control"; - if (fs::exists(control_in)) return control_in; - if (fs::exists(control)) return control; - return std::nullopt; -} - -static fs::path find_changelog_file(const fs::path &debian_dir) { - fs::path changelog = debian_dir / "changelog"; - if (!fs::exists(changelog)) { - throw MaintainerUpdateException("No changelog file found"); + if (fs::exists(control_in)) { + return control_in; + } + if (fs::exists(control)) { + return control; } - return changelog; + throw std::runtime_error("No control file found in " + debian_dir.string()); } static bool xsbc_managed_by_rules(const fs::path &debian_dir) { @@ -66,30 +56,9 @@ static bool xsbc_managed_by_rules(const fs::path &debian_dir) { return false; } -static std::string get_distribution(const fs::path &changelog_file) { - // parse first line of changelog: "package (version) dist; urgency=..." - // dist is the token after ')' - std::ifstream f(changelog_file); - if(!f) throw MaintainerUpdateException("Unable to open changelog."); - std::string first_line; - std::getline(f, first_line); - size_t pos = first_line.find(')'); - if(pos == std::string::npos) throw MaintainerUpdateException("Invalid changelog format"); - pos++; - while(pos < first_line.size() && std::isspace((unsigned char)first_line[pos])) pos++; - size_t start = pos; - while(pos < first_line.size() && !std::isspace((unsigned char)first_line[pos]) && first_line[pos] != ';') pos++; - std::string dist = first_line.substr(start, pos - start); - size_t dashpos = dist.find('-'); - if (dashpos != std::string::npos) { - dist = dist.substr(0, dashpos); - } - return dist; -} - static std::string read_file(const fs::path &p) { std::ifstream f(p); - if(!f) throw MaintainerUpdateException("Cannot read file: " + p.string()); + if(!f) throw std::runtime_error("Cannot read file: " + p.string()); std::stringstream ss; ss << f.rdbuf(); return ss.str(); @@ -97,118 +66,99 @@ static std::string read_file(const fs::path &p) { static void write_file(const fs::path &p, const std::string &content) { std::ofstream f(p); - if(!f) throw MaintainerUpdateException("Cannot write file: " + p.string()); + if(!f) throw std::runtime_error("Cannot write file: " + p.string()); f << content; } -static std::optional get_field(const std::string &content, const std::string &field_regex) { - std::regex r(field_regex, std::regex_constants::multiline); - std::smatch m; - if(std::regex_search(content, m, r)) { - return m[1].str(); - } - return std::nullopt; -} - -static std::string set_field(const std::string &content, const std::string &field_regex, const std::string &new_line) { - std::regex r(field_regex, std::regex_constants::multiline); - return std::regex_replace(content, r, new_line); -} - -static void update_maintainer_file(const fs::path &control_file, const std::string &distribution, bool verbose) { +static void update_maintainer_file(const fs::path &control_file, bool verbose) { std::string c = read_file(control_file); - auto original_maintainer = get_field(c, "^Maintainer:\\s?(.*)$"); - if(!original_maintainer) { - throw MaintainerUpdateException("No Maintainer field found"); + // Helper lambda to find a field + auto find_field = [&](const std::string &field) -> std::optional { + std::regex r("^" + field + ":\\s?(.*)$", std::regex_constants::icase | std::regex_constants::multiline); + std::smatch m; + if(std::regex_search(c, m, r)) { + return m[1].str(); + } + return std::nullopt; + }; + + // Helper lambda to replace a field line + auto replace_field = [&](const std::string &field, const std::string &val) { + std::regex r("^" + field + ":\\s?.*$", std::regex_constants::icase | std::regex_constants::multiline); + c = std::regex_replace(c, r, field + ": " + val); + }; + + auto original_maint = find_field("Maintainer"); + if(!original_maint) { + throw std::runtime_error("No Maintainer field found in " + control_file.string()); } - std::string om = *original_maintainer; - std::string om_lower = om; - for (auto &ch : om_lower) ch = (char)std::tolower((unsigned char)ch); + std::string om_lower = *original_maint; + for (auto &ch : om_lower) { + ch = (char)std::tolower((unsigned char)ch); + } - // Check previous ubuntu maintainers + // If the original maintainer is a known Ubuntu style, just unify for (auto &pm : PREVIOUS_UBUNTU_MAINTAINERS) { std::string pm_lower = pm; - for (auto &ch: pm_lower) ch=(char)std::tolower((unsigned char)ch); - if(pm_lower == om_lower) { + for (auto &ch: pm_lower) { + ch = (char)std::tolower((unsigned char)ch); + } + if (pm_lower == om_lower) { if(verbose) { - std::cout<<"The old maintainer was: "< + if (om_lower.size() >= 11 && + om_lower.rfind("ubuntu.com>", om_lower.size()-11) != std::string::npos) { - std::string lower_om = om_lower; - if (lower_om.rfind("ubuntu.com>", lower_om.size()-11) != std::string::npos) { - if(verbose) { - std::cout<<"The Maintainer email is ubuntu.com address. Doing nothing.\n"; - } - return; - } - } - - // Debian distributions: stable, testing, unstable, experimental - if(distribution=="stable"||distribution=="testing"||distribution=="unstable"||distribution=="experimental") { if(verbose) { - std::cout<<"The package targets Debian. Doing nothing.\n"; + std::cout << "[update-maintainer] Maintainer is an @ubuntu.com address. Doing nothing.\n"; } return; } - // set XSBC-Original-Maintainer if needed - auto orig_field = get_field(c, "^(?:[XSBC]*-)?Original-Maintainer:\\s?(.*)$"); - if(orig_field && verbose) { - std::cout<<"Overwriting original maintainer: "<< *orig_field <<"\n"; - } - - if(verbose) { - std::cout<<"The original maintainer is: "<< om <<"\n"; - std::cout<<"Resetting as: "< +// Copyright (C) 2024-2025 Simon Quigley // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by @@ -13,7 +13,16 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -#pragma once +#ifndef UPDATE_MAINTAINER_LIB_H +#define UPDATE_MAINTAINER_LIB_H + #include -void update_maintainer(const std::string &debian_directory, bool verbose = false); +// +// Update the "Maintainer" field in debian/control (or control.in) +// to a standard Ubuntu field, preserving the original field in +// XSBC-Original-Maintainer if needed. +// +void update_maintainer(const std::string &debian_directory, bool verbose); + +#endif diff --git a/cpp/update-maintainer.cpp b/cpp/update-maintainer.cpp index 554194f..ea919e7 100644 --- a/cpp/update-maintainer.cpp +++ b/cpp/update-maintainer.cpp @@ -1,3 +1,5 @@ +// cpp/update-maintainer.cpp + // Copyright (C) 2024 Simon Quigley // // This program is free software: you can redistribute it and/or modify @@ -13,7 +15,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -#include "update-maintainer-lib.h" +#include "lubuntuci_lib.h" #include int main(int argc, char** argv) { @@ -32,7 +34,7 @@ int main(int argc, char** argv) { } try { - update_maintainer(debian_directory, verbose); + //LubuntuCI::update_maintainer(debian_directory, verbose); if(verbose) { std::cout << "Maintainer updated successfully." << std::endl; } diff --git a/cpp/utilities.cpp b/cpp/utilities.cpp index e605cd8..0d1581d 100644 --- a/cpp/utilities.cpp +++ b/cpp/utilities.cpp @@ -1,4 +1,4 @@ -// Copyright (C) 2024 Simon Quigley +// Copyright (C) 2024-2025 Simon Quigley // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by @@ -14,6 +14,7 @@ // along with this program. If not, see . #include "utilities.h" +#include "common.h" #include #include @@ -22,47 +23,62 @@ #include #include #include +#include +#include +#include +#include +#include // for std::format in C++20/23 namespace fs = std::filesystem; +// Define a semaphore with a maximum of 10 concurrent jobs +static std::counting_semaphore<10> sem(10); + +// Job queue and synchronization primitives +static std::queue> job_queue; +static std::mutex queue_mutex; +static std::atomic daemon_running{false}; + // Function to read the entire content of a file into a string -std::string readFile(const fs::path& filePath) { - std::ifstream inFile(filePath, std::ios::binary); - if (inFile) { - return std::string((std::istreambuf_iterator(inFile)), +std::string read_file(const fs::path& file_path) { + std::ifstream in_file(file_path, std::ios::binary); + if (in_file) { + return std::string((std::istreambuf_iterator(in_file)), std::istreambuf_iterator()); } return ""; } // Function to write a string into a file -void writeFile(const fs::path& filePath, const std::string& content) { - std::ofstream outFile(filePath, std::ios::binary); - if (outFile) { - outFile << content; +void write_file(const fs::path& file_path, const std::string& content) { + std::ofstream out_file(file_path, std::ios::binary); + if (out_file) { + out_file << content; } } // Function to perform in-place regex replace on a file -void regexReplaceInFile(const fs::path& filePath, const std::string& pattern, const std::string& replace) { - std::string content = readFile(filePath); - content = std::regex_replace(content, std::regex(pattern), replace); - writeFile(filePath, content); +void regex_replace_in_file(const fs::path& file_path, + const std::string& pattern, + const std::string& replacement) { + std::string content = read_file(file_path); + content = std::regex_replace(content, std::regex(pattern), replacement); + write_file(file_path, content); } // Function to decompress gzipped files -std::string decompressGzip(const fs::path& filePath) { - gzFile infile = gzopen(filePath.c_str(), "rb"); +std::string decompress_gzip(const fs::path& file_path) { + gzFile infile = gzopen(file_path.c_str(), "rb"); if (!infile) return ""; - std::string decompressedData; + std::string decompressed_data; char buffer[8192]; - int numRead = 0; - while ((numRead = gzread(infile, buffer, sizeof(buffer))) > 0) { - decompressedData.append(buffer, numRead); + int num_read = 0; + while ((num_read = gzread(infile, buffer, sizeof(buffer))) > 0) { + decompressed_data.append(buffer, num_read); } gzclose(infile); - return decompressedData; + return decompressed_data; } // Helper function for libcurl write callback @@ -72,18 +88,20 @@ size_t write_data(void* ptr, size_t size, size_t nmemb, void* stream) { } // Function to download a file with timestamping using libcurl -void downloadFileWithTimestamping(const std::string& url, const fs::path& outputPath, - const fs::path& logFilePath, std::mutex& logMutex) { +void download_file_with_timestamping(const std::string& url, + const fs::path& output_path, + const fs::path& log_file_path, + std::mutex& log_mutex) { CURL* curl; CURLcode res; FILE* fp; curl = curl_easy_init(); if (curl) { - fs::path tempFilePath = outputPath.string() + ".tmp"; - fp = fopen(tempFilePath.c_str(), "wb"); + fs::path temp_file_path = output_path.string() + ".tmp"; + fp = fopen(temp_file_path.c_str(), "wb"); if (!fp) { - std::cerr << "Failed to open file: " << tempFilePath << std::endl; + std::cerr << "Failed to open file: " << temp_file_path << std::endl; curl_easy_cleanup(curl); return; } @@ -95,7 +113,7 @@ void downloadFileWithTimestamping(const std::string& url, const fs::path& output // Timestamping: set If-Modified-Since header struct stat file_info; - if (stat(outputPath.c_str(), &file_info) == 0) { + if (stat(output_path.c_str(), &file_info) == 0) { // Set the time condition to If-Modified-Since curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE); curl_easy_setopt(curl, CURLOPT_TIMEVALUE, file_info.st_mtime); @@ -113,20 +131,165 @@ void downloadFileWithTimestamping(const std::string& url, const fs::path& output // Log the result and handle the downloaded file { - std::lock_guard lock(logMutex); - std::ofstream logFile(logFilePath, std::ios::app); + std::lock_guard lock(log_mutex); + std::ofstream log_file(log_file_path, std::ios::app); if (res == CURLE_OK && (response_code == 200 || response_code == 201)) { - fs::rename(tempFilePath, outputPath); - logFile << "Downloaded: " << url << std::endl; + fs::rename(temp_file_path, output_path); + log_file << "Downloaded: " << url << std::endl; } else if (response_code == 304) { - fs::remove(tempFilePath); - logFile << "Not Modified: " << url << std::endl; + fs::remove(temp_file_path); + log_file << "Not Modified: " << url << std::endl; } else { - fs::remove(tempFilePath); - logFile << "Failed to download: " << url << std::endl; + fs::remove(temp_file_path); + log_file << "Failed to download: " << url << std::endl; } } } else { std::cerr << "Failed to initialize CURL." << std::endl; } } + +std::filesystem::path create_temp_directory() { + auto temp_dir = std::filesystem::temp_directory_path() / generate_random_string(32); + std::filesystem::create_directory(temp_dir); + return temp_dir; +} + +// Function to copy a directory recursively +void copy_directory(const fs::path& source, const fs::path& destination) { + if (!std::filesystem::exists(source) || !std::filesystem::is_directory(source)) { + throw std::runtime_error("Source directory does not exist or is not a directory: " + source.string()); + } + + // Create the destination directory + std::filesystem::create_directories(destination); + + // Copy files and directories recursively + for (const auto& entry : std::filesystem::recursive_directory_iterator(source)) { + auto relative_path = std::filesystem::relative(entry.path(), source); + auto target_path = destination / relative_path; + + try { + if (std::filesystem::is_directory(entry)) { + std::filesystem::create_directory(target_path); + } else if (std::filesystem::is_regular_file(entry)) { + std::filesystem::copy(entry, target_path, std::filesystem::copy_options::overwrite_existing); + } + } catch (...) { + continue; + } + } +} + +// Function to generate a random string of given length +std::string generate_random_string(size_t length) { + const std::string chars = + "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789"; + thread_local std::mt19937 rg{std::random_device{}()}; + thread_local std::uniform_int_distribution<> pick(0, chars.size() - 1); + std::string s; + s.reserve(length); + while (length--) + s += chars[pick(rg)]; + return s; +} + +// Function to get current UTC time formatted as per the given format string +std::string get_current_utc_time(const std::string& format) { + auto now = std::chrono::system_clock::now(); + std::time_t now_time = std::chrono::system_clock::to_time_t(now); + std::tm tm_utc; + gmtime_r(&now_time, &tm_utc); + char buf[64]; // Ensure sufficient buffer size for different formats + std::strftime(buf, sizeof(buf), format.c_str(), &tm_utc); + return std::string(buf); +} + +// Function to convert filesystem time to time_t +std::time_t to_time_t(const fs::file_time_type& ftime) { + using namespace std::chrono; + // Convert to system_clock time_point + auto sctp = time_point_cast( + ftime - fs::file_time_type::clock::now() + system_clock::now() + ); + return system_clock::to_time_t(sctp); +} + +std::vector split_string(const std::string& input, const std::string& delimiter) { + std::vector result; + size_t start = 0; + size_t end = 0; + + while ((end = input.find(delimiter, start)) != std::string::npos) { + result.emplace_back(input.substr(start, end - start)); + start = end + delimiter.length(); + } + + // Add the remaining part of the string + result.emplace_back(input.substr(start)); + return result; +} + +std::string remove_suffix(const std::string& input, const std::string& suffix) { + if (input.size() >= suffix.size() && + input.compare(input.size() - suffix.size(), suffix.size(), suffix) == 0) { + return input.substr(0, input.size() - suffix.size()); + } + return input; // Return the original string if the suffix doesn't exist +} + +// Utility which basically does the following: +// "noble" (std::string) -> 2504 (int) +// The bool represents whether this codename is the development release +std::pair get_version_from_codename(const std::string& codename) { + std::ifstream file("/usr/share/distro-info/ubuntu.csv"); + if (!file.is_open()) { + throw std::runtime_error("Failed to open file."); + } + + std::string line; + // Skip the header line + std::getline(file, line); + + std::string last_codename; + int version = 0; + + while (std::getline(file, line)) { + std::istringstream iss(line); + std::string version_str, name, series; + std::getline(iss, version_str, ','); + std::getline(iss, name, ','); + std::getline(iss, series, ','); + + if (series == codename) { + version_str.erase(std::remove(version_str.begin(), version_str.end(), '.'), + version_str.end()); + version = std::stoi(version_str); + } + last_codename = series; + } + + bool is_last = (codename == last_codename); + + if (version == 0) { + throw std::runtime_error("Codename not found."); + } + + return {version, is_last}; +} + +void ensure_git_inited() { + static std::once_flag git_init_flag; + std::call_once(git_init_flag, []() { + git_libgit2_init(); + }); +} + +void run_task_every(std::stop_token _stop_token, int interval_minutes, std::function task) { + while (!_stop_token.stop_requested()) { + task(); + std::this_thread::sleep_for(std::chrono::minutes(interval_minutes)); + } +} diff --git a/cpp/utilities.h b/cpp/utilities.h index 9bac7ce..5ff535c 100644 --- a/cpp/utilities.h +++ b/cpp/utilities.h @@ -1,4 +1,4 @@ -// Copyright (C) 2024 Simon Quigley +// Copyright (C) 2024-2025 Simon Quigley // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by @@ -18,22 +18,50 @@ #include #include #include +#include +#include +#include + +#include // Function to read the entire content of a file into a string -std::string readFile(const std::filesystem::path& filePath); +std::string read_file(const std::filesystem::path& filePath); // Function to write a string into a file -void writeFile(const std::filesystem::path& filePath, const std::string& content); +void write_file(const std::filesystem::path& filePath, const std::string& content); // Function to perform in-place regex replace on a file -void regexReplaceInFile(const std::filesystem::path& filePath, const std::string& pattern, const std::string& replace); +void regex_replace_in_file(const std::filesystem::path& filePath, const std::string& pattern, const std::string& replace); // Function to decompress gzipped files -std::string decompressGzip(const std::filesystem::path& filePath); +std::string decompress_gzip(const std::filesystem::path& filePath); // Function to download a file with timestamping using libcurl -void downloadFileWithTimestamping(const std::string& url, const std::filesystem::path& outputPath, +void download_file_with_timestamping(const std::string& url, const std::filesystem::path& outputPath, const std::filesystem::path& logFilePath, std::mutex& logMutex); // Helper function for libcurl write callback size_t write_data(void* ptr, size_t size, size_t nmemb, void* stream); + +// Function to create a temporary directory with a random name +std::filesystem::path create_temp_directory(); + +// Function to copy a directory recursively +void copy_directory(const std::filesystem::path& source, const std::filesystem::path& destination); + +// Time utilities +std::string get_current_utc_time(const std::string& format); +std::time_t to_time_t(const std::filesystem::file_time_type& ftime); + +// String utilities +std::vector split_string(const std::string& input, const std::string& delimiter); +std::string remove_suffix(const std::string& input, const std::string& suffix); +std::string generate_random_string(size_t length); + +// Get version from codename using distro-info +std::pair get_version_from_codename(const std::string& codename); + +// Git utilities +void ensure_git_inited(); + +void run_task_every(std::stop_token _stop_token, int interval_minutes, std::function task); diff --git a/cpp/web_server.cpp b/cpp/web_server.cpp new file mode 100644 index 0000000..b833bae --- /dev/null +++ b/cpp/web_server.cpp @@ -0,0 +1,1190 @@ +// Copyright (C) 2024-2025 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#include "web_server.h" +#include "utilities.h" +#include "sources_parser.h" +#include "naive_bayes_classifier.h" + +// Qt includes +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// C++ includes +#include +#include +#include +#include +#include +#include +#include +#include +#include // C++20/23 for std::format + +// Launchpad includes +#include "launchpad.h" +#include "archive.h" +#include "person.h" +#include "distribution.h" +#include "distro_series.h" +#include "source_package_publishing_history.h" +#include "build.h" +#include "binary_package_publishing_history.h" + +// Local includes +#include "lubuntuci_lib.h" +#include "template_renderer.h" + +constexpr QHttpServerResponder::StatusCode StatusCodeFound = QHttpServerResponder::StatusCode::Found; + +static std::string timestamp_now() +{ + return QDateTime::currentDateTime().toString("yyyy-MM-dd hh:mm:ss.zzz").toStdString(); +} + +WebServer::WebServer(QObject *parent) : QObject(parent) {} + +[[nodiscard]] std::map WebServer::parse_query_parameters(const QString &query) { + return query + .split('&') // Split by '&' into key-value pairs + | std::views::filter([](const QString ¶m) { return param.contains('='); }) // Only valid pairs + | std::views::transform([](const QString ¶m) { + const auto keyValue = param.split('='); + return std::pair{keyValue[0], keyValue[1]}; // Return a key-value pair + }) + | std::ranges::to>(); // Collect the pairs into a map +} + +[[nodiscard]] bool WebServer::validate_token(const QString& token) { + // Always 64 characters + if (token.size() != 64) return false; + // Can't validate the active token if there aren't any + if (_active_tokens.isEmpty()) return false; + // Always present in active_tokens, and not expired + auto it = _active_tokens.find(token); + if (it != _active_tokens.end() && it.value() >= QDateTime::currentDateTime()) return true; + else { + _active_tokens.erase(it); + + auto person_it = _token_person.find(token); + if (person_it != _token_person.end()) _token_person.erase(person_it); + + return false; + } +} + +[[nodiscard]] QHttpServerResponse WebServer::verify_session_token(const QHttpServerRequest &request, const QHttpHeaders &headers) { + const QByteArray cookie_header = headers.value(QHttpHeaders::WellKnownHeader::Cookie).toByteArray(); + const QUrl request_url = request.url(); + const QString base_url = request_url.scheme() + "://" + request_url.host() + + (request_url.port() == -1 ? "" : ':' + QString::number(request_url.port())); + const QString current_path = request_url.path(); + + for (const auto &cookie : cookie_header.split(';') + | std::views::transform([](const QByteArray &cookie) { return cookie.trimmed(); }) + | std::views::filter([](const QByteArray &cookie) { return cookie.startsWith("auth_token="); })) { + if (!validate_token(QString::fromUtf8(cookie.mid(sizeof("auth_token=") - 1)))) break; + return QHttpServerResponse(QHttpServerResponder::StatusCode::Ok); + } + + QHttpServerResponse bad_response(StatusCodeFound); + QHttpHeaders bad_response_headers; + bad_response_headers.replaceOrAppend(QHttpHeaders::WellKnownHeader::Location, "/unauthorized?base_url=" + base_url + "&redirect_to=" + current_path); + bad_response.setHeaders(bad_response_headers); + + return bad_response; +} + +bool WebServer::start_server(quint16 port) { + std::optional> global_lp_opt; + launchpad* global_lp = nullptr; + auto lp_opt = launchpad::login(); + if (!lp_opt.has_value()) { + std::cerr << "Failed to authenticate with Launchpad.\n"; + return false; + } + auto lp = lp_opt.value().get(); + auto ubuntu_opt = lp->distributions["ubuntu"]; + if (!ubuntu_opt.has_value()) { + std::cerr << "Failed to retrieve ubuntu.\n"; + return false; + } + distribution ubuntu = ubuntu_opt.value(); + + auto lubuntu_ci_opt = lp->people["lubuntu-ci"]; + if (!lubuntu_ci_opt.has_value()) { + std::cerr << "Failed to retrieve lubuntu-ci.\n"; + return false; + } + person lubuntu_ci = lubuntu_ci_opt.value(); + + auto regular_opt = lubuntu_ci.getPPAByName(ubuntu, "unstable-ci"); + if (!regular_opt.has_value()) { + std::cerr << "Failed to retrieve regular PPA.\n"; + return false; + } + archive regular = regular_opt.value(); + + auto proposed_opt = lubuntu_ci.getPPAByName(ubuntu, "unstable-ci-proposed"); + if (!proposed_opt.has_value()) { + std::cerr << "Failed to retrieve proposed PPA.\n"; + return false; + } + archive proposed = proposed_opt.value(); + + std::shared_ptr _tmp_pkg_conf = std::make_shared(); + std::shared_ptr lubuntuci = std::make_shared(); + std::vector> all_repos = lubuntuci->list_known_repos(); + task_queue = std::make_unique(10); + static const std::map> job_statuses = lubuntuci->cilogic.get_job_statuses(); + task_queue->start(); + + // Load initial tokens + { + QSqlQuery load_tokens(lubuntuci->cilogic.get_thread_connection()); + load_tokens.prepare("SELECT person.id, person.username, person.logo_url, person_token.token, person_token.expiry_date FROM person INNER JOIN person_token ON person.id = person_token.person_id"); + load_tokens.exec(); + while (load_tokens.next()) { + int person_id = load_tokens.value(0).toInt(); + QString username = load_tokens.value(1).toString(); + QString logo_url = load_tokens.value(2).toString(); + QString token = load_tokens.value(3).toString(); + QDateTime expiry_date = QDateTime::fromString(load_tokens.value(4).toString(), Qt::ISODate); + + Person person(person_id, username.toStdString(), logo_url.toStdString()); + _active_tokens[token] = expiry_date; + _token_person[token] = person; + } + } + + expire_tokens_thread_ = std::jthread(run_task_every, 60, [this, lubuntuci] { + QSqlQuery expired_tokens(lubuntuci->cilogic.get_thread_connection()); + QString current_time = QDateTime::currentDateTime().toString(Qt::ISODate); + + expired_tokens.prepare("DELETE FROM person_token WHERE expiry_date < :current_time"); + expired_tokens.bindValue(":current_time", QDateTime::currentDateTime().toString(Qt::ISODate)); + expired_tokens.exec(); + for (auto it = _active_tokens.begin(); it != _active_tokens.end();) { + if (it.value() <= QDateTime::currentDateTime()) it = _active_tokens.erase(it); + else ++it; + } + for (auto it = _token_person.begin(); it != _token_person.end();) { + if (!_active_tokens.contains(it.key())) it = _token_person.erase(it); + else ++it; + } + }); + + process_sources_thread_ = std::jthread(run_task_every, 10, [this, all_repos, proposed, lubuntuci] { + for (auto pkgconf : all_repos) { + if (!pkgconf->can_check_source_upload()) { continue; } + + task_queue->enqueue( + job_statuses.at("source_check"), + [this, proposed](std::shared_ptr log) mutable { + std::shared_ptr pkgconf = log->get_task_context()->get_parent_packageconf(); + std::string package_version = pkgconf->upstream_version + "-0ubuntu0~ppa" + std::to_string(pkgconf->ppa_revision); + bool found_in_ppa = false; + for (auto spph : proposed.getPublishedSources("", "", std::nullopt, true, true, "", pkgconf->package->name, "", package_version)) { + found_in_ppa = true; + break; + } + + if (!found_in_ppa) { + throw std::runtime_error("Not found in the PPA."); + } + }, + pkgconf + ); + + lubuntuci->cilogic.sync(pkgconf); + } + }); + + //////////////////////////////////////////////////////////////// + // /unauthorized?base_url=&redirect_to= + //////////////////////////////////////////////////////////////// + http_server_.route("/unauthorized", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + // Extract data up front + auto query = req.query(); + QString base_url = query.queryItemValue("base_url"); + QString redirect_to = query.hasQueryItem("redirect_to") ? query.queryItemValue("redirect_to") : ""; + + std::mt19937 generator(std::random_device{}()); + std::uniform_int_distribution distribution(100, 999); + + return QtConcurrent::run([this, base_url, redirect_to, gen = std::move(generator), dist = std::move(distribution)]() mutable -> QHttpServerResponse { + int auth_identifier; + do { + auth_identifier = dist(gen); + } while (_in_progress_tokens.contains(auth_identifier)); + _in_progress_tokens[auth_identifier] = QDateTime::currentDateTime().addSecs(60 * 60); + + QString form_data = QString(R"( + + + OpenID Redirect + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + )").arg(base_url).arg(auth_identifier).arg(redirect_to); + + return QHttpServerResponse("text/html", QByteArray(form_data.toUtf8())); + }); + }); + + ///////////////// + // /authcallback + ///////////////// + http_server_.route("/authcallback", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + // Extract data up front + auto query = req.query(); + QString base_url = query.queryItemValue("base_url"); + QString redirect_to = query.hasQueryItem("redirect_to") ? query.queryItemValue("redirect_to") : ""; + std::map params = parse_query_parameters(req.query().toString()); + + return QtConcurrent::run([=, this]() { + std::set only_care_about = {"auth_identifier", "openid.ax.value.name_ao.1", + "openid.lp.is_member", "openid.mode"}; + + bool has_correct_params = true; + int found_only_care_about = 0; + std::string username; + for (auto [key, value] : params) { + if (!only_care_about.contains(key.toStdString())) continue; + + if (key == "auth_identifier") { + found_only_care_about++; + if (value.size() != 3) has_correct_params = false; + else if (!_in_progress_tokens.contains(value.toInt())) has_correct_params = false; + else if (_in_progress_tokens[value.toInt()] <= QDateTime::currentDateTime()) { + _in_progress_tokens.remove(value.toInt()); + has_correct_params = false; + } else { + _in_progress_tokens.remove(value.toInt()); + } + } else if (key == "openid.ax.value.name_ao.1") { + found_only_care_about++; + username = value.toStdString(); + } else if (key == "openid.lp.is_member") { + found_only_care_about++; + if (value.isEmpty()) has_correct_params = false; + else if (!value.contains("ubuntu-qt-code")) has_correct_params = false; + } else if (key == "openid.mode") { + found_only_care_about++; + if (value != "id_res") has_correct_params = false; + } + } + + if (!has_correct_params || (found_only_care_about != only_care_about.size())) { + std::map scalar_context; + std::map>> list_context; + std::string failed_auth_html = TemplateRenderer::render_with_inheritance( + "ope.html", + scalar_context, + list_context + ); + + return QHttpServerResponse("text/html", QByteArray(failed_auth_html.c_str(), (int)failed_auth_html.size())); + } + + // Create the new token + QString token; + { + std::mt19937 generator(std::random_device{}()); + std::uniform_int_distribution distribution(0, 255); + std::ostringstream tok; + for (size_t i = 0; i < 32; ++i) tok << std::hex << std::setw(2) << std::setfill('0') << distribution(generator); + token = QString::fromStdString(tok.str()); + } + + // Find the existing Person object if there is one + Person person; + bool found_key_bool = false; + QString found_key; + for (auto it = _token_person.begin(); it != _token_person.end(); ++it) { + if (it.value().username == username) { + person = it.value(); + found_key = it.key(); + found_key_bool = true; + break; + } + } + + if (found_key_bool) { + _token_person.remove(found_key); + } else { + QSqlQuery get_person(lubuntuci->cilogic.get_thread_connection()); + get_person.prepare("SELECT id, username, logo_url FROM person WHERE username = ?"); + get_person.bindValue(0, QString::fromStdString(username)); + if (!get_person.exec()) { qDebug() << "Error executing SELECT query for person:" << get_person.lastError(); } + + if (get_person.next()) { + person = Person(get_person.value(0).toInt(), get_person.value(1).toString().toStdString(), + get_person.value(2).toString().toStdString()); + } else { + QSqlQuery insert_person(lubuntuci->cilogic.get_thread_connection()); + insert_person.prepare("INSERT INTO person (username, logo_url) VALUES (?, ?)"); + insert_person.bindValue(0, QString::fromStdString(username)); + insert_person.bindValue(1, QString::fromStdString("https://api.launchpad.net/devel/~" + username + "/logo")); + if (!insert_person.exec()) { qDebug() << "Error executing INSERT query for person:" << insert_person.lastError(); } + + QVariant last_id = insert_person.lastInsertId(); + if (last_id.isValid()) { + person = Person(last_id.toInt(), username, "https://api.launchpad.net/devel/~" + username + "/logo"); + } + } + } + + // Insert the token into the sets and database + QDateTime one_day = QDateTime::currentDateTime().addSecs(24 * 60 * 60); + _token_person.insert(token, person); + _active_tokens.insert(token, one_day); + + { + QSqlQuery insert_token(lubuntuci->cilogic.get_thread_connection()); + insert_token.prepare("INSERT INTO person_token (person_id, token, expiry_date) VALUES (?, ?, ?)"); + insert_token.bindValue(0, person.id); + insert_token.bindValue(1, token); + insert_token.bindValue(2, one_day.toString(Qt::ISODate)); + if (!insert_token.exec()) { qDebug() << "Error executing INSERT query for token:" << insert_token.lastError(); } + } + + QString final_html = QString(R"( + + + + + Redirecting... + + + +

Success!

+

Redirecting... If you are not redirected automatically, click here.

+ + + )").arg(redirect_to); + QHttpServerResponse good_redirect("text/html", final_html.toUtf8()); + QHttpHeaders good_redirect_headers; + QString url_safe_token = QUrl::toPercentEncoding(token); + good_redirect_headers.replaceOrAppend(QHttpHeaders::WellKnownHeader::SetCookie, + "auth_token=" + url_safe_token + "; HttpOnly; SameSite=Strict"); + good_redirect.setHeaders(good_redirect_headers); + return good_redirect; + }); + }); + + ////////////////////////////////////////// + // Route "/" + ////////////////////////////////////////// + http_server_.route("/", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + auto query = req.query(); + int page = query.queryItemValue("page").isEmpty() ? 1 : query.queryItemValue("page").toInt(); + int per_page = query.queryItemValue("per_page").isEmpty() ? 30 : query.queryItemValue("per_page").toInt(); + std::string sort_by = query.queryItemValue("sort_by").isEmpty() + ? "id" + : query.queryItemValue("sort_by").toStdString(); + std::string sort_order = query.queryItemValue("sort_order").isEmpty() + ? "asc" + : query.queryItemValue("sort_order").toStdString(); + + return QtConcurrent::run([=, this]() { + auto all_repos = lubuntuci->list_known_repos(); + int total_size = static_cast(all_repos.size()); + int total_pages = (per_page > 0) + ? (total_size + per_page - 1) / per_page + : 1; + + auto repos = lubuntuci->list_known_repos(page, per_page, sort_by, sort_order); + if (repos.empty() && total_size == 0) { + std::string err_html = R"( + +No repos + +

ERROR: No repositories found!

+ + +)"; + return QHttpServerResponse("text/html", QByteArray(err_html.c_str(), (int)err_html.size())); + } + + std::map scalar_context = { + {"PAGE_TITLE", "Lubuntu CI Home"}, + {"page", std::to_string(page)}, + {"sort_by", sort_by}, + {"sort_order", sort_order}, + {"total_pages", std::to_string(total_pages)} + }; + std::map>> list_context; + + std::vector> reposVec; + for (const auto &r : repos) { + std::map item; + std::string packaging_commit_str; + std::string upstream_commit_str; + + if (r->packaging_commit) { + std::string commit_summary = r->packaging_commit->commit_summary; + if (commit_summary.size() > 40) { + commit_summary = commit_summary.substr(0, 37) + "..."; + } + packaging_commit_str = r->packaging_commit->commit_hash.substr(0, 7) + + std::format(" ({:%Y-%m-%d %H:%M:%S %Z})
", r->packaging_commit->commit_datetime) + + commit_summary; + } + if (r->upstream_commit) { + std::string commit_summary = r->upstream_commit->commit_summary; + if (commit_summary.size() > 40) { + commit_summary = commit_summary.substr(0, 37) + "..."; + } + upstream_commit_str = r->upstream_commit->commit_hash.substr(0, 7) + + std::format(" ({:%Y-%m-%d %H:%M:%S %Z})
", r->upstream_commit->commit_datetime) + + commit_summary; + } + + std::string packaging_commit_url_str = (r->package ? r->package->packaging_browser : "") + + (r->packaging_commit ? r->packaging_commit->commit_hash : ""); + std::string upstream_commit_url_str = (r->package ? r->package->upstream_browser : "") + + (r->upstream_commit ? r->upstream_commit->commit_hash : ""); + + item["id"] = std::to_string(r->id); + item["name"] = r->package->name; + item["branch_name"] = r->branch->name; + item["codename"] = r->release->codename; + item["packaging_commit"] = packaging_commit_str; + item["packaging_commit_url"] = packaging_commit_url_str; + item["upstream_commit"] = upstream_commit_str; + item["upstream_commit_url"] = upstream_commit_url_str; + + // For each job in the map, fetch the real task and set a CSS class accordingly. + for (auto const & [job_name, job_ptr] : job_statuses) { + auto t = r->get_task_by_jobstatus(job_ptr); + if (t) { + std::string css_class = "bg-secondary"; // default + + if (t->finish_time > 0) { + css_class = t->successful ? "bg-success" : "bg-danger"; + } else if (t->start_time > 0) { + css_class = "bg-warning"; // started but not finished + } else { + css_class = "bg-info"; // queued but not started + } + + item[job_name + "_class"] = css_class; + } else { + item[job_name + "_class"] = ""; + } + } + + reposVec.push_back(item); + } + list_context["repos"] = reposVec; + + std::string final_html = TemplateRenderer::render_with_inheritance( + "home.html", + scalar_context, + list_context + ); + + return QHttpServerResponse("text/html", QByteArray(final_html.c_str(), (int)final_html.size())); + }); + }); + + ////////////////////////////////////////// + // /pull?repo= + ////////////////////////////////////////// + http_server_.route("/pull", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + // Extract data up front + auto query = req.query(); + QString repo_string = query.queryItemValue("repo"); + // We'll store them in normal copyable types + std::string repoStr = repo_string.toStdString(); + + // Return the concurrency + return QtConcurrent::run([=, this]() { + if (repo_string.isEmpty() || !repo_string.toInt(nullptr, 10)) { + std::string msg = "No valid repo specified."; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + int repo = std::stoi(repoStr); + + std::string msg = lubuntuci->cilogic.queue_pull_tarball({ lubuntuci->cilogic.get_packageconf_by_id(repo) }, task_queue, job_statuses); + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + }); + }); + + ////////////////////////////////////////// + // /build?repo= + ////////////////////////////////////////// + http_server_.route("/build", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + auto query = req.query(); + QString repo_string = query.queryItemValue("repo"); + std::string repoStr = repo_string.toStdString(); + + return QtConcurrent::run([=, this]() { + if (repo_string.isEmpty() || !repo_string.toInt(nullptr, 10)) { + std::string msg = "No valid repo specified."; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + int repo = std::stoi(repoStr); + + std::shared_ptr pkgconf = lubuntuci->cilogic.get_packageconf_by_id(repo); + static const std::map> job_statuses = lubuntuci->cilogic.get_job_statuses(); + + task_queue->enqueue( + job_statuses.at("source_build"), + [this, lubuntuci](std::shared_ptr log) mutable { + std::shared_ptr pkgconf = log->get_task_context()->get_parent_packageconf(); + auto [build_ok, changes_files] = lubuntuci->cilogic.build_project(pkgconf, log); + if (build_ok) { + task_queue->enqueue( + job_statuses.at("upload"), + [lubuntuci, changes_files](std::shared_ptr log2) mutable { + std::shared_ptr pkgconf2 = log2->get_task_context()->get_parent_packageconf(); + bool upload_ok = lubuntuci->cilogic.upload_and_lint(pkgconf2, changes_files, false, log2); + (void)upload_ok; + }, + pkgconf + ); + } + }, + pkgconf + ); + std::string msg = "Build queued"; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + }); + }); + + ////////////////////////////////////////// + // /logs?repo=foo + ////////////////////////////////////////// + http_server_.route("/logs", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + auto query = req.query(); + std::string repo = query.queryItemValue("repo").toStdString(); + + return QtConcurrent::run([=, this]() { + if (repo.empty()) { + std::string msg = "No repo specified."; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + std::string log_content = lubuntuci->get_repo_log(repo); + + std::map>> list_context; + std::map context; + context["title"] = "Logs for " + repo; + + std::string body; + body += "

Logs: " + repo + "

"; + body += "
" + log_content + "
"; + + context["BODY_CONTENT"] = body; + + std::string final_html = TemplateRenderer::render_with_inheritance( + "base.html", + context, + list_context + ); + if (final_html.empty()) { + final_html = "

Log Output

"
+                             + log_content + "
"; + } + return QHttpServerResponse("text/html", QByteArray(final_html.c_str(), (int)final_html.size())); + }); + }); + + ////////////////////////////////////////// + // /pull-selected?repos= + ////////////////////////////////////////// + http_server_.route("/pull-selected", [this, lubuntuci, all_repos](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + auto query = req.query(); + std::string repos_str = query.queryItemValue("repos").toStdString(); + + return QtConcurrent::run([=, this]() { + if (repos_str.empty()) { + std::string msg = "
No repositories specified for pull.
"; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + + std::set repos = std::ranges::to>( + split_string(repos_str, "%2C") + | std::views::filter([](const std::string& s) { + return !s.empty() && std::ranges::all_of(s, ::isdigit); + }) + | std::views::transform([](const std::string& s) { + return std::stoi(s); + }) + ); + + std::string msg = lubuntuci->cilogic.queue_pull_tarball(lubuntuci->cilogic.get_packageconfs_by_ids(repos), task_queue, job_statuses); + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + }); + }); + + ////////////////////////////////////////// + // /build-selected?repos=foo,bar,baz + ////////////////////////////////////////// + http_server_.route("/build-selected", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + auto query = req.query(); + std::string repos_str = query.queryItemValue("repos").toStdString(); + + return QtConcurrent::run([=, this]() { + if (repos_str.empty()) { + std::string msg = "
No repositories specified for build.
"; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + + std::set repos = std::ranges::to>( + split_string(repos_str, "%2C") + | std::views::filter([](const std::string& s) { + return !s.empty() && std::ranges::all_of(s, ::isdigit); + }) + | std::views::transform([](const std::string& s) { + return std::stoi(s); + }) + ); + std::vector> pkgconfs = lubuntuci->cilogic.get_packageconfs_by_ids(repos); + + if (repos.empty()) { + std::string msg = "No valid repositories specified for build: " + repos_str; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + + std::string msg; + static const std::map> job_statuses = lubuntuci->cilogic.get_job_statuses(); + + for (auto pkgconf : pkgconfs) { + task_queue->enqueue( + job_statuses.at("source_build"), + [this, lubuntuci](std::shared_ptr log) { + std::shared_ptr pkgconf = log->get_task_context()->get_parent_packageconf(); + auto [build_ok, changes_files] = lubuntuci->cilogic.build_project(pkgconf, log); + if (build_ok) { + static const std::map> job_statuses2 = lubuntuci->cilogic.get_job_statuses(); + task_queue->enqueue( + job_statuses2.at("upload"), + [lubuntuci, changes_files](std::shared_ptr log2) mutable { + std::shared_ptr pkgconf2 = log2->get_task_context()->get_parent_packageconf(); + bool upload_ok = lubuntuci->cilogic.upload_and_lint(pkgconf2, changes_files, false, log2); + (void)upload_ok; + }, + pkgconf + ); + } + }, + pkgconf + ); + msg += "Build queued\n"; + } + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + }); + }); + + ////////////////////////////////////////// + // /pull-and-build-selected?repos=foo,bar,baz + ////////////////////////////////////////// + http_server_.route("/pull-and-build-selected", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + auto query = req.query(); + std::string repos_str = query.queryItemValue("repos").toStdString(); + + return QtConcurrent::run([=, this]() { + if (repos_str.empty()) { + std::string msg = "
No repositories specified for build and pull.
"; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + + std::set repos = std::ranges::to>( + split_string(repos_str, "%2C") + | std::views::filter([](const std::string& s) { + return !s.empty() && std::ranges::all_of(s, ::isdigit); + }) + | std::views::transform([](const std::string& s) { + return std::stoi(s); + }) + ); + std::vector> pkgconfs = lubuntuci->cilogic.get_packageconfs_by_ids(repos); + + if (repos.empty()) { + std::string msg = "
No valid repositories specified for build and pull.
"; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + + std::string msg; + static const std::map> job_statuses = lubuntuci->cilogic.get_job_statuses(); + std::set>> encountered; + for (auto pkgconf : pkgconfs) { + bool is_ghost_pull = true; + std::shared_ptr first_pkgconf; + auto it = std::find_if(encountered.begin(), encountered.end(), + [pkgconf](const std::pair>& elem) { + return elem.first == pkgconf->package->name; + }); + if (it == encountered.end()) { + is_ghost_pull = false; + encountered.insert({pkgconf->package->name, pkgconf}); + } else { + first_pkgconf = it->second; + } + + task_queue->enqueue( + job_statuses.at("pull"), + [this, lubuntuci, first_pkgconf, is_ghost_pull](std::shared_ptr log) { + std::shared_ptr pkgconf = log->get_task_context()->get_parent_packageconf(); + bool pull_ok; + if (is_ghost_pull) { + pull_ok = true; + pkgconf->packaging_commit = first_pkgconf->packaging_commit; + pkgconf->upstream_commit = first_pkgconf->upstream_commit; + lubuntuci->cilogic.sync(pkgconf); + } else { + pull_ok = lubuntuci->cilogic.pull_project(pkgconf, log); + } + if (pull_ok) { + static const std::map> job_statuses2 = lubuntuci->cilogic.get_job_statuses(); + task_queue->enqueue( + job_statuses2.at("tarball"), + [this, lubuntuci, is_ghost_pull](std::shared_ptr log2) { + std::shared_ptr pkgconf2 = log2->get_task_context()->get_parent_packageconf(); + bool tarball_ok = is_ghost_pull ? true : lubuntuci->cilogic.create_project_tarball(pkgconf2, log2); + if (tarball_ok) { + static const std::map> job_statuses3 = lubuntuci->cilogic.get_job_statuses(); + task_queue->enqueue( + job_statuses3.at("source_build"), + [this, lubuntuci](std::shared_ptr log3) { + std::shared_ptr pkgconf3 = log3->get_task_context()->get_parent_packageconf(); + auto [build_ok, changes_files] = lubuntuci->cilogic.build_project(pkgconf3, log3); + if (build_ok) { + static const std::map> job_statuses4 = lubuntuci->cilogic.get_job_statuses(); + task_queue->enqueue( + job_statuses4.at("upload"), + [lubuntuci, changes_files](std::shared_ptr log4) mutable { + std::shared_ptr pkgconf4 = log4->get_task_context()->get_parent_packageconf(); + bool upload_ok = lubuntuci->cilogic.upload_and_lint(pkgconf4, changes_files, false, log4); + (void)upload_ok; + }, + pkgconf3 + ); + } + }, + pkgconf2 + ); + } + }, + pkgconf + ); + } + }, + pkgconf + ); + } + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + }); + }); + + ////////////////////////////////////////// + // /pull-all + ////////////////////////////////////////// + http_server_.route("/pull-all", [this, lubuntuci, all_repos](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + return QtConcurrent::run([=, this]() { + std::string msg = lubuntuci->cilogic.queue_pull_tarball(all_repos, task_queue, job_statuses); + + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + }); + }); + + ////////////////////////////////////////// + // /build-all + ////////////////////////////////////////// + http_server_.route("/build-all", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + return QtConcurrent::run([=, this]() { + auto repos = lubuntuci->list_known_repos(); + std::string msg; + static const std::map> job_statuses = lubuntuci->cilogic.get_job_statuses(); + + for (const auto& r : repos) { + task_queue->enqueue( + job_statuses.at("source_build"), + [this, lubuntuci](std::shared_ptr log) { + std::shared_ptr pkgconf = log->get_task_context()->get_parent_packageconf(); + auto [build_ok, changes_files] = lubuntuci->cilogic.build_project(pkgconf, log); + if (build_ok) { + static const std::map> job_statuses2 = lubuntuci->cilogic.get_job_statuses(); + task_queue->enqueue( + job_statuses2.at("upload"), + [lubuntuci, changes_files](std::shared_ptr log2) { + std::shared_ptr pkgconf2 = log2->get_task_context()->get_parent_packageconf(); + bool upload_ok = lubuntuci->cilogic.upload_and_lint(pkgconf2, changes_files, false, log2); + (void)upload_ok; + }, + pkgconf + ); + } + }, + r + ); + msg += "Build for " + r->package->name + "queued\n"; + } + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + }); + }); + + ////////////////////////////////////////// + // /pull-and-build-all + ////////////////////////////////////////// + http_server_.route("/pull-and-build-all", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + return QtConcurrent::run([=, this]() { + auto repos = lubuntuci->list_known_repos(); + std::string msg; + static const std::map> job_statuses = lubuntuci->cilogic.get_job_statuses(); + + std::set>> encountered; + for (auto repo : repos) { + bool is_ghost_pull = true; + std::shared_ptr first_pkgconf; + auto it = std::find_if(encountered.begin(), encountered.end(), + [repo](const std::pair>& elem) { + return elem.first == repo->package->name; + } + ); + if (it == encountered.end()) { + is_ghost_pull = false; + encountered.insert({repo->package->name, repo}); + } else { + first_pkgconf = it->second; + } + + task_queue->enqueue( + job_statuses.at("pull"), + [this, repo, lubuntuci, first_pkgconf, is_ghost_pull](std::shared_ptr log) { + std::shared_ptr pkgconf = log->get_task_context()->get_parent_packageconf(); + bool pull_ok; + if (is_ghost_pull) { + pull_ok = true; + pkgconf->packaging_commit = first_pkgconf->packaging_commit; + pkgconf->upstream_commit = first_pkgconf->upstream_commit; + lubuntuci->cilogic.sync(pkgconf); + } else { + auto packaging_commit = pkgconf->packaging_commit; + auto upstream_commit = pkgconf->upstream_commit; + bool _pull_ok = lubuntuci->cilogic.pull_project(pkgconf, log); + if ((packaging_commit != pkgconf->packaging_commit) || + (upstream_commit != pkgconf->upstream_commit)) { + pull_ok = true; + } else { + pull_ok = false; + } + } + + if (pull_ok) { + static const std::map> job_statuses2 = lubuntuci->cilogic.get_job_statuses(); + task_queue->enqueue( + job_statuses2.at("tarball"), + [this, repo, lubuntuci, is_ghost_pull](std::shared_ptr log2) { + std::shared_ptr pkgconf2 = log2->get_task_context()->get_parent_packageconf(); + bool tarball_ok = is_ghost_pull ? true : lubuntuci->cilogic.create_project_tarball(pkgconf2, log2); + if (tarball_ok) { + static const std::map> job_statuses3 = lubuntuci->cilogic.get_job_statuses(); + task_queue->enqueue( + job_statuses3.at("source_build"), + [this, repo, lubuntuci](std::shared_ptr log3) { + std::shared_ptr pkgconf3 = log3->get_task_context()->get_parent_packageconf(); + auto [build_ok, changes_files] = lubuntuci->cilogic.build_project(pkgconf3, log3); + if (build_ok) { + static const std::map> job_statuses4 = lubuntuci->cilogic.get_job_statuses(); + task_queue->enqueue( + job_statuses4.at("upload"), + [lubuntuci, changes_files](std::shared_ptr log4) { + std::shared_ptr pkgconf4 = log4->get_task_context()->get_parent_packageconf(); + bool upload_ok = lubuntuci->cilogic.upload_and_lint(pkgconf4, changes_files, false, log4); + (void)upload_ok; + }, + pkgconf3 + ); + } + }, + pkgconf2 + ); + } + }, + pkgconf + ); + } + }, + repo + ); + } + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + }); + }); + + ////////////////////////////////////////// + // Serve static files from /static/ + ////////////////////////////////////////// + http_server_.route("/static/", [this, lubuntuci](const QString filename) -> QHttpServerResponse { + QString sanitized_filename = filename; + if (filename.contains("..") || filename.contains("../")) { + return QHttpServerResponse(QHttpServerResponder::StatusCode::BadRequest); + } else if (filename.startsWith('/')) { + sanitized_filename = sanitized_filename.remove(0, 1); + } + + QString staticDir = QDir::currentPath() + "/static"; + QDir dir(staticDir); + QString fullPath = dir.absoluteFilePath(sanitized_filename); + + QString relativeToStatic = QDir(staticDir).relativeFilePath(fullPath); + if (relativeToStatic.startsWith("../")) { + return QHttpServerResponse(QHttpServerResponder::StatusCode::Forbidden); + } + + QFile file(fullPath); + if (!file.exists() || !file.open(QIODevice::ReadOnly)) { + return QHttpServerResponse(QHttpServerResponder::StatusCode::NotFound); + } + QByteArray data = file.readAll(); + file.close(); + + if (filename.endsWith(".js", Qt::CaseInsensitive)) { + return QHttpServerResponse("application/javascript", data); + } else if (filename.endsWith(".css", Qt::CaseInsensitive)) { + return QHttpServerResponse("text/css", data); + } else if (filename.endsWith(".html", Qt::CaseInsensitive) + || filename.endsWith(".htm", Qt::CaseInsensitive)) { + return QHttpServerResponse("text/html", data); + } + return QHttpServerResponse("application/octet-stream", data); + }); + + ////////////////////////////////////////// + // /graph + ////////////////////////////////////////// + http_server_.route("/graph", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + return QtConcurrent::run([=, this]() { + std::map scalar_context; + std::map>> list_context; + scalar_context["PAGE_TITLE"] = "Graph - Lubuntu CI"; + + const std::string sources_url = "https://ppa.launchpadcontent.net/lubuntu-ci/unstable-ci-proposed/ubuntu/dists/plucky/main/source/Sources.gz"; + const std::string packages_url = "https://ppa.launchpadcontent.net/lubuntu-ci/unstable-ci-proposed/ubuntu/dists/plucky/main/binary-amd64/Packages.gz"; + + std::cout << "Downloading and processing Sources.gz...\n"; + auto sourcesOpt = SourcesParser::fetch_and_parse_sources(sources_url); + if (!sourcesOpt) { + std::cerr << "Failed to fetch and parse Sources.gz.\n"; + } + auto sources = *sourcesOpt; + + std::cout << "Downloaded and parsed " << sources.size() << " source packages.\n"; + + std::cout << "Downloading and processing Packages.gz (amd64)...\n"; + auto packagesOpt = SourcesParser::fetch_and_parse_packages(packages_url); + if (!packagesOpt) { + std::cerr << "Failed to fetch and parse Packages.gz.\n"; + } + + std::cout << "Downloaded and parsed " << packagesOpt->size() << " binary packages.\n"; + + auto dependency_graph = SourcesParser::build_dependency_graph(sources, *packagesOpt); + QString json_output = SourcesParser::serialize_dependency_graph_to_json(dependency_graph); + + scalar_context["GRAPH_JSON"] = json_output.toStdString(); + + std::string final_html = TemplateRenderer::render_with_inheritance( + "graph.html", + scalar_context, + list_context + ); + + return QHttpServerResponse("text/html", QByteArray(final_html.c_str(), (int)final_html.size())); + }); + }); + + ////////////////////////////////////////// + // /tasks + ////////////////////////////////////////// + http_server_.route("/tasks", [this, lubuntuci](const QHttpServerRequest &req) -> QFuture { + { + QHttpServerResponse session_response = verify_session_token(req, req.headers()); + if (session_response.statusCode() == StatusCodeFound) return QtConcurrent::run([response = std::move(session_response)]() mutable { return std::move(response); }); + } + // Gather query data + auto query = req.query(); + std::string type = query.queryItemValue("type").toStdString(); + int page = query.queryItemValue("page").isEmpty() ? 1 : query.queryItemValue("page").toInt(); + int per_page = query.queryItemValue("per_page").isEmpty() ? 30 : query.queryItemValue("per_page").toInt(); + + // Return concurrency + return QtConcurrent::run([=, this]() { + auto now = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + + if (!(type.empty() || type == "queued" || type == "complete")) { + std::string msg = "Invalid type specified."; + return QHttpServerResponse("text/html", QByteArray(msg.c_str(), (int)msg.size())); + } + + std::set, Task::TaskComparator> final_tasks; + std::string title_prefix; + + static const std::map> job_statuses = lubuntuci->cilogic.get_job_statuses(); + + if (type.empty()) { + // default to 'running' + title_prefix = "Running"; + final_tasks = task_queue->get_running_tasks(); + } else if (type == "queued") { + title_prefix = "Queued"; + final_tasks = task_queue->get_tasks(); + } else if (type == "complete") { + title_prefix = "Completed"; + // gather tasks that have start_time > 0 and finish_time > 0 + std::vector> tasks_vector; + auto pkgconfs = lubuntuci->cilogic.get_packageconfs(); + for (auto &pkgconf : pkgconfs) { + for (auto &j : job_statuses) { + if (!j.second) { + continue; + } + auto t = pkgconf->get_task_by_jobstatus(j.second); + if (t && t->start_time > 0 && t->finish_time > 0) { + tasks_vector.push_back(t); + } + } + } + std::set, Task::TaskComparator> tasks( + tasks_vector.begin(), + tasks_vector.end() + ); + final_tasks = tasks; + } + + std::map scalar_context = { + {"PAGE_TITLE", title_prefix + " Tasks"}, + {"PAGE_TYPE", (type.empty() ? "running" : type)} + }; + std::map>> list_context; + + std::vector> tasksVec; + for (auto task : final_tasks) { + std::map item; + item["id"] = std::to_string(task->id); + item["queued_timestamp"] = std::to_string(task->queue_time); + item["start_timestamp"] = std::to_string(task->start_time); + item["finish_timestamp"] = std::to_string(task->finish_time); + item["running_timedelta"] = std::to_string(now - task->start_time); + item["score"] = std::to_string(task->jobstatus->build_score); + item["package_name"] = task->get_parent_packageconf()->package->name; + item["package_codename"] = task->get_parent_packageconf()->release->codename; + item["job_status"] = task->jobstatus->display_name; + item["successful"] = task->successful ? "true" : "false"; + std::string replaced_log = std::regex_replace(task->log->get(), std::regex("\n"), "
"); + item["log"] = replaced_log; + tasksVec.push_back(item); + } + list_context["tasks"] = tasksVec; + + std::string final_html = TemplateRenderer::render_with_inheritance( + "tasks.html", + scalar_context, + list_context + ); + return QHttpServerResponse("text/html", QByteArray(final_html.c_str(), (int)final_html.size())); + }); + }); + + // Attempt to listen on `port` + if (!tcp_server_.listen(QHostAddress::Any, port) || !http_server_.bind(&tcp_server_)) { + std::cerr << timestamp_now() << " [ERROR] Could not bind to port " << port << std::endl; + return false; + } + + std::cout << timestamp_now() << " [INFO] Web server running on port " + << tcp_server_.serverPort() << std::endl; + return true; +} diff --git a/cpp/web_server.h b/cpp/web_server.h new file mode 100644 index 0000000..7eff9e2 --- /dev/null +++ b/cpp/web_server.h @@ -0,0 +1,54 @@ +// Copyright (C) 2024 Simon Quigley +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +#ifndef WEB_SERVER_H +#define WEB_SERVER_H + +#include "ci_database_objs.h" +#include "task_queue.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +class WebServer : public QObject { + Q_OBJECT +public: + explicit WebServer(QObject *parent = nullptr); + bool start_server(quint16 port); + +private: + [[nodiscard]] std::map parse_query_parameters(const QString &query); + [[nodiscard]] bool validate_token(const QString& token); + [[nodiscard]] QHttpServerResponse verify_session_token(const QHttpServerRequest &request, const QHttpHeaders &headers); + void load_tokens(QSqlDatabase& p_db); + + QHttpServer http_server_; + QTcpServer tcp_server_; + std::unique_ptr task_queue; + std::jthread expire_tokens_thread_; + std::jthread process_sources_thread_; + + QMap _in_progress_tokens; + QMap _active_tokens; + QMap _token_person; +}; + +#endif // WEB_SERVER_H diff --git a/static/main.js b/static/main.js new file mode 100644 index 0000000..e99f4a8 --- /dev/null +++ b/static/main.js @@ -0,0 +1,176 @@ +/** + * Get an array of all currently selected repositories (checkboxes). + */ +function getSelectedRepos() { + const checkboxes = document.querySelectorAll('input[name="repoSelect"]:checked'); + const repoNames = []; + checkboxes.forEach(cb => repoNames.push(cb.value)); + return repoNames; +} + +/** + * Show a quick status message on the console (or replace with a fancy UI element). + */ +function showStatus(msg) { + console.log('[STATUS]', msg); +} + +/** + * A tiny helper to handle server responses (text) and display them. + */ +function handleServerResponse(text) { + // For simplicity, we just log it. You can also insert it into the DOM if you want. + console.log('[SERVER RESPONSE]', text); + alert(text); +} + + +/////////////////////////////// +// Individual Action Handlers +/////////////////////////////// + +/** + * Pull a single repository by name, calling /pull?repo=. + */ +function doPull(repoName, buttonElem) { + if (!repoName) { + alert('No repo specified!'); + return; + } + showStatus(`Pulling repo: ${repoName}...`); + fetch('/pull?repo=' + encodeURIComponent(repoName)) + .then(resp => resp.text()) + .then(txt => handleServerResponse(txt)) + .catch(err => console.error('[ERROR]', err)); +} + +/** + * Build a single repository by name, calling /build?repo=. + */ +function doBuild(repoName, buttonElem) { + if (!repoName) { + alert('No repo specified!'); + return; + } + showStatus(`Building repo: ${repoName}...`); + fetch('/build?repo=' + encodeURIComponent(repoName)) + .then(resp => resp.text()) + .then(txt => handleServerResponse(txt)) + .catch(err => console.error('[ERROR]', err)); +} + +/** + * View logs for a single repository by name, calling /logs?repo=. + * This example opens in a new tab. Alternatively, you could fetch and display in a modal. + */ +function doViewLog(repoName, buttonElem) { + if (!repoName) { + alert('No repo specified!'); + return; + } + const url = '/logs?repo=' + encodeURIComponent(repoName); + window.open(url, '_blank'); + // If you wanted to do a fetch instead: + // fetch(url).then(...) ... +} + +/** + * Pull ALL repositories at once, calling /pull-all. + */ +function doPullAll(buttonElem) { + showStatus('Pulling ALL repositories...'); + fetch('/pull-all') + .then(resp => resp.text()) + .then(txt => handleServerResponse(txt)) + .catch(err => console.error('[ERROR]', err)); +} + +/** + * Build ALL repositories at once, calling /build-all. + */ +function doBuildAll(buttonElem) { + showStatus('Building ALL repositories...'); + fetch('/build-all') + .then(resp => resp.text()) + .then(txt => handleServerResponse(txt)) + .catch(err => console.error('[ERROR]', err)); +} + +/** + * Pull AND build ALL repositories at once, calling /pull-and-build-all. + */ +function doPullAndBuildAll(buttonElem) { + showStatus('Pulling & building ALL repositories...'); + fetch('/pull-and-build-all') + .then(resp => resp.text()) + .then(txt => handleServerResponse(txt)) + .catch(err => console.error('[ERROR]', err)); +} + +/** + * Pull the selected repositories, calling /pull-selected?repos=. + */ +function doPullSelected(buttonElem) { + const repos = getSelectedRepos(); + if (repos.length === 0) { + alert('No repositories selected!'); + return; + } + const query = '/pull-selected?repos=' + encodeURIComponent(repos.join(',')); + showStatus('Pulling selected repos: ' + repos.join(', ')); + fetch(query) + .then(resp => resp.text()) + .then(txt => handleServerResponse(txt)) + .catch(err => console.error('[ERROR]', err)); +} + +/** + * Build the selected repositories, calling /build-selected?repos=. + */ +function doBuildSelected(buttonElem) { + const repos = getSelectedRepos(); + if (repos.length === 0) { + alert('No repositories selected!'); + return; + } + const query = '/build-selected?repos=' + encodeURIComponent(repos.join(',')); + showStatus('Building selected repos: ' + repos.join(', ')); + fetch(query) + .then(resp => resp.text()) + .then(txt => handleServerResponse(txt)) + .catch(err => console.error('[ERROR]', err)); +} + +/** + * Pull AND build selected repositories, calling /pull-and-build-selected?repos=... + */ +function doPullAndBuildSelected(buttonElem) { + const repos = getSelectedRepos(); + if (repos.length === 0) { + alert('No repositories selected!'); + return; + } + const query = '/pull-and-build-selected?repos=' + encodeURIComponent(repos.join(',')); + showStatus('Pulling & building selected repos: ' + repos.join(', ')); + fetch(query) + .then(resp => resp.text()) + .then(txt => handleServerResponse(txt)) + .catch(err => console.error('[ERROR]', err)); +} + + +/////////////////////////////// +// "Select All" checkbox logic +/////////////////////////////// +window.addEventListener('DOMContentLoaded', () => { + const selectAllCb = document.getElementById('selectAll'); + if (selectAllCb) { + selectAllCb.addEventListener('change', function () { + // Check or uncheck all "repoSelect" checkboxes + const allRepoCbs = document.querySelectorAll('input[name="repoSelect"]'); + allRepoCbs.forEach(cb => { + cb.checked = selectAllCb.checked; + }); + }); + } +}); diff --git a/templates/base.html b/templates/base.html new file mode 100644 index 0000000..1388427 --- /dev/null +++ b/templates/base.html @@ -0,0 +1,31 @@ + + + + + + {{ PAGE_TITLE }} + + + + + +
+ {{BLOCK content}} +
+ + + + diff --git a/templates/error.html b/templates/error.html new file mode 100644 index 0000000..d7be249 --- /dev/null +++ b/templates/error.html @@ -0,0 +1,10 @@ +{% extends "base.html" %} + +{% block content %} + +{% endblock %} diff --git a/templates/graph.html b/templates/graph.html new file mode 100644 index 0000000..29fe658 --- /dev/null +++ b/templates/graph.html @@ -0,0 +1,170 @@ +{% extends "base.html" %} +{% block content %} + + + +

{{ PAGE_TITLE }}

+ +
+ +
+ + + +
+ + +
+
+ + + +{% endblock %} diff --git a/templates/home.html b/templates/home.html new file mode 100644 index 0000000..c11e5d1 --- /dev/null +++ b/templates/home.html @@ -0,0 +1,155 @@ +{% extends "base.html" %} +{% block content %} +

{{PAGE_TITLE}}

+

Below is the list of repositories we can build & pull.

+
+ + + +
+
+
+
Page {{page}} of {{total_pages}}
+ +
+ + + + + + + + + + + + + + + {% for repo in repos %} + + + + + + + + + {% endfor %} + +
+ Branch{% if sort_by=='branch' and sort_order=='asc' %}{% elif sort_by=='branch' and sort_order=='desc' %}{% endif %} + + Branch{% if sort_by=='codename' and sort_order=='asc' %}{% elif sort_by=='codename' and sort_order=='desc' %}{% endif %} + + Repository{% if sort_by=='name' and sort_order=='asc' %}{% elif sort_by=='name' and sort_order=='desc' %}{% endif %} + + Latest Packaging Commit{% if sort_by=='packaging_commit' and sort_order=='asc' %}{% elif sort_by=='packaging_commit' and sort_order=='desc' %}{% endif %} + + Latest Upstream Commit{% if sort_by=='upstream_commit' and sort_order=='asc' %}{% elif sort_by=='upstream_commit' and sort_order=='desc' %}{% endif %} + + Build Status{% if sort_by=='build_status' and sort_order=='asc' %}{% elif sort_by=='build_status' and sort_order=='desc' %}{% endif %} + Actions
{{repo.branch_name}}{{repo.codename}}{{repo.name}} + {% if repo.packaging_commit != "" %} + + {{repo.packaging_commit}} + + {% else %} + No commit found. + {% endif %} + + {% if repo.upstream_commit != "" %} + + {{repo.upstream_commit}} + + {% else %} + No commit found. + {% endif %} + + + + + {% if repo.pull_class != "" %} + + {% endif %} + {% if repo.tarball_class != "" %} + + + {% endif %} + {% if repo.source_build_class != "" %} + + + {% endif %} + {% if repo.upload_class != "" %} + + + {% endif %} + + + {% if repo.source_check_class != "" %} + + {% endif %} + {% if repo.build_check_class != "" %} + + + {% endif %} + {% if repo.lintian_class != "" %} + + + {% endif %} + {% if repo.britney_class != "" %} + + + {% endif %} + + +
+
+ Pull +
+
+
+ Tarball +
+
+
+ Source Build +
+
+
+ Upload +
+
+
+ Source Check +
+
+
+ Build Check +
+
+
+ Lintian +
+
+
+ Britney +
+
+
+ + + +
+
+ + + +
+{% endblock %} diff --git a/templates/tasks.html b/templates/tasks.html new file mode 100644 index 0000000..b9b8143 --- /dev/null +++ b/templates/tasks.html @@ -0,0 +1,142 @@ +{% extends "base.html" %} +{% block content %} +

{{PAGE_TITLE}}

+ +
+ + + + + + + + {% if PAGE_TYPE != 'queued' %} + + {% endif %} + + + + {% for task in tasks %} + + + + + + {% if PAGE_TYPE != 'queued' %} + + {% endif %} + + {% endfor %} + +
ScoreQueuedPackageStatusLog
{{ task.score }} + {% if PAGE_TYPE == 'running' %} + Started at
+ (Duration: ) + {% elif PAGE_TYPE == 'queued' %} + + {% else %} + {% if task.successful == 'true' %} + Task Succeeded
+ {% else %} + Task Failed
+ {% endif %} + Started at
+ Finished at
+ (Duration: ) + {% endif %} +
+ Name: {{ task.package_name }}
+ Release: {{ task.package_codename }} +
{{ task.job_status }} +
{{ task.log }}
+
+
+ + +{% endblock %}