parent
d63f3b8d79
commit
9ff2bd6fcd
@ -1,63 +1,141 @@
|
||||
cmake_minimum_required(VERSION 3.21)
|
||||
project(lubuntuci CXX)
|
||||
cmake_minimum_required(VERSION 3.16)
|
||||
project(lubuntu_ci_all CXX)
|
||||
|
||||
set(CMAKE_AUTOMOC ON)
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "Build type" FORCE)
|
||||
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
|
||||
#
|
||||
# Allow the user to override LAUNCHPAD_CPP_INCLUDE_DIR/LAUNCHPAD_CPP_LIBRARY
|
||||
#
|
||||
if (NOT DEFINED LAUNCHPAD_CPP_INCLUDE_DIR)
|
||||
set(LAUNCHPAD_CPP_INCLUDE_DIR "/srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp")
|
||||
endif()
|
||||
if (NOT DEFINED LAUNCHPAD_CPP_LIBRARY)
|
||||
set(LAUNCHPAD_CPP_LIBRARY "/srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so")
|
||||
endif()
|
||||
|
||||
find_package(yaml-cpp REQUIRED)
|
||||
find_package(Qt6 REQUIRED COMPONENTS Core HttpServer Sql)
|
||||
find_package(PkgConfig REQUIRED)
|
||||
pkg_check_modules(LIBGIT2 REQUIRED IMPORTED_TARGET libgit2)
|
||||
find_package(CURL REQUIRED)
|
||||
find_library(UUID_LIB uuid)
|
||||
find_package(ZLIB REQUIRED)
|
||||
|
||||
find_package(yaml-cpp REQUIRED)
|
||||
pkg_check_modules(LIBARCHIVE REQUIRED libarchive)
|
||||
include_directories(${LIBARCHIVE_INCLUDE_DIRS})
|
||||
link_directories(${LIBARCHIVE_LIBRARY_DIRS})
|
||||
add_definitions(${LIBARCHIVE_CFLAGS_OTHER})
|
||||
pkg_check_modules(LIBGIT2 REQUIRED libgit2)
|
||||
find_package(ZLIB REQUIRED)
|
||||
find_package(CURL REQUIRED)
|
||||
set(UUID_LIB "uuid")
|
||||
|
||||
include_directories(/srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
|
||||
#
|
||||
# 1. The main library: lubuntuci_lib
|
||||
#
|
||||
add_library(lubuntuci_lib SHARED
|
||||
common.cpp
|
||||
utilities.cpp
|
||||
ci_logic.cpp
|
||||
ci_database_objs.cpp
|
||||
lubuntuci_lib.cpp
|
||||
task_queue.cpp
|
||||
template_renderer.cpp
|
||||
web_server.cpp
|
||||
sources_parser.cpp
|
||||
naive_bayes_classifier.cpp
|
||||
)
|
||||
|
||||
add_library(lubuntuci SHARED common.cpp utilities.cpp)
|
||||
target_include_directories(lubuntuci PUBLIC ${CMAKE_CURRENT_SOURCE_DIR} ${LIBARCHIVE_INCLUDE_DIRS})
|
||||
target_link_libraries(lubuntuci PUBLIC yaml-cpp::yaml-cpp PRIVATE CURL::libcurl ${LIBARCHIVE_LIBRARIES} /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so)
|
||||
target_include_directories(lubuntuci_lib PUBLIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}
|
||||
"${LAUNCHPAD_CPP_INCLUDE_DIR}"
|
||||
)
|
||||
|
||||
add_library(update_maintainer_lib STATIC update-maintainer-lib.cpp)
|
||||
target_include_directories(update_maintainer_lib PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
|
||||
target_link_libraries(update_maintainer_lib PRIVATE lubuntuci yaml-cpp::yaml-cpp CURL::libcurl)
|
||||
target_link_libraries(lubuntuci_lib
|
||||
Qt6::Core
|
||||
Qt6::HttpServer
|
||||
Qt6::Sql
|
||||
yaml-cpp
|
||||
${LIBARCHIVE_LIBRARIES}
|
||||
${LIBGIT2_LIBRARIES}
|
||||
"${LAUNCHPAD_CPP_LIBRARY}"
|
||||
ZLIB::ZLIB
|
||||
CURL::libcurl
|
||||
${UUID_LIB}
|
||||
)
|
||||
|
||||
add_executable(update-maintainer update-maintainer.cpp)
|
||||
target_link_libraries(update-maintainer PRIVATE update_maintainer_lib)
|
||||
#
|
||||
# 2. The update-maintainer-lib library
|
||||
#
|
||||
add_library(update_maintainer_lib STATIC
|
||||
update-maintainer-lib.cpp
|
||||
)
|
||||
target_include_directories(update_maintainer_lib PRIVATE
|
||||
"${LAUNCHPAD_CPP_INCLUDE_DIR}"
|
||||
)
|
||||
target_link_libraries(update_maintainer_lib
|
||||
lubuntuci_lib
|
||||
yaml-cpp
|
||||
CURL::libcurl
|
||||
${LIBARCHIVE_LIBRARIES}
|
||||
${LIBGIT2_LIBRARIES}
|
||||
ZLIB::ZLIB
|
||||
"${LAUNCHPAD_CPP_LIBRARY}"
|
||||
${UUID_LIB}
|
||||
)
|
||||
|
||||
add_executable(build-packages build-packages.cpp)
|
||||
target_include_directories(build-packages PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
|
||||
target_link_libraries(build-packages PRIVATE lubuntuci PkgConfig::LIBGIT2 update_maintainer_lib yaml-cpp::yaml-cpp)
|
||||
#
|
||||
# 3. Build each executable
|
||||
#
|
||||
|
||||
add_executable(fetch-indexes fetch-indexes.cpp utilities.cpp)
|
||||
target_include_directories(fetch-indexes PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
|
||||
target_link_libraries(fetch-indexes PRIVATE lubuntuci CURL::libcurl yaml-cpp::yaml-cpp ZLIB::ZLIB /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so)
|
||||
add_executable(update-maintainer update-maintainer.cpp)
|
||||
target_link_libraries(update-maintainer
|
||||
lubuntuci_lib
|
||||
update_maintainer_lib
|
||||
yaml-cpp
|
||||
${LIBARCHIVE_LIBRARIES}
|
||||
${LIBGIT2_LIBRARIES}
|
||||
ZLIB::ZLIB
|
||||
CURL::libcurl
|
||||
"${LAUNCHPAD_CPP_LIBRARY}"
|
||||
${UUID_LIB}
|
||||
)
|
||||
|
||||
add_executable(lintian-ppa lintian-ppa.cpp)
|
||||
target_include_directories(lintian-ppa PRIVATE /srv/lubuntu-ci/repos/ci-tools/include/launchpadlib-cpp)
|
||||
target_link_libraries(lintian-ppa PRIVATE lubuntuci ${UUID_LIB} /srv/lubuntu-ci/repos/ci-tools/lib/liblaunchpad.so)
|
||||
|
||||
set_target_properties(lubuntuci build-packages fetch-indexes update-maintainer lintian-ppa PROPERTIES
|
||||
BUILD_WITH_INSTALL_RPATH TRUE
|
||||
INSTALL_RPATH "$ORIGIN/lib"
|
||||
target_link_libraries(lintian-ppa
|
||||
lubuntuci_lib
|
||||
yaml-cpp
|
||||
${LIBARCHIVE_LIBRARIES}
|
||||
${LIBGIT2_LIBRARIES}
|
||||
"${LAUNCHPAD_CPP_LIBRARY}"
|
||||
ZLIB::ZLIB
|
||||
CURL::libcurl
|
||||
${UUID_LIB}
|
||||
)
|
||||
|
||||
install(TARGETS lubuntuci
|
||||
LIBRARY DESTINATION lib
|
||||
add_executable(fetch-indexes fetch-indexes.cpp)
|
||||
target_link_libraries(fetch-indexes
|
||||
lubuntuci_lib
|
||||
yaml-cpp
|
||||
${LIBARCHIVE_LIBRARIES}
|
||||
${LIBGIT2_LIBRARIES}
|
||||
"${LAUNCHPAD_CPP_LIBRARY}"
|
||||
ZLIB::ZLIB
|
||||
CURL::libcurl
|
||||
${UUID_LIB}
|
||||
)
|
||||
|
||||
install(TARGETS build-packages fetch-indexes update-maintainer lintian-ppa
|
||||
RUNTIME DESTINATION .
|
||||
add_executable(web_ui main.cpp)
|
||||
target_link_libraries(web_ui
|
||||
lubuntuci_lib
|
||||
yaml-cpp
|
||||
${LIBARCHIVE_LIBRARIES}
|
||||
${LIBGIT2_LIBRARIES}
|
||||
ZLIB::ZLIB
|
||||
CURL::libcurl
|
||||
"${LAUNCHPAD_CPP_LIBRARY}"
|
||||
${UUID_LIB}
|
||||
)
|
||||
|
||||
install(FILES common.h update-maintainer-lib.h utilities.h
|
||||
DESTINATION include/lubuntuci
|
||||
)
|
||||
#
|
||||
# Copy templates
|
||||
#
|
||||
file(COPY ../templates
|
||||
DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
|
||||
file(COPY ../static
|
||||
DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,269 @@
|
||||
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#ifndef CI_DATABASE_OBJS_H
|
||||
#define CI_DATABASE_OBJS_H
|
||||
|
||||
#include <chrono>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <mutex>
|
||||
|
||||
#include <QDateTime>
|
||||
#include <QSqlDatabase>
|
||||
#include <yaml-cpp/yaml.h>
|
||||
|
||||
#include "common.h"
|
||||
|
||||
class Person {
|
||||
public:
|
||||
int id;
|
||||
std::string username;
|
||||
std::string logo_url;
|
||||
|
||||
Person(int id = 0, const std::string username = "", const std::string logo_url = "");
|
||||
};
|
||||
|
||||
class Release {
|
||||
public:
|
||||
int id;
|
||||
int version;
|
||||
std::string codename;
|
||||
bool isDefault;
|
||||
|
||||
Release(int id = 0, int version = 0, const std::string& codename = "", bool isDefault = false);
|
||||
std::vector<Release> get_releases(QSqlDatabase& p_db);
|
||||
Release get_release_by_id(QSqlDatabase& p_db, int id);
|
||||
bool set_releases(QSqlDatabase& p_db, YAML::Node& releases);
|
||||
};
|
||||
|
||||
class Package {
|
||||
public:
|
||||
int id;
|
||||
std::string name;
|
||||
bool large;
|
||||
std::string upstream_browser;
|
||||
std::string packaging_browser;
|
||||
std::string upstream_url;
|
||||
std::string packaging_branch;
|
||||
std::string packaging_url;
|
||||
|
||||
Package(int id = 0, const std::string& name = "", bool large = false, const std::string& upstream_url = "", const std::string& packaging_branch = "", const std::string& packaging_url = "");
|
||||
std::vector<Package> get_packages(QSqlDatabase& p_db);
|
||||
Package get_package_by_id(QSqlDatabase& p_db, int id);
|
||||
bool set_packages(QSqlDatabase& p_db, YAML::Node& packages);
|
||||
|
||||
private:
|
||||
std::string transform_url(const std::string& url);
|
||||
};
|
||||
|
||||
class Branch {
|
||||
public:
|
||||
int id;
|
||||
std::string name;
|
||||
std::string upload_target;
|
||||
std::string upload_target_ssh;
|
||||
|
||||
Branch(int id = 0, const std::string& name = "", const std::string& upload_target = "", const std::string& upload_target_ssh = "");
|
||||
std::vector<Branch> get_branches(QSqlDatabase& p_db);
|
||||
Branch get_branch_by_id(QSqlDatabase& p_db, int id);
|
||||
};
|
||||
|
||||
class GitCommit {
|
||||
public:
|
||||
int id = 0;
|
||||
std::string commit_hash;
|
||||
std::string commit_summary;
|
||||
std::string commit_message;
|
||||
std::chrono::zoned_time<std::chrono::seconds> commit_datetime;
|
||||
std::string commit_author;
|
||||
std::string commit_committer;
|
||||
|
||||
GitCommit(
|
||||
QSqlDatabase& p_db,
|
||||
const std::string& commit_hash = "",
|
||||
const std::string& commit_summary = "",
|
||||
const std::string& commit_message = "",
|
||||
const std::chrono::zoned_time<std::chrono::seconds>& commit_datetime = std::chrono::zoned_time<std::chrono::seconds>(),
|
||||
const std::string& commit_author = "",
|
||||
const std::string& commit_committer = ""
|
||||
);
|
||||
GitCommit(
|
||||
const int id = 0,
|
||||
const std::string& commit_hash = "",
|
||||
const std::string& commit_summary = "",
|
||||
const std::string& commit_message = "",
|
||||
const std::chrono::zoned_time<std::chrono::seconds>& commit_datetime = std::chrono::zoned_time<std::chrono::seconds>(),
|
||||
const std::string& commit_author = "",
|
||||
const std::string& commit_committer = ""
|
||||
);
|
||||
|
||||
GitCommit get_commit_by_id(QSqlDatabase& p_db, int id);
|
||||
std::optional<GitCommit> get_commit_by_hash(QSqlDatabase& p_db, const std::string commit_hash);
|
||||
|
||||
private:
|
||||
std::chrono::zoned_time<std::chrono::seconds> convert_timestr_to_zonedtime(const std::string& datetime_str);
|
||||
};
|
||||
|
||||
class JobStatus {
|
||||
public:
|
||||
int id;
|
||||
int build_score;
|
||||
std::string name;
|
||||
std::string display_name;
|
||||
|
||||
JobStatus(QSqlDatabase& p_db, int id);
|
||||
};
|
||||
|
||||
class PackageConf {
|
||||
public:
|
||||
int id = 0;
|
||||
std::shared_ptr<Package> package;
|
||||
std::shared_ptr<Release> release;
|
||||
std::shared_ptr<Branch> branch;
|
||||
std::shared_ptr<GitCommit> packaging_commit = std::make_shared<GitCommit>();
|
||||
std::shared_ptr<GitCommit> upstream_commit = std::make_shared<GitCommit>();
|
||||
std::string upstream_version;
|
||||
int ppa_revision = 1;
|
||||
|
||||
bool operator<(const PackageConf& other) const {
|
||||
if (package->id != other.package->id)
|
||||
return package->id < other.package->id;
|
||||
if (release->id != other.release->id)
|
||||
return release->id < other.release->id;
|
||||
if (branch->id != other.branch->id)
|
||||
return branch->id < other.branch->id;
|
||||
return id < other.id;
|
||||
}
|
||||
bool operator==(const PackageConf& other) const {
|
||||
// Intentionally leave out our ID
|
||||
return package->id == other.package->id &&
|
||||
release->id == other.release->id &&
|
||||
branch->id == other.branch->id;
|
||||
}
|
||||
|
||||
PackageConf(int id = 0, std::shared_ptr<Package> package = NULL, std::shared_ptr<Release> release = NULL, std::shared_ptr<Branch> branch = NULL,
|
||||
std::shared_ptr<GitCommit> packaging_commit = NULL, std::shared_ptr<GitCommit> upstream_commit = NULL);
|
||||
std::vector<std::shared_ptr<PackageConf>> get_package_confs(QSqlDatabase& p_db, std::map<std::string, std::shared_ptr<JobStatus>> jobstatus_map);
|
||||
std::vector<std::shared_ptr<PackageConf>> get_package_confs_by_package_name(QSqlDatabase& p_db,
|
||||
std::vector<std::shared_ptr<PackageConf>> packageconfs,
|
||||
const std::string& package_name);
|
||||
void assign_task(std::shared_ptr<JobStatus> jobstatus, std::shared_ptr<Task> task_ptr, std::weak_ptr<PackageConf> packageconf_ptr);
|
||||
int successful_task_count();
|
||||
int total_task_count();
|
||||
std::shared_ptr<Task> get_task_by_jobstatus(std::shared_ptr<JobStatus> jobstatus);
|
||||
bool set_package_confs(QSqlDatabase& p_db);
|
||||
bool set_commit_id(const std::string& _commit_id = "");
|
||||
bool set_commit_time(const std::chrono::zoned_time<std::chrono::seconds>& _commit_time = std::chrono::zoned_time<std::chrono::seconds>{});
|
||||
void sync(QSqlDatabase& p_db);
|
||||
bool can_check_source_upload();
|
||||
bool can_check_builds();
|
||||
|
||||
struct PackageConfPlain {
|
||||
int package_id;
|
||||
int release_id;
|
||||
int branch_id;
|
||||
bool operator<(const PackageConf::PackageConfPlain& other) const {
|
||||
if (package_id != other.package_id)
|
||||
return package_id < other.package_id;
|
||||
if (release_id != other.release_id)
|
||||
return release_id < other.release_id;
|
||||
return branch_id < other.branch_id;
|
||||
}
|
||||
|
||||
bool operator==(const PackageConf::PackageConfPlain& other) const {
|
||||
return package_id == other.package_id &&
|
||||
release_id == other.release_id &&
|
||||
branch_id == other.branch_id;
|
||||
}
|
||||
};
|
||||
|
||||
private:
|
||||
std::unordered_map<std::shared_ptr<JobStatus>, std::shared_ptr<Task>> jobstatus_task_map_;
|
||||
std::unique_ptr<std::mutex> task_mutex_ = std::make_unique<std::mutex>();
|
||||
};
|
||||
|
||||
class Task {
|
||||
public:
|
||||
int id;
|
||||
int build_score = 0;
|
||||
bool successful;
|
||||
std::int64_t queue_time = 0;
|
||||
std::int64_t start_time = 0;
|
||||
std::int64_t finish_time = 0;
|
||||
std::function<void(std::shared_ptr<Log> log)> func;
|
||||
std::shared_ptr<Log> log;
|
||||
std::shared_ptr<JobStatus> jobstatus;
|
||||
std::weak_ptr<PackageConf> parent_packageconf;
|
||||
bool is_running;
|
||||
|
||||
Task(QSqlDatabase& p_db, std::shared_ptr<JobStatus> jobstatus, std::int64_t time, std::shared_ptr<PackageConf> packageconf);
|
||||
Task();
|
||||
|
||||
std::set<std::shared_ptr<Task>> get_completed_tasks(QSqlDatabase& p_db, std::vector<std::shared_ptr<PackageConf>> packageconfs, std::map<std::string, std::shared_ptr<JobStatus>> job_statuses, int page, int per_page);
|
||||
void save(QSqlDatabase& p_db, int _packageconf_id = 0);
|
||||
|
||||
std::shared_ptr<PackageConf> get_parent_packageconf() const {
|
||||
return parent_packageconf.lock();
|
||||
}
|
||||
|
||||
struct TaskComparator {
|
||||
bool operator()(const std::shared_ptr<Task>& lhs, const std::shared_ptr<Task>& rhs) const {
|
||||
return Task::compare(lhs, rhs);
|
||||
}
|
||||
};
|
||||
|
||||
// Custom comparator for task ordering
|
||||
bool operator<(const Task& other) const {
|
||||
if (build_score != other.build_score) {
|
||||
return build_score < other.build_score;
|
||||
} else if (queue_time != other.queue_time) {
|
||||
return queue_time < other.queue_time;
|
||||
} else if (start_time != other.start_time) {
|
||||
return start_time < other.start_time;
|
||||
} else if (finish_time != other.finish_time) {
|
||||
return finish_time < other.finish_time;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool operator<(const std::shared_ptr<Task>& other) const {
|
||||
if (build_score != other->build_score) {
|
||||
return build_score < other->build_score;
|
||||
} else if (queue_time != other->queue_time) {
|
||||
return queue_time < other->queue_time;
|
||||
} else if (start_time != other->start_time) {
|
||||
return start_time < other->start_time;
|
||||
} else if (finish_time != other->finish_time) {
|
||||
return finish_time < other->finish_time;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool compare(const std::shared_ptr<Task>& lhs, const std::shared_ptr<Task>& rhs);
|
||||
};
|
||||
|
||||
inline size_t qHash(const PackageConf::PackageConfPlain& key, size_t seed = 0) {
|
||||
size_t res = 0;
|
||||
res ^= std::hash<int>()(key.package_id) + 0x9e3779b9 + (res << 6) + (res >> 2);
|
||||
res ^= std::hash<int>()(key.release_id) + 0x9e3779b9 + (res << 6) + (res >> 2);
|
||||
res ^= std::hash<int>()(key.branch_id) + 0x9e3779b9 + (res << 6) + (res >> 2);
|
||||
return res;
|
||||
}
|
||||
|
||||
#endif // CI_DATABASE_OBJS_H
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,133 @@
|
||||
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
// cpp/ci_logic.h
|
||||
// [License Header as in original]
|
||||
|
||||
#ifndef CI_LOGIC_H
|
||||
#define CI_LOGIC_H
|
||||
|
||||
#include "ci_database_objs.h"
|
||||
#include "task_queue.h"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <optional>
|
||||
#include <filesystem>
|
||||
#include <mutex>
|
||||
#include <queue>
|
||||
#include <thread>
|
||||
#include <functional>
|
||||
#include <condition_variable>
|
||||
|
||||
#include <QSqlDatabase>
|
||||
#include <yaml-cpp/yaml.h>
|
||||
|
||||
struct CiProject;
|
||||
|
||||
/**
|
||||
* Data describing one package to pull/build/etc.
|
||||
*/
|
||||
struct CiProject {
|
||||
std::string name;
|
||||
std::string version;
|
||||
std::string time;
|
||||
std::string upload_target;
|
||||
std::string upstream_url;
|
||||
std::string packaging_url;
|
||||
std::optional<std::string> packaging_branch;
|
||||
std::filesystem::path main_tarball;
|
||||
bool large = false;
|
||||
|
||||
// These get populated during build
|
||||
std::vector<std::string> changes_files;
|
||||
std::vector<std::string> devel_changes_files;
|
||||
};
|
||||
|
||||
class CiLogic {
|
||||
public:
|
||||
// Initialize global configurations
|
||||
void init_global();
|
||||
|
||||
// Load YAML configuration from a given path
|
||||
YAML::Node load_yaml_config(const std::filesystem::path &config_path);
|
||||
|
||||
// Convert a YAML node to a CiProject structure
|
||||
CiProject yaml_to_project(const YAML::Node &pkg_node);
|
||||
|
||||
// Clone or fetch a git repository
|
||||
void clone_or_fetch(const std::filesystem::path &repo_dir, const std::string &repo_url, const std::optional<std::string> &branch, std::shared_ptr<Log> log = NULL);
|
||||
|
||||
bool pull_project(std::shared_ptr<PackageConf> &proj, std::shared_ptr<Log> log = NULL);
|
||||
bool create_project_tarball(std::shared_ptr<PackageConf> &proj, std::shared_ptr<Log> log = NULL);
|
||||
std::tuple<bool, std::set<std::string>> build_project(std::shared_ptr<PackageConf> proj, std::shared_ptr<Log> log = NULL);
|
||||
bool upload_and_lint(std::shared_ptr<PackageConf> &proj, const std::set<std::string> changes_files, bool skip_dput, std::shared_ptr<Log> log = NULL);
|
||||
|
||||
// Perform cleanup and summarize the build process
|
||||
void do_summary(bool skip_cleanup);
|
||||
|
||||
// Process the entire pipeline for a given PackageConf ID
|
||||
void process_entire_pipeline(std::shared_ptr<PackageConf> &proj, bool skip_dput, bool skip_cleanup);
|
||||
|
||||
// Retrieve all PackageConf entries from the database
|
||||
std::vector<std::shared_ptr<PackageConf>> get_config(const std::string &repo_name = "", int page = 0, int per_page = 0, const std::string& sort_by = "", const std::string& sort_order = "");
|
||||
|
||||
// Function to enqueue tasks
|
||||
void enqueue(std::function<void()> task);
|
||||
|
||||
// Fetch logs for a specific PackageConf ID
|
||||
std::string get_logs_for_repo_conf(int package_conf_id);
|
||||
|
||||
std::map<std::string, std::shared_ptr<JobStatus>> get_job_statuses();
|
||||
std::vector<std::shared_ptr<PackageConf>> get_packageconfs();
|
||||
std::shared_ptr<PackageConf> get_packageconf_by_id(int id);
|
||||
std::vector<std::shared_ptr<PackageConf>> get_packageconfs_by_ids(std::set<int> ids);
|
||||
void set_packageconfs(std::vector<std::shared_ptr<PackageConf>> _pkgconfs);
|
||||
void sync(std::shared_ptr<PackageConf> pkgconf);
|
||||
|
||||
QSqlDatabase get_thread_connection();
|
||||
|
||||
std::string queue_pull_tarball(std::vector<std::shared_ptr<PackageConf>> repos,
|
||||
std::unique_ptr<TaskQueue>& task_queue,
|
||||
const std::map<std::string, std::shared_ptr<JobStatus>> job_statuses);
|
||||
|
||||
std::vector<Release> releases;
|
||||
std::vector<Package> packages;
|
||||
std::vector<Branch> branches;
|
||||
|
||||
private:
|
||||
// Initialize the database
|
||||
bool init_database(const QString& connectionName = "LubuntuCIConnection",
|
||||
const QString& databasePath = "/srv/lubuntu-ci/repos/ci-tools/lubuntu_ci.db");
|
||||
|
||||
void debuild_package(const fs::path &packaging_dir, std::shared_ptr<Log> log);
|
||||
|
||||
QSqlDatabase p_db;
|
||||
|
||||
mutable std::mutex connection_mutex_;
|
||||
mutable std::mutex packageconfs_mutex_;
|
||||
std::vector<std::shared_ptr<PackageConf>> packageconfs;
|
||||
std::map<std::string, std::shared_ptr<JobStatus>> _cached_job_statuses;
|
||||
|
||||
struct package_conf_item {
|
||||
std::shared_ptr<PackageConf> first_pkgconf;
|
||||
std::shared_ptr<Task> first_pull_task = std::make_shared<Task>();
|
||||
std::shared_ptr<Task> first_tarball_task = std::make_shared<Task>();
|
||||
std::shared_ptr<GitCommit> packaging_commit = std::make_shared<GitCommit>();
|
||||
std::shared_ptr<GitCommit> upstream_commit = std::make_shared<GitCommit>();
|
||||
};
|
||||
};
|
||||
|
||||
#endif // CI_LOGIC_H
|
@ -0,0 +1,92 @@
|
||||
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#include "lubuntuci_lib.h"
|
||||
#include "ci_logic.h"
|
||||
#include "common.h"
|
||||
#include <yaml-cpp/yaml.h>
|
||||
#include <filesystem>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <mutex>
|
||||
#include <git2.h>
|
||||
|
||||
namespace fs = std::filesystem;
|
||||
|
||||
/**
|
||||
* list_known_repos():
|
||||
* Make sure we call CiLogic::init_global() before reading
|
||||
* the config, otherwise the config node will be empty.
|
||||
*/
|
||||
std::vector<std::shared_ptr<PackageConf>> LubuntuCI::list_known_repos(int page, int per_page, const std::string& sort_by, const std::string& sort_order)
|
||||
{
|
||||
cilogic.init_global();
|
||||
if (page == 0 || per_page == 0 || sort_by.empty() || sort_order.empty()) { return cilogic.get_config(); }
|
||||
return cilogic.get_config("", page, per_page, sort_by, sort_order);
|
||||
}
|
||||
|
||||
/**
|
||||
* pull_repo():
|
||||
* - We do not call init_global() here because list_known_repos()
|
||||
* or build_repo() might do it. But calling it again is safe.
|
||||
*/
|
||||
bool LubuntuCI::pull_repo(const std::string &repo_name, std::shared_ptr<Log> log)
|
||||
{
|
||||
log->append("Ensuring the global config is initialized...\n");
|
||||
cilogic.init_global();
|
||||
log->append("Global config is initialized. Getting the configs for the package name...\n");
|
||||
auto pkgconfs = cilogic.get_config(repo_name);
|
||||
log->append("Configs retrieved. Performing the pull...\n");
|
||||
return cilogic.pull_project(pkgconfs.at(0), log);
|
||||
}
|
||||
|
||||
/**
|
||||
* create_project_tarball
|
||||
*/
|
||||
bool LubuntuCI::create_project_tarball(const std::string &repo_name, std::shared_ptr<Log> log)
|
||||
{
|
||||
cilogic.init_global();
|
||||
log->append("Global config is initialized. Getting the configs for the package name...\n");
|
||||
auto pkgconfs = cilogic.get_config(repo_name);
|
||||
log->append("Configs retrieved. Performing the tarball creation...\n");
|
||||
return cilogic.create_project_tarball(pkgconfs.at(0), log);
|
||||
}
|
||||
|
||||
/**
|
||||
* build_repo():
|
||||
* - Also safely calls init_global().
|
||||
* - Reads skip_dput from config if present (default = false).
|
||||
*/
|
||||
bool LubuntuCI::build_repo(const std::string &repo_name, std::shared_ptr<Log> log)
|
||||
{
|
||||
cilogic.init_global();
|
||||
bool success = true;
|
||||
for (auto pkgconf : cilogic.get_config(repo_name)) {
|
||||
const auto [build_success, changes_files] = cilogic.build_project(pkgconf, log);
|
||||
success = success && build_success && cilogic.upload_and_lint(pkgconf, changes_files, false);
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
* get_repo_log():
|
||||
* - Directly opens the repo in /srv/lubuntu-ci/repos/<repo_name>
|
||||
* - Reads HEAD commit message
|
||||
*/
|
||||
std::string LubuntuCI::get_repo_log(const std::string &repo_name)
|
||||
{
|
||||
// FIXME: unused
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#ifndef LUBUNTUCI_LIB_H
|
||||
#define LUBUNTUCI_LIB_H
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include "ci_logic.h"
|
||||
|
||||
class LubuntuCI {
|
||||
public:
|
||||
/**
|
||||
* List all known repositories from the merged config.
|
||||
*/
|
||||
std::vector<std::shared_ptr<PackageConf>> list_known_repos(int page = 0,
|
||||
int per_page = 0,
|
||||
const std::string& sort_by = "",
|
||||
const std::string& sort_order = "");
|
||||
|
||||
/**
|
||||
* Pull a specific repository by name (returns true on success).
|
||||
*/
|
||||
bool pull_repo(const std::string &repo_name, std::shared_ptr<Log> log = NULL);
|
||||
|
||||
bool create_project_tarball(const std::string &repo_name, std::shared_ptr<Log> log);
|
||||
|
||||
/**
|
||||
* Build a specific repository by name (returns true on success).
|
||||
*/
|
||||
bool build_repo(const std::string &repo_name, std::shared_ptr<Log> log = NULL);
|
||||
|
||||
/**
|
||||
* Retrieve the most recent commit log from a named repo.
|
||||
*/
|
||||
std::string get_repo_log(const std::string &repo_name);
|
||||
|
||||
CiLogic cilogic = CiLogic();
|
||||
};
|
||||
|
||||
#endif // LUBUNTUCI_LIB_H
|
@ -0,0 +1,314 @@
|
||||
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#include "naive_bayes_classifier.h"
|
||||
|
||||
#include <curl/curl.h>
|
||||
#include <zlib.h>
|
||||
#include <algorithm>
|
||||
#include <cctype>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <numeric>
|
||||
#include <cmath>
|
||||
#include <cstring> // for std::memset
|
||||
|
||||
/******************************************************************************
|
||||
* Constructor / Destructor
|
||||
*****************************************************************************/
|
||||
naive_bayes_classifier::naive_bayes_classifier() = default;
|
||||
naive_bayes_classifier::~naive_bayes_classifier() = default;
|
||||
|
||||
/******************************************************************************
|
||||
* reset
|
||||
*****************************************************************************/
|
||||
void naive_bayes_classifier::reset() {
|
||||
word_freqs_.clear();
|
||||
category_freqs_.clear();
|
||||
vocabulary_.clear();
|
||||
token_categories_map_.clear();
|
||||
total_samples_ = 0.0;
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* train_from_url
|
||||
*****************************************************************************/
|
||||
bool naive_bayes_classifier::train_from_url(const std::string &url, const std::string &category) {
|
||||
streaming_context ctx;
|
||||
ctx.classifier = this;
|
||||
ctx.is_prediction_mode = false;
|
||||
ctx.category = category;
|
||||
|
||||
bool ok = fetch_and_inflate_gz(url, &naive_bayes_classifier::train_write_cb, &ctx);
|
||||
if (!ok) {
|
||||
std::cerr << "Error: train_from_url failed for " << url << std::endl;
|
||||
return false;
|
||||
}
|
||||
category_freqs_[category]++;
|
||||
total_samples_++;
|
||||
return true;
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* predict_from_url
|
||||
*****************************************************************************/
|
||||
std::optional<std::string> naive_bayes_classifier::predict_from_url(const std::string &url) const {
|
||||
streaming_context ctx;
|
||||
ctx.classifier = const_cast<naive_bayes_classifier*>(this);
|
||||
ctx.is_prediction_mode = true;
|
||||
|
||||
bool ok = fetch_and_inflate_gz(url, &naive_bayes_classifier::predict_write_cb, &ctx);
|
||||
if (!ok) {
|
||||
return std::nullopt;
|
||||
}
|
||||
std::string best_cat = compute_best_category(ctx.prediction_tokens);
|
||||
return best_cat;
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* prune_common_tokens
|
||||
*****************************************************************************/
|
||||
void naive_bayes_classifier::prune_common_tokens() {
|
||||
if (category_freqs_.empty()) {
|
||||
return;
|
||||
}
|
||||
size_t category_count = category_freqs_.size();
|
||||
|
||||
std::vector<std::string> tokens_to_remove_vec;
|
||||
tokens_to_remove_vec.reserve(vocabulary_.size());
|
||||
|
||||
for (const auto &[token, cats_set] : token_categories_map_) {
|
||||
if (cats_set.size() == category_count) {
|
||||
tokens_to_remove_vec.push_back(token);
|
||||
}
|
||||
}
|
||||
|
||||
for (const auto &tk : tokens_to_remove_vec) {
|
||||
vocabulary_.erase(tk);
|
||||
for (auto &cat_map : word_freqs_) {
|
||||
cat_map.second.erase(tk);
|
||||
}
|
||||
token_categories_map_.erase(tk);
|
||||
}
|
||||
|
||||
std::cout << "Pruned " << tokens_to_remove_vec.size()
|
||||
<< " common tokens that appeared in all categories.\n";
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* train_token
|
||||
*****************************************************************************/
|
||||
void naive_bayes_classifier::train_token(const std::string &category, const std::string &token) {
|
||||
if (token.empty()) return;
|
||||
word_freqs_[category][token]++;
|
||||
vocabulary_[token] = true;
|
||||
token_categories_map_[token].insert(category);
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* compute_best_category
|
||||
*****************************************************************************/
|
||||
std::string naive_bayes_classifier::compute_best_category(const token_counts_t &tokens) const {
|
||||
if (category_freqs_.empty() || total_samples_ <= 0.0) {
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
double best_score = -1e308;
|
||||
std::string best_cat = "Unknown";
|
||||
|
||||
for (const auto &[cat, cat_count] : category_freqs_) {
|
||||
double prior_log = std::log(cat_count / total_samples_);
|
||||
|
||||
double total_cat_words = 0.0;
|
||||
auto cat_iter = word_freqs_.find(cat);
|
||||
if (cat_iter != word_freqs_.end()) {
|
||||
total_cat_words = std::accumulate(
|
||||
cat_iter->second.begin(),
|
||||
cat_iter->second.end(),
|
||||
0.0,
|
||||
[](double sum, const auto &p){ return sum + p.second; }
|
||||
);
|
||||
}
|
||||
|
||||
double score = prior_log;
|
||||
for (const auto &[tk, freq] : tokens) {
|
||||
double word_count = 0.0;
|
||||
if (cat_iter != word_freqs_.end()) {
|
||||
auto w_it = cat_iter->second.find(tk);
|
||||
if (w_it != cat_iter->second.end()) {
|
||||
word_count = w_it->second;
|
||||
}
|
||||
}
|
||||
double smoothed = (word_count + 1.0) / (total_cat_words + vocabulary_.size());
|
||||
score += freq * std::log(smoothed);
|
||||
}
|
||||
|
||||
if (score > best_score) {
|
||||
best_score = score;
|
||||
best_cat = cat;
|
||||
}
|
||||
}
|
||||
|
||||
return best_cat;
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* chunk_to_tokens
|
||||
*****************************************************************************/
|
||||
std::generator<std::string> naive_bayes_classifier::chunk_to_tokens(
|
||||
const std::string &chunk, std::string &partial_token)
|
||||
{
|
||||
for (char c : chunk) {
|
||||
if (std::isalpha(static_cast<unsigned char>(c))) {
|
||||
partial_token.push_back(static_cast<char>(std::tolower(static_cast<unsigned char>(c))));
|
||||
} else {
|
||||
if (!partial_token.empty()) {
|
||||
co_yield partial_token;
|
||||
partial_token.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
// leftover partial_token remains if chunk ends mid-token
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* train_write_cb
|
||||
*****************************************************************************/
|
||||
size_t naive_bayes_classifier::train_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) {
|
||||
auto ctx = static_cast<streaming_context*>(userdata);
|
||||
if (!ctx || !ctx->classifier || ctx->is_prediction_mode) {
|
||||
return 0;
|
||||
}
|
||||
size_t bytes = size * nmemb;
|
||||
std::string chunk(ptr, bytes);
|
||||
|
||||
for (auto &&tk : chunk_to_tokens(chunk, ctx->partial_token)) {
|
||||
ctx->classifier->train_token(ctx->category, tk);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* predict_write_cb
|
||||
*****************************************************************************/
|
||||
size_t naive_bayes_classifier::predict_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) {
|
||||
auto ctx = static_cast<streaming_context*>(userdata);
|
||||
if (!ctx || !ctx->classifier || !ctx->is_prediction_mode) {
|
||||
return 0;
|
||||
}
|
||||
size_t bytes = size * nmemb;
|
||||
std::string chunk(ptr, bytes);
|
||||
|
||||
for (auto &&tk : chunk_to_tokens(chunk, ctx->partial_token)) {
|
||||
ctx->prediction_tokens[tk]++;
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* fetch_and_inflate_gz
|
||||
*****************************************************************************/
|
||||
struct inflating_context {
|
||||
naive_bayes_classifier::streaming_context *user_ctx;
|
||||
size_t (*callback)(char*, size_t, size_t, void*);
|
||||
z_stream strm;
|
||||
std::string decompress_buffer;
|
||||
|
||||
inflating_context() {
|
||||
std::memset(&strm, 0, sizeof(strm));
|
||||
strm.zalloc = Z_NULL;
|
||||
strm.zfree = Z_NULL;
|
||||
strm.opaque = Z_NULL;
|
||||
inflateInit2(&strm, 16 + MAX_WBITS);
|
||||
decompress_buffer.resize(64 * 1024);
|
||||
}
|
||||
|
||||
~inflating_context() {
|
||||
inflateEnd(&strm);
|
||||
}
|
||||
};
|
||||
|
||||
static size_t curl_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata) {
|
||||
auto *inf_ctx = static_cast<inflating_context*>(userdata);
|
||||
size_t total_in = size * nmemb;
|
||||
|
||||
inf_ctx->strm.avail_in = static_cast<uInt>(total_in);
|
||||
inf_ctx->strm.next_in = reinterpret_cast<unsigned char*>(ptr);
|
||||
|
||||
while (inf_ctx->strm.avail_in > 0) {
|
||||
inf_ctx->strm.avail_out = static_cast<uInt>(inf_ctx->decompress_buffer.size());
|
||||
inf_ctx->strm.next_out = reinterpret_cast<unsigned char*>(&inf_ctx->decompress_buffer[0]);
|
||||
|
||||
int ret = inflate(&inf_ctx->strm, Z_NO_FLUSH);
|
||||
if (ret == Z_STREAM_ERROR || ret == Z_MEM_ERROR || ret == Z_DATA_ERROR) {
|
||||
std::cerr << "zlib inflate error: " << inf_ctx->strm.msg << std::endl;
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t have = inf_ctx->decompress_buffer.size() - inf_ctx->strm.avail_out;
|
||||
if (have > 0) {
|
||||
size_t written = inf_ctx->callback(
|
||||
&inf_ctx->decompress_buffer[0],
|
||||
1,
|
||||
have,
|
||||
inf_ctx->user_ctx
|
||||
);
|
||||
if (written < have) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
return total_in;
|
||||
}
|
||||
|
||||
bool naive_bayes_classifier::fetch_and_inflate_gz(
|
||||
const std::string &url,
|
||||
size_t (*callback)(char*, size_t, size_t, void*),
|
||||
void *user_context)
|
||||
{
|
||||
CURL *curl = curl_easy_init();
|
||||
if (!curl) {
|
||||
std::cerr << "Error: curl_easy_init failed.\n";
|
||||
return false;
|
||||
}
|
||||
|
||||
inflating_context inf_ctx;
|
||||
inf_ctx.callback = callback;
|
||||
inf_ctx.user_ctx = static_cast<streaming_context*>(user_context);
|
||||
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, curl_write_cb);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &inf_ctx);
|
||||
|
||||
CURLcode res = curl_easy_perform(curl);
|
||||
if (res != CURLE_OK) {
|
||||
std::cerr << "cURL error fetching " << url << ": "
|
||||
<< curl_easy_strerror(res) << std::endl;
|
||||
curl_easy_cleanup(curl);
|
||||
return false;
|
||||
}
|
||||
curl_easy_cleanup(curl);
|
||||
|
||||
auto *ctx = static_cast<streaming_context*>(user_context);
|
||||
if (!ctx->partial_token.empty()) {
|
||||
if (!ctx->is_prediction_mode) {
|
||||
ctx->classifier->train_token(ctx->category, ctx->partial_token);
|
||||
} else {
|
||||
ctx->prediction_tokens[ctx->partial_token]++;
|
||||
}
|
||||
ctx->partial_token.clear();
|
||||
}
|
||||
return true;
|
||||
}
|
@ -0,0 +1,124 @@
|
||||
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#ifndef NAIVE_BAYES_CLASSIFIER_H
|
||||
#define NAIVE_BAYES_CLASSIFIER_H
|
||||
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <optional>
|
||||
#include <generator> // C++23 std::generator
|
||||
#include <cmath>
|
||||
|
||||
/******************************************************************************
|
||||
* Type aliases
|
||||
*****************************************************************************/
|
||||
using token_counts_t = std::unordered_map<std::string, double>;
|
||||
using category_counts_t = std::unordered_map<std::string, double>;
|
||||
|
||||
/******************************************************************************
|
||||
* naive_bayes_classifier
|
||||
*
|
||||
* A streaming-only Naive Bayes text classifier. It fetches .gz logs via cURL,
|
||||
* decompresses them chunk by chunk, tokenizes, and trains or predicts
|
||||
* incrementally without storing entire logs in memory.
|
||||
*****************************************************************************/
|
||||
class naive_bayes_classifier {
|
||||
public:
|
||||
naive_bayes_classifier();
|
||||
~naive_bayes_classifier();
|
||||
|
||||
/**************************************************************************
|
||||
* train_from_url
|
||||
*
|
||||
* Streams the .gz log from 'url', decompresses chunk by chunk, extracts
|
||||
* tokens, and updates frequency counts for 'category'.
|
||||
**************************************************************************/
|
||||
bool train_from_url(const std::string &url, const std::string &category);
|
||||
|
||||
/**************************************************************************
|
||||
* predict_from_url
|
||||
*
|
||||
* Streams the .gz log from 'url', decompresses, extracts tokens, and
|
||||
* returns the most likely category. Returns std::nullopt if there's an error.
|
||||
**************************************************************************/
|
||||
std::optional<std::string> predict_from_url(const std::string &url) const;
|
||||
|
||||
/**************************************************************************
|
||||
* prune_common_tokens
|
||||
*
|
||||
* Removes tokens that appear in *all* categories from the vocabulary_
|
||||
* and per-category frequencies, reducing noise from universal tokens.
|
||||
**************************************************************************/
|
||||
void prune_common_tokens();
|
||||
|
||||
/**************************************************************************
|
||||
* reset
|
||||
*
|
||||
* Clears all training data (word_freqs_, category_freqs_, etc.).
|
||||
**************************************************************************/
|
||||
void reset();
|
||||
|
||||
double total_samples() const { return total_samples_; }
|
||||
size_t vocabulary_size() const { return vocabulary_.size(); }
|
||||
|
||||
public:
|
||||
/**************************************************************************
|
||||
* streaming_context
|
||||
*
|
||||
* Declared *public* so that external structures (like inflating_context)
|
||||
* can refer to it. Tracks the current partial token, mode, etc.
|
||||
**************************************************************************/
|
||||
struct streaming_context {
|
||||
naive_bayes_classifier *classifier = nullptr;
|
||||
bool is_prediction_mode = false;
|
||||
std::string category; // used if training
|
||||
token_counts_t prediction_tokens;
|
||||
std::string partial_token;
|
||||
};
|
||||
|
||||
private:
|
||||
/**************************************************************************
|
||||
* Data
|
||||
**************************************************************************/
|
||||
std::unordered_map<std::string, token_counts_t> word_freqs_; // cat->(word->freq)
|
||||
category_counts_t category_freqs_; // cat->count of logs
|
||||
std::unordered_map<std::string, bool> vocabulary_; // global set of words
|
||||
double total_samples_ = 0.0;
|
||||
|
||||
// For pruning, track which categories each token has appeared in
|
||||
std::unordered_map<std::string, std::unordered_set<std::string>> token_categories_map_;
|
||||
|
||||
/**************************************************************************
|
||||
* Internal methods
|
||||
**************************************************************************/
|
||||
void train_token(const std::string &category, const std::string &token);
|
||||
std::string compute_best_category(const token_counts_t &tokens) const;
|
||||
|
||||
static std::generator<std::string> chunk_to_tokens(const std::string &chunk,
|
||||
std::string &partial_token);
|
||||
|
||||
// Callback for training vs. predicting
|
||||
static size_t train_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata);
|
||||
static size_t predict_write_cb(char *ptr, size_t size, size_t nmemb, void *userdata);
|
||||
|
||||
// cURL + zlib-based streaming
|
||||
static bool fetch_and_inflate_gz(const std::string &url,
|
||||
size_t (*callback)(char*, size_t, size_t, void*),
|
||||
void *user_context);
|
||||
};
|
||||
|
||||
#endif // NAIVE_BAYES_CLASSIFIER_H
|
@ -0,0 +1,581 @@
|
||||
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#include "sources_parser.h"
|
||||
#include "utilities.h"
|
||||
|
||||
#include "/usr/include/archive.h"
|
||||
#include <archive_entry.h>
|
||||
#include <curl/curl.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <cctype>
|
||||
#include <iostream>
|
||||
#include <regex>
|
||||
#include <sstream>
|
||||
#include <stdexcept>
|
||||
#include <map>
|
||||
#include <set>
|
||||
#include <vector>
|
||||
#include <optional>
|
||||
#include <fstream> // Added to resolve ofstream errors
|
||||
#include <set>
|
||||
#include <ranges>
|
||||
|
||||
#include <QtCore/QJsonArray>
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonObject>
|
||||
|
||||
|
||||
|
||||
namespace SourcesParser {
|
||||
|
||||
// Function to write data fetched by libcurl into a std::vector<char>
|
||||
size_t WriteCallback(void* contents, size_t size, size_t nmemb, void* userp) {
|
||||
size_t totalSize = size * nmemb;
|
||||
auto* buffer = static_cast<std::vector<char>*>(userp);
|
||||
buffer->insert(buffer->end(), static_cast<char*>(contents), static_cast<char*>(contents) + totalSize);
|
||||
return totalSize;
|
||||
}
|
||||
|
||||
// Function to parse dependency relations
|
||||
std::vector<std::vector<PackageInfo::ParsedRelation>> parse_relations(const std::string& raw) {
|
||||
std::vector<std::vector<PackageInfo::ParsedRelation>> result;
|
||||
|
||||
// Split by comma to get top-level dependencies
|
||||
std::regex comma_sep_RE(R"(\s*,\s*)");
|
||||
std::sregex_token_iterator comma_it(raw.begin(), raw.end(), comma_sep_RE, -1);
|
||||
std::sregex_token_iterator comma_end;
|
||||
|
||||
for (; comma_it != comma_end; ++comma_it) {
|
||||
std::string top_dep = comma_it->str();
|
||||
// Split by pipe to get alternative dependencies
|
||||
std::regex pipe_sep_RE(R"(\s*\|\s*)");
|
||||
std::sregex_token_iterator pipe_it(top_dep.begin(), top_dep.end(), pipe_sep_RE, -1);
|
||||
std::sregex_token_iterator pipe_end;
|
||||
|
||||
std::vector<PackageInfo::ParsedRelation> alternatives;
|
||||
|
||||
for (; pipe_it != pipe_end; ++pipe_it) {
|
||||
std::string dep = pipe_it->str();
|
||||
// Remove any version constraints or architecture qualifiers
|
||||
size_t pos_space = dep.find(' ');
|
||||
size_t pos_paren = dep.find('(');
|
||||
size_t pos = std::string::npos;
|
||||
if (pos_space != std::string::npos && pos_paren != std::string::npos) {
|
||||
pos = std::min(pos_space, pos_paren);
|
||||
}
|
||||
else if (pos_space != std::string::npos) {
|
||||
pos = pos_space;
|
||||
}
|
||||
else if (pos_paren != std::string::npos) {
|
||||
pos = pos_paren;
|
||||
}
|
||||
|
||||
if (pos != std::string::npos) {
|
||||
dep = dep.substr(0, pos);
|
||||
}
|
||||
|
||||
// Trim whitespace
|
||||
dep.erase(dep.find_last_not_of(" \t\n\r\f\v") + 1);
|
||||
dep.erase(0, dep.find_first_not_of(" \t\n\r\f\v"));
|
||||
|
||||
// Handle architecture qualifiers (e.g., "libc6 (>= 2.27)")
|
||||
std::regex arch_RE(R"(^([a-zA-Z0-9+\-\.]+)(?:\s*\(\s*([a-zA-Z]+)\s*([<>=]+)\s*([0-9a-zA-Z:\-+~.]+)\s*\))?$)");
|
||||
std::smatch match;
|
||||
if (std::regex_match(dep, match, arch_RE)) {
|
||||
PackageInfo::ParsedRelation pr;
|
||||
pr.name = match[1];
|
||||
if (match[2].matched && match[3].matched && match[4].matched) {
|
||||
// If architecture qualifier exists, store it
|
||||
pr.archqual = match[2].str() + match[3].str() + match[4].str();
|
||||
}
|
||||
if (match[3].matched && match[4].matched) {
|
||||
// Store version constraints
|
||||
pr.version = std::make_pair(match[3].str(), match[4].str());
|
||||
}
|
||||
alternatives.push_back(pr);
|
||||
}
|
||||
else {
|
||||
// If regex does not match, include raw dependency without qualifiers
|
||||
dep = remove_suffix(dep, ":any");
|
||||
dep = remove_suffix(dep, ":native");
|
||||
PackageInfo::ParsedRelation pr;
|
||||
pr.name = dep;
|
||||
alternatives.push_back(pr);
|
||||
std::cerr << "Warning: Cannot parse dependency relation \"" << dep << "\", returning it raw.\n";
|
||||
}
|
||||
}
|
||||
|
||||
if (!alternatives.empty()) {
|
||||
result.push_back(alternatives);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Function to download, decompress, and parse the Sources.gz data
|
||||
std::optional<std::vector<PackageInfo>> fetch_and_parse_sources(const std::string& url) {
|
||||
CURL* curl = curl_easy_init();
|
||||
if (!curl) {
|
||||
std::cerr << "Failed to initialize CURL.\n";
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::vector<char> downloadedData;
|
||||
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &downloadedData);
|
||||
// Follow redirects if any
|
||||
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
|
||||
// Set a user agent
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, "SourcesParser/1.0");
|
||||
|
||||
CURLcode res = curl_easy_perform(curl);
|
||||
if (res != CURLE_OK) {
|
||||
std::cerr << "CURL download error (Sources.gz): " << curl_easy_strerror(res) << "\n";
|
||||
curl_easy_cleanup(curl);
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
curl_easy_cleanup(curl);
|
||||
|
||||
// Initialize libarchive
|
||||
struct archive* a = archive_read_new();
|
||||
archive_read_support_filter_gzip(a);
|
||||
archive_read_support_format_raw(a);
|
||||
|
||||
if (archive_read_open_memory(a, downloadedData.data(), downloadedData.size()) != ARCHIVE_OK) {
|
||||
std::cerr << "Failed to open Sources.gz archive: " << archive_error_string(a) << "\n";
|
||||
archive_read_free(a);
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
struct archive_entry* entry;
|
||||
std::string decompressedData;
|
||||
|
||||
// Read all entries (though there should typically be only one)
|
||||
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
|
||||
const void* buff;
|
||||
size_t size;
|
||||
la_int64_t offset;
|
||||
|
||||
while (true) {
|
||||
int r = archive_read_data_block(a, &buff, &size, &offset);
|
||||
if (r == ARCHIVE_EOF)
|
||||
break;
|
||||
if (r != ARCHIVE_OK) {
|
||||
std::cerr << "Error during decompression (Sources.gz): " << archive_error_string(a) << "\n";
|
||||
archive_read_free(a);
|
||||
return std::nullopt;
|
||||
}
|
||||
decompressedData.append(static_cast<const char*>(buff), size);
|
||||
}
|
||||
}
|
||||
|
||||
archive_read_free(a);
|
||||
|
||||
// Parse the decompressed data
|
||||
std::vector<PackageInfo> packages;
|
||||
std::istringstream stream(decompressedData);
|
||||
std::string line;
|
||||
PackageInfo currentPackage;
|
||||
bool in_entry = false;
|
||||
|
||||
while (std::getline(stream, line)) {
|
||||
if (line.empty()) {
|
||||
if (in_entry && !currentPackage.Package.empty()) {
|
||||
// Finalize BuildDependsParsed
|
||||
currentPackage.BuildDependsParsed = parse_relations(currentPackage.BuildDepends);
|
||||
packages.push_back(currentPackage);
|
||||
currentPackage = PackageInfo();
|
||||
in_entry = false;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
in_entry = true;
|
||||
|
||||
if (line.find("Build-Depends:") == 0) {
|
||||
currentPackage.BuildDepends = line.substr(strlen("Build-Depends: "));
|
||||
// Continue reading lines that start with a space or tab
|
||||
while (std::getline(stream, line)) {
|
||||
if (line.empty() || (!std::isspace(static_cast<unsigned char>(line[0]))))
|
||||
break;
|
||||
currentPackage.BuildDepends += " " + line.substr(1);
|
||||
}
|
||||
// If the last read line is not a continuation, process it in the next iteration
|
||||
if (!line.empty() && !std::isspace(static_cast<unsigned char>(line[0]))) {
|
||||
stream.seekg(-static_cast<int>(line.length()) - 1, std::ios_base::cur);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.find("Binary:") == 0) {
|
||||
std::string binary_str;
|
||||
binary_str = line.substr(strlen("Binary: "));
|
||||
// Continue reading lines that start with a space or tab
|
||||
while (std::getline(stream, line)) {
|
||||
if (line.empty() || (!std::isspace(static_cast<unsigned char>(line[0]))))
|
||||
break;
|
||||
binary_str += " " + line.substr(1);
|
||||
}
|
||||
// If the last read line is not a continuation, process it in the next iteration
|
||||
if (!line.empty() && !std::isspace(static_cast<unsigned char>(line[0]))) {
|
||||
stream.seekg(-static_cast<int>(line.length()) - 1, std::ios_base::cur);
|
||||
}
|
||||
currentPackage.Binary = split_string(binary_str, ", ");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract Package
|
||||
if (line.find("Package:") == 0) {
|
||||
currentPackage.Package = line.substr(strlen("Package: "));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract Provides (if any)
|
||||
if (line.find("Provides:") == 0) {
|
||||
std::string provides_line = line.substr(strlen("Provides: "));
|
||||
// Split by commas
|
||||
std::regex comma_sep_RE(R"(\s*,\s*)");
|
||||
std::sregex_token_iterator provides_it(provides_line.begin(), provides_line.end(), comma_sep_RE, -1);
|
||||
std::sregex_token_iterator provides_end;
|
||||
|
||||
for (; provides_it != provides_end; ++provides_it) {
|
||||
std::string provide = provides_it->str();
|
||||
// Extract the package name before any space or '('
|
||||
size_t pos_space = provide.find(' ');
|
||||
size_t pos_paren = provide.find('(');
|
||||
size_t pos = std::string::npos;
|
||||
if (pos_space != std::string::npos && pos_paren != std::string::npos) {
|
||||
pos = std::min(pos_space, pos_paren);
|
||||
}
|
||||
else if (pos_space != std::string::npos) {
|
||||
pos = pos_space;
|
||||
}
|
||||
else if (pos_paren != std::string::npos) {
|
||||
pos = pos_paren;
|
||||
}
|
||||
|
||||
if (pos != std::string::npos) {
|
||||
provide = provide.substr(0, pos);
|
||||
}
|
||||
|
||||
// Trim whitespace
|
||||
provide.erase(provide.find_last_not_of(" \t\n\r\f\v") + 1);
|
||||
provide.erase(0, provide.find_first_not_of(" \t\n\r\f\v"));
|
||||
|
||||
if (!provide.empty()) {
|
||||
currentPackage.Provides.push_back(provide);
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Add the last package if the file doesn't end with a blank line
|
||||
if (in_entry && !currentPackage.Package.empty()) {
|
||||
// Finalize BuildDependsParsed
|
||||
currentPackage.BuildDependsParsed = parse_relations(currentPackage.BuildDepends);
|
||||
packages.push_back(currentPackage);
|
||||
}
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
// Function to download, decompress, and parse the Packages.gz data
|
||||
std::optional<std::vector<PackageInfo>> fetch_and_parse_packages(const std::string& url) {
|
||||
CURL* curl = curl_easy_init();
|
||||
if (!curl) {
|
||||
std::cerr << "Failed to initialize CURL.\n";
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::vector<char> downloadedData;
|
||||
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &downloadedData);
|
||||
// Follow redirects if any
|
||||
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
|
||||
// Set a user agent
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, "SourcesParser/1.0");
|
||||
|
||||
CURLcode res = curl_easy_perform(curl);
|
||||
if (res != CURLE_OK) {
|
||||
std::cerr << "CURL download error (Packages.gz): " << curl_easy_strerror(res) << "\n";
|
||||
curl_easy_cleanup(curl);
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
curl_easy_cleanup(curl);
|
||||
|
||||
// Initialize libarchive
|
||||
struct archive* a = archive_read_new();
|
||||
archive_read_support_filter_gzip(a);
|
||||
archive_read_support_format_raw(a);
|
||||
|
||||
if (archive_read_open_memory(a, downloadedData.data(), downloadedData.size()) != ARCHIVE_OK) {
|
||||
std::cerr << "Failed to open Packages.gz archive: " << archive_error_string(a) << "\n";
|
||||
archive_read_free(a);
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
struct archive_entry* entry;
|
||||
std::string decompressedData;
|
||||
|
||||
// Read all entries (though there should typically be only one)
|
||||
while (archive_read_next_header(a, &entry) == ARCHIVE_OK) {
|
||||
const void* buff;
|
||||
size_t size;
|
||||
la_int64_t offset;
|
||||
|
||||
while (true) {
|
||||
int r = archive_read_data_block(a, &buff, &size, &offset);
|
||||
if (r == ARCHIVE_EOF)
|
||||
break;
|
||||
if (r != ARCHIVE_OK) {
|
||||
std::cerr << "Error during decompression (Packages.gz): " << archive_error_string(a) << "\n";
|
||||
archive_read_free(a);
|
||||
return std::nullopt;
|
||||
}
|
||||
decompressedData.append(static_cast<const char*>(buff), size);
|
||||
}
|
||||
}
|
||||
|
||||
archive_read_free(a);
|
||||
|
||||
// Parse the decompressed data
|
||||
std::vector<PackageInfo> packages;
|
||||
std::istringstream stream(decompressedData);
|
||||
std::string line;
|
||||
PackageInfo currentPackage;
|
||||
bool in_entry = false;
|
||||
|
||||
while (std::getline(stream, line)) {
|
||||
if (line.empty()) {
|
||||
if (in_entry && !currentPackage.Package.empty()) {
|
||||
packages.push_back(currentPackage);
|
||||
currentPackage = PackageInfo();
|
||||
in_entry = false;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
in_entry = true;
|
||||
|
||||
// Extract Package
|
||||
if (line.find("Package:") == 0) {
|
||||
currentPackage.Package = line.substr(strlen("Package: "));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract Source
|
||||
if (line.find("Source:") == 0) {
|
||||
currentPackage.Source = line.substr(strlen("Source: "));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract Provides
|
||||
if (line.find("Provides:") == 0) {
|
||||
std::string provides_line = line.substr(strlen("Provides: "));
|
||||
// Split by commas
|
||||
std::regex comma_sep_RE(R"(\s*,\s*)");
|
||||
std::sregex_token_iterator provides_it(provides_line.begin(), provides_line.end(), comma_sep_RE, -1);
|
||||
std::sregex_token_iterator provides_end;
|
||||
|
||||
for (; provides_it != provides_end; ++provides_it) {
|
||||
std::string provide = provides_it->str();
|
||||
// Extract the package name before any space or '('
|
||||
size_t pos_space = provide.find(' ');
|
||||
size_t pos_paren = provide.find('(');
|
||||
size_t pos = std::string::npos;
|
||||
if (pos_space != std::string::npos && pos_paren != std::string::npos) {
|
||||
pos = std::min(pos_space, pos_paren);
|
||||
}
|
||||
else if (pos_space != std::string::npos) {
|
||||
pos = pos_space;
|
||||
}
|
||||
else if (pos_paren != std::string::npos) {
|
||||
pos = pos_paren;
|
||||
}
|
||||
|
||||
if (pos != std::string::npos) {
|
||||
provide = provide.substr(0, pos);
|
||||
}
|
||||
|
||||
// Trim whitespace
|
||||
provide.erase(provide.find_last_not_of(" \t\n\r\f\v") + 1);
|
||||
provide.erase(0, provide.find_first_not_of(" \t\n\r\f\v"));
|
||||
|
||||
if (!provide.empty()) {
|
||||
currentPackage.Provides.push_back(provide);
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Any other fields are ignored for now
|
||||
}
|
||||
|
||||
// Add the last package if the file doesn't end with a blank line
|
||||
if (in_entry && !currentPackage.Package.empty()) {
|
||||
packages.push_back(currentPackage);
|
||||
}
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
std::set<std::pair<std::string, std::string>> build_dependency_graph(
|
||||
const std::vector<PackageInfo>& sources,
|
||||
const std::vector<PackageInfo>& binaries) {
|
||||
|
||||
// Map of virtual package to real binary package(s)
|
||||
std::map<std::string, std::vector<std::string>> virtual_to_real;
|
||||
// Set of all real binary package names
|
||||
std::set<std::string> real_binary_packages;
|
||||
// Map of binary package to its source package
|
||||
std::map<std::string, std::string> binary_to_source;
|
||||
|
||||
// Populate binary_to_source mapping and virtual_to_real
|
||||
for (const auto& source_pkg : sources) {
|
||||
for (const auto& binary_pkg : source_pkg.Binary) {
|
||||
binary_to_source[binary_pkg] = source_pkg.Package;
|
||||
real_binary_packages.insert(binary_pkg);
|
||||
}
|
||||
}
|
||||
for (const auto& binary_pkg : binaries) {
|
||||
if (binary_pkg.Source.has_value()) {
|
||||
binary_to_source[binary_pkg.Package] = binary_pkg.Source.value();
|
||||
}
|
||||
real_binary_packages.insert(binary_pkg.Package);
|
||||
|
||||
// Process Provides
|
||||
for (const auto& provide : binary_pkg.Provides) {
|
||||
virtual_to_real[provide].push_back(binary_pkg.Package);
|
||||
}
|
||||
}
|
||||
|
||||
// Dependency graph as a set of edges (dependency -> package)
|
||||
std::set<std::pair<std::string, std::string>> graph;
|
||||
|
||||
for (const auto& pkg : sources) {
|
||||
if (!pkg.BuildDependsParsed.has_value())
|
||||
continue; // Skip if no build dependencies
|
||||
|
||||
for (const auto& or_deps : pkg.BuildDependsParsed.value()) {
|
||||
// For each set of alternative dependencies (logical OR)
|
||||
for (const auto& dep : or_deps) {
|
||||
std::string dep_name = dep.name;
|
||||
// If dep.archqual exists, append it with ':'
|
||||
if (dep.archqual.has_value())
|
||||
dep_name += ":" + dep.archqual.value();
|
||||
|
||||
// If dep_name is a virtual package, map it to real binary package(s)
|
||||
if (virtual_to_real.find(dep_name) != virtual_to_real.end()) {
|
||||
for (const auto& real_pkg : virtual_to_real[dep_name]) {
|
||||
// Map binary dependency to source package
|
||||
if (binary_to_source.find(real_pkg) != binary_to_source.end()) {
|
||||
std::string source_dep = binary_to_source[real_pkg];
|
||||
// Avoid self-dependency
|
||||
if (source_dep != pkg.Package) {
|
||||
graph.emplace(source_dep, pkg.Package); // Reversed edge
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Warning: Binary package \"" << real_pkg << "\" provided by \""
|
||||
<< dep_name << "\" does not map to any source package.\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (real_binary_packages.find(dep_name) != real_binary_packages.end()) {
|
||||
// Direct binary dependency
|
||||
if (binary_to_source.find(dep_name) != binary_to_source.end()) {
|
||||
std::string source_dep = binary_to_source[dep_name];
|
||||
// Avoid self-dependency
|
||||
if (source_dep != pkg.Package) {
|
||||
graph.emplace(source_dep, pkg.Package); // Reversed edge
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Warning: Binary dependency \"" << dep_name << "\" does not map to any source package.\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Transitive reduction: Collect edges to remove first
|
||||
std::vector<std::pair<std::string, std::string>> edges_to_remove;
|
||||
|
||||
// Build adjacency list from the graph
|
||||
std::map<std::string, std::set<std::string>> adj;
|
||||
for (const auto& edge : graph) {
|
||||
adj[edge.first].insert(edge.second);
|
||||
}
|
||||
|
||||
for (const auto& [u, neighbors] : adj) {
|
||||
for (const auto& v : neighbors) {
|
||||
if (adj.find(v) != adj.end()) {
|
||||
for (const auto& w : adj[v]) {
|
||||
if (adj[u].find(w) != adj[u].end()) {
|
||||
edges_to_remove.emplace_back(u, w);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now remove the collected edges
|
||||
for (const auto& edge : edges_to_remove) {
|
||||
graph.erase(edge);
|
||||
adj[edge.first].erase(edge.second);
|
||||
}
|
||||
|
||||
return graph;
|
||||
}
|
||||
|
||||
QString serialize_dependency_graph_to_json(const std::set<std::pair<std::string, std::string>>& graph) {
|
||||
// Check if the graph is empty
|
||||
if (graph.empty()) {
|
||||
std::cerr << "Warning: Dependency graph is empty." << std::endl;
|
||||
return "{}"; // Return empty JSON object
|
||||
}
|
||||
|
||||
// Build adjacency list where key is dependency and value is list of packages that depend on it
|
||||
std::map<std::string, QJsonArray> adjacency;
|
||||
for (const auto& edge : graph) {
|
||||
if (!edge.first.empty() && !edge.second.empty()) {
|
||||
adjacency[edge.first].append(QString::fromStdString(edge.second));
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to QJsonObject
|
||||
QJsonObject jsonObj;
|
||||
for (const auto& [dep, dependents] : adjacency) {
|
||||
jsonObj[QString::fromStdString(dep)] = dependents;
|
||||
}
|
||||
|
||||
// Convert to JSON string
|
||||
QJsonDocument doc(jsonObj);
|
||||
return QString(doc.toJson(QJsonDocument::Compact));
|
||||
}
|
||||
|
||||
} // namespace SourcesParser
|
@ -0,0 +1,79 @@
|
||||
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#ifndef SOURCES_PARSER_H
|
||||
#define SOURCES_PARSER_H
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <optional>
|
||||
#include <cstring>
|
||||
#include <set>
|
||||
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QtCore/QJsonDocument>
|
||||
|
||||
// Structure to hold the required fields
|
||||
struct PackageInfo {
|
||||
std::string Package; // Package name
|
||||
std::vector<std::string> Provides; // Virtual packages provided
|
||||
std::string BuildDepends; // Build dependencies (for source packages)
|
||||
std::optional<std::string> Source; // Source package name (for binary packages)
|
||||
std::vector<std::string> Binary;
|
||||
|
||||
// Nested structures for parsing dependencies
|
||||
struct ArchRestriction {
|
||||
bool enabled;
|
||||
std::string arch;
|
||||
};
|
||||
|
||||
struct BuildRestriction {
|
||||
bool enabled;
|
||||
std::string condition;
|
||||
};
|
||||
|
||||
struct ParsedRelation {
|
||||
std::string name; // Dependency package name
|
||||
std::optional<std::string> archqual; // Architecture qualifier
|
||||
std::optional<std::pair<std::string, std::string>> version; // Version relation and version
|
||||
std::optional<std::vector<ArchRestriction>> arch; // Architecture restrictions
|
||||
std::optional<std::vector<std::vector<BuildRestriction>>> restrictions; // Build restrictions
|
||||
};
|
||||
|
||||
// Parsed BuildDepends and Binary relations
|
||||
std::optional<std::vector<std::vector<ParsedRelation>>> BuildDependsParsed;
|
||||
};
|
||||
|
||||
// Namespace to encapsulate the parser functionalities
|
||||
namespace SourcesParser {
|
||||
// Function to download, decompress, and parse the Sources.gz data
|
||||
std::optional<std::vector<PackageInfo>> fetch_and_parse_sources(const std::string& url);
|
||||
|
||||
// Function to download, decompress, and parse the Packages.gz data
|
||||
std::optional<std::vector<PackageInfo>> fetch_and_parse_packages(const std::string& url);
|
||||
|
||||
// Function to parse dependency relations
|
||||
std::vector<std::vector<PackageInfo::ParsedRelation>> parse_relations(const std::string& raw);
|
||||
|
||||
// Function to build dependency graph
|
||||
std::set<std::pair<std::string, std::string>> build_dependency_graph(
|
||||
const std::vector<PackageInfo>& sources,
|
||||
const std::vector<PackageInfo>& binaries);
|
||||
|
||||
// Function to serialize dependency graph to JSON
|
||||
QString serialize_dependency_graph_to_json(const std::set<std::pair<std::string, std::string>>& graph);
|
||||
} // namespace SourcesParser
|
||||
|
||||
#endif // SOURCES_PARSER_H
|
@ -0,0 +1,217 @@
|
||||
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#include "task_queue.h"
|
||||
#include <iostream>
|
||||
#include <QSqlError>
|
||||
|
||||
TaskQueue::TaskQueue(size_t max_concurrent_tasks)
|
||||
: max_concurrent_tasks_(max_concurrent_tasks), stop_(false),
|
||||
tasks_(),
|
||||
running_tasks_() {}
|
||||
|
||||
TaskQueue::~TaskQueue() {
|
||||
stop();
|
||||
}
|
||||
|
||||
// FIXME: copy of CiLogic::get_thread_connection()
|
||||
std::atomic<unsigned int> TaskQueue::thread_id_counter{1200};
|
||||
QSqlDatabase TaskQueue::get_thread_connection() {
|
||||
std::lock_guard<std::mutex> lock(connection_mutex_);
|
||||
thread_local unsigned int thread_unique_id = thread_id_counter.fetch_add(1);
|
||||
QString connectionName = QString("LubuntuCIConnection_%1").arg(thread_unique_id);
|
||||
|
||||
// Check if the connection already exists for this thread
|
||||
if (QSqlDatabase::contains(connectionName)) {
|
||||
QSqlDatabase db = QSqlDatabase::database(connectionName);
|
||||
if (!db.isOpen()) {
|
||||
if (!db.open()) {
|
||||
throw std::runtime_error("Failed to open thread-specific database connection: " + db.lastError().text().toStdString());
|
||||
}
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
||||
QSqlDatabase threadDb = QSqlDatabase::addDatabase("QSQLITE", connectionName);
|
||||
threadDb.setDatabaseName("/srv/lubuntu-ci/repos/ci-tools/lubuntu_ci.db");
|
||||
|
||||
if (!threadDb.open()) {
|
||||
throw std::runtime_error("Failed to open new database connection for thread: " + threadDb.lastError().text().toStdString());
|
||||
}
|
||||
|
||||
return threadDb;
|
||||
}
|
||||
|
||||
void TaskQueue::enqueue(std::shared_ptr<JobStatus> jobstatus,
|
||||
std::function<void(std::shared_ptr<Log> log)> task_func,
|
||||
std::shared_ptr<PackageConf> packageconf) {
|
||||
{
|
||||
auto connection = get_thread_connection();
|
||||
auto now = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::system_clock::now().time_since_epoch())
|
||||
.count();
|
||||
|
||||
// Create the task
|
||||
std::shared_ptr<Task> task_ptr = std::make_shared<Task>(connection, jobstatus, now, packageconf);
|
||||
task_ptr->func = [task_func, self_weak = std::weak_ptr<Task>(task_ptr)](std::shared_ptr<Log> log) {
|
||||
std::shared_ptr<Task> task_locked = self_weak.lock();
|
||||
if (task_locked) {
|
||||
log->assign_task_context(task_locked);
|
||||
task_func(log);
|
||||
}
|
||||
};
|
||||
packageconf->assign_task(jobstatus, task_ptr, packageconf);
|
||||
|
||||
std::unique_lock<std::mutex> lock(tasks_mutex_);
|
||||
tasks_.emplace(task_ptr);
|
||||
}
|
||||
cv_.notify_all(); // Notify worker threads
|
||||
}
|
||||
|
||||
void TaskQueue::start() {
|
||||
stop_ = false;
|
||||
for (size_t i = 0; i < max_concurrent_tasks_; ++i) {
|
||||
workers_.emplace_back(&TaskQueue::worker_thread, this);
|
||||
}
|
||||
}
|
||||
|
||||
void TaskQueue::stop() {
|
||||
{
|
||||
std::unique_lock<std::mutex> tasks_lock(tasks_mutex_);
|
||||
std::unique_lock<std::mutex> pkgconfs_lock(running_pkgconfs_mutex_);
|
||||
std::unique_lock<std::mutex> running_tasks_lock(running_tasks_mutex_);
|
||||
stop_ = true;
|
||||
}
|
||||
cv_.notify_all(); // Wake up all threads
|
||||
for (auto& worker : workers_) {
|
||||
if (worker.joinable()) {
|
||||
worker.join();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::set<std::shared_ptr<Task>, Task::TaskComparator> TaskQueue::get_tasks() const {
|
||||
std::lock_guard<std::mutex> lock(tasks_mutex_);
|
||||
return tasks_;
|
||||
}
|
||||
|
||||
std::set<std::shared_ptr<Task>, Task::TaskComparator> TaskQueue::get_running_tasks() const {
|
||||
std::lock_guard<std::mutex> lock(running_tasks_mutex_);
|
||||
return running_tasks_;
|
||||
}
|
||||
|
||||
void TaskQueue::worker_thread() {
|
||||
int worker_id = max_worker_id++;
|
||||
while (true) {
|
||||
std::shared_ptr<Task> task_to_execute;
|
||||
{
|
||||
std::lock_guard<std::mutex> tasks_lock(tasks_mutex_);
|
||||
|
||||
if (stop_ && tasks_.empty()) {
|
||||
return; // Exit thread if stopping and no tasks left
|
||||
}
|
||||
|
||||
auto it = tasks_.begin();
|
||||
bool found_valid = false;
|
||||
// Iterate through the set until a valid task is found
|
||||
while (it != tasks_.end()) {
|
||||
std::lock_guard<std::mutex> lock(running_pkgconfs_mutex_);
|
||||
std::shared_ptr<Task> it_task = *it;
|
||||
task_to_execute = it_task;
|
||||
|
||||
int pkgconf_id = task_to_execute->get_parent_packageconf()->id;
|
||||
auto running_pkgconf_it = std::find_if(running_pkgconfs_.begin(), running_pkgconfs_.end(),
|
||||
[&pkgconf_id](const std::shared_ptr<PackageConf>& pkgconf) { return pkgconf->id == pkgconf_id; });
|
||||
|
||||
if (running_pkgconf_it != running_pkgconfs_.end()) {
|
||||
++it; // Move to the next task
|
||||
continue;
|
||||
}
|
||||
|
||||
// Task is valid to execute
|
||||
found_valid = true;
|
||||
it = tasks_.erase(it);
|
||||
break;
|
||||
}
|
||||
if (!found_valid) { continue; }
|
||||
}
|
||||
|
||||
if (!task_to_execute || !task_to_execute->func) {
|
||||
continue;
|
||||
} else {
|
||||
std::lock_guard<std::mutex> pkgconfslock(running_pkgconfs_mutex_);
|
||||
running_pkgconfs_.insert(task_to_execute->get_parent_packageconf());
|
||||
std::lock_guard<std::mutex> tasks_lock(running_tasks_mutex_);
|
||||
running_tasks_.insert(task_to_execute);
|
||||
}
|
||||
|
||||
// Set the start time
|
||||
{
|
||||
auto now = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::system_clock::now().time_since_epoch())
|
||||
.count();
|
||||
task_to_execute->start_time = now;
|
||||
auto connection = get_thread_connection();
|
||||
task_to_execute->save(connection, 0);
|
||||
}
|
||||
|
||||
try {
|
||||
task_to_execute->func(task_to_execute->log); // Execute the task
|
||||
task_to_execute->successful = true;
|
||||
} catch (const std::exception& e) {
|
||||
task_to_execute->successful = false;
|
||||
std::ostringstream oss;
|
||||
oss << "Exception type: " << typeid(e).name() << "\n"
|
||||
<< "What: " << e.what();
|
||||
task_to_execute->log->append(oss.str());
|
||||
} catch (...) {
|
||||
task_to_execute->successful = false;
|
||||
task_to_execute->log->append("Unknown exception occurred");
|
||||
}
|
||||
|
||||
{
|
||||
auto now = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::system_clock::now().time_since_epoch())
|
||||
.count();
|
||||
task_to_execute->finish_time = now;
|
||||
auto connection = get_thread_connection();
|
||||
task_to_execute->save(connection, 0);
|
||||
}
|
||||
|
||||
{
|
||||
// Remove the task from running_tasks_
|
||||
std::lock_guard<std::mutex> lock(running_tasks_mutex_);
|
||||
int id = task_to_execute->id;
|
||||
auto running_task_it = std::find_if(running_tasks_.begin(), running_tasks_.end(),
|
||||
[&id](const std::shared_ptr<Task>& task) { return task->id == id; });
|
||||
|
||||
if (running_task_it != running_tasks_.end()) {
|
||||
running_tasks_.erase(running_task_it);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
// Remove packageconf from running_pkgconfs_ by id
|
||||
std::lock_guard<std::mutex> lock(running_pkgconfs_mutex_);
|
||||
int pkgconf_id = task_to_execute->get_parent_packageconf()->id;
|
||||
auto running_pkgconf_it = std::find_if(running_pkgconfs_.begin(), running_pkgconfs_.end(),
|
||||
[&pkgconf_id](const std::shared_ptr<PackageConf>& pkgconf) { return pkgconf->id == pkgconf_id; });
|
||||
|
||||
if (running_pkgconf_it != running_pkgconfs_.end()) {
|
||||
running_pkgconfs_.erase(running_pkgconf_it);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,64 @@
|
||||
// Copyright (C) 2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#ifndef TASK_QUEUE_H
|
||||
#define TASK_QUEUE_H
|
||||
|
||||
#include "ci_database_objs.h"
|
||||
|
||||
#include <set>
|
||||
#include <vector>
|
||||
#include <thread>
|
||||
#include <mutex>
|
||||
#include <condition_variable>
|
||||
#include <functional>
|
||||
#include <string>
|
||||
#include <queue>
|
||||
|
||||
#include <QSqlDatabase>
|
||||
|
||||
class TaskQueue {
|
||||
public:
|
||||
TaskQueue(size_t max_concurrent_tasks = 10);
|
||||
~TaskQueue();
|
||||
|
||||
void enqueue(std::shared_ptr<JobStatus> jobstatus, std::function<void(std::shared_ptr<Log> log)> task_func, std::shared_ptr<PackageConf> packageconf);
|
||||
void start();
|
||||
void stop();
|
||||
|
||||
std::set<std::shared_ptr<Task>, Task::TaskComparator> get_tasks() const;
|
||||
std::set<std::shared_ptr<Task>, Task::TaskComparator> get_running_tasks() const;
|
||||
|
||||
private:
|
||||
size_t max_concurrent_tasks_;
|
||||
std::set<std::shared_ptr<Task>, Task::TaskComparator> tasks_;
|
||||
std::set<std::shared_ptr<Task>, Task::TaskComparator> running_tasks_;
|
||||
std::set<std::shared_ptr<PackageConf>> running_pkgconfs_;
|
||||
std::queue<std::function<void()>> thread_pool_tasks_;
|
||||
mutable std::mutex tasks_mutex_;
|
||||
mutable std::mutex running_pkgconfs_mutex_;
|
||||
mutable std::mutex running_tasks_mutex_;
|
||||
std::condition_variable cv_;
|
||||
bool stop_;
|
||||
std::vector<std::thread> workers_;
|
||||
static std::atomic<unsigned int> thread_id_counter;
|
||||
mutable std::mutex connection_mutex_;
|
||||
int max_worker_id = 1;
|
||||
|
||||
void worker_thread();
|
||||
QSqlDatabase get_thread_connection();
|
||||
};
|
||||
|
||||
#endif // TASK_QUEUE_H
|
@ -0,0 +1,544 @@
|
||||
/*
|
||||
* A minimal Jinja2-like template engine in one file, supporting:
|
||||
* - {% extends "base.html" %}
|
||||
* - {% block content %} ... {% endblock %}
|
||||
* - {{ scalarVariable }}
|
||||
* - {% if expr %} ... {% elif expr %} ... {% else %} ... {% endif %}
|
||||
* - {% for item in list %} ... {% endfor %}
|
||||
* - Basic expression parsing with ==, !=, >, <, >=, <=
|
||||
* - Simple filter usage: {{ var|add:-1 }}
|
||||
*
|
||||
* Updated to support nested variable access using dot notation (e.g., repo.packaging_commit).
|
||||
*
|
||||
* Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
*/
|
||||
|
||||
#include "template_renderer.h"
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <regex>
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <iostream>
|
||||
#include <filesystem>
|
||||
#include <stdexcept>
|
||||
#include <cstdlib>
|
||||
#include <algorithm>
|
||||
#include <exception>
|
||||
#include <shared_mutex>
|
||||
#include <mutex>
|
||||
|
||||
namespace fs = std::filesystem;
|
||||
static std::mutex file_mutex;
|
||||
|
||||
std::string TemplateRenderer::build_template_path(const std::string &tplName)
|
||||
{
|
||||
if (!tplName.empty() && tplName.front() == '/') {
|
||||
return tplName;
|
||||
}
|
||||
return "templates/" + tplName;
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::file_get_contents(const std::string &path)
|
||||
{
|
||||
std::unique_lock lock(file_mutex);
|
||||
try {
|
||||
fs::path rel(path);
|
||||
fs::path abs = fs::absolute(rel);
|
||||
auto open_file = [](const fs::path& file_path) -> std::ifstream {
|
||||
std::ifstream file(file_path, std::ios::in);
|
||||
if (!file) {
|
||||
throw std::ios_base::failure("File could not be opened: " + file_path.string());
|
||||
}
|
||||
return file;
|
||||
};
|
||||
|
||||
std::ifstream file = open_file(abs);
|
||||
|
||||
std::ostringstream contents;
|
||||
contents << file.rdbuf();
|
||||
return contents.str();
|
||||
} catch (const std::exception& e) {
|
||||
std::cerr << "Unable to get file contents in template_renderer: " << e.what() << "\n";
|
||||
return "";
|
||||
} catch (...) {
|
||||
std::cerr << "Unable to get file contents in template_renderer (unknown exception.)\n";
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::apply_filter(const std::string &value, const std::string &filterPart)
|
||||
{
|
||||
size_t colonPos = filterPart.find(':');
|
||||
std::string filterName = (colonPos == std::string::npos)
|
||||
? filterPart
|
||||
: filterPart.substr(0, colonPos);
|
||||
std::string filterArg = (colonPos == std::string::npos)
|
||||
? ""
|
||||
: filterPart.substr(colonPos + 1);
|
||||
|
||||
if (filterName == "add") {
|
||||
try {
|
||||
int original = std::stoi(value);
|
||||
int increment = std::stoi(filterArg);
|
||||
return std::to_string(original + increment);
|
||||
} catch(...) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
// Additional filters can be added here.
|
||||
return value; // Unknown filter => pass through
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::apply_all_filters(const std::string &valueWithFilters,
|
||||
const std::map<std::string,std::string> &ctx)
|
||||
{
|
||||
// Split on '|'
|
||||
std::vector<std::string> parts;
|
||||
size_t start = 0;
|
||||
while (true) {
|
||||
size_t pos = valueWithFilters.find('|', start);
|
||||
if (pos == std::string::npos) {
|
||||
parts.push_back(valueWithFilters.substr(start));
|
||||
break;
|
||||
}
|
||||
parts.push_back(valueWithFilters.substr(start, pos - start));
|
||||
start = pos + 1;
|
||||
}
|
||||
if (parts.empty()) {
|
||||
return "";
|
||||
}
|
||||
std::string varExpression = parts[0];
|
||||
std::string value = get_variable_value(varExpression, ctx);
|
||||
|
||||
// Apply filters if any
|
||||
for (size_t i = 1; i < parts.size(); i++) {
|
||||
value = apply_filter(value, parts[i]);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
bool TemplateRenderer::evaluate_condition(const std::string &expr,
|
||||
const std::map<std::string,std::string> &ctx)
|
||||
{
|
||||
// Define helper lambdas
|
||||
auto trim = [](const std::string &s) -> std::string {
|
||||
size_t start = 0;
|
||||
while (start < s.size() && isspace(static_cast<unsigned char>(s[start]))) start++;
|
||||
size_t end = s.size();
|
||||
while (end > start && isspace(static_cast<unsigned char>(s[end - 1]))) end--;
|
||||
return s.substr(start, end - start);
|
||||
};
|
||||
|
||||
auto isInteger = [&](const std::string &s) -> bool {
|
||||
if (s.empty()) return false;
|
||||
size_t start = (s[0] == '-') ? 1 : 0;
|
||||
for (size_t i = start; i < s.size(); ++i) {
|
||||
if (!isdigit(static_cast<unsigned char>(s[i]))) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
auto unquoteIfNeeded = [&](const std::string &tok) -> std::string {
|
||||
auto t = trim(tok);
|
||||
if (t.size() >= 2 &&
|
||||
((t.front() == '\'' && t.back() == '\'') ||
|
||||
(t.front() == '\"' && t.back() == '\"'))) {
|
||||
return t.substr(1, t.size() - 2);
|
||||
}
|
||||
return t;
|
||||
};
|
||||
|
||||
auto parse_token_value = [&](const std::string &rawToken) -> std::string {
|
||||
auto t = trim(rawToken);
|
||||
if (t.size() >= 2 && ((t.front() == '\'' && t.back() == '\'') ||
|
||||
(t.front() == '\"' && t.back() == '\"'))) {
|
||||
// Literal string
|
||||
return unquoteIfNeeded(t);
|
||||
} else {
|
||||
// Apply filters
|
||||
return apply_all_filters(t, ctx);
|
||||
}
|
||||
};
|
||||
|
||||
// Split the expression by 'and'
|
||||
std::vector<std::string> conditions;
|
||||
std::regex andRe("\\s+and\\s+");
|
||||
std::sregex_token_iterator it(expr.begin(), expr.end(), andRe, -1);
|
||||
std::sregex_token_iterator end;
|
||||
while (it != end) {
|
||||
conditions.push_back(trim(*it));
|
||||
++it;
|
||||
}
|
||||
|
||||
// Evaluate each sub-condition
|
||||
for (const auto &subExpr : conditions) {
|
||||
std::string e = trim(subExpr);
|
||||
if (e.empty()) continue;
|
||||
|
||||
// Operators
|
||||
static std::vector<std::string> ops = {"==", "!=", "<=", ">=", ">", "<"};
|
||||
size_t opPos = std::string::npos;
|
||||
std::string opFound;
|
||||
for (const auto &cand : ops) {
|
||||
size_t p = e.find(cand);
|
||||
if (p != std::string::npos) {
|
||||
if (opPos == std::string::npos || p < opPos) {
|
||||
opPos = p;
|
||||
opFound = cand;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (opPos == std::string::npos) {
|
||||
// No operator => check truthiness of var
|
||||
std::string val = parse_token_value(e);
|
||||
if (val.empty()) return false;
|
||||
continue;
|
||||
}
|
||||
|
||||
std::string left = trim(e.substr(0, opPos));
|
||||
std::string right = trim(e.substr(opPos + opFound.size()));
|
||||
|
||||
// Directly handle dot notation by using the entire composite key
|
||||
std::string lv = parse_token_value(left);
|
||||
std::string rv = parse_token_value(right);
|
||||
|
||||
bool li = isInteger(lv);
|
||||
bool ri = isInteger(rv);
|
||||
bool result = false;
|
||||
|
||||
if (li && ri) {
|
||||
int lnum = std::stoi(lv);
|
||||
int rnum = std::stoi(rv);
|
||||
if (opFound == "==") result = (lnum == rnum);
|
||||
else if (opFound == "!=") result = (lnum != rnum);
|
||||
else if (opFound == ">") result = (lnum > rnum);
|
||||
else if (opFound == "<") result = (lnum < rnum);
|
||||
else if (opFound == ">=") result = (lnum >= rnum);
|
||||
else if (opFound == "<=") result = (lnum <= rnum);
|
||||
} else {
|
||||
// String compare
|
||||
if (opFound == "==") result = (lv == rv);
|
||||
else if (opFound == "!=") result = (lv != rv);
|
||||
else if (opFound == ">") result = (lv > rv);
|
||||
else if (opFound == "<") result = (lv < rv);
|
||||
else if (opFound == ">=") result = (lv >= rv);
|
||||
else if (opFound == "<=") result = (lv <= rv);
|
||||
}
|
||||
|
||||
if (!result) return false; // Short-circuit for 'and'
|
||||
}
|
||||
|
||||
return true; // All sub-conditions passed
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::expand_conditionals(std::string input,
|
||||
const std::map<std::string,std::string> &ctx)
|
||||
{
|
||||
static std::regex ifOpenRe("\\{\\%\\s*if\\s+[^\\}]+\\%\\}");
|
||||
static std::regex ifCloseRe("\\{\\%\\s*endif\\s*\\%\\}");
|
||||
|
||||
while (true) {
|
||||
// Gather all if-positions
|
||||
std::vector<size_t> ifPositions;
|
||||
{
|
||||
size_t searchStart = 0;
|
||||
while (true) {
|
||||
std::smatch mOpen;
|
||||
std::string sub = input.substr(searchStart);
|
||||
if (!std::regex_search(sub, mOpen, ifOpenRe)) {
|
||||
break;
|
||||
}
|
||||
size_t posAbsolute = searchStart + mOpen.position(0);
|
||||
ifPositions.push_back(posAbsolute);
|
||||
searchStart = posAbsolute + mOpen.length(0);
|
||||
}
|
||||
}
|
||||
if (ifPositions.empty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
// The last one is the innermost
|
||||
size_t ifPos = ifPositions.back();
|
||||
|
||||
{
|
||||
std::string sub2 = input.substr(ifPos);
|
||||
std::smatch mclose;
|
||||
if (!std::regex_search(sub2, mclose, ifCloseRe)) {
|
||||
// No matching endif
|
||||
break;
|
||||
}
|
||||
|
||||
size_t closePosRelative = mclose.position(0);
|
||||
size_t ifClosePos = ifPos + closePosRelative;
|
||||
size_t blockLen = (ifClosePos - ifPos) + mclose.length(0);
|
||||
|
||||
// Entire block
|
||||
std::string blockText = input.substr(ifPos, blockLen);
|
||||
|
||||
// Main regex to match the entire if-endif block
|
||||
static std::regex mainRe(
|
||||
"\\{\\%\\s*if\\s+([^\\}]+)\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endif\\s*\\%\\}"
|
||||
);
|
||||
std::smatch blockMatch;
|
||||
if (!std::regex_match(blockText, blockMatch, mainRe)) {
|
||||
break;
|
||||
}
|
||||
|
||||
std::string condition = blockMatch[1].str();
|
||||
std::string innerBlock = blockMatch[2].str();
|
||||
|
||||
// Parse out any {% elif ... %} / {% else %}
|
||||
struct ConditionBlock {
|
||||
std::string cond; // Empty => else
|
||||
std::string content;
|
||||
};
|
||||
std::vector<ConditionBlock> blocks;
|
||||
blocks.emplace_back(ConditionBlock{ condition, "" });
|
||||
|
||||
static std::regex elifElseRe("\\{\\%\\s*elif\\s+([^\\}]+)\\s*\\%\\}|\\{\\%\\s*else\\s*\\%\\}");
|
||||
size_t lastPos = 0;
|
||||
auto bBegin = std::sregex_iterator(innerBlock.begin(), innerBlock.end(), elifElseRe);
|
||||
auto bEnd = std::sregex_iterator();
|
||||
for (auto i = bBegin; i != bEnd; ++i) {
|
||||
auto m2 = *i;
|
||||
size_t pos2 = m2.position(0);
|
||||
// Text up to pos2 is the previous block's content
|
||||
blocks.back().content.append(innerBlock.substr(lastPos, pos2 - lastPos));
|
||||
if (m2[1].matched) {
|
||||
// Elif
|
||||
blocks.emplace_back(ConditionBlock{ m2[1].str(), "" });
|
||||
} else {
|
||||
// Else
|
||||
blocks.emplace_back(ConditionBlock{ "", "" });
|
||||
}
|
||||
lastPos = pos2 + m2.length(0);
|
||||
}
|
||||
// Leftover
|
||||
if (!blocks.empty()) {
|
||||
blocks.back().content.append(innerBlock.substr(lastPos));
|
||||
}
|
||||
|
||||
// Evaluate
|
||||
std::string finalText;
|
||||
bool used = false;
|
||||
for (auto &b : blocks) {
|
||||
if (b.cond.empty()) {
|
||||
// Else
|
||||
if (!used) {
|
||||
finalText = b.content;
|
||||
}
|
||||
break;
|
||||
} else {
|
||||
if (evaluate_condition(b.cond, ctx)) {
|
||||
finalText = b.content;
|
||||
used = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Replace that block region with finalText
|
||||
input.replace(ifPos, blockLen, finalText);
|
||||
}
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::expand_loops(const std::string &input,
|
||||
const std::map<std::string,std::string> &scalarContext,
|
||||
const std::map<std::string,
|
||||
std::vector<std::map<std::string,std::string>>> &listContext)
|
||||
{
|
||||
std::string result = input;
|
||||
static std::regex loopRegex("\\{\\%\\s*for\\s+(\\S+)\\s+in\\s+(\\S+)\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endfor\\s*\\%\\}");
|
||||
while (true) {
|
||||
std::smatch m;
|
||||
if (!std::regex_search(result, m, loopRegex)) {
|
||||
break;
|
||||
}
|
||||
std::string aliasName = m[1].str(); // e.g., 'repo'
|
||||
std::string arrayName = m[2].str(); // e.g., 'repos'
|
||||
std::string loopBody = m[3].str();
|
||||
auto it = listContext.find(arrayName);
|
||||
if (it == listContext.end()) {
|
||||
// No such array => remove the block
|
||||
result.replace(m.position(0), m.length(0), "");
|
||||
continue;
|
||||
}
|
||||
std::string expanded;
|
||||
for (const auto &oneItem : it->second) {
|
||||
// Create a per-item scalar context with prefixed keys
|
||||
std::map<std::string, std::string> perItemScalarContext = scalarContext;
|
||||
for (const auto &kv : oneItem) {
|
||||
perItemScalarContext[aliasName + "." + kv.first] = kv.second;
|
||||
}
|
||||
|
||||
std::string chunk = loopBody;
|
||||
|
||||
// Expand conditionals with per-item scalar context
|
||||
chunk = expand_conditionals(chunk, perItemScalarContext);
|
||||
|
||||
// Expand nested loops if any with per-item scalar context
|
||||
chunk = expand_loops(chunk, perItemScalarContext, listContext);
|
||||
|
||||
// Final scalar expansions with per-item scalar context
|
||||
chunk = replace_variables(chunk, perItemScalarContext);
|
||||
|
||||
// Remove excess whitespace
|
||||
chunk = strip_excess_whitespace(chunk);
|
||||
|
||||
expanded += chunk;
|
||||
}
|
||||
result.replace(m.position(0), m.length(0), expanded);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::replace_variables(const std::string &input,
|
||||
const std::map<std::string,std::string> &context)
|
||||
{
|
||||
static std::regex varRe("\\{\\{\\s*(.*?)\\s*\\}\\}");
|
||||
std::string output;
|
||||
output.reserve(input.size());
|
||||
size_t lastPos = 0;
|
||||
auto begin = std::sregex_iterator(input.begin(), input.end(), varRe);
|
||||
auto end = std::sregex_iterator();
|
||||
for (auto it = begin; it != end; ++it) {
|
||||
auto match = *it;
|
||||
output.append(input, lastPos, match.position(0) - lastPos);
|
||||
std::string expr = match[1].str();
|
||||
|
||||
// Directly apply all filters (which now handle composite keys)
|
||||
std::string value = apply_all_filters(expr, context);
|
||||
|
||||
output.append(value);
|
||||
lastPos = match.position(0) + match.length(0);
|
||||
}
|
||||
output.append(input, lastPos);
|
||||
|
||||
// Remove leftover {% ... %} if any
|
||||
static std::regex leftover("\\{\\%.*?\\%\\}");
|
||||
output = std::regex_replace(output, leftover, "");
|
||||
return output;
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::render_jinja(
|
||||
const std::string &tplPath,
|
||||
const std::map<std::string,std::string> &scalarContext,
|
||||
const std::map<std::string,
|
||||
std::vector<std::map<std::string,std::string>>> &listContext)
|
||||
{
|
||||
std::string tpl = file_get_contents(tplPath);
|
||||
if (tpl.empty()) {
|
||||
return "<html><body><p>Template not found: " + tplPath + "</p></body></html>";
|
||||
}
|
||||
std::string step0 = expand_conditionals(tpl, scalarContext);
|
||||
std::string step1 = expand_loops(step0, scalarContext, listContext);
|
||||
std::string result = replace_variables(step1, scalarContext);
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::render_with_inheritance(
|
||||
const std::string &childTplName,
|
||||
const std::map<std::string,std::string> &scalarContext,
|
||||
const std::map<std::string,
|
||||
std::vector<std::map<std::string,std::string>>> &listContext)
|
||||
{
|
||||
// Load child template
|
||||
std::string childText = file_get_contents(build_template_path(childTplName));
|
||||
if (childText.empty()) {
|
||||
return "<html><body><h1>Missing child template:</h1>"
|
||||
+ build_template_path(childTplName) + "</body></html>";
|
||||
}
|
||||
|
||||
// Check for {% extends "base.html" %}
|
||||
static std::regex extendsRe("\\{\\%\\s*extends\\s*\"([^\"]+)\"\\s*\\%\\}");
|
||||
std::smatch exm;
|
||||
if (!std::regex_search(childText, exm, extendsRe)) {
|
||||
// No extends => just do expansions
|
||||
std::string step0 = expand_conditionals(childText, scalarContext);
|
||||
std::string step1 = expand_loops(step0, scalarContext, listContext);
|
||||
std::string result = replace_variables(step1, scalarContext);
|
||||
return result;
|
||||
}
|
||||
|
||||
// If extends => load base
|
||||
std::string baseName = exm[1].str();
|
||||
std::string baseText = file_get_contents(build_template_path(baseName));
|
||||
if (baseText.empty()) {
|
||||
return "<html><body><h1>Missing base template:</h1>"
|
||||
+ baseName + "</body></html>";
|
||||
}
|
||||
|
||||
// Extract child block content
|
||||
static std::regex blockRe("\\{\\%\\s*block\\s+content\\s*\\%\\}([\\s\\S]*?)\\{\\%\\s*endblock\\s*\\%\\}");
|
||||
std::smatch blockMatch;
|
||||
std::string childBlock;
|
||||
if (std::regex_search(childText, blockMatch, blockRe)) {
|
||||
childBlock = blockMatch[1].str();
|
||||
}
|
||||
|
||||
// Process loops first, which handle their own conditionals with loop variables
|
||||
std::string expandedChildBlock = expand_loops(childBlock, scalarContext, listContext);
|
||||
// Then process any conditionals outside loops
|
||||
expandedChildBlock = expand_conditionals(expandedChildBlock, scalarContext);
|
||||
// Finally, replace variables in the child block
|
||||
expandedChildBlock = replace_variables(expandedChildBlock, scalarContext);
|
||||
|
||||
// Replace {{BLOCK content}} in base with expanded child block
|
||||
const std::string marker = "{{BLOCK content}}";
|
||||
size_t pos = baseText.find(marker);
|
||||
if (pos != std::string::npos) {
|
||||
baseText.replace(pos, marker.size(), expandedChildBlock);
|
||||
}
|
||||
|
||||
// Replace variables in the entire base template (to handle {{PAGE_TITLE}})
|
||||
baseText = replace_variables(baseText, scalarContext);
|
||||
|
||||
// Remove any remaining {% ... %} tags
|
||||
static std::regex leftover("\\{\\%.*?\\%\\}");
|
||||
baseText = std::regex_replace(baseText, leftover, "");
|
||||
|
||||
return baseText;
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::strip_excess_whitespace(const std::string &str) {
|
||||
// Remove leading/trailing spaces and unify consecutive whitespace into single spaces
|
||||
std::string result;
|
||||
result.reserve(str.size());
|
||||
bool prevSpace = false;
|
||||
for (char c: str) {
|
||||
if (isspace(static_cast<unsigned char>(c))) {
|
||||
if (!prevSpace) {
|
||||
result += ' ';
|
||||
prevSpace = true;
|
||||
}
|
||||
} else {
|
||||
result += c;
|
||||
prevSpace = false;
|
||||
}
|
||||
}
|
||||
// Trim leading and trailing spaces
|
||||
size_t start = 0;
|
||||
while (start < result.size() && isspace(static_cast<unsigned char>(result[start]))) {
|
||||
start++;
|
||||
}
|
||||
size_t end = result.size();
|
||||
while (end > start && isspace(static_cast<unsigned char>(result[end - 1]))) {
|
||||
end--;
|
||||
}
|
||||
return result.substr(start, end - start);
|
||||
}
|
||||
|
||||
std::string TemplateRenderer::get_variable_value(const std::string &var,
|
||||
const std::map<std::string, std::string> &ctx) {
|
||||
auto it = ctx.find(var);
|
||||
if (it != ctx.end()) {
|
||||
return it->second;
|
||||
}
|
||||
return "";
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
// Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#ifndef TEMPLATE_RENDERER_H
|
||||
#define TEMPLATE_RENDERER_H
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <filesystem>
|
||||
|
||||
/**
|
||||
* This class provides two styles of rendering:
|
||||
*
|
||||
* 1) render_jinja(...) -- A naive Jinja-like expansion for loops/variables.
|
||||
* 2) render_with_inheritance(...) -- A minimal approach to handle
|
||||
* {% extends "base.html" %} and {% block content %} usage, plus
|
||||
* {{VARIABLE}} expansions.
|
||||
*
|
||||
* The "base.html" template is expected to contain something like:
|
||||
* <html>... {{BLOCK content}} ...</html>
|
||||
* And the child template might do:
|
||||
* {% extends "base.html" %}
|
||||
* {% block content %}Hello world{% endblock %}
|
||||
*/
|
||||
class TemplateRenderer {
|
||||
public:
|
||||
static std::string render_jinja(
|
||||
const std::string &tplPath,
|
||||
const std::map<std::string,std::string> &scalarContext,
|
||||
const std::map<std::string,
|
||||
std::vector<std::map<std::string,std::string>>> &listContext
|
||||
);
|
||||
|
||||
static std::string render_with_inheritance(
|
||||
const std::string &childTplName,
|
||||
const std::map<std::string,std::string> &scalarContext,
|
||||
const std::map<std::string,
|
||||
std::vector<std::map<std::string,std::string>>> &listContext
|
||||
);
|
||||
|
||||
private:
|
||||
static std::string build_template_path(const std::string &tplName);
|
||||
static std::string file_get_contents(const std::string &path);
|
||||
|
||||
// Filters
|
||||
static std::string apply_filter(const std::string &value, const std::string &filterPart);
|
||||
static std::string apply_all_filters(const std::string &valueWithFilters,
|
||||
const std::map<std::string,std::string> &ctx);
|
||||
|
||||
// Conditionals
|
||||
static std::string expand_conditionals(std::string input,
|
||||
const std::map<std::string,std::string> &ctx);
|
||||
static bool evaluate_condition(const std::string &expr,
|
||||
const std::map<std::string,std::string> &ctx);
|
||||
|
||||
// For loops
|
||||
static std::string expand_loops(const std::string &input,
|
||||
const std::map<std::string,std::string> &scalarContext,
|
||||
const std::map<std::string,
|
||||
std::vector<std::map<std::string,std::string>>> &listContext);
|
||||
|
||||
// Final expansions
|
||||
static std::string replace_variables(const std::string &input,
|
||||
const std::map<std::string,std::string> &context);
|
||||
|
||||
// Helper: strip extraneous whitespace from final expansions
|
||||
static std::string strip_excess_whitespace(const std::string &str);
|
||||
|
||||
static std::string get_variable_value(const std::string &var, const std::map<std::string, std::string> &ctx);
|
||||
};
|
||||
|
||||
#endif // TEMPLATE_RENDERER_H
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,54 @@
|
||||
// Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
#ifndef WEB_SERVER_H
|
||||
#define WEB_SERVER_H
|
||||
|
||||
#include "ci_database_objs.h"
|
||||
#include "task_queue.h"
|
||||
|
||||
#include <QDateTime>
|
||||
#include <QObject>
|
||||
#include <QHttpServer>
|
||||
#include <QMap>
|
||||
#include <QSqlDatabase>
|
||||
#include <QString>
|
||||
#include <QTcpServer>
|
||||
#include <string>
|
||||
|
||||
class WebServer : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
explicit WebServer(QObject *parent = nullptr);
|
||||
bool start_server(quint16 port);
|
||||
|
||||
private:
|
||||
[[nodiscard]] std::map<QString, QString> parse_query_parameters(const QString &query);
|
||||
[[nodiscard]] bool validate_token(const QString& token);
|
||||
[[nodiscard]] QHttpServerResponse verify_session_token(const QHttpServerRequest &request, const QHttpHeaders &headers);
|
||||
void load_tokens(QSqlDatabase& p_db);
|
||||
|
||||
QHttpServer http_server_;
|
||||
QTcpServer tcp_server_;
|
||||
std::unique_ptr<TaskQueue> task_queue;
|
||||
std::jthread expire_tokens_thread_;
|
||||
std::jthread process_sources_thread_;
|
||||
|
||||
QMap<int, QDateTime> _in_progress_tokens;
|
||||
QMap<QString, QDateTime> _active_tokens;
|
||||
QMap<QString, Person> _token_person;
|
||||
};
|
||||
|
||||
#endif // WEB_SERVER_H
|
@ -0,0 +1,176 @@
|
||||
/**
|
||||
* Get an array of all currently selected repositories (checkboxes).
|
||||
*/
|
||||
function getSelectedRepos() {
|
||||
const checkboxes = document.querySelectorAll('input[name="repoSelect"]:checked');
|
||||
const repoNames = [];
|
||||
checkboxes.forEach(cb => repoNames.push(cb.value));
|
||||
return repoNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Show a quick status message on the console (or replace with a fancy UI element).
|
||||
*/
|
||||
function showStatus(msg) {
|
||||
console.log('[STATUS]', msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* A tiny helper to handle server responses (text) and display them.
|
||||
*/
|
||||
function handleServerResponse(text) {
|
||||
// For simplicity, we just log it. You can also insert it into the DOM if you want.
|
||||
console.log('[SERVER RESPONSE]', text);
|
||||
alert(text);
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////
|
||||
// Individual Action Handlers
|
||||
///////////////////////////////
|
||||
|
||||
/**
|
||||
* Pull a single repository by name, calling /pull?repo=<repoName>.
|
||||
*/
|
||||
function doPull(repoName, buttonElem) {
|
||||
if (!repoName) {
|
||||
alert('No repo specified!');
|
||||
return;
|
||||
}
|
||||
showStatus(`Pulling repo: ${repoName}...`);
|
||||
fetch('/pull?repo=' + encodeURIComponent(repoName))
|
||||
.then(resp => resp.text())
|
||||
.then(txt => handleServerResponse(txt))
|
||||
.catch(err => console.error('[ERROR]', err));
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a single repository by name, calling /build?repo=<repoName>.
|
||||
*/
|
||||
function doBuild(repoName, buttonElem) {
|
||||
if (!repoName) {
|
||||
alert('No repo specified!');
|
||||
return;
|
||||
}
|
||||
showStatus(`Building repo: ${repoName}...`);
|
||||
fetch('/build?repo=' + encodeURIComponent(repoName))
|
||||
.then(resp => resp.text())
|
||||
.then(txt => handleServerResponse(txt))
|
||||
.catch(err => console.error('[ERROR]', err));
|
||||
}
|
||||
|
||||
/**
|
||||
* View logs for a single repository by name, calling /logs?repo=<repoName>.
|
||||
* This example opens in a new tab. Alternatively, you could fetch and display in a modal.
|
||||
*/
|
||||
function doViewLog(repoName, buttonElem) {
|
||||
if (!repoName) {
|
||||
alert('No repo specified!');
|
||||
return;
|
||||
}
|
||||
const url = '/logs?repo=' + encodeURIComponent(repoName);
|
||||
window.open(url, '_blank');
|
||||
// If you wanted to do a fetch instead:
|
||||
// fetch(url).then(...) ...
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull ALL repositories at once, calling /pull-all.
|
||||
*/
|
||||
function doPullAll(buttonElem) {
|
||||
showStatus('Pulling ALL repositories...');
|
||||
fetch('/pull-all')
|
||||
.then(resp => resp.text())
|
||||
.then(txt => handleServerResponse(txt))
|
||||
.catch(err => console.error('[ERROR]', err));
|
||||
}
|
||||
|
||||
/**
|
||||
* Build ALL repositories at once, calling /build-all.
|
||||
*/
|
||||
function doBuildAll(buttonElem) {
|
||||
showStatus('Building ALL repositories...');
|
||||
fetch('/build-all')
|
||||
.then(resp => resp.text())
|
||||
.then(txt => handleServerResponse(txt))
|
||||
.catch(err => console.error('[ERROR]', err));
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull AND build ALL repositories at once, calling /pull-and-build-all.
|
||||
*/
|
||||
function doPullAndBuildAll(buttonElem) {
|
||||
showStatus('Pulling & building ALL repositories...');
|
||||
fetch('/pull-and-build-all')
|
||||
.then(resp => resp.text())
|
||||
.then(txt => handleServerResponse(txt))
|
||||
.catch(err => console.error('[ERROR]', err));
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull the selected repositories, calling /pull-selected?repos=<comma-separated-list>.
|
||||
*/
|
||||
function doPullSelected(buttonElem) {
|
||||
const repos = getSelectedRepos();
|
||||
if (repos.length === 0) {
|
||||
alert('No repositories selected!');
|
||||
return;
|
||||
}
|
||||
const query = '/pull-selected?repos=' + encodeURIComponent(repos.join(','));
|
||||
showStatus('Pulling selected repos: ' + repos.join(', '));
|
||||
fetch(query)
|
||||
.then(resp => resp.text())
|
||||
.then(txt => handleServerResponse(txt))
|
||||
.catch(err => console.error('[ERROR]', err));
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the selected repositories, calling /build-selected?repos=<comma-separated-list>.
|
||||
*/
|
||||
function doBuildSelected(buttonElem) {
|
||||
const repos = getSelectedRepos();
|
||||
if (repos.length === 0) {
|
||||
alert('No repositories selected!');
|
||||
return;
|
||||
}
|
||||
const query = '/build-selected?repos=' + encodeURIComponent(repos.join(','));
|
||||
showStatus('Building selected repos: ' + repos.join(', '));
|
||||
fetch(query)
|
||||
.then(resp => resp.text())
|
||||
.then(txt => handleServerResponse(txt))
|
||||
.catch(err => console.error('[ERROR]', err));
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull AND build selected repositories, calling /pull-and-build-selected?repos=...
|
||||
*/
|
||||
function doPullAndBuildSelected(buttonElem) {
|
||||
const repos = getSelectedRepos();
|
||||
if (repos.length === 0) {
|
||||
alert('No repositories selected!');
|
||||
return;
|
||||
}
|
||||
const query = '/pull-and-build-selected?repos=' + encodeURIComponent(repos.join(','));
|
||||
showStatus('Pulling & building selected repos: ' + repos.join(', '));
|
||||
fetch(query)
|
||||
.then(resp => resp.text())
|
||||
.then(txt => handleServerResponse(txt))
|
||||
.catch(err => console.error('[ERROR]', err));
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////
|
||||
// "Select All" checkbox logic
|
||||
///////////////////////////////
|
||||
window.addEventListener('DOMContentLoaded', () => {
|
||||
const selectAllCb = document.getElementById('selectAll');
|
||||
if (selectAllCb) {
|
||||
selectAllCb.addEventListener('change', function () {
|
||||
// Check or uncheck all "repoSelect" checkboxes
|
||||
const allRepoCbs = document.querySelectorAll('input[name="repoSelect"]');
|
||||
allRepoCbs.forEach(cb => {
|
||||
cb.checked = selectAllCb.checked;
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
@ -0,0 +1,31 @@
|
||||
<!--
|
||||
Copyright (C) 2024-2025 Simon Quigley <tsimonq2@ubuntu.com>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
...
|
||||
-->
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>{{ PAGE_TITLE }}</title>
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css">
|
||||
<link rel="stylesheet"
|
||||
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.7.2/css/all.min.css">
|
||||
</head>
|
||||
<body class="bg-light">
|
||||
<nav class="navbar navbar-expand-lg navbar-light bg-white border-bottom mb-3">
|
||||
<div class="container-fluid">
|
||||
<a class="navbar-brand" href="/">Lubuntu CI</a>
|
||||
</div>
|
||||
</nav>
|
||||
<div class="mt-2 px-5">
|
||||
{{BLOCK content}}
|
||||
</div>
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js"></script>
|
||||
<script src="/static/main.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,10 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="alert alert-danger" role="alert">
|
||||
<h4 class="alert-heading">Error</h4>
|
||||
<p>{{ERROR_MESSAGE}}</p>
|
||||
<hr>
|
||||
<p class="mb-0">Please check your configuration or contact support.</p>
|
||||
</div>
|
||||
{% endblock %}
|
@ -0,0 +1,170 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/cytoscape/3.30.4/cytoscape.min.js"></script>
|
||||
<style>
|
||||
/* Reset default margins and paddings */
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
/* Set body and html to take full height */
|
||||
html, body {
|
||||
height: 100%;
|
||||
font-family: Arial, sans-serif;
|
||||
}
|
||||
|
||||
/* Container for the entire content */
|
||||
.container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
padding: 20px;
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
|
||||
/* Style for the graph container */
|
||||
#cy {
|
||||
flex: 1;
|
||||
width: 100%;
|
||||
max-width: 1200px;
|
||||
height: 600px;
|
||||
border: 1px solid #ccc;
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
/* Style for control buttons */
|
||||
.controls {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.controls button {
|
||||
padding: 10px 20px;
|
||||
margin: 0 5px;
|
||||
font-size: 16px;
|
||||
cursor: pointer;
|
||||
border: none;
|
||||
background-color: #0074D9;
|
||||
color: #fff;
|
||||
border-radius: 4px;
|
||||
transition: background-color 0.3s;
|
||||
}
|
||||
|
||||
.controls button:hover {
|
||||
background-color: #005fa3;
|
||||
}
|
||||
</style>
|
||||
|
||||
<h1>{{ PAGE_TITLE }}</h1>
|
||||
|
||||
<div class="container">
|
||||
<!-- Control Buttons -->
|
||||
<div class="controls">
|
||||
<button id="zoom-in">Zoom In</button>
|
||||
<button id="zoom-out">Zoom Out</button>
|
||||
<button id="reset">Reset Zoom</button>
|
||||
</div>
|
||||
|
||||
<!-- Graph Container -->
|
||||
<div id="cy"></div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Wait for the DOM to fully load
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
// Initialize Cytoscape with elements and style
|
||||
const data = {{ GRAPH_JSON }};
|
||||
const elements = [];
|
||||
|
||||
// Iterate through each key-value pair in the JSON object
|
||||
for (const [key, values] of Object.entries(data)) {
|
||||
// Add a node for the key
|
||||
elements.push({ data: { id: key, label: key } });
|
||||
|
||||
// Add nodes and edges for each value
|
||||
values.forEach(value => {
|
||||
elements.push({ data: { id: value, label: value } }); // Node for value
|
||||
elements.push({ data: { source: value, target: key } }); // Edge from value to key
|
||||
});
|
||||
}
|
||||
var cy = cytoscape({
|
||||
container: document.getElementById('cy'), // Container to render in
|
||||
elements: elements,
|
||||
style: [ // Styling for nodes and edges
|
||||
{
|
||||
selector: 'node',
|
||||
style: {
|
||||
'background-color': '#0074D9',
|
||||
'label': 'data(label)',
|
||||
'color': '#fff',
|
||||
'text-valign': 'center',
|
||||
'text-halign': 'center',
|
||||
'font-size': '10px',
|
||||
'width': '60px',
|
||||
'height': '60px'
|
||||
}
|
||||
},
|
||||
{
|
||||
selector: 'edge',
|
||||
style: {
|
||||
'width': 2,
|
||||
'line-color': '#ccc',
|
||||
'target-arrow-color': '#ccc',
|
||||
'target-arrow-shape': 'triangle',
|
||||
'curve-style': 'bezier'
|
||||
}
|
||||
},
|
||||
{
|
||||
selector: ':selected',
|
||||
style: {
|
||||
'background-color': '#FF4136',
|
||||
'line-color': '#FF4136',
|
||||
'target-arrow-color': '#FF4136',
|
||||
'source-arrow-color': '#FF4136'
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
layout: {
|
||||
name: 'breadthfirst',
|
||||
directed: true,
|
||||
spacingFactor: 2.75,
|
||||
animate: true
|
||||
}
|
||||
});
|
||||
|
||||
// Fit the graph within the container
|
||||
cy.on('layoutready', function(){
|
||||
cy.fit(cy.elements(), 50);
|
||||
});
|
||||
|
||||
// Optional: Add interactivity
|
||||
cy.on('tap', 'node', function(evt){
|
||||
var node = evt.target;
|
||||
alert('Tapped node: ' + node.id());
|
||||
});
|
||||
|
||||
// Zoom and Pan Controls
|
||||
document.getElementById('zoom-in').addEventListener('click', function(){
|
||||
cy.zoom({
|
||||
level: cy.zoom() * 1.2, // Zoom in by 20%
|
||||
renderedPosition: { x: cy.width() / 2, y: cy.height() / 2 }
|
||||
});
|
||||
});
|
||||
|
||||
document.getElementById('zoom-out').addEventListener('click', function(){
|
||||
cy.zoom({
|
||||
level: cy.zoom() / 1.2, // Zoom out by ~16.7%
|
||||
renderedPosition: { x: cy.width() / 2, y: cy.height() / 2 }
|
||||
});
|
||||
});
|
||||
|
||||
document.getElementById('reset').addEventListener('click', function(){
|
||||
cy.fit(cy.elements(), 50); // Fit the graph to the container with padding
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
{% endblock %}
|
@ -0,0 +1,155 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>{{PAGE_TITLE}}</h1>
|
||||
<p class="lead">Below is the list of repositories we can build & pull.</p>
|
||||
<div class="btn-group mb-3" role="group">
|
||||
<button class="btn btn-lg btn-success" onclick="doPullAll(this)">Pull All</button>
|
||||
<button class="btn btn-lg btn-secondary" onclick="doBuildAll(this)">Build All</button>
|
||||
<button class="btn btn-lg btn-dark" onclick="doPullAndBuildAll(this)">Pull & Build All</button>
|
||||
</div>
|
||||
<hr/>
|
||||
<div class="d-flex justify-content-between align-items-center mb-3">
|
||||
<div class="fw-bold">Page {{page}} of {{total_pages}}</div>
|
||||
<nav>
|
||||
<ul class="pagination pagination-sm mb-0">
|
||||
{% if page>1 %}<li class="page-item"><a class="page-link" href="?page={{page|add:-1}}&sort_by={{sort_by}}&sort_order={{sort_order}}" aria-label="Previous"><span aria-hidden="true">«</span></a></li>{% endif %}
|
||||
{% for p in pages %}<li class="page-item{% if p==page %}active{% endif %}"><a class="page-link" href="?page={{p}}&sort_by={{sort_by}}&sort_order={{sort_order}}">{{p}}</a></li>{% endfor %}
|
||||
{% if page<total_pages %}<li class="page-item"><a class="page-link" href="?page={{page|add:1}}&sort_by={{sort_by}}&sort_order={{sort_order}}" aria-label="Next"><span aria-hidden="true">»</span></a></li>{% endif %}
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
<table class="table table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th><input style="width: 1.5em; height: 1.5em;" type="checkbox" id="selectAll"></th>
|
||||
<th class="align-middle">
|
||||
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=branch_name&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}">Branch{% if sort_by=='branch' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='branch' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
|
||||
</th>
|
||||
<th class="align-middle">
|
||||
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=codename&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}">Branch{% if sort_by=='codename' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='codename' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
|
||||
</th>
|
||||
<th class="align-middle">
|
||||
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=name&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}">Repository{% if sort_by=='name' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='name' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
|
||||
</th>
|
||||
<th class="align-middle">
|
||||
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=packaging_commit&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}"><i class="fa-brands fa-git-alt"></i> Latest Packaging Commit{% if sort_by=='packaging_commit' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='packaging_commit' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
|
||||
</th>
|
||||
<th class="align-middle">
|
||||
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=upstream_commit&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}"><i class="fa-brands fa-git-alt"></i> Latest Upstream Commit{% if sort_by=='upstream_commit' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='upstream_commit' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
|
||||
</th>
|
||||
<th class="align-middle">
|
||||
<a class="text-decoration-none text-body" href="?page={{page}}&sort_by=build_status&sort_order={% if sort_order=='asc' %}desc{% else %}asc{% endif %}"><i class="fa-brands fa-git-alt"></i> Build Status{% if sort_by=='build_status' and sort_order=='asc' %}<i class="fa-solid fa-arrow-up"></i>{% elif sort_by=='build_status' and sort_order=='desc' %}<i class="fa-solid fa-arrow-down"></i>{% endif %}</a>
|
||||
</th>
|
||||
<th class="align-middle">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for repo in repos %}<tr>
|
||||
<td class="align-middle"><input style="width: 1.25em; height: 1.25em;" type="checkbox" name="repoSelect" value="{{repo.id}}"></td>
|
||||
<td class="align-middle">{{repo.branch_name}}</td>
|
||||
<td class="align-middle">{{repo.codename}}</td>
|
||||
<td class="align-middle">{{repo.name}}</td>
|
||||
<td class="align-middle">
|
||||
{% if repo.packaging_commit != "" %}
|
||||
<a href="{{repo.packaging_commit_url}}">
|
||||
<i class="fa-solid fa-code-commit"></i> {{repo.packaging_commit}}
|
||||
</a>
|
||||
{% else %}
|
||||
No commit found.
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="align-middle">
|
||||
{% if repo.upstream_commit != "" %}
|
||||
<a href="{{repo.upstream_commit_url}}">
|
||||
<i class="fa-solid fa-code-commit"></i> {{repo.upstream_commit}}
|
||||
</a>
|
||||
{% else %}
|
||||
No commit found.
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
<table class="table text-center">
|
||||
<tbody>
|
||||
<tr>
|
||||
{% if repo.pull_class != "" %}
|
||||
<td>
|
||||
<div class="justify-content-center align-items-center align-middle {{repo.pull_class}} text-white">
|
||||
Pull
|
||||
</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% if repo.tarball_class != "" %}
|
||||
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
|
||||
<td>
|
||||
<div class="justify-content-center align-items-center align-middle {{repo.tarball_class}} text-white">
|
||||
Tarball
|
||||
</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% if repo.source_build_class != "" %}
|
||||
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
|
||||
<td>
|
||||
<div class="justify-content-center align-items-center align-middle {{repo.source_build_class}} text-white">
|
||||
Source Build
|
||||
</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% if repo.upload_class != "" %}
|
||||
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
|
||||
<td>
|
||||
<div class="justify-content-center align-items-center align-middle {{repo.upload_class}} text-white">
|
||||
Upload
|
||||
</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
<tr>
|
||||
{% if repo.source_check_class != "" %}
|
||||
<td>
|
||||
<div class="justify-content-center align-items-center align-middle {{repo.source_check_class}} text-white">
|
||||
Source Check
|
||||
</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% if repo.build_check_class != "" %}
|
||||
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
|
||||
<td>
|
||||
<div class="justify-content-center align-items-center align-middle {{repo.build_check_class}} text-white">
|
||||
Build Check
|
||||
</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% if repo.lintian_class != "" %}
|
||||
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
|
||||
<td>
|
||||
<div class="justify-content-center align-items-center align-middle {{repo.lintian_class}} text-white">
|
||||
Lintian
|
||||
</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% if repo.britney_class != "" %}
|
||||
<td><i class="fa-solid fa-right-long" style="font-size: 1.5rem;"></i></td>
|
||||
<td>
|
||||
<div class="justify-content-center align-items-center align-middle {{repo.britney_class}} text-white">
|
||||
Britney
|
||||
</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
<td class="align-middle">
|
||||
<button class="btn btn-outline-secondary" onclick="doPull('{{repo.id}}', this)">Pull</button>
|
||||
<button class="btn btn-primary" onclick="doBuild('{{repo.id}}', this)">Build</button>
|
||||
<button class="btn btn-secondary" onclick="doViewLog('{{repo.id}}', this)">View Log</button>
|
||||
</td>
|
||||
</tr>{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="btn-group" role="group">
|
||||
<button class="btn btn-lg btn-primary" onclick="doPullSelected(this)">Pull Selected</button>
|
||||
<button class="btn btn-lg btn-success" onclick="doBuildSelected(this)">Build Selected</button>
|
||||
<button class="btn btn-lg btn-info" onclick="doPullAndBuildSelected(this)">Pull & Build Selected</button>
|
||||
</div>
|
||||
{% endblock %}
|
@ -0,0 +1,142 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>{{PAGE_TITLE}}</h1>
|
||||
<ul class="nav nav-tabs">
|
||||
<li class="nav-item">
|
||||
{% if PAGE_TYPE == 'running' %}
|
||||
<a class="nav-link active" aria-current="page" href="/tasks">Running</a>
|
||||
{% else %}
|
||||
<a class="nav-link" href="/tasks">Running</a>
|
||||
{% endif %}
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
{% if PAGE_TYPE == 'queued' %}
|
||||
<a class="nav-link active" aria-current="page" href="/tasks?type=queued">Queued</a>
|
||||
{% else %}
|
||||
<a class="nav-link" href="/tasks?type=queued">Queued</a>
|
||||
{% endif %}
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
{% if PAGE_TYPE == 'complete' %}
|
||||
<a class="nav-link active" aria-current="page" href="/tasks?type=complete">Complete</a>
|
||||
{% else %}
|
||||
<a class="nav-link" href="/tasks?type=complete">Complete</a>
|
||||
{% endif %}
|
||||
</li>
|
||||
</ul>
|
||||
<div class="table-responsive">
|
||||
<table class="table table-striped table-bordered">
|
||||
<thead class="table-dark">
|
||||
<tr>
|
||||
<th scope="col">Score</th>
|
||||
<th scope="col">Queued</th>
|
||||
<th scope="col">Package</th>
|
||||
<th scope="col">Status</th>
|
||||
{% if PAGE_TYPE != 'queued' %}
|
||||
<th scope="col">Log</th>
|
||||
{% endif %}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for task in tasks %}
|
||||
<tr>
|
||||
<td>{{ task.score }}</td>
|
||||
<td>
|
||||
{% if PAGE_TYPE == 'running' %}
|
||||
Started at <span data-timestamp="{{ task.start_timestamp }}"></span><br />
|
||||
(Duration: <span data-timedelta="{{ task.running_timedelta }}"></span>)
|
||||
{% elif PAGE_TYPE == 'queued' %}
|
||||
<span data-timestamp="{{ task.queued_timestamp }}"></span>
|
||||
{% else %}
|
||||
{% if task.successful == 'true' %}
|
||||
<i class="fas fa-check" style="color: green;"></i> <b>Task Succeeded</b><br />
|
||||
{% else %}
|
||||
<i class="fas fa-times-circle" style="color: red;"></i> <b>Task Failed</b><br />
|
||||
{% endif %}
|
||||
Started at <span data-timestamp="{{ task.start_timestamp }}"></span><br />
|
||||
Finished at <span data-timestamp="{{ task.finish_timestamp }}"></span><br />
|
||||
(Duration: <span data-timedelta="{{ task.running_timedelta }}"></span>)
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
Name: {{ task.package_name }}<br />
|
||||
Release: {{ task.package_codename }}
|
||||
</td>
|
||||
<td>{{ task.job_status }}</td>
|
||||
{% if PAGE_TYPE != 'queued' %}
|
||||
<td>
|
||||
<div class="bg-light font-monospace p-2 rounded overflow-scroll" style="max-height: 15em; white-space: pre-wrap;">{{ task.log }}</div>
|
||||
</td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function formatDuration(ms) {
|
||||
if (typeof ms !== "number" || ms < 0) {
|
||||
throw new Error("Input must be a non-negative number representing milliseconds.");
|
||||
}
|
||||
|
||||
// statics
|
||||
const millisecondsInOneSecond = 1000;
|
||||
const millisecondsInOneMinute = 60 * millisecondsInOneSecond;
|
||||
const millisecondsInOneHour = 60 * millisecondsInOneMinute;
|
||||
const millisecondsInOneDay = 24 * millisecondsInOneHour;
|
||||
|
||||
// calculate
|
||||
const days = Math.floor(ms / millisecondsInOneDay);
|
||||
const hours = Math.floor((ms % millisecondsInOneDay) / millisecondsInOneHour);
|
||||
const minutes = Math.floor((ms % millisecondsInOneHour) / millisecondsInOneMinute);
|
||||
const seconds = Math.floor((ms % millisecondsInOneMinute) / millisecondsInOneSecond);
|
||||
const milliseconds = ms % millisecondsInOneSecond;
|
||||
|
||||
/**
|
||||
* Pads a number with leading zeros to achieve the desired length.
|
||||
*
|
||||
* @param {number} num - The number to pad.
|
||||
* @param {number} size - The desired string length.
|
||||
* @returns {string} - The padded string.
|
||||
*/
|
||||
const pad = (num, size) => {
|
||||
let s = num.toString();
|
||||
while (s.length < size) s = "0" + s;
|
||||
return s;
|
||||
};
|
||||
|
||||
// Construct the formatted string
|
||||
let formatted = "";
|
||||
|
||||
if (days > 0) {
|
||||
formatted += `${days}:`;
|
||||
}
|
||||
|
||||
formatted += `${pad(hours, 2)}:${pad(minutes, 2)}:${pad(seconds, 2)}.${pad(milliseconds, 3)}`;
|
||||
|
||||
return formatted;
|
||||
}
|
||||
|
||||
document.querySelectorAll("[data-timestamp]").forEach((el) => {
|
||||
const timestamp = parseInt(el.getAttribute("data-timestamp"), 10);
|
||||
|
||||
if (!isNaN(timestamp)) {
|
||||
const date = new Date(timestamp);
|
||||
const formattedDate = date.toLocaleString(undefined, {
|
||||
timeZoneName: "short"
|
||||
});
|
||||
el.textContent = formattedDate;
|
||||
}
|
||||
});
|
||||
|
||||
document.querySelectorAll("[data-timedelta]").forEach((el) => {
|
||||
const timestamp = parseInt(el.getAttribute("data-timedelta"));
|
||||
|
||||
if (!isNaN(timestamp)) {
|
||||
el.textContent = formatDuration(timestamp);
|
||||
}
|
||||
});
|
||||
|
||||
</script>
|
||||
{% endblock %}
|
Loading…
Reference in new issue