Port pending-packages to C++ using launchpadlib-cpp

main
Simon Quigley 7 days ago
parent fd6b2603ba
commit 2aea573803

@ -15,6 +15,12 @@
#include "utilities.h" #include "utilities.h"
#include "launchpad.h"
#include "archive.h"
#include "distribution.h"
#include "person.h"
#include "source.h"
#include <iostream> #include <iostream>
#include <fstream> #include <fstream>
#include <string> #include <string>
@ -33,8 +39,9 @@
#include <getopt.h> #include <getopt.h>
#include <yaml-cpp/yaml.h> #include <yaml-cpp/yaml.h>
#include <sstream> #include <sstream>
#include <optional>
#include "utilities.h" #include <unordered_set>
#include <set>
namespace fs = std::filesystem; namespace fs = std::filesystem;
@ -44,6 +51,10 @@ void processRelease(const std::string& release, const YAML::Node& config);
void refresh(const std::string& url, const std::string& pocket, const std::string& britneyCache, std::mutex& logMutex); void refresh(const std::string& url, const std::string& pocket, const std::string& britneyCache, std::mutex& logMutex);
int executeAndLog(const std::string& command); int executeAndLog(const std::string& command);
// Global Launchpad variable, initialized once for reuse
static std::optional<launchpad> global_lp_opt;
static launchpad* global_lp = nullptr;
// Execute a command and stream its output to std::cout in real time // Execute a command and stream its output to std::cout in real time
int executeAndLog(const std::string& command) { int executeAndLog(const std::string& command) {
std::string fullCommand = command + " 2>&1"; // Redirect stderr to stdout std::string fullCommand = command + " 2>&1"; // Redirect stderr to stdout
@ -56,7 +67,7 @@ int executeAndLog(const std::string& command) {
char buffer[256]; char buffer[256];
while (fgets(buffer, sizeof(buffer), pipe) != nullptr) { while (fgets(buffer, sizeof(buffer), pipe) != nullptr) {
std::cout << buffer; std::cout << buffer;
std::cout.flush(); // Ensure real-time logging std::cout.flush();
} }
int exitCode = pclose(pipe); int exitCode = pclose(pipe);
@ -67,6 +78,194 @@ int executeAndLog(const std::string& command) {
} }
} }
// This function replaces the Python "pending-packages" script logic.
// Everything is done in one function, using the Launchpad C++ logic shown before.
int check_pending_packages(const std::string& release) {
// Ensure global_lp is initialized once globally
if (!global_lp_opt.has_value()) {
global_lp_opt = launchpad::login();
if (!global_lp_opt.has_value()) {
std::cerr << "Failed to authenticate with Launchpad.\n";
return 1;
}
global_lp = &global_lp_opt.value();
}
auto lp = global_lp;
std::cout << "Logging into Launchpad..." << std::endl;
// We already logged in globally above.
std::cout << "Logged in. Initializing repositories..." << std::endl;
auto ubuntu_opt = lp->distributions["ubuntu"];
if (!ubuntu_opt.has_value()) {
std::cerr << "Failed to retrieve ubuntu.\n";
return 1;
}
distribution ubuntu = std::move(ubuntu_opt.value());
auto lubuntu_ci_opt = lp->people["lubuntu-ci"];
if (!lubuntu_ci_opt.has_value()) {
std::cerr << "Failed to retrieve lubuntu-ci.\n";
return 1;
}
person lubuntu_ci = std::move(lubuntu_ci_opt.value());
std::optional<archive> regular_opt = lubuntu_ci.getPPAByName(ubuntu, "unstable-ci");
if (!regular_opt.has_value()) {
std::cerr << "Failed to retrieve regular PPA.\n";
return 1;
}
archive regular = std::move(regular_opt.value());
std::optional<archive> proposed_opt = lubuntu_ci.getPPAByName(ubuntu, "unstable-ci-proposed");
if (!proposed_opt.has_value()) {
std::cerr << "Failed to retrieve proposed PPA.\n";
return 1;
}
archive proposed = std::move(proposed_opt.value());
// We need to get the distro series for "release"
auto series_opt = ubuntu.getSeries(release);
if (!series_opt.has_value()) {
std::cerr << "Failed to retrieve series for: " << release << std::endl;
return 1;
}
distro_series series = std::move(series_opt.value());
std::cout << "Repositories initialized. Checking for pending sources..." << std::endl;
// Check if any sources are pending in either regular or proposed
int total_pending = 0;
{
auto reg_pending = regular.getPublishedSources("Pending", series);
auto prop_pending = proposed.getPublishedSources("Pending", series);
total_pending = (int)reg_pending.size() + (int)prop_pending.size();
}
bool has_pending = (total_pending != 0);
if (has_pending) {
std::cout << "Total sources pending: " << total_pending << std::endl;
std::cout << "Sources are still pending, not running Britney" << std::endl;
return 1;
}
std::cout << "No pending sources, continuing. Checking for pending builds..." << std::endl;
total_pending = 0;
int total_retried = 0;
{
using namespace std::chrono;
auto now_utc = std::chrono::system_clock::now();
auto one_hour_ago = now_utc - std::chrono::hours(1);
// Convert one_hour_ago to a time_point that matches the build date time usage
// Our build.date_started presumably returns a std::chrono::system_clock::time_point
auto archives = {proposed, regular};
for (auto& archv : archives) {
for (auto src : archv.getPublishedSources("Published", series)) {
for (auto build : src.getBuilds()) {
auto bs = build.buildstate;
if (bs == "Currently building") {
if (build.date_started.has_value() && build.date_started.value() >= one_hour_ago) {
total_pending += 1;
}
} else if (bs == "Needs building") {
total_pending += 1;
} else if (bs == "Chroot problem" ||
(bs == "Failed to build" && build.build_log_url.empty())) {
// Retry failed builds without logs
if (build.can_be_retried) {
if (build.retry()) {
total_pending += 1;
total_retried += 1;
}
}
}
}
}
}
}
if (total_retried != 0) {
std::cout << "Total builds retried due to builder flakiness: " << total_retried << std::endl;
}
if (total_pending != 0) {
std::cout << "Total builds pending: " << total_pending << std::endl;
std::cout << "Builds are still running, not running Britney" << std::endl;
return 1;
}
std::cout << "No pending builds, continuing. Checking for pending binaries..." << std::endl;
// Check if binaries are pending
// This logic replicates the python code:
// For each of [proposed, regular], if binaries are not all published after a certain grace period,
// we consider them pending.
has_pending = false;
{
auto archives = {proposed, regular};
for (auto& pocket : archives) {
if (has_pending) break;
auto three_hours_ago = std::chrono::system_clock::now() - std::chrono::hours(3);
std::set<std::string> check_builds;
std::set<std::string> current_builds;
std::vector<source> source_packages;
// Get successfully built records
for (auto build_record : pocket.getBuildRecords("Successfully built")) {
if (build_record.datebuilt.has_value() && build_record.datebuilt.value() < three_hours_ago) {
// If any build is older than 3 hours, the python code breaks and doesn't check
// further. We'll do the same.
source_packages.clear();
break;
}
check_builds.insert(build_record.title);
auto src_pub = build_record.current_source_publication;
if (src_pub.has_value() && src_pub.value().distro_series.name_or_version == series.name_or_version) {
bool found = false;
for (auto& sp : source_packages) {
if (sp.self_link == src_pub.value().self_link) {
found = true;
break;
}
}
if (!found && src_pub.has_value()) {
source_packages.push_back(src_pub.value());
}
}
}
// For each source package, get their published binaries and see if they're all in check_builds
for (auto& s : source_packages) {
for (auto bin : s.getPublishedBinaries()) {
current_builds.insert(bin.build.title);
}
}
// If check_builds does not fully cover current_builds, we have pending binaries
for (auto& cb : current_builds) {
if (check_builds.find(cb) == check_builds.end()) {
has_pending = true;
break;
}
}
}
}
if (has_pending) {
std::cout << "Binaries are still pending, not running Britney" << std::endl;
return 1;
}
std::cout << "All clear. Starting Britney." << std::endl;
return 0;
}
int main(int argc, char* argv[]) { int main(int argc, char* argv[]) {
std::string configFilePath = "config.yaml"; std::string configFilePath = "config.yaml";
@ -110,7 +309,7 @@ int main(int argc, char* argv[]) {
YAML::Node config; YAML::Node config;
try { try {
config = YAML::LoadFile(configFilePath); config = YAML::LoadFile(configFilePath);
} catch (const YAML::BadFile& e) { } catch (const YAML::BadFile&) {
std::cerr << "Error: Unable to open config file: " << configFilePath << std::endl; std::cerr << "Error: Unable to open config file: " << configFilePath << std::endl;
return 1; return 1;
} catch (const YAML::ParserException& e) { } catch (const YAML::ParserException& e) {
@ -200,20 +399,10 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
std::string SOURCE_PPA_URL = "https://ppa.launchpadcontent.net/" + LP_TEAM + "/" + SOURCE_PPA + "/ubuntu/dists/" + RELEASE + "/main"; std::string SOURCE_PPA_URL = "https://ppa.launchpadcontent.net/" + LP_TEAM + "/" + SOURCE_PPA + "/ubuntu/dists/" + RELEASE + "/main";
std::string DEST_PPA_URL = "https://ppa.launchpadcontent.net/" + LP_TEAM + "/" + DEST_PPA + "/ubuntu/dists/" + RELEASE + "/main"; std::string DEST_PPA_URL = "https://ppa.launchpadcontent.net/" + LP_TEAM + "/" + DEST_PPA + "/ubuntu/dists/" + RELEASE + "/main";
// Get current timestamp // Instead of calling pending-packages script, we call check_pending_packages function
std::time_t now_c = std::time(nullptr); int pendingResult = check_pending_packages(RELEASE);
char timestamp[20];
std::strftime(timestamp, sizeof(timestamp), "%Y%m%dT%H%M%S", std::gmtime(&now_c));
std::string BRITNEY_TIMESTAMP(timestamp);
std::cout << "Release: " << RELEASE << std::endl;
std::cout << "Timestamp: " << BRITNEY_TIMESTAMP << std::endl;
// Execute pending-packages script and capture its output
std::string pendingCmd = "./pending-packages " + RELEASE;
int pendingResult = executeAndLog(pendingCmd);
if (pendingResult != 0) { if (pendingResult != 0) {
std::cerr << "Error: pending-packages script failed for release " << RELEASE << std::endl; std::cerr << "Error: pending-packages (now integrated check) failed for release " << RELEASE << std::endl;
return; return;
} }
@ -222,11 +411,9 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
std::vector<std::thread> threads; std::vector<std::thread> threads;
std::mutex logMutex; std::mutex logMutex;
// Refresh package indexes
std::vector<std::string> pockets = {RELEASE, RELEASE + "-updates"}; std::vector<std::string> pockets = {RELEASE, RELEASE + "-updates"};
std::vector<std::string> components = {"main", "restricted", "universe", "multiverse"}; std::vector<std::string> components = {"main", "restricted", "universe", "multiverse"};
// Loop over pockets, components, architectures to refresh package indexes
for (const auto& pocket : pockets) { for (const auto& pocket : pockets) {
for (const auto& component : components) { for (const auto& component : components) {
for (const auto& arch : ARCHES) { for (const auto& arch : ARCHES) {
@ -242,42 +429,45 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
} }
} }
// Treat the destination PPA as just another pocket
std::string pocket = RELEASE + "-ppa-proposed";
for (const auto& arch : ARCHES) {
std::string url = DEST_PPA_URL + "/binary-" + arch + "/Packages.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
for (const auto& arch : PORTS_ARCHES) {
std::string url = DEST_PPA_URL + "/binary-" + arch + "/Packages.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
{ {
std::string url = DEST_PPA_URL + "/source/Sources.gz"; // Treat DEST_PPA as proposed pocket
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex)); std::string pocket = RELEASE + "-ppa-proposed";
for (const auto& arch : ARCHES) {
std::string url = DEST_PPA_URL + "/binary-" + arch + "/Packages.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
for (const auto& arch : PORTS_ARCHES) {
std::string url = DEST_PPA_URL + "/binary-" + arch + "/Packages.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
{
std::string url = DEST_PPA_URL + "/source/Sources.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
} }
// Get the source PPA
pocket = SOURCE_PPA + "-" + RELEASE;
for (const auto& arch : ARCHES) {
std::string url = SOURCE_PPA_URL + "/binary-" + arch + "/Packages.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
for (const auto& arch : PORTS_ARCHES) {
std::string url = SOURCE_PPA_URL + "/binary-" + arch + "/Packages.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
{ {
std::string url = SOURCE_PPA_URL + "/source/Sources.gz"; // SOURCE_PPA as unstable pocket
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex)); std::string pocket = SOURCE_PPA + "-" + RELEASE;
for (const auto& arch : ARCHES) {
std::string url = SOURCE_PPA_URL + "/binary-" + arch + "/Packages.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
for (const auto& arch : PORTS_ARCHES) {
std::string url = SOURCE_PPA_URL + "/binary-" + arch + "/Packages.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
{
std::string url = SOURCE_PPA_URL + "/source/Sources.gz";
threads.emplace_back(refresh, url, pocket, BRITNEY_CACHE, std::ref(logMutex));
}
} }
// Wait for all threads to finish
for (auto& th : threads) { for (auto& th : threads) {
th.join(); th.join();
} }
// Process logs and delete them // Process logs
pid_t pid = getpid(); pid_t pid = getpid();
std::string logPattern = std::to_string(pid) + "-wget-log"; std::string logPattern = std::to_string(pid) + "-wget-log";
@ -285,7 +475,6 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
if (p.is_regular_file()) { if (p.is_regular_file()) {
std::string filename = p.path().filename().string(); std::string filename = p.path().filename().string();
if (filename.find(logPattern) != std::string::npos) { if (filename.find(logPattern) != std::string::npos) {
// Output log content to stderr
std::ifstream logFile(p.path()); std::ifstream logFile(p.path());
if (logFile.is_open()) { if (logFile.is_open()) {
std::cerr << logFile.rdbuf(); std::cerr << logFile.rdbuf();
@ -298,109 +487,107 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
std::cout << "Building britney indexes..." << std::endl; std::cout << "Building britney indexes..." << std::endl;
// Create output directory fs::create_directories(fs::path(BRITNEY_OUTDIR) / getCurrentTimestamp());
fs::create_directories(fs::path(BRITNEY_OUTDIR) / BRITNEY_TIMESTAMP);
// "Unstable" is SOURCE_PPA
std::string DEST = BRITNEY_DATADIR + RELEASE + "-proposed"; std::string DEST = BRITNEY_DATADIR + RELEASE + "-proposed";
fs::create_directories(DEST); fs::create_directories(DEST);
fs::create_directories(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state"); fs::create_directories(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state");
writeFile(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state" / "age-policy-dates", ""); writeFile(fs::path(BRITNEY_DATADIR) / (RELEASE + "-proposed") / "state" / "age-policy-dates", "");
// Create symlink for Hints
fs::remove(fs::path(DEST) / "Hints"); fs::remove(fs::path(DEST) / "Hints");
fs::create_symlink(BRITNEY_HINTDIR, fs::path(DEST) / "Hints"); fs::create_symlink(BRITNEY_HINTDIR, fs::path(DEST) / "Hints");
// Concatenate Sources.gz files for SOURCE_PPA // Concatenate Sources from SOURCE_PPA
std::string sourcesContent; {
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) { std::string sourcesContent;
if (p.path().filename() == "Sources.gz") { for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) {
sourcesContent += decompressGzip(p.path()); if (p.path().filename() == "Sources.gz") {
sourcesContent += decompressGzip(p.path());
}
} }
} writeFile(fs::path(DEST) / "Sources", sourcesContent);
writeFile(fs::path(DEST) / "Sources", sourcesContent);
// Concatenate Packages.gz files for SOURCE_PPA for (const auto& arch : ARCHES) {
for (const auto& arch : ARCHES) { std::string packagesContent;
std::string packagesContent; for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) {
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) { if (p.path().filename() == "Packages.gz" && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
if (p.path().filename() == "Packages.gz" && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) { packagesContent += decompressGzip(p.path());
packagesContent += decompressGzip(p.path()); }
} }
writeFile(fs::path(DEST) / ("Packages_" + arch), packagesContent);
} }
writeFile(fs::path(DEST) / ("Packages_" + arch), packagesContent); for (const auto& arch : PORTS_ARCHES) {
} std::string packagesContent;
for (const auto& arch : PORTS_ARCHES) { for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) {
std::string packagesContent; if (p.path().filename() == "Packages.gz" && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE + SOURCE_PPA + "-" + RELEASE)) { packagesContent += decompressGzip(p.path());
if (p.path().filename() == "Packages.gz" && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) { }
packagesContent += decompressGzip(p.path());
} }
writeFile(fs::path(DEST) / ("Packages_" + arch), packagesContent);
} }
writeFile(fs::path(DEST) / ("Packages_" + arch), packagesContent);
}
writeFile(fs::path(DEST) / "Blocks", "");
writeFile(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", "");
// Similar steps for "Testing" writeFile(fs::path(DEST) / "Blocks", "");
DEST = BRITNEY_DATADIR + RELEASE; writeFile(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", "");
fs::create_directories(DEST); }
fs::create_directories(fs::path(BRITNEY_DATADIR) / RELEASE / "state");
writeFile(fs::path(BRITNEY_DATADIR) / RELEASE / "state" / "age-policy-dates", "");
fs::remove(fs::path(DEST) / "Hints");
fs::create_symlink(BRITNEY_HINTDIR, fs::path(DEST) / "Hints");
// Concatenate Sources.gz files for RELEASE // Process Testing
sourcesContent.clear(); {
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) { DEST = BRITNEY_DATADIR + RELEASE;
if (p.path().filename() == "Sources.gz" && p.path().string().find(RELEASE) != std::string::npos) { fs::create_directories(DEST);
sourcesContent += decompressGzip(p.path()); fs::create_directories(fs::path(BRITNEY_DATADIR) / RELEASE / "state");
writeFile(fs::path(BRITNEY_DATADIR) / RELEASE / "state" / "age-policy-dates", "");
fs::remove(fs::path(DEST) / "Hints");
fs::create_symlink(BRITNEY_HINTDIR, fs::path(DEST) / "Hints");
// Concatenate Sources for RELEASE
{
std::string sourcesContent;
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) {
if (p.path().filename() == "Sources.gz" && p.path().string().find(RELEASE) != std::string::npos) {
sourcesContent += decompressGzip(p.path());
}
}
writeFile(fs::path(DEST) / "Sources", sourcesContent);
regexReplaceInFile(fs::path(DEST) / "Sources", "Section: universe/", "Section: ");
} }
}
writeFile(fs::path(DEST) / "Sources", sourcesContent);
// Replace "Section: universe/" with "Section: "
regexReplaceInFile(fs::path(DEST) / "Sources", "Section: universe/", "Section: ");
// Concatenate Packages.gz files for RELEASE for (const auto& arch : ARCHES) {
for (const auto& arch : ARCHES) { std::string packagesContent;
std::string packagesContent; for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) {
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) { if (p.path().filename() == "Packages.gz" && p.path().string().find(RELEASE) != std::string::npos && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
if (p.path().filename() == "Packages.gz" && p.path().string().find(RELEASE) != std::string::npos && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) { packagesContent += decompressGzip(p.path());
packagesContent += decompressGzip(p.path()); }
} }
fs::path packagesFilePath = fs::path(DEST) / ("Packages_" + arch);
writeFile(packagesFilePath, packagesContent);
regexReplaceInFile(packagesFilePath, "Section: universe/", "Section: ");
} }
fs::path packagesFilePath = fs::path(DEST) / ("Packages_" + arch);
writeFile(packagesFilePath, packagesContent); for (const auto& arch : PORTS_ARCHES) {
// Replace "Section: universe/" with "Section: " std::string packagesContent;
regexReplaceInFile(packagesFilePath, "Section: universe/", "Section: "); for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) {
} if (p.path().filename() == "Packages.gz" && p.path().string().find(RELEASE) != std::string::npos && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
for (const auto& arch : PORTS_ARCHES) { packagesContent += decompressGzip(p.path());
std::string packagesContent; }
for (auto& p : fs::recursive_directory_iterator(BRITNEY_CACHE)) {
if (p.path().filename() == "Packages.gz" && p.path().string().find(RELEASE) != std::string::npos && p.path().parent_path().string().find("binary-" + arch) != std::string::npos) {
packagesContent += decompressGzip(p.path());
} }
fs::path packagesFilePath = fs::path(DEST) / ("Packages_" + arch);
writeFile(packagesFilePath, packagesContent);
regexReplaceInFile(packagesFilePath, "Section: universe/", "Section: ");
} }
fs::path packagesFilePath = fs::path(DEST) / ("Packages_" + arch);
writeFile(packagesFilePath, packagesContent);
// Replace "Section: universe/" with "Section: "
regexReplaceInFile(packagesFilePath, "Section: universe/", "Section: ");
}
writeFile(fs::path(DEST) / "Blocks", ""); writeFile(fs::path(DEST) / "Blocks", "");
writeFile(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", ""); writeFile(fs::path(BRITNEY_DATADIR) / (SOURCE_PPA + "-" + RELEASE) / "Dates", "");
}
// Create config file atomically // Create britney.conf
std::string configContent = readFile(BRITNEY_CONF); {
// Replace variables in configContent using configuration variables std::string configContent = readFile(BRITNEY_CONF);
configContent = std::regex_replace(configContent, std::regex("%\\{SERIES\\}"), RELEASE); configContent = std::regex_replace(configContent, std::regex("%\\{SERIES\\}"), RELEASE);
writeFile("britney.conf", configContent); writeFile("britney.conf", configContent);
}
std::cout << "Running britney..." << std::endl; std::cout << "Running britney..." << std::endl;
// Run britney.py
std::string britneyCmd = BRITNEY_LOC + " -v --config britney.conf --series " + RELEASE; std::string britneyCmd = BRITNEY_LOC + " -v --config britney.conf --series " + RELEASE;
int britneyResult = executeAndLog(britneyCmd); int britneyResult = executeAndLog(britneyCmd);
if (britneyResult != 0) { if (britneyResult != 0) {
@ -413,67 +600,66 @@ void processRelease(const std::string& RELEASE, const YAML::Node& config) {
fs::remove_all(BRITNEY_OUTDIR); fs::remove_all(BRITNEY_OUTDIR);
std::cout << "Moving packages..." << std::endl; std::cout << "Moving packages..." << std::endl;
{
// Read candidates from HeidiResultDelta std::ifstream heidiFile("output/" + RELEASE + "/HeidiResultDelta");
std::ifstream heidiFile("output/" + RELEASE + "/HeidiResultDelta"); if (!heidiFile.is_open()) {
if (!heidiFile.is_open()) { std::cout << "No candidates found for release " << RELEASE << "." << std::endl;
std::cout << "No candidates found for release " << RELEASE << "." << std::endl; } else {
} else { std::ofstream candidatesFile("candidates");
std::ofstream candidatesFile("candidates"); std::string line;
std::string line; while (std::getline(heidiFile, line)) {
while (std::getline(heidiFile, line)) { if (line.empty() || line[0] == '#') continue;
if (line.empty() || line[0] == '#') continue; candidatesFile << line << std::endl;
candidatesFile << line << std::endl;
}
heidiFile.close();
candidatesFile.close();
// Process candidates
std::ifstream candidates("candidates");
while (std::getline(candidates, line)) {
std::istringstream iss(line);
std::vector<std::string> packageInfo;
std::string word;
while (iss >> word) {
packageInfo.push_back(word);
} }
if (packageInfo.size() == 2) { heidiFile.close();
std::string COPY = "../ubuntu-archive-tools/copy-package"; candidatesFile.close();
std::string REMOVE = "../ubuntu-archive-tools/remove-package";
if (packageInfo[0][0] == '-') { std::ifstream candidates("candidates");
std::string PACKAGE = packageInfo[0].substr(1); while (std::getline(candidates, line)) {
std::cout << "Demoting " << PACKAGE << "..." << std::endl; std::istringstream iss(line);
std::string copyCmd = COPY + " -y -b -s " + RELEASE + " --from ppa:" + LP_TEAM + "/ubuntu/" + DEST_PPA + std::vector<std::string> packageInfo;
" --to ppa:" + LP_TEAM + "/ubuntu/" + SOURCE_PPA + " --version " + packageInfo[1] + " " + PACKAGE; std::string word;
std::string removeCmd = REMOVE + " -y -s " + RELEASE + " --archive ppa:" + LP_TEAM + "/ubuntu/" + DEST_PPA + while (iss >> word) {
" --version " + packageInfo[1] + " --removal-comment=\"demoted to proposed\" " + PACKAGE; packageInfo.push_back(word);
int copyResult = executeAndLog(copyCmd); }
if (copyResult != 0) { if (packageInfo.size() == 2) {
std::cerr << "Error: Copy command failed for package " << PACKAGE << std::endl; std::string COPY = "../ubuntu-archive-tools/copy-package";
} std::string REMOVE = "../ubuntu-archive-tools/remove-package";
int removeResult = executeAndLog(removeCmd); if (packageInfo[0][0] == '-') {
if (removeResult != 0) { std::string PACKAGE = packageInfo[0].substr(1);
std::cerr << "Error: Remove command failed for package " << PACKAGE << std::endl; std::cout << "Demoting " << PACKAGE << "..." << std::endl;
} std::string copyCmd = COPY + " -y -b -s " + RELEASE + " --from ppa:" + LP_TEAM + "/ubuntu/" + DEST_PPA +
} else { " --to ppa:" + LP_TEAM + "/ubuntu/" + SOURCE_PPA + " --version " + packageInfo[1] + " " + PACKAGE;
std::cout << "Migrating " << packageInfo[0] << "..." << std::endl; std::string removeCmd = REMOVE + " -y -s " + RELEASE + " --archive ppa:" + LP_TEAM + "/ubuntu/" + DEST_PPA +
std::string copyCmd = COPY + " -y -b -s " + RELEASE + " --from ppa:" + LP_TEAM + "/ubuntu/" + SOURCE_PPA + " --version " + packageInfo[1] + " --removal-comment=\"demoted to proposed\" " + PACKAGE;
" --to ppa:" + LP_TEAM + "/ubuntu/" + DEST_PPA + " --version " + packageInfo[1] + " " + packageInfo[0]; int copyResult = executeAndLog(copyCmd);
std::string removeCmd = REMOVE + " -y -s " + RELEASE + " --archive ppa:" + LP_TEAM + "/ubuntu/" + SOURCE_PPA + if (copyResult != 0) {
" --version " + packageInfo[1] + " --removal-comment=\"moved to release\" " + packageInfo[0]; std::cerr << "Error: Copy command failed for package " << PACKAGE << std::endl;
int copyResult = executeAndLog(copyCmd); }
if (copyResult != 0) { int removeResult = executeAndLog(removeCmd);
std::cerr << "Error: Copy command failed for package " << packageInfo[0] << std::endl; if (removeResult != 0) {
} std::cerr << "Error: Remove command failed for package " << PACKAGE << std::endl;
int removeResult = executeAndLog(removeCmd); }
if (removeResult != 0) { } else {
std::cerr << "Error: Remove command failed for package " << packageInfo[0] << std::endl; std::cout << "Migrating " << packageInfo[0] << "..." << std::endl;
std::string copyCmd = COPY + " -y -b -s " + RELEASE + " --from ppa:" + LP_TEAM + "/ubuntu/" + SOURCE_PPA +
" --to ppa:" + LP_TEAM + "/ubuntu/" + DEST_PPA + " --version " + packageInfo[1] + " " + packageInfo[0];
std::string removeCmd = REMOVE + " -y -s " + RELEASE + " --archive ppa:" + LP_TEAM + "/ubuntu/" + SOURCE_PPA +
" --version " + packageInfo[1] + " --removal-comment=\"moved to release\" " + packageInfo[0];
int copyResult = executeAndLog(copyCmd);
if (copyResult != 0) {
std::cerr << "Error: Copy command failed for package " << packageInfo[0] << std::endl;
}
int removeResult = executeAndLog(removeCmd);
if (removeResult != 0) {
std::cerr << "Error: Remove command failed for package " << packageInfo[0] << std::endl;
}
} }
} }
} }
candidates.close();
fs::remove("candidates");
} }
candidates.close();
fs::remove("candidates");
} }
std::cout << "Run the grim reaper..." << std::endl; std::cout << "Run the grim reaper..." << std::endl;
@ -494,30 +680,23 @@ void printHelp(const char* programName) {
std::cout << " -h, --help Show this help message and exit\n"; std::cout << " -h, --help Show this help message and exit\n";
} }
// Refresh package indexes by downloading files in parallel
void refresh(const std::string& url, const std::string& pocket, const std::string& britneyCache, std::mutex& logMutex) { void refresh(const std::string& url, const std::string& pocket, const std::string& britneyCache, std::mutex& logMutex) {
// Compute directory path based on the URL
fs::path urlPath(url); fs::path urlPath(url);
std::string lastTwoComponents = urlPath.parent_path().parent_path().filename().string() + "/" + urlPath.parent_path().filename().string(); std::string lastTwoComponents = urlPath.parent_path().parent_path().filename().string() + "/" + urlPath.parent_path().filename().string();
fs::path dir = fs::path(britneyCache) / pocket / lastTwoComponents; fs::path dir = fs::path(britneyCache) / pocket / lastTwoComponents;
// Create necessary directories
fs::create_directories(dir); fs::create_directories(dir);
// Update timestamps to prevent expiration auto now = fs::file_time_type::clock::now();
auto now = fs::file_time_type::clock::now(); // Use the same clock
fs::last_write_time(britneyCache, now); fs::last_write_time(britneyCache, now);
fs::last_write_time(fs::path(britneyCache) / pocket, now); fs::last_write_time(fs::path(britneyCache) / pocket, now);
fs::last_write_time(dir.parent_path(), now); fs::last_write_time(dir.parent_path(), now);
fs::last_write_time(dir, now); fs::last_write_time(dir, now);
// Log file path (uses process ID)
pid_t pid = getpid(); pid_t pid = getpid();
fs::path logFilePath = dir / (std::to_string(pid) + "-wget-log"); fs::path logFilePath = dir / (std::to_string(pid) + "-wget-log");
// Output file path
fs::path outputPath = dir / urlPath.filename(); fs::path outputPath = dir / urlPath.filename();
// Download the file
downloadFileWithTimestamping(url, outputPath, logFilePath, logMutex); downloadFileWithTimestamping(url, outputPath, logFilePath, logMutex);
} }

@ -1,103 +0,0 @@
#!/usr/bin/env python3
#
# Copyright (C) 2024 Simon Quigley <tsimonq2@ubuntu.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import argparse
from datetime import datetime, timedelta, timezone
from launchpadlib.launchpad import Launchpad
print(f"Logging into Launchpad...")
launchpad = Launchpad.login_with("pending-packages", "production", version="devel")
print("Logged in. Initializing repositories...")
ubuntu = launchpad.distributions["ubuntu"]
lubuntu_ci = launchpad.people["lubuntu-ci"]
regular = lubuntu_ci.getPPAByName(distribution=ubuntu, name="unstable-ci")
proposed = lubuntu_ci.getPPAByName(distribution=ubuntu, name="unstable-ci-proposed")
parser = argparse.ArgumentParser()
parser.add_argument("release")
args = parser.parse_args()
series = ubuntu.getSeries(name_or_version=args.release)
# First, check if any sources are still publishing
print("Repositories initialized. Checking for pending sources...")
records = [regular.getPublishedSources(status="Pending", distro_series=series),
proposed.getPublishedSources(status="Pending", distro_series=series)]
total_pending = sum([len(i) for i in records])
has_pending = total_pending != 0
if has_pending:
print(f"Total sources pending: {total_pending}")
print("Sources are still pending, not running Britney")
exit(1)
# Finally, check if any builds are still running/queued
print("No pending sources, continuing. Checking for pending builds...")
total_pending = 0
total_retried = 0
for archive in [proposed, regular]:
one_hour_ago = datetime.now(timezone.utc) - timedelta(hours=1)
for source in archive.getPublishedSources(status="Published", distro_series=series):
for build in source.getBuilds():
if build.buildstate == "Currently building":
if build.date_started >= one_hour_ago:
total_pending += 1
elif build.buildstate == "Needs building":
total_pending += 1
# This isn't technically related, but retry failed builds without logs
elif build.buildstate == "Chroot problem" or (build.buildstate == "Failed to build" and not build.build_log_url):
if build.can_be_retried:
build.retry()
total_pending += 1
total_retried += 1
if total_retried != 0:
print(f"Total builds retried due to builder flakiness: {total_retried}")
if total_pending != 0:
print(f"Total builds pending: {total_pending}")
print("Builds are still running, not running Britney")
exit(1)
print("No pending builds, continuing. Checking for pending binaries...")
has_pending = False
for pocket in [proposed, regular]:
if has_pending:
break
three_hours_ago = datetime.now(timezone.utc) - timedelta(hours=3)
check_builds = set()
current_builds = set()
source_packages = []
for build in pocket.getBuildRecords(build_state="Successfully built"):
if build.datebuilt < three_hours_ago:
del source_packages
break
check_builds.add(build.title)
source_package = build.current_source_publication
if source_package and source_package.distro_series == series and source_package not in source_packages:
source_packages.append(source_package)
for binary in source_package.getPublishedBinaries():
current_builds.add(binary.build.title)
has_pending = not check_builds.issuperset(current_builds) or has_pending
if has_pending:
print("Binaries are still pending, not running Britney")
exit(1)
print("All clear. Starting Britney.")
Loading…
Cancel
Save