From c3b4638d5f70880374dfa312061b9eeacc435d70 Mon Sep 17 00:00:00 2001 From: Bill Thiede Date: Tue, 2 Dec 2025 15:20:20 -0800 Subject: [PATCH] WIP --- src/lib.rs | 143 +++++++++++++++++++++++++++++++- src/main.rs | 229 ---------------------------------------------------- 2 files changed, 142 insertions(+), 230 deletions(-) delete mode 100644 src/main.rs diff --git a/src/lib.rs b/src/lib.rs index 0994a37..dd28966 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,12 +1,14 @@ use std::{ collections::HashMap, + fs, + fs::File, io, path::{Path, PathBuf}, process::{Command, Output}, time::{Duration, Instant}, }; -use log::info; +use log::{error, info}; use rocket::response::Responder; use serde::Deserialize; use thiserror::Error; @@ -91,3 +93,142 @@ pub fn git_checkout(build_path: &Path, rev: &str) -> Result, dst: impl AsRef) -> io::Result<()> { + fs::create_dir_all(&dst)?; + for entry in fs::read_dir(src)? { + let entry = entry?; + let ty = entry.file_type()?; + if ty.is_dir() { + copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?; + } else { + fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?; + } + } + Ok(()) +} + +fn bench_at_commit( + commit: &str, + build_path: &Path, + target_path: &Path, +) -> Result, SyncError> { + let mut output = Vec::new(); + output.push(git_checkout(build_path, commit)?); + // Run `cargo aoc bench` + output.push(logging_run( + Command::new("cargo") + .env("CARGO_TARGET_DIR", target_path) + .current_dir(&build_path) + .arg("aoc") + .arg("bench"), + )?); + output.push(git_checkout(build_path, "HEAD")?); + + Ok(output) +} + +pub fn reload(config: &Config, name: &str) -> Result { + let repo = &config.repos[name]; + info!("Need to reload '{}': {:?}\n{:#?}", name, config, repo); + + let commits_root = config.build_root.join("commits"); + let git_root = config.build_root.join("git").join(name); + let www_root = config.www_root.join(name); + let target_root = config.build_root.join("target"); + + let checkout_path = git_root.join(&repo.name); + let build_path = if let Some(subdir) = &repo.subdir { + git_root.join(&repo.name).join(subdir) + } else { + git_root.join(&repo.name) + }; + let target_path = target_root.join(name); + + dbg!( + &build_path, + &target_path, + &commits_root, + &git_root, + &www_root + ); + + if !commits_root.exists() { + info!("Creating {}", commits_root.display()); + fs::create_dir_all(&commits_root)?; + } + if !git_root.exists() { + info!("Creating {}", git_root.display()); + fs::create_dir_all(&git_root)?; + } + if !target_root.exists() { + info!("Creating {}", target_root.display()); + fs::create_dir_all(&target_root)?; + } + if !www_root.exists() { + info!("Creating {}", www_root.display()); + fs::create_dir_all(&www_root)?; + } + let mut output = vec![logging_run(Command::new("git").arg("version"))?]; + let needs_clone = !checkout_path.exists(); + if needs_clone { + output.push(logging_run( + Command::new("git") + .current_dir(&git_root) + .arg("clone") + .arg(&repo.url) + .arg(&checkout_path), + )?); + } + output.push(logging_run( + Command::new("git") + .current_dir(&checkout_path) + .arg("checkout") + .arg("-f") + .arg(&repo.branch), + )?); + output.push(logging_run( + Command::new("git") + .current_dir(&checkout_path) + .arg("checkout") + .arg("HEAD"), + )?); + dbg!(&checkout_path); + // Make sure buildable clone is up to date + output.push(logging_run( + Command::new("git").current_dir(&checkout_path).arg("pull"), + )?); + let commits = logging_run(Command::new("git").current_dir(&checkout_path).args([ + "log", + "--format=%H", + &repo.branch, + ]))?; + let binding = String::from_utf8_lossy(&commits.output.stdout).into_owned(); + let mut unknown_commits: Vec<_> = binding + .lines() + .filter(|commit| !commits_root.join(commit).exists()) + .collect(); + unknown_commits.reverse(); + output.push(commits); + info!("Need to bench commits: {:?}", unknown_commits); + for commit in unknown_commits { + match bench_at_commit(commit, &build_path, &target_path) { + Ok(outputs) => { + output.extend(outputs); + File::create(commits_root.join(commit))?; + } + Err(err) => error!("Failed to bench {}@{}: {}", name, commit, err), + } + } + // Copy files from `target/` to serving directory + let bench_path = target_path.join("criterion"); + info!("Copying {} -> {}", bench_path.display(), www_root.display()); + copy_dir_all(bench_path, www_root)?; + let response = output + .iter() + .map(|ts| format!("{}", format_task_status(ts))) + .collect::>() + .join("\n"); + Ok(response) +} diff --git a/src/main.rs b/src/main.rs deleted file mode 100644 index c579e2d..0000000 --- a/src/main.rs +++ /dev/null @@ -1,229 +0,0 @@ -#[macro_use] -extern crate rocket; -use std::{ - collections::HashMap, - fs, - fs::File, - io, - path::{Path, PathBuf}, - process::{Command, Output}, -}; - -use aocsync::{format_task_status, git_checkout, logging_run, Config, SyncError, TaskStatus}; -use glog::Flags; -use log::{error, info}; -use rocket::{fairing::AdHoc, fs::FileServer, response::Responder, State}; -use rocket_dyn_templates::{context, Template}; -use serde::Deserialize; - -// For testing -#[get("/-/reload/")] -fn get_reload( - lock: &State, - config: &State, - repo: &str, -) -> Result { - reload(lock, config, repo) -} - -#[post("/-/reload/")] -fn post_reload( - lock: &State, - config: &State, - repo: &str, -) -> Result { - reload(lock, config, repo) -} - -fn bench_at_commit( - commit: &str, - build_path: &Path, - target_path: &Path, -) -> Result, SyncError> { - let mut output = Vec::new(); - output.push(git_checkout(build_path, commit)?); - // Run `cargo aoc bench` - output.push(logging_run( - Command::new("cargo") - .env("CARGO_TARGET_DIR", target_path) - .current_dir(&build_path) - .arg("aoc") - .arg("bench"), - )?); - output.push(git_checkout(build_path, "HEAD")?); - - Ok(output) -} - -use std::sync::{Arc, Mutex}; - -fn reload( - lock: &State, - config: &State, - name: &str, -) -> Result { - let _locked = lock.0.lock().unwrap(); - let repo = &config.repos[name]; - info!("Need to reload '{}': {:?}\n{:#?}", name, config, repo); - - let commits_root = config.build_root.join("commits"); - let git_root = config.build_root.join("git").join(name); - let www_root = config.www_root.join(name); - let target_root = config.build_root.join("target"); - - let checkout_path = git_root.join(&repo.name); - let build_path = if let Some(subdir) = &repo.subdir { - git_root.join(&repo.name).join(subdir) - } else { - git_root.join(&repo.name) - }; - let target_path = target_root.join(name); - - dbg!( - &build_path, - &target_path, - &commits_root, - &git_root, - &www_root - ); - - if !commits_root.exists() { - info!("Creating {}", commits_root.display()); - fs::create_dir_all(&commits_root)?; - } - if !git_root.exists() { - info!("Creating {}", git_root.display()); - fs::create_dir_all(&git_root)?; - } - if !target_root.exists() { - info!("Creating {}", target_root.display()); - fs::create_dir_all(&target_root)?; - } - if !www_root.exists() { - info!("Creating {}", www_root.display()); - fs::create_dir_all(&www_root)?; - } - let mut output = vec![logging_run(Command::new("git").arg("version"))?]; - let needs_clone = !checkout_path.exists(); - if needs_clone { - output.push(logging_run( - Command::new("git") - .current_dir(&git_root) - .arg("clone") - .arg(&repo.url) - .arg(&checkout_path), - )?); - } - output.push(logging_run( - Command::new("git") - .current_dir(&checkout_path) - .arg("checkout") - .arg("-f") - .arg(&repo.branch), - )?); - output.push(logging_run( - Command::new("git") - .current_dir(&checkout_path) - .arg("checkout") - .arg("HEAD"), - )?); - dbg!(&checkout_path); - // Make sure buildable clone is up to date - output.push(logging_run( - Command::new("git").current_dir(&checkout_path).arg("pull"), - )?); - let commits = logging_run(Command::new("git").current_dir(&checkout_path).args([ - "log", - "--format=%H", - &repo.branch, - ]))?; - let binding = String::from_utf8_lossy(&commits.output.stdout).into_owned(); - let mut unknown_commits: Vec<_> = binding - .lines() - .filter(|commit| !commits_root.join(commit).exists()) - .collect(); - unknown_commits.reverse(); - output.push(commits); - info!("Need to bench commits: {:?}", unknown_commits); - for commit in unknown_commits { - match bench_at_commit(commit, &build_path, &target_path) { - Ok(outputs) => { - output.extend(outputs); - File::create(commits_root.join(commit))?; - } - Err(err) => error!("Failed to bench {}@{}: {}", name, commit, err), - } - } - // Copy files from `target/` to serving directory - let bench_path = target_path.join("criterion"); - info!("Copying {} -> {}", bench_path.display(), www_root.display()); - copy_dir_all(bench_path, www_root)?; - let response = output - .iter() - .map(|ts| format!("{}", format_task_status(ts))) - .collect::>() - .join("\n"); - Ok(response) -} - -// From https://stackoverflow.com/questions/26958489/how-to-copy-a-folder-recursively-in-rust -fn copy_dir_all(src: impl AsRef, dst: impl AsRef) -> io::Result<()> { - fs::create_dir_all(&dst)?; - for entry in fs::read_dir(src)? { - let entry = entry?; - let ty = entry.file_type()?; - if ty.is_dir() { - copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?; - } else { - fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?; - } - } - Ok(()) -} - -#[get("/")] -fn index(config: &State) -> Result { - let dirs: Vec<_> = fs::read_dir(&config.www_root)? - .filter_map(|ent| ent.ok()) - .map(|ent| ent.file_name().into_string()) - .filter_map(|ent| ent.ok()) - .collect(); - Ok(Template::render( - "index", - context! { - dirs - }, - )) -} - -#[catch(500)] -fn http500(req: &rocket::Request) -> String { - // TODO(wathiede): figure out a way to retrieve the Error that got us here? - format!("{:?}", req) -} - -struct ReloadLock(Arc>); - -#[launch] -fn rocket() -> _ { - glog::new() - .init(Flags { - colorlogtostderr: true, - //alsologtostderr: true, // use logtostderr to only write to stderr and not to files - logtostderr: true, - ..Default::default() - }) - .unwrap(); - - let config = rocket::Config::figment() - .extract::() - .expect("Couldn't parse config"); - info!("Config:\n{:#?}", config); - rocket::build() - .mount("/", routes![index, get_reload, post_reload]) - .mount("/results/", FileServer::from(config.www_root)) - .manage(ReloadLock(Arc::new(Mutex::new(())))) - //.register("/", catchers![http500]) - .attach(AdHoc::config::()) - .attach(Template::fairing()) -}