From 82dcf88a05dc63d67fd567eaf9a00757eb96adc1 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 10 Dec 2025 14:57:09 -0500 Subject: [PATCH 1/6] Sort Cargo deps and remove stray comment It was not about `lapin`, it was about `http`. --- ofborg/Cargo.toml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/ofborg/Cargo.toml b/ofborg/Cargo.toml index e6fd5b11..38aad489 100644 --- a/ofborg/Cargo.toml +++ b/ofborg/Cargo.toml @@ -15,25 +15,24 @@ chrono = { version = "0.4.38", default-features = false, features = [ either = "1.13.0" fs2 = "0.4.3" futures-util = "0.3.31" +hex = "0.4.3" +hmac = "0.12.1" +http = "1" #hubcaps = "0.6" # for Conclusion::Skipped which is in master hubcaps = { git = "https://github.com/ofborg/hubcaps.git", rev = "50dbe6ec45c9dfea4e3cfdf27bbadfa565f69dec", default-features = false, features = ["app", "rustls-tls"] } -http = "1" # hyper = { version = "0.14", features = ["full"] } hyper = "=0.10.*" -# maybe can be removed when hyper is updated lapin = "2.5.4" lru-cache = "0.1.2" md5 = "0.8.0" nom = "4.2.3" regex = "1.11.1" +rustls-pemfile = "2.2.0" serde = { version = "1.0.217", features = ["derive"] } serde_json = "1.0.135" +sha2 = "0.10.8" tempfile = "3.15.0" tracing = "0.1.41" tracing-subscriber = { version = "0.3.19", features = ["json", "env-filter"] } uuid = { version = "1.12", features = ["v4"] } -rustls-pemfile = "2.2.0" -hmac = "0.12.1" -sha2 = "0.10.8" -hex = "0.4.3" From 15022bf59eb9600579043ac52c278b0a88955eec Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 10 Dec 2025 15:53:44 -0500 Subject: [PATCH 2/6] cargo update --- Cargo.lock | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cedfdaa8..33ed7add 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1223,9 +1223,9 @@ checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" dependencies = [ "icu_collections", "icu_locale_core", @@ -1237,9 +1237,9 @@ dependencies = [ [[package]] name = "icu_properties_data" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" [[package]] name = "icu_provider" @@ -2060,9 +2060,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" -version = "0.12.24" +version = "0.12.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" +checksum = "b6eff9328d40131d43bd911d42d79eb6a47312002a4daefc9e37f17e74a7701a" dependencies = [ "base64 0.22.1", "bytes", @@ -2701,9 +2701,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf146f99d442e8e68e585f5d798ccd3cad9a7835b917e09728880a862706456" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ "bitflags 2.10.0", "bytes", From 8e925470ceafac6cc309992cf58e5e2722aef7c0 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 30 Nov 2025 12:11:41 -0500 Subject: [PATCH 3/6] Pull out some functions before switching to tokio This should have absolutely no change in behavior, but it will make the commit doing the switch easier to review. --- ofborg/src/bin/github-webhook-receiver.rs | 225 +++++++++++----------- ofborg/src/bin/logapi.rs | 181 +++++++++-------- ofborg/src/bin/stats.rs | 27 ++- 3 files changed, 229 insertions(+), 204 deletions(-) diff --git a/ofborg/src/bin/github-webhook-receiver.rs b/ofborg/src/bin/github-webhook-receiver.rs index 7e911126..626e2313 100644 --- a/ofborg/src/bin/github-webhook-receiver.rs +++ b/ofborg/src/bin/github-webhook-receiver.rs @@ -1,7 +1,6 @@ use std::env; use std::error::Error; use std::io::Read as _; -use std::sync::Arc; #[macro_use] extern crate hyper; @@ -86,6 +85,117 @@ fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { Ok(()) } +fn handle_request(mut req: Request, mut res: Response, webhook_secret: &str, chan: &Channel) { + // HTTP 405 + if req.method != hyper::Post { + *res.status_mut() = StatusCode::MethodNotAllowed; + return; + } + let hdr = req.headers.clone(); + + // Read body + let mut raw = Vec::new(); + if req.read_to_end(&mut raw).is_err() { + warn!("Failed to read body from client"); + *res.status_mut() = StatusCode::InternalServerError; + return; + } + let raw = raw.as_slice(); + + // Validate signature + { + let Some(sig) = hdr.get::() else { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Missing signature header"); + return; + }; + let mut components = sig.splitn(2, '='); + let Some(algo) = components.next() else { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Signature hash method missing"); + return; + }; + let Some(hash) = components.next() else { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Signature hash missing"); + return; + }; + let Ok(hash) = hex::decode(hash) else { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Invalid signature hash hex"); + return; + }; + + if algo != "sha256" { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Invalid signature hash method"); + return; + } + + let Ok(mut mac) = Hmac::::new_from_slice(webhook_secret.as_bytes()) else { + *res.status_mut() = StatusCode::InternalServerError; + error!("Unable to create HMAC from secret"); + return; + }; + mac.update(raw); + if mac.verify_slice(hash.as_slice()).is_err() { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Signature verification failed"); + return; + } + } + + // Parse body + let Some(ct) = hdr.get::() else { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"No Content-Type header passed"); + return; + }; + if ct + != &ContentType(mime::Mime( + mime::TopLevel::Application, + mime::SubLevel::Json, + Vec::new(), + )) + { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Content-Type is not application/json. Webhook misconfigured?"); + return; + } + let input = match serde_json::from_slice::(raw) { + Ok(i) => i, + Err(e) => { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Invalid JSON"); + error!("Invalid JSON received: {e}"); + return; + } + }; + + // Build routing key + let Some(event_type) = hdr.get::() else { + *res.status_mut() = StatusCode::BadRequest; + let _ = res.send(b"Missing event type"); + return; + }; + let routing_key = format!("{event_type}.{}", input.repository.full_name.to_lowercase()); + + // Publish message + let _confirmation = task::block_on(async { + chan.basic_publish( + "github-events", + &routing_key, + BasicPublishOptions::default(), + raw, + BasicProperties::default() + .with_content_type("application/json".into()) + .with_delivery_mode(2), // persistent + ) + .await + }); + *res.status_mut() = StatusCode::NoContent; +} + fn main() -> Result<(), Box> { ofborg::setup_log(); @@ -99,7 +209,7 @@ fn main() -> Result<(), Box> { let webhook_secret = std::fs::read_to_string(cfg.webhook_secret_file) .expect("Unable to read webhook secret file"); - let webhook_secret = Arc::new(webhook_secret.trim().to_string()); + let webhook_secret = webhook_secret.trim().to_string(); let conn = easylapin::from_config(&cfg.rabbitmq)?; let mut chan = task::block_on(conn.create_channel())?; @@ -111,115 +221,8 @@ fn main() -> Result<(), Box> { .unwrap_or(1); info!("Will listen on {} with {threads} threads", cfg.listen); Server::http(cfg.listen)?.handle_threads( - move |mut req: Request, mut res: Response| { - // HTTP 405 - if req.method != hyper::Post { - *res.status_mut() = StatusCode::MethodNotAllowed; - return; - } - let hdr = req.headers.clone(); - - // Read body - let mut raw = Vec::new(); - if req.read_to_end(&mut raw).is_err() { - warn!("Failed to read body from client"); - *res.status_mut() = StatusCode::InternalServerError; - return; - } - let raw = raw.as_slice(); - - // Validate signature - { - let Some(sig) = hdr.get::() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Missing signature header"); - return; - }; - let mut components = sig.splitn(2, '='); - let Some(algo) = components.next() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Signature hash method missing"); - return; - }; - let Some(hash) = components.next() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Signature hash missing"); - return; - }; - let Ok(hash) = hex::decode(hash) else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Invalid signature hash hex"); - return; - }; - - if algo != "sha256" { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Invalid signature hash method"); - return; - } - - let Ok(mut mac) = Hmac::::new_from_slice(webhook_secret.as_bytes()) else { - *res.status_mut() = StatusCode::InternalServerError; - error!("Unable to create HMAC from secret"); - return; - }; - mac.update(raw); - if mac.verify_slice(hash.as_slice()).is_err() { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Signature verification failed"); - return; - } - } - - // Parse body - let Some(ct) = hdr.get::() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"No Content-Type header passed"); - return; - }; - if ct - != &ContentType(mime::Mime( - mime::TopLevel::Application, - mime::SubLevel::Json, - Vec::new(), - )) - { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Content-Type is not application/json. Webhook misconfigured?"); - return; - } - let input = match serde_json::from_slice::(raw) { - Ok(i) => i, - Err(e) => { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Invalid JSON"); - error!("Invalid JSON received: {e}"); - return; - } - }; - - // Build routing key - let Some(event_type) = hdr.get::() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Missing event type"); - return; - }; - let routing_key = format!("{event_type}.{}", input.repository.full_name.to_lowercase()); - - // Publish message - let _confirmation = task::block_on(async { - chan.basic_publish( - "github-events", - &routing_key, - BasicPublishOptions::default(), - raw, - BasicProperties::default() - .with_content_type("application/json".into()) - .with_delivery_mode(2), // persistent - ) - .await - }); - *res.status_mut() = StatusCode::NoContent; + move |req: Request, res: Response| { + handle_request(req, res, &webhook_secret, &chan); }, threads, )?; diff --git a/ofborg/src/bin/logapi.rs b/ofborg/src/bin/logapi.rs index 7797c59d..966def22 100644 --- a/ofborg/src/bin/logapi.rs +++ b/ofborg/src/bin/logapi.rs @@ -21,6 +21,96 @@ struct LogResponse { attempts: HashMap, } +#[derive(Clone)] +struct LogApiConfig { + logs_path: String, + serve_root: String, +} + +fn handle_request(req: Request, mut res: Response, cfg: &LogApiConfig) { + if req.method != hyper::Get { + *res.status_mut() = StatusCode::MethodNotAllowed; + return; + } + + let uri = req.uri.to_string(); + let Some(reqd) = uri.strip_prefix("/logs/").map(ToOwned::to_owned) else { + *res.status_mut() = StatusCode::NotFound; + let _ = res.send(b"invalid uri"); + return; + }; + let path: PathBuf = [&cfg.logs_path, &reqd].iter().collect(); + let Ok(path) = std::fs::canonicalize(&path) else { + *res.status_mut() = StatusCode::NotFound; + let _ = res.send(b"absent"); + return; + }; + let Ok(iter) = std::fs::read_dir(path) else { + *res.status_mut() = StatusCode::NotFound; + let _ = res.send(b"non dir"); + return; + }; + + let mut attempts = HashMap::::new(); + for e in iter { + let Ok(e) = e else { continue }; + let e_metadata = e.metadata(); + if e_metadata.as_ref().map(|v| v.is_dir()).unwrap_or(true) { + *res.status_mut() = StatusCode::InternalServerError; + let _ = res.send(b"dir found"); + return; + } + + if e_metadata.as_ref().map(|v| v.is_file()).unwrap_or_default() { + let Ok(file_name) = e.file_name().into_string() else { + warn!("entry filename is not a utf-8 string: {:?}", e.file_name()); + continue; + }; + + if file_name.ends_with(".metadata.json") || file_name.ends_with(".result.json") { + let Ok(file) = std::fs::File::open(e.path()) else { + warn!("could not open file: {file_name}"); + continue; + }; + let Ok(json) = serde_json::from_reader::<_, serde_json::Value>(file) else { + warn!("file is not a valid json file: {file_name}"); + continue; + }; + let Some(attempt_id) = json + .get("attempt_id") + .and_then(|v| v.as_str()) + .map(ToOwned::to_owned) + else { + warn!("attempt_id not found in file: {file_name}"); + continue; + }; + let attempt_obj = attempts.entry(attempt_id).or_default(); + if file_name.ends_with(".metadata.json") { + attempt_obj.metadata = Some(json); + } else { + attempt_obj.result = Some(json); + } + } else { + let attempt_obj = attempts.entry(file_name.clone()).or_default(); + attempt_obj.log_url = Some(format!("{}/{reqd}/{file_name}", &cfg.serve_root)); + } + } + } + + *res.status_mut() = StatusCode::Ok; + res.headers_mut() + .set::(hyper::header::ContentType(mime::Mime( + mime::TopLevel::Application, + mime::SubLevel::Json, + Vec::new(), + ))); + let _ = res.send( + serde_json::to_string(&LogResponse { attempts }) + .unwrap_or_default() + .as_bytes(), + ); +} + fn main() -> Result<(), Box> { ofborg::setup_log(); @@ -32,95 +122,18 @@ fn main() -> Result<(), Box> { panic!(); }; + let api_cfg = LogApiConfig { + logs_path: cfg.logs_path, + serve_root: cfg.serve_root, + }; + let threads = std::thread::available_parallelism() .map(|x| x.get()) .unwrap_or(1); info!("Will listen on {} with {threads} threads", cfg.listen); Server::http(cfg.listen)?.handle_threads( - move |req: Request, mut res: Response| { - if req.method != hyper::Get { - *res.status_mut() = StatusCode::MethodNotAllowed; - return; - } - - let uri = req.uri.to_string(); - let Some(reqd) = uri.strip_prefix("/logs/").map(ToOwned::to_owned) else { - *res.status_mut() = StatusCode::NotFound; - let _ = res.send(b"invalid uri"); - return; - }; - let path: PathBuf = [&cfg.logs_path, &reqd].iter().collect(); - let Ok(path) = std::fs::canonicalize(&path) else { - *res.status_mut() = StatusCode::NotFound; - let _ = res.send(b"absent"); - return; - }; - let Ok(iter) = std::fs::read_dir(path) else { - *res.status_mut() = StatusCode::NotFound; - let _ = res.send(b"non dir"); - return; - }; - - let mut attempts = HashMap::::new(); - for e in iter { - let Ok(e) = e else { continue }; - let e_metadata = e.metadata(); - if e_metadata.as_ref().map(|v| v.is_dir()).unwrap_or(true) { - *res.status_mut() = StatusCode::InternalServerError; - let _ = res.send(b"dir found"); - return; - } - - if e_metadata.as_ref().map(|v| v.is_file()).unwrap_or_default() { - let Ok(file_name) = e.file_name().into_string() else { - warn!("entry filename is not a utf-8 string: {:?}", e.file_name()); - continue; - }; - - if file_name.ends_with(".metadata.json") || file_name.ends_with(".result.json") - { - let Ok(file) = std::fs::File::open(e.path()) else { - warn!("could not open file: {file_name}"); - continue; - }; - let Ok(json) = serde_json::from_reader::<_, serde_json::Value>(file) else { - warn!("file is not a valid json file: {file_name}"); - continue; - }; - let Some(attempt_id) = json - .get("attempt_id") - .and_then(|v| v.as_str()) - .map(ToOwned::to_owned) - else { - warn!("attempt_id not found in file: {file_name}"); - continue; - }; - let attempt_obj = attempts.entry(attempt_id).or_default(); - if file_name.ends_with(".metadata.json") { - attempt_obj.metadata = Some(json); - } else { - attempt_obj.result = Some(json); - } - } else { - let attempt_obj = attempts.entry(file_name.clone()).or_default(); - attempt_obj.log_url = - Some(format!("{}/{reqd}/{file_name}", &cfg.serve_root)); - } - } - } - - *res.status_mut() = StatusCode::Ok; - res.headers_mut() - .set::(hyper::header::ContentType(mime::Mime( - mime::TopLevel::Application, - mime::SubLevel::Json, - Vec::new(), - ))); - let _ = res.send( - serde_json::to_string(&LogResponse { attempts }) - .unwrap_or_default() - .as_bytes(), - ); + move |req: Request, res: Response| { + handle_request(req, res, &api_cfg); }, threads, )?; diff --git a/ofborg/src/bin/stats.rs b/ofborg/src/bin/stats.rs index 779db455..b24958c4 100644 --- a/ofborg/src/bin/stats.rs +++ b/ofborg/src/bin/stats.rs @@ -1,5 +1,6 @@ use std::env; use std::error::Error; +use std::sync::Arc; use std::thread; use async_std::task; @@ -9,6 +10,17 @@ use tracing::{error, info}; use ofborg::easyamqp::{ChannelExt, ConsumerExt}; use ofborg::{config, easyamqp, easylapin, stats, tasks}; +fn run_http_server(metrics: Arc) { + let addr = "0.0.0.0:9898"; + info!("HTTP server listening on {}", addr); + Server::http(addr) + .expect("Failed to bind HTTP server") + .handle(move |_: Request, res: Response| { + res.send(metrics.prometheus_output().as_bytes()).unwrap(); + }) + .expect("Failed to start HTTP server"); +} + fn main() -> Result<(), Box> { ofborg::setup_log(); @@ -28,8 +40,8 @@ fn main() -> Result<(), Box> { let events = stats::RabbitMq::from_lapin(&cfg.whoami(), task::block_on(conn.create_channel())?); - let metrics = stats::MetricCollector::new(); - let collector = tasks::statscollector::StatCollectorWorker::new(events, metrics.clone()); + let metrics = Arc::new(stats::MetricCollector::new()); + let collector = tasks::statscollector::StatCollectorWorker::new(events, (*metrics).clone()); chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "stats".to_owned(), @@ -70,13 +82,10 @@ fn main() -> Result<(), Box> { }, )?; - thread::spawn(|| { - let addr = "0.0.0.0:9898"; - info!("listening addr {:?}", addr); - Server::http(addr)?.handle(move |_: Request, res: Response| { - res.send(metrics.prometheus_output().as_bytes()).unwrap(); - })?; - Ok::<_, Box>(()) + // Spawn HTTP server in a separate thread + let metrics_clone = metrics.clone(); + thread::spawn(move || { + run_http_server(metrics_clone); }); info!("Fetching jobs from {}", &queue_name); From 054cbf26759d4aa8a9c1d4538994de5a1b905314 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 10 Dec 2025 16:05:09 -0500 Subject: [PATCH 4/6] Wrap `task::block_on` in preparation for switching to Tokio This will lesson the diff. --- ofborg/src/bin/build-faker.rs | 4 ++-- ofborg/src/bin/builder.rs | 9 +++++---- ofborg/src/bin/evaluation-filter.rs | 6 +++--- ofborg/src/bin/github-comment-filter.rs | 6 +++--- ofborg/src/bin/github-comment-poster.rs | 6 +++--- ofborg/src/bin/github-webhook-receiver.rs | 8 ++++---- ofborg/src/bin/log-message-collector.rs | 6 +++--- ofborg/src/bin/mass-rebuilder.rs | 8 ++++---- ofborg/src/bin/stats.rs | 8 ++++---- ofborg/src/commitstatus.rs | 2 +- ofborg/src/config.rs | 2 +- ofborg/src/easylapin.rs | 24 +++++++++++++---------- ofborg/src/lib.rs | 6 ++++++ ofborg/src/stats.rs | 3 +-- ofborg/src/tasks/eval/nixpkgs.rs | 2 +- ofborg/src/tasks/evaluate.rs | 12 ++++++------ ofborg/src/tasks/githubcommentfilter.rs | 2 +- ofborg/src/tasks/githubcommentposter.rs | 2 +- 18 files changed, 63 insertions(+), 53 deletions(-) diff --git a/ofborg/src/bin/build-faker.rs b/ofborg/src/bin/build-faker.rs index e5f03505..bf510e5b 100644 --- a/ofborg/src/bin/build-faker.rs +++ b/ofborg/src/bin/build-faker.rs @@ -1,10 +1,10 @@ use std::env; use std::error::Error; -use async_std::task; use lapin::BasicProperties; use lapin::message::Delivery; +use ofborg::block_on; use ofborg::commentparser; use ofborg::config; use ofborg::easylapin; @@ -19,7 +19,7 @@ fn main() -> Result<(), Box> { let cfg = config::load(arg.as_ref()); let conn = easylapin::from_config(&cfg.builder.unwrap().rabbitmq)?; - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; let repo_msg = Repo { clone_url: "https://github.com/nixos/ofborg.git".to_owned(), diff --git a/ofborg/src/bin/builder.rs b/ofborg/src/bin/builder.rs index e2d7a7a6..25778a08 100644 --- a/ofborg/src/bin/builder.rs +++ b/ofborg/src/bin/builder.rs @@ -2,8 +2,9 @@ use std::env; use std::error::Error; use std::path::Path; -use async_std::task::{self, JoinHandle}; +use async_std::task::{JoinHandle, spawn}; use futures_util::future; +use ofborg::block_on; use tracing::{error, info, warn}; use ofborg::easyamqp::{self, ChannelExt, ConsumerExt}; @@ -31,7 +32,7 @@ fn main() -> Result<(), Box> { handles.push(handle_ext); } - task::block_on(future::join_all(handles)); + block_on(future::join_all(handles)); drop(conn); // Close connection. info!("Closed the session... EOF"); @@ -43,7 +44,7 @@ fn create_handle( cfg: &config::Config, system: String, ) -> Result, Box> { - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; let cloner = checkout::cached_cloner(Path::new(&cfg.checkout.root)); let nix = cfg.nix().with_system(system.clone()); @@ -104,5 +105,5 @@ fn create_handle( )?; info!("Fetching jobs from {}", &queue_name); - Ok(task::spawn(handle)) + Ok(spawn(handle)) } diff --git a/ofborg/src/bin/evaluation-filter.rs b/ofborg/src/bin/evaluation-filter.rs index d391f237..8c8b41a9 100644 --- a/ofborg/src/bin/evaluation-filter.rs +++ b/ofborg/src/bin/evaluation-filter.rs @@ -1,7 +1,7 @@ use std::env; use std::error::Error; -use async_std::task; +use ofborg::block_on; use tracing::{error, info}; use ofborg::config; @@ -23,7 +23,7 @@ fn main() -> Result<(), Box> { }; let conn = easylapin::from_config(&filter_cfg.rabbitmq)?; - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "github-events".to_owned(), @@ -74,7 +74,7 @@ fn main() -> Result<(), Box> { )?; info!("Fetching jobs from {}", &queue_name); - task::block_on(handle); + block_on(handle); drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/github-comment-filter.rs b/ofborg/src/bin/github-comment-filter.rs index 1ddf045c..95a17dfb 100644 --- a/ofborg/src/bin/github-comment-filter.rs +++ b/ofborg/src/bin/github-comment-filter.rs @@ -1,7 +1,7 @@ use std::env; use std::error::Error; -use async_std::task; +use ofborg::block_on; use ofborg::systems::System; use tracing::{error, info}; @@ -24,7 +24,7 @@ fn main() -> Result<(), Box> { }; let conn = easylapin::from_config(&filter_cfg.rabbitmq)?; - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "github-events".to_owned(), @@ -98,7 +98,7 @@ fn main() -> Result<(), Box> { )?; info!("Fetching jobs from {}", &queue_name); - task::block_on(handle); + block_on(handle); drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/github-comment-poster.rs b/ofborg/src/bin/github-comment-poster.rs index cf183d3d..579fddbd 100644 --- a/ofborg/src/bin/github-comment-poster.rs +++ b/ofborg/src/bin/github-comment-poster.rs @@ -1,7 +1,7 @@ use std::env; use std::error::Error; -use async_std::task; +use ofborg::block_on; use tracing::{error, info}; use ofborg::config; @@ -23,7 +23,7 @@ fn main() -> Result<(), Box> { }; let conn = easylapin::from_config(&poster_cfg.rabbitmq)?; - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "build-results".to_owned(), @@ -63,7 +63,7 @@ fn main() -> Result<(), Box> { }, )?; - task::block_on(handle); + block_on(handle); drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/github-webhook-receiver.rs b/ofborg/src/bin/github-webhook-receiver.rs index 626e2313..22a56845 100644 --- a/ofborg/src/bin/github-webhook-receiver.rs +++ b/ofborg/src/bin/github-webhook-receiver.rs @@ -4,7 +4,6 @@ use std::io::Read as _; #[macro_use] extern crate hyper; -use async_std::task; use hmac::{Hmac, Mac}; use hyper::header::ContentType; use hyper::mime; @@ -14,6 +13,7 @@ use hyper::{ }; use lapin::options::BasicPublishOptions; use lapin::{BasicProperties, Channel}; +use ofborg::block_on; use ofborg::ghevent::GenericWebhook; use ofborg::{config, easyamqp, easyamqp::ChannelExt, easylapin}; use sha2::Sha256; @@ -181,7 +181,7 @@ fn handle_request(mut req: Request, mut res: Response, webhook_secret: &str, cha let routing_key = format!("{event_type}.{}", input.repository.full_name.to_lowercase()); // Publish message - let _confirmation = task::block_on(async { + let _confirmation = block_on(async { chan.basic_publish( "github-events", &routing_key, @@ -212,10 +212,10 @@ fn main() -> Result<(), Box> { let webhook_secret = webhook_secret.trim().to_string(); let conn = easylapin::from_config(&cfg.rabbitmq)?; - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; setup_amqp(&mut chan)?; - //let events = stats::RabbitMq::from_lapin(&cfg.whoami(), task::block_on(conn.create_channel())?); + //let events = stats::RabbitMq::from_lapin(&cfg.whoami(), block_on(conn.create_channel())?); let threads = std::thread::available_parallelism() .map(|x| x.get()) .unwrap_or(1); diff --git a/ofborg/src/bin/log-message-collector.rs b/ofborg/src/bin/log-message-collector.rs index fec6cb56..19bfadb9 100644 --- a/ofborg/src/bin/log-message-collector.rs +++ b/ofborg/src/bin/log-message-collector.rs @@ -2,7 +2,7 @@ use std::env; use std::error::Error; use std::path::PathBuf; -use async_std::task; +use ofborg::block_on; use tracing::{error, info}; use ofborg::config; @@ -24,7 +24,7 @@ fn main() -> Result<(), Box> { }; let conn = easylapin::from_config(&collector_cfg.rabbitmq)?; - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "logs".to_owned(), @@ -70,7 +70,7 @@ fn main() -> Result<(), Box> { )?; info!("Fetching jobs from {}", &queue_name); - task::block_on(handle); + block_on(handle); drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/mass-rebuilder.rs b/ofborg/src/bin/mass-rebuilder.rs index 902cb940..f96e45e3 100644 --- a/ofborg/src/bin/mass-rebuilder.rs +++ b/ofborg/src/bin/mass-rebuilder.rs @@ -2,7 +2,7 @@ use std::env; use std::error::Error; use std::path::Path; -use async_std::task; +use ofborg::block_on; use tracing::{error, info}; use ofborg::checkout; @@ -26,12 +26,12 @@ fn main() -> Result<(), Box> { }; let conn = easylapin::from_config(&rebuilder_cfg.rabbitmq)?; - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; let root = Path::new(&cfg.checkout.root); let cloner = checkout::cached_cloner(&root.join(cfg.runner.instance.to_string())); - let events = stats::RabbitMq::from_lapin(&cfg.whoami(), task::block_on(conn.create_channel())?); + let events = stats::RabbitMq::from_lapin(&cfg.whoami(), block_on(conn.create_channel())?); let queue_name = String::from("mass-rebuild-check-jobs"); chan.declare_queue(easyamqp::QueueConfig { @@ -62,7 +62,7 @@ fn main() -> Result<(), Box> { )?; info!("Fetching jobs from {}", queue_name); - task::block_on(handle); + block_on(handle); drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/stats.rs b/ofborg/src/bin/stats.rs index b24958c4..848852d4 100644 --- a/ofborg/src/bin/stats.rs +++ b/ofborg/src/bin/stats.rs @@ -3,8 +3,8 @@ use std::error::Error; use std::sync::Arc; use std::thread; -use async_std::task; use hyper::server::{Request, Response, Server}; +use ofborg::block_on; use tracing::{error, info}; use ofborg::easyamqp::{ChannelExt, ConsumerExt}; @@ -36,9 +36,9 @@ fn main() -> Result<(), Box> { let conn = easylapin::from_config(&stats_cfg.rabbitmq)?; - let mut chan = task::block_on(conn.create_channel())?; + let mut chan = block_on(conn.create_channel())?; - let events = stats::RabbitMq::from_lapin(&cfg.whoami(), task::block_on(conn.create_channel())?); + let events = stats::RabbitMq::from_lapin(&cfg.whoami(), block_on(conn.create_channel())?); let metrics = Arc::new(stats::MetricCollector::new()); let collector = tasks::statscollector::StatCollectorWorker::new(events, (*metrics).clone()); @@ -89,7 +89,7 @@ fn main() -> Result<(), Box> { }); info!("Fetching jobs from {}", &queue_name); - task::block_on(handle); + block_on(handle); drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/commitstatus.rs b/ofborg/src/commitstatus.rs index 12c4b350..b982d904 100644 --- a/ofborg/src/commitstatus.rs +++ b/ofborg/src/commitstatus.rs @@ -57,7 +57,7 @@ impl CommitStatus { } else { self.description.clone() }; - async_std::task::block_on( + crate::block_on( self.api .create( self.sha.as_ref(), diff --git a/ofborg/src/config.rs b/ofborg/src/config.rs index 3300e925..fd4cbd45 100644 --- a/ofborg/src/config.rs +++ b/ofborg/src/config.rs @@ -339,7 +339,7 @@ impl GithubAppVendingMachine { let lookup_gh = Github::new(useragent, Credentials::JWT(jwt)).unwrap(); - match async_std::task::block_on(lookup_gh.app().find_repo_installation(owner, repo)) { + match crate::block_on(lookup_gh.app().find_repo_installation(owner, repo)) { Ok(install_id) => { debug!("Received install ID {:?}", install_id); Some(install_id.id) diff --git a/ofborg/src/easylapin.rs b/ofborg/src/easylapin.rs index f53dc6d4..4daf4fee 100644 --- a/ofborg/src/easylapin.rs +++ b/ofborg/src/easylapin.rs @@ -11,7 +11,6 @@ use crate::worker::{Action, SimpleWorker}; use async_std::future::Future; use async_std::stream::StreamExt; -use async_std::task; use lapin::message::Delivery; use lapin::options::{ BasicAckOptions, BasicConsumeOptions, BasicNackOptions, BasicPublishOptions, BasicQosOptions, @@ -31,7 +30,7 @@ pub fn from_config(cfg: &RabbitMqConfig) -> Result { client_properties: props, ..Default::default() }; - task::block_on(Connection::connect(&cfg.as_uri()?, opts)) + crate::block_on(Connection::connect(&cfg.as_uri()?, opts)) } impl ChannelExt for Channel { @@ -51,7 +50,12 @@ impl ChannelExt for Channel { ExchangeType::Fanout => ExchangeKind::Fanout, _ => panic!("exchange kind"), }; - task::block_on(self.exchange_declare(&config.exchange, kind, opts, FieldTable::default()))?; + crate::block_on(self.exchange_declare( + &config.exchange, + kind, + opts, + FieldTable::default(), + ))?; Ok(()) } @@ -64,7 +68,7 @@ impl ChannelExt for Channel { nowait: config.no_wait, }; - task::block_on(self.queue_declare(&config.queue, opts, FieldTable::default()))?; + crate::block_on(self.queue_declare(&config.queue, opts, FieldTable::default()))?; Ok(()) } @@ -73,7 +77,7 @@ impl ChannelExt for Channel { nowait: config.no_wait, }; - task::block_on(self.queue_bind( + crate::block_on(self.queue_bind( &config.queue, &config.exchange, &config.routing_key.unwrap_or_else(|| "".into()), @@ -89,7 +93,7 @@ impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for Channel { type Handle = Pin + 'a>>; fn consume(self, mut worker: W, config: ConsumeConfig) -> Result { - let mut consumer = task::block_on(self.basic_consume( + let mut consumer = crate::block_on(self.basic_consume( &config.queue, &config.consumer_tag, BasicConsumeOptions::default(), @@ -127,7 +131,7 @@ impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for WorkerChannel { type Handle = Pin + 'a>>; fn consume(self, worker: W, config: ConsumeConfig) -> Result { - task::block_on(self.0.basic_qos(1, BasicQosOptions::default()))?; + crate::block_on(self.0.basic_qos(1, BasicQosOptions::default()))?; self.0.consume(worker, config) } } @@ -145,7 +149,7 @@ impl<'a> ChannelNotificationReceiver<'a> { impl NotificationReceiver for ChannelNotificationReceiver<'_> { fn tell(&mut self, action: Action) { - task::block_on(action_deliver(self.channel, self.deliver, action)) + crate::block_on(action_deliver(self.channel, self.deliver, action)) .expect("action deliver failure"); } } @@ -159,9 +163,9 @@ impl<'a, W: SimpleNotifyWorker + 'a + Send> ConsumerExt<'a, W> for NotifyChannel type Handle = Pin + 'a + Send>>; fn consume(self, worker: W, config: ConsumeConfig) -> Result { - task::block_on(self.0.basic_qos(1, BasicQosOptions::default()))?; + crate::block_on(self.0.basic_qos(1, BasicQosOptions::default()))?; - let mut consumer = task::block_on(self.0.basic_consume( + let mut consumer = crate::block_on(self.0.basic_consume( &config.queue, &config.consumer_tag, BasicConsumeOptions::default(), diff --git a/ofborg/src/lib.rs b/ofborg/src/lib.rs index 51d00f9f..21374903 100644 --- a/ofborg/src/lib.rs +++ b/ofborg/src/lib.rs @@ -9,6 +9,7 @@ extern crate nom; use std::env; +use std::future::Future; use tracing_subscriber::EnvFilter; use tracing_subscriber::prelude::*; @@ -108,3 +109,8 @@ pub fn setup_log() { tracing::info!("Logging configured"); } + +/// Block on a future from synchronous code. +pub fn block_on(f: F) -> F::Output { + async_std::task::block_on(f) +} diff --git a/ofborg/src/stats.rs b/ofborg/src/stats.rs index 618a9013..08deb27e 100644 --- a/ofborg/src/stats.rs +++ b/ofborg/src/stats.rs @@ -1,4 +1,3 @@ -use async_std::task; use lapin::options::BasicPublishOptions; include!(concat!(env!("OUT_DIR"), "/events.rs")); @@ -36,7 +35,7 @@ impl RabbitMq { impl SysEvents for RabbitMq { fn notify(&mut self, event: Event) { let props = lapin::BasicProperties::default().with_content_type("application/json".into()); - task::block_on(async { + crate::block_on(async { let _confirmaton = self .channel .basic_publish( diff --git a/ofborg/src/tasks/eval/nixpkgs.rs b/ofborg/src/tasks/eval/nixpkgs.rs index b0140c0e..52f85fae 100644 --- a/ofborg/src/tasks/eval/nixpkgs.rs +++ b/ofborg/src/tasks/eval/nixpkgs.rs @@ -49,7 +49,7 @@ impl<'a> NixpkgsStrategy<'a> { } fn tag_from_title(&self) { - let title = match async_std::task::block_on(self.issue_ref.get()) { + let title = match crate::block_on(self.issue_ref.get()) { Ok(issue) => issue.title.to_lowercase(), Err(_) => return, }; diff --git a/ofborg/src/tasks/evaluate.rs b/ofborg/src/tasks/evaluate.rs index 51b80e56..dc40e440 100644 --- a/ofborg/src/tasks/evaluate.rs +++ b/ofborg/src/tasks/evaluate.rs @@ -158,7 +158,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { &self.job.pr.number, &self.job.pr.head_sha, &description ); - async_std::task::block_on( + crate::block_on( self.repo .statuses() .create(&self.job.pr.head_sha, &builder.build()) @@ -231,7 +231,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { let issue_ref = repo.issue(job.pr.number); let auto_schedule_build_archs: Vec; - match async_std::task::block_on(issue_ref.get()) { + match crate::block_on(issue_ref.get()) { Ok(iss) => { if iss.state == "closed" { self.events.notify(Event::IssueAlreadyClosed); @@ -452,7 +452,7 @@ fn schedule_builds( pub fn update_labels(issueref: &hubcaps::issues::IssueRef, add: &[String], remove: &[String]) { let l = issueref.labels(); - let issue = async_std::task::block_on(issueref.get()).expect("Failed to get issue"); + let issue = crate::block_on(issueref.get()).expect("Failed to get issue"); let existing: Vec = issue.labels.iter().map(|l| l.name.clone()).collect(); @@ -472,11 +472,11 @@ pub fn update_labels(issueref: &hubcaps::issues::IssueRef, add: &[String], remov info!("Labeling issue #{issue}: + {to_add:?} , - {to_remove:?}, = {existing:?}"); - async_std::task::block_on(l.add(to_add.clone())) + crate::block_on(l.add(to_add.clone())) .unwrap_or_else(|err| panic!("Failed to add labels {to_add:?} to issue #{issue}: {err:?}")); for label in to_remove { - async_std::task::block_on(l.remove(&label)).unwrap_or_else(|err| { + crate::block_on(l.remove(&label)).unwrap_or_else(|err| { panic!("Failed to remove label {label:?} from issue #{issue}: {err:?}") }); } @@ -497,7 +497,7 @@ pub fn get_prefix( statuses: hubcaps::statuses::Statuses, sha: &str, ) -> Result<&str, CommitStatusError> { - if async_std::task::block_on(statuses.list(sha))? + if crate::block_on(statuses.list(sha))? .iter() .any(|s| s.context.starts_with("grahamcofborg-")) { diff --git a/ofborg/src/tasks/githubcommentfilter.rs b/ofborg/src/tasks/githubcommentfilter.rs index bd23b234..fd7b1ecb 100644 --- a/ofborg/src/tasks/githubcommentfilter.rs +++ b/ofborg/src/tasks/githubcommentfilter.rs @@ -65,7 +65,7 @@ impl worker::SimpleWorker for GitHubCommentWorker { let instructions = commentparser::parse(&job.comment.body); info!("Instructions: {:?}", instructions); - let pr = async_std::task::block_on( + let pr = crate::block_on( self.github .repo( job.repository.owner.login.clone(), diff --git a/ofborg/src/tasks/githubcommentposter.rs b/ofborg/src/tasks/githubcommentposter.rs index 8de1613b..c4381ce1 100644 --- a/ofborg/src/tasks/githubcommentposter.rs +++ b/ofborg/src/tasks/githubcommentposter.rs @@ -78,7 +78,7 @@ impl worker::SimpleWorker for GitHubCommentPoster { ); debug!("{:?}", check); - let check_create_attempt = async_std::task::block_on( + let check_create_attempt = crate::block_on( self.github_vend .for_repo(&repo.owner, &repo.name) .unwrap() From 5ceda763da0baa11856ccb6cff77fff12ea28307 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 30 Nov 2025 12:06:24 -0500 Subject: [PATCH 5/6] Bump manually deps, async-std -> tokio The LLM did this, and I cannot (before or after) run all the tests without things hanging, so we should procede with caution. --- Cargo.lock | 472 ++++++---------------- ofborg/Cargo.toml | 9 +- ofborg/src/bin/build-faker.rs | 5 +- ofborg/src/bin/builder.rs | 10 +- ofborg/src/bin/github-webhook-receiver.rs | 257 +++++++----- ofborg/src/bin/logapi.rs | 112 ++--- ofborg/src/bin/stats.rs | 61 ++- ofborg/src/commitstatus.rs | 3 +- ofborg/src/easylapin.rs | 3 +- ofborg/src/lib.rs | 16 +- 10 files changed, 402 insertions(+), 546 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 33ed7add..fec5c0be 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -66,8 +66,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f89f8273826a676282208e5af38461a07fe939def57396af6ad5997fcf56577d" dependencies = [ "amq-protocol-types", - "percent-encoding 2.3.2", - "url 2.5.7", + "percent-encoding", + "url", ] [[package]] @@ -92,7 +92,7 @@ dependencies = [ "num-traits", "rusticata-macros", "thiserror", - "time 0.3.44", + "time", ] [[package]] @@ -118,17 +118,6 @@ dependencies = [ "syn", ] -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - [[package]] name = "async-channel" version = "2.5.0" @@ -155,29 +144,13 @@ dependencies = [ "slab", ] -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.5.0", - "async-executor", - "async-io 2.6.0", - "async-lock 3.4.1", - "blocking", - "futures-lite 2.6.1", - "once_cell", - "tokio", -] - [[package]] name = "async-global-executor" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13f937e26114b93193065fd44f507aa2e9169ad0cdabbb996920b1fe1ddea7ba" dependencies = [ - "async-channel 2.5.0", + "async-channel", "async-executor", "async-io 2.6.0", "async-lock 3.4.1", @@ -191,7 +164,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9af57045d58eeb1f7060e7025a1631cbc6399e0a1d10ad6735b3d0ea7f8346ce" dependencies = [ - "async-global-executor 3.1.0", + "async-global-executor", "async-trait", "executor-trait", ] @@ -207,7 +180,7 @@ dependencies = [ "cfg-if", "concurrent-queue", "futures-lite 1.13.0", - "log 0.4.29", + "log", "parking", "polling 2.8.0", "rustix 0.37.28", @@ -254,23 +227,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "async-process" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" -dependencies = [ - "async-io 1.13.0", - "async-lock 2.8.0", - "async-signal", - "blocking", - "cfg-if", - "event-listener 3.1.0", - "futures-lite 1.13.0", - "rustix 0.38.44", - "windows-sys 0.48.0", -] - [[package]] name = "async-reactor-trait" version = "1.1.0" @@ -283,51 +239,6 @@ dependencies = [ "reactor-trait", ] -[[package]] -name = "async-signal" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" -dependencies = [ - "async-io 2.6.0", - "async-lock 3.4.1", - "atomic-waker", - "cfg-if", - "futures-core", - "futures-io", - "rustix 1.1.2", - "signal-hook-registry", - "slab", - "windows-sys 0.61.2", -] - -[[package]] -name = "async-std" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" -dependencies = [ - "async-channel 1.9.0", - "async-global-executor 2.4.1", - "async-io 1.13.0", - "async-lock 2.8.0", - "async-process", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite 1.13.0", - "gloo-timers", - "kv-log-macro", - "log 0.4.29", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - [[package]] name = "async-task" version = "4.7.1" @@ -357,16 +268,6 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" -[[package]] -name = "base64" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "489d6c0ed21b11d038c31b6ceccca973e65d73ba3bd8ecb9a2babf5546164643" -dependencies = [ - "byteorder", - "safemem", -] - [[package]] name = "base64" version = "0.13.1" @@ -427,7 +328,7 @@ version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" dependencies = [ - "async-channel 2.5.0", + "async-channel", "async-task", "futures-io", "futures-lite 2.6.1", @@ -446,12 +347,6 @@ version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - [[package]] name = "bytes" version = "1.11.0" @@ -680,6 +575,12 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + [[package]] name = "errno" version = "0.3.14" @@ -696,17 +597,6 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" -[[package]] -name = "event-listener" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - [[package]] name = "event-listener" version = "5.4.1" @@ -787,7 +677,7 @@ version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ - "percent-encoding 2.3.2", + "percent-encoding", ] [[package]] @@ -921,7 +811,7 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.1+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] @@ -940,17 +830,30 @@ dependencies = [ ] [[package]] -name = "gloo-timers" -version = "0.2.6" +name = "h2" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" dependencies = [ - "futures-channel", + "atomic-waker", + "bytes", + "fnv", "futures-core", - "js-sys", - "wasm-bindgen", + "futures-sink", + "http 1.4.0", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", ] +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + [[package]] name = "hermit-abi" version = "0.3.9" @@ -1045,33 +948,14 @@ dependencies = [ "http 1.4.0", "hyperx", "jsonwebtoken", - "log 0.4.29", - "mime 0.3.17", - "percent-encoding 2.3.2", + "log", + "mime", + "percent-encoding", "reqwest", "serde", "serde_derive", "serde_json", - "url 2.5.7", -] - -[[package]] -name = "hyper" -version = "0.10.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a0652d9a2609a968c14be1a9ea00bf4b1d64e2e1f53a1b51b6fff3a6e829273" -dependencies = [ - "base64 0.9.3", - "httparse", - "language-tags 0.2.2", - "log 0.3.9", - "mime 0.2.6", - "num_cpus", - "time 0.1.45", - "traitobject", - "typeable", - "unicase 1.4.2", - "url 1.7.2", + "url", ] [[package]] @@ -1084,9 +968,11 @@ dependencies = [ "bytes", "futures-channel", "futures-core", + "h2", "http 1.4.0", "http-body", "httparse", + "httpdate", "itoa", "pin-project-lite", "pin-utils", @@ -1102,7 +988,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ "http 1.4.0", - "hyper 1.8.1", + "hyper", "hyper-util", "rustls", "rustls-pki-types", @@ -1125,10 +1011,10 @@ dependencies = [ "futures-util", "http 1.4.0", "http-body", - "hyper 1.8.1", + "hyper", "ipnet", "libc", - "percent-encoding 2.3.2", + "percent-encoding", "pin-project-lite", "socket2 0.6.1", "tokio", @@ -1145,10 +1031,10 @@ dependencies = [ "bytes", "http 0.2.12", "httpdate", - "language-tags 0.3.2", - "mime 0.3.17", - "percent-encoding 2.3.2", - "unicase 2.8.1", + "language-tags", + "mime", + "percent-encoding", + "unicase", ] [[package]] @@ -1161,7 +1047,7 @@ dependencies = [ "core-foundation-sys", "iana-time-zone-haiku", "js-sys", - "log 0.4.29", + "log", "wasm-bindgen", "windows-core", ] @@ -1256,17 +1142,6 @@ dependencies = [ "zerovec", ] -[[package]] -name = "idna" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" -dependencies = [ - "matches", - "unicode-bidi", - "unicode-normalization", -] - [[package]] name = "idna" version = "1.1.0" @@ -1288,6 +1163,16 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "indexmap" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +dependencies = [ + "equivalent", + "hashbrown", +] + [[package]] name = "inout" version = "0.1.4" @@ -1364,21 +1249,6 @@ dependencies = [ "simple_asn1", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log 0.4.29", -] - -[[package]] -name = "language-tags" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" - [[package]] name = "language-tags" version = "0.3.2" @@ -1431,12 +1301,6 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" -[[package]] -name = "linux-raw-sys" -version = "0.4.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" - [[package]] name = "linux-raw-sys" version = "0.11.0" @@ -1458,23 +1322,11 @@ dependencies = [ "scopeguard", ] -[[package]] -name = "log" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" -dependencies = [ - "log 0.4.29", -] - [[package]] name = "log" version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" -dependencies = [ - "value-bag", -] [[package]] name = "lru-cache" @@ -1500,12 +1352,6 @@ dependencies = [ "regex-automata", ] -[[package]] -name = "matches" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" - [[package]] name = "md5" version = "0.8.0" @@ -1518,15 +1364,6 @@ version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" -[[package]] -name = "mime" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba626b8a6de5da682e1caa06bdb42a335aee5a84db8e5046a3e8ab17ba0a3ae0" -dependencies = [ - "log 0.3.9", -] - [[package]] name = "mime" version = "0.3.17" @@ -1546,7 +1383,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", - "wasi 0.11.1+wasi-snapshot-preview1", + "wasi", "windows-sys 0.61.2", ] @@ -1613,21 +1450,10 @@ dependencies = [ "autocfg", ] -[[package]] -name = "num_cpus" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" -dependencies = [ - "hermit-abi 0.5.2", - "libc", -] - [[package]] name = "ofborg" version = "0.1.9" dependencies = [ - "async-std", "brace-expand", "chrono", "either", @@ -1636,11 +1462,14 @@ dependencies = [ "hex", "hmac", "http 1.4.0", + "http-body-util", "hubcaps", - "hyper 0.10.16", + "hyper", + "hyper-util", "lapin", "lru-cache", "md5", + "mime", "nom 4.2.3", "regex", "rustls-pemfile", @@ -1648,6 +1477,8 @@ dependencies = [ "serde_json", "sha2", "tempfile", + "tokio", + "tokio-stream", "tracing", "tracing-subscriber", "uuid", @@ -1657,7 +1488,7 @@ dependencies = [ name = "ofborg-simple-build" version = "0.1.0" dependencies = [ - "log 0.4.29", + "log", "ofborg", ] @@ -1761,12 +1592,6 @@ dependencies = [ "base64ct", ] -[[package]] -name = "percent-encoding" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" - [[package]] name = "percent-encoding" version = "2.3.2" @@ -1849,7 +1674,7 @@ dependencies = [ "cfg-if", "concurrent-queue", "libc", - "log 0.4.29", + "log", "pin-project-lite", "windows-sys 0.48.0", ] @@ -2070,12 +1895,12 @@ dependencies = [ "http 1.4.0", "http-body", "http-body-util", - "hyper 1.8.1", + "hyper", "hyper-rustls", "hyper-util", "js-sys", - "log 0.4.29", - "percent-encoding 2.3.2", + "log", + "percent-encoding", "pin-project-lite", "quinn", "rustls", @@ -2089,7 +1914,7 @@ dependencies = [ "tower", "tower-http", "tower-service", - "url 2.5.7", + "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -2154,19 +1979,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "rustix" -version = "0.38.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" -dependencies = [ - "bitflags 2.10.0", - "errno", - "libc", - "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", -] - [[package]] name = "rustix" version = "1.1.2" @@ -2200,7 +2012,7 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70cc376c6ba1823ae229bacf8ad93c136d93524eab0e4e5e0e4f96b9c4e5b212" dependencies = [ - "log 0.4.29", + "log", "rustls", "rustls-native-certs", "rustls-pki-types", @@ -2262,12 +2074,6 @@ version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" -[[package]] -name = "safemem" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" - [[package]] name = "salsa20" version = "0.10.2" @@ -2418,15 +2224,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" -[[package]] -name = "signal-hook-registry" -version = "1.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7664a098b8e616bdfcc2dc0e9ac44eb231eedf41db4e9fe95d8d32ec728dedad" -dependencies = [ - "libc", -] - [[package]] name = "simple_asn1" version = "0.6.3" @@ -2436,7 +2233,7 @@ dependencies = [ "num-bigint", "num-traits", "thiserror", - "time 0.3.44", + "time", ] [[package]] @@ -2593,17 +2390,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - [[package]] name = "time" version = "0.3.44" @@ -2671,9 +2457,21 @@ dependencies = [ "mio", "pin-project-lite", "socket2 0.6.1", + "tokio-macros", "windows-sys 0.61.2", ] +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "tokio-rustls" version = "0.26.4" @@ -2684,6 +2482,30 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + [[package]] name = "tower" version = "0.5.2" @@ -2767,7 +2589,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "log 0.4.29", + "log", "once_cell", "tracing-core", ] @@ -2803,66 +2625,30 @@ dependencies = [ "tracing-serde", ] -[[package]] -name = "traitobject" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04a79e25382e2e852e8da874249358d382ebaf259d0d34e75d8db16a7efabbc7" - [[package]] name = "try-lock" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" -[[package]] -name = "typeable" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887" - [[package]] name = "typenum" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" -[[package]] -name = "unicase" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" -dependencies = [ - "version_check 0.1.5", -] - [[package]] name = "unicase" version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" -[[package]] -name = "unicode-bidi" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" - [[package]] name = "unicode-ident" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" -[[package]] -name = "unicode-normalization" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" -dependencies = [ - "tinyvec", -] - [[package]] name = "untrusted" version = "0.7.1" @@ -2875,17 +2661,6 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" -[[package]] -name = "url" -version = "1.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" -dependencies = [ - "idna 0.1.5", - "matches", - "percent-encoding 1.0.1", -] - [[package]] name = "url" version = "2.5.7" @@ -2893,8 +2668,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", - "idna 1.1.0", - "percent-encoding 2.3.2", + "idna", + "percent-encoding", "serde", ] @@ -2921,12 +2696,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" -[[package]] -name = "value-bag" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba6f5989077681266825251a52748b8c1d8a4ad098cc37e440103d0ea717fc0" - [[package]] name = "version_check" version = "0.1.5" @@ -2954,12 +2723,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -3161,15 +2924,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-sys" version = "0.60.2" @@ -3411,7 +3165,7 @@ dependencies = [ "oid-registry", "rusticata-macros", "thiserror", - "time 0.3.44", + "time", ] [[package]] diff --git a/ofborg/Cargo.toml b/ofborg/Cargo.toml index 38aad489..b485b106 100644 --- a/ofborg/Cargo.toml +++ b/ofborg/Cargo.toml @@ -6,7 +6,6 @@ build = "build.rs" edition = "2024" [dependencies] -async-std = { version = "=1.12.0", features = ["unstable", "tokio1"] } brace-expand = "0.1.0" chrono = { version = "0.4.38", default-features = false, features = [ "clock", @@ -18,14 +17,16 @@ futures-util = "0.3.31" hex = "0.4.3" hmac = "0.12.1" http = "1" +http-body-util = "0.1" #hubcaps = "0.6" # for Conclusion::Skipped which is in master hubcaps = { git = "https://github.com/ofborg/hubcaps.git", rev = "50dbe6ec45c9dfea4e3cfdf27bbadfa565f69dec", default-features = false, features = ["app", "rustls-tls"] } -# hyper = { version = "0.14", features = ["full"] } -hyper = "=0.10.*" +hyper = { version = "1.0", features = ["full", "server", "http1"] } +hyper-util = { version = "0.1", features = ["server", "tokio", "http1"] } lapin = "2.5.4" lru-cache = "0.1.2" md5 = "0.8.0" +mime = "0.3" nom = "4.2.3" regex = "1.11.1" rustls-pemfile = "2.2.0" @@ -33,6 +34,8 @@ serde = { version = "1.0.217", features = ["derive"] } serde_json = "1.0.135" sha2 = "0.10.8" tempfile = "3.15.0" +tokio = { version = "1", features = ["rt-multi-thread", "net", "macros", "sync"] } +tokio-stream = "0.1" tracing = "0.1.41" tracing-subscriber = { version = "0.3.19", features = ["json", "env-filter"] } uuid = { version = "1.12", features = ["v4"] } diff --git a/ofborg/src/bin/build-faker.rs b/ofborg/src/bin/build-faker.rs index bf510e5b..e543e15b 100644 --- a/ofborg/src/bin/build-faker.rs +++ b/ofborg/src/bin/build-faker.rs @@ -1,8 +1,7 @@ -use std::env; -use std::error::Error; - use lapin::BasicProperties; use lapin::message::Delivery; +use std::env; +use std::error::Error; use ofborg::block_on; use ofborg::commentparser; diff --git a/ofborg/src/bin/builder.rs b/ofborg/src/bin/builder.rs index 25778a08..a671cae3 100644 --- a/ofborg/src/bin/builder.rs +++ b/ofborg/src/bin/builder.rs @@ -1,8 +1,9 @@ use std::env; use std::error::Error; +use std::future::Future; use std::path::Path; +use std::pin::Pin; -use async_std::task::{JoinHandle, spawn}; use futures_util::future; use ofborg::block_on; use tracing::{error, info, warn}; @@ -25,7 +26,7 @@ fn main() -> Result<(), Box> { }; let conn = easylapin::from_config(&builder_cfg.rabbitmq)?; - let mut handles = Vec::new(); + let mut handles: Vec + Send>>> = Vec::new(); for system in &cfg.nix.system { let handle_ext = self::create_handle(&conn, &cfg, system.to_string())?; @@ -39,11 +40,12 @@ fn main() -> Result<(), Box> { Ok(()) } +#[allow(clippy::type_complexity)] fn create_handle( conn: &lapin::Connection, cfg: &config::Config, system: String, -) -> Result, Box> { +) -> Result + Send>>, Box> { let mut chan = block_on(conn.create_channel())?; let cloner = checkout::cached_cloner(Path::new(&cfg.checkout.root)); @@ -105,5 +107,5 @@ fn create_handle( )?; info!("Fetching jobs from {}", &queue_name); - Ok(spawn(handle)) + Ok(handle) } diff --git a/ofborg/src/bin/github-webhook-receiver.rs b/ofborg/src/bin/github-webhook-receiver.rs index 22a56845..ef570f68 100644 --- a/ofborg/src/bin/github-webhook-receiver.rs +++ b/ofborg/src/bin/github-webhook-receiver.rs @@ -1,30 +1,28 @@ use std::env; use std::error::Error; -use std::io::Read as _; -#[macro_use] -extern crate hyper; +use std::net::SocketAddr; +use std::sync::Arc; use hmac::{Hmac, Mac}; -use hyper::header::ContentType; -use hyper::mime; -use hyper::{ - server::{Request, Response, Server}, - status::StatusCode, -}; +use http::{Method, StatusCode}; +use http_body_util::{BodyExt, Full}; +use hyper::body::{Bytes, Incoming}; +use hyper::server::conn::http1; +use hyper::service::service_fn; +use hyper::{Request, Response}; +use hyper_util::rt::TokioIo; use lapin::options::BasicPublishOptions; use lapin::{BasicProperties, Channel}; -use ofborg::block_on; use ofborg::ghevent::GenericWebhook; use ofborg::{config, easyamqp, easyamqp::ChannelExt, easylapin}; use sha2::Sha256; +use tokio::net::TcpListener; +use tokio::sync::Mutex; use tracing::{error, info, warn}; -header! { (XHubSignature256, "X-Hub-Signature-256") => [String] } -header! { (XGithubEvent, "X-Github-Event") => [String] } - /// Prepares the the exchange we will write to, the queues that are bound to it /// and binds them. -fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { +fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "github-events".to_owned(), exchange_type: easyamqp::ExchangeType::Topic, @@ -85,118 +83,152 @@ fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { Ok(()) } -fn handle_request(mut req: Request, mut res: Response, webhook_secret: &str, chan: &Channel) { +fn response(status: StatusCode, body: &'static str) -> Response> { + Response::builder() + .status(status) + .body(Full::new(Bytes::from(body))) + .unwrap() +} + +fn empty_response(status: StatusCode) -> Response> { + Response::builder() + .status(status) + .body(Full::new(Bytes::new())) + .unwrap() +} + +async fn handle_request( + req: Request, + webhook_secret: Arc, + chan: Arc>, +) -> Result>, hyper::Error> { // HTTP 405 - if req.method != hyper::Post { - *res.status_mut() = StatusCode::MethodNotAllowed; - return; + if req.method() != Method::POST { + return Ok(empty_response(StatusCode::METHOD_NOT_ALLOWED)); } - let hdr = req.headers.clone(); + + // Get headers before consuming body + let sig_header = req + .headers() + .get("X-Hub-Signature-256") + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_string()); + let event_type = req + .headers() + .get("X-Github-Event") + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_string()); + let content_type = req + .headers() + .get("Content-Type") + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_string()); // Read body - let mut raw = Vec::new(); - if req.read_to_end(&mut raw).is_err() { - warn!("Failed to read body from client"); - *res.status_mut() = StatusCode::InternalServerError; - return; - } - let raw = raw.as_slice(); + let raw = match req.collect().await { + Ok(collected) => collected.to_bytes(), + Err(e) => { + warn!("Failed to read body from client: {e}"); + return Ok(response( + StatusCode::INTERNAL_SERVER_ERROR, + "Failed to read body", + )); + } + }; // Validate signature - { - let Some(sig) = hdr.get::() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Missing signature header"); - return; - }; - let mut components = sig.splitn(2, '='); - let Some(algo) = components.next() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Signature hash method missing"); - return; - }; - let Some(hash) = components.next() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Signature hash missing"); - return; - }; - let Ok(hash) = hex::decode(hash) else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Invalid signature hash hex"); - return; - }; - - if algo != "sha256" { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Invalid signature hash method"); - return; - } + let Some(sig) = sig_header else { + return Ok(response( + StatusCode::BAD_REQUEST, + "Missing signature header", + )); + }; + let mut components = sig.splitn(2, '='); + let Some(algo) = components.next() else { + return Ok(response( + StatusCode::BAD_REQUEST, + "Signature hash method missing", + )); + }; + let Some(hash) = components.next() else { + return Ok(response(StatusCode::BAD_REQUEST, "Signature hash missing")); + }; + let Ok(hash) = hex::decode(hash) else { + return Ok(response( + StatusCode::BAD_REQUEST, + "Invalid signature hash hex", + )); + }; - let Ok(mut mac) = Hmac::::new_from_slice(webhook_secret.as_bytes()) else { - *res.status_mut() = StatusCode::InternalServerError; - error!("Unable to create HMAC from secret"); - return; - }; - mac.update(raw); - if mac.verify_slice(hash.as_slice()).is_err() { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Signature verification failed"); - return; - } + if algo != "sha256" { + return Ok(response( + StatusCode::BAD_REQUEST, + "Invalid signature hash method", + )); + } + + let Ok(mut mac) = Hmac::::new_from_slice(webhook_secret.as_bytes()) else { + error!("Unable to create HMAC from secret"); + return Ok(response( + StatusCode::INTERNAL_SERVER_ERROR, + "Internal error", + )); + }; + mac.update(&raw); + if mac.verify_slice(hash.as_slice()).is_err() { + return Ok(response( + StatusCode::BAD_REQUEST, + "Signature verification failed", + )); } // Parse body - let Some(ct) = hdr.get::() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"No Content-Type header passed"); - return; + let Some(ct) = content_type else { + return Ok(response( + StatusCode::BAD_REQUEST, + "No Content-Type header passed", + )); }; - if ct - != &ContentType(mime::Mime( - mime::TopLevel::Application, - mime::SubLevel::Json, - Vec::new(), - )) - { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Content-Type is not application/json. Webhook misconfigured?"); - return; + if !ct.contains("application/json") { + return Ok(response( + StatusCode::BAD_REQUEST, + "Content-Type is not application/json. Webhook misconfigured?", + )); } - let input = match serde_json::from_slice::(raw) { + + let input = match serde_json::from_slice::(&raw) { Ok(i) => i, Err(e) => { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Invalid JSON"); error!("Invalid JSON received: {e}"); - return; + return Ok(response(StatusCode::BAD_REQUEST, "Invalid JSON")); } }; // Build routing key - let Some(event_type) = hdr.get::() else { - *res.status_mut() = StatusCode::BadRequest; - let _ = res.send(b"Missing event type"); - return; + let Some(event_type) = event_type else { + return Ok(response(StatusCode::BAD_REQUEST, "Missing event type")); }; let routing_key = format!("{event_type}.{}", input.repository.full_name.to_lowercase()); // Publish message - let _confirmation = block_on(async { - chan.basic_publish( + let chan = chan.lock().await; + let _confirmation = chan + .basic_publish( "github-events", &routing_key, BasicPublishOptions::default(), - raw, + &raw, BasicProperties::default() .with_content_type("application/json".into()) .with_delivery_mode(2), // persistent ) - .await - }); - *res.status_mut() = StatusCode::NoContent; + .await; + + Ok(empty_response(StatusCode::NO_CONTENT)) } -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args() @@ -209,22 +241,31 @@ fn main() -> Result<(), Box> { let webhook_secret = std::fs::read_to_string(cfg.webhook_secret_file) .expect("Unable to read webhook secret file"); - let webhook_secret = webhook_secret.trim().to_string(); + let webhook_secret = Arc::new(webhook_secret.trim().to_string()); let conn = easylapin::from_config(&cfg.rabbitmq)?; - let mut chan = block_on(conn.create_channel())?; + let mut chan = conn.create_channel().await?; setup_amqp(&mut chan)?; + let chan = Arc::new(Mutex::new(chan)); - //let events = stats::RabbitMq::from_lapin(&cfg.whoami(), block_on(conn.create_channel())?); - let threads = std::thread::available_parallelism() - .map(|x| x.get()) - .unwrap_or(1); - info!("Will listen on {} with {threads} threads", cfg.listen); - Server::http(cfg.listen)?.handle_threads( - move |req: Request, res: Response| { - handle_request(req, res, &webhook_secret, &chan); - }, - threads, - )?; - Ok(()) + let addr: SocketAddr = cfg.listen.parse()?; + let listener = TcpListener::bind(addr).await?; + info!("Listening on {}", addr); + + loop { + let (stream, _) = listener.accept().await?; + let io = TokioIo::new(stream); + + let webhook_secret = webhook_secret.clone(); + let chan = chan.clone(); + + tokio::task::spawn(async move { + let service = + service_fn(move |req| handle_request(req, webhook_secret.clone(), chan.clone())); + + if let Err(err) = http1::Builder::new().serve_connection(io, service).await { + warn!("Error serving connection: {:?}", err); + } + }); + } } diff --git a/ofborg/src/bin/logapi.rs b/ofborg/src/bin/logapi.rs index 966def22..46a80058 100644 --- a/ofborg/src/bin/logapi.rs +++ b/ofborg/src/bin/logapi.rs @@ -1,12 +1,15 @@ -use std::{collections::HashMap, error::Error, path::PathBuf}; - -use hyper::{ - header::ContentType, - mime, - server::{Request, Response, Server}, - status::StatusCode, -}; +use std::net::SocketAddr; +use std::{collections::HashMap, error::Error, path::PathBuf, sync::Arc}; + +use http::{Method, StatusCode}; +use http_body_util::Full; +use hyper::body::Bytes; +use hyper::server::conn::http1; +use hyper::service::service_fn; +use hyper::{Request, Response}; +use hyper_util::rt::TokioIo; use ofborg::config; +use tokio::net::TcpListener; use tracing::{error, info, warn}; #[derive(serde::Serialize, Default)] @@ -27,28 +30,39 @@ struct LogApiConfig { serve_root: String, } -fn handle_request(req: Request, mut res: Response, cfg: &LogApiConfig) { - if req.method != hyper::Get { - *res.status_mut() = StatusCode::MethodNotAllowed; - return; +fn response(status: StatusCode, body: &'static str) -> Response> { + Response::builder() + .status(status) + .body(Full::new(Bytes::from(body))) + .unwrap() +} + +fn json_response(status: StatusCode, body: String) -> Response> { + Response::builder() + .status(status) + .header("Content-Type", "application/json") + .body(Full::new(Bytes::from(body))) + .unwrap() +} + +async fn handle_request( + req: Request, + cfg: Arc, +) -> Result>, hyper::Error> { + if req.method() != Method::GET { + return Ok(response(StatusCode::METHOD_NOT_ALLOWED, "")); } - let uri = req.uri.to_string(); + let uri = req.uri().path().to_string(); let Some(reqd) = uri.strip_prefix("/logs/").map(ToOwned::to_owned) else { - *res.status_mut() = StatusCode::NotFound; - let _ = res.send(b"invalid uri"); - return; + return Ok(response(StatusCode::NOT_FOUND, "invalid uri")); }; let path: PathBuf = [&cfg.logs_path, &reqd].iter().collect(); let Ok(path) = std::fs::canonicalize(&path) else { - *res.status_mut() = StatusCode::NotFound; - let _ = res.send(b"absent"); - return; + return Ok(response(StatusCode::NOT_FOUND, "absent")); }; let Ok(iter) = std::fs::read_dir(path) else { - *res.status_mut() = StatusCode::NotFound; - let _ = res.send(b"non dir"); - return; + return Ok(response(StatusCode::NOT_FOUND, "non dir")); }; let mut attempts = HashMap::::new(); @@ -56,9 +70,7 @@ fn handle_request(req: Request, mut res: Response, cfg: &LogApiConfig) { let Ok(e) = e else { continue }; let e_metadata = e.metadata(); if e_metadata.as_ref().map(|v| v.is_dir()).unwrap_or(true) { - *res.status_mut() = StatusCode::InternalServerError; - let _ = res.send(b"dir found"); - return; + return Ok(response(StatusCode::INTERNAL_SERVER_ERROR, "dir found")); } if e_metadata.as_ref().map(|v| v.is_file()).unwrap_or_default() { @@ -97,21 +109,12 @@ fn handle_request(req: Request, mut res: Response, cfg: &LogApiConfig) { } } - *res.status_mut() = StatusCode::Ok; - res.headers_mut() - .set::(hyper::header::ContentType(mime::Mime( - mime::TopLevel::Application, - mime::SubLevel::Json, - Vec::new(), - ))); - let _ = res.send( - serde_json::to_string(&LogResponse { attempts }) - .unwrap_or_default() - .as_bytes(), - ); + let body = serde_json::to_string(&LogResponse { attempts }).unwrap_or_default(); + Ok(json_response(StatusCode::OK, body)) } -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = std::env::args() @@ -122,20 +125,27 @@ fn main() -> Result<(), Box> { panic!(); }; - let api_cfg = LogApiConfig { + let api_cfg = Arc::new(LogApiConfig { logs_path: cfg.logs_path, serve_root: cfg.serve_root, - }; + }); - let threads = std::thread::available_parallelism() - .map(|x| x.get()) - .unwrap_or(1); - info!("Will listen on {} with {threads} threads", cfg.listen); - Server::http(cfg.listen)?.handle_threads( - move |req: Request, res: Response| { - handle_request(req, res, &api_cfg); - }, - threads, - )?; - Ok(()) + let addr: SocketAddr = cfg.listen.parse()?; + let listener = TcpListener::bind(addr).await?; + info!("Listening on {}", addr); + + loop { + let (stream, _) = listener.accept().await?; + let io = TokioIo::new(stream); + + let api_cfg = api_cfg.clone(); + + tokio::task::spawn(async move { + let service = service_fn(move |req| handle_request(req, api_cfg.clone())); + + if let Err(err) = http1::Builder::new().serve_connection(io, service).await { + warn!("Error serving connection: {:?}", err); + } + }); + } } diff --git a/ofborg/src/bin/stats.rs b/ofborg/src/bin/stats.rs index 848852d4..62a29651 100644 --- a/ofborg/src/bin/stats.rs +++ b/ofborg/src/bin/stats.rs @@ -1,24 +1,53 @@ use std::env; use std::error::Error; +use std::net::SocketAddr; use std::sync::Arc; -use std::thread; -use hyper::server::{Request, Response, Server}; +use http::StatusCode; +use http_body_util::Full; +use hyper::body::Bytes; +use hyper::server::conn::http1; +use hyper::service::service_fn; +use hyper::{Request, Response}; +use hyper_util::rt::TokioIo; use ofborg::block_on; -use tracing::{error, info}; +use tokio::net::TcpListener; +use tracing::{error, info, warn}; use ofborg::easyamqp::{ChannelExt, ConsumerExt}; use ofborg::{config, easyamqp, easylapin, stats, tasks}; -fn run_http_server(metrics: Arc) { - let addr = "0.0.0.0:9898"; +fn response(body: String) -> Response> { + Response::builder() + .status(StatusCode::OK) + .body(Full::new(Bytes::from(body))) + .unwrap() +} + +async fn run_http_server( + addr: SocketAddr, + metrics: Arc, +) -> Result<(), Box> { + let listener = TcpListener::bind(addr).await?; info!("HTTP server listening on {}", addr); - Server::http(addr) - .expect("Failed to bind HTTP server") - .handle(move |_: Request, res: Response| { - res.send(metrics.prometheus_output().as_bytes()).unwrap(); - }) - .expect("Failed to start HTTP server"); + + loop { + let (stream, _) = listener.accept().await?; + let io = TokioIo::new(stream); + + let metrics = metrics.clone(); + + tokio::task::spawn(async move { + let service = service_fn(move |_req: Request| { + let metrics = metrics.clone(); + async move { Ok::<_, hyper::Error>(response(metrics.prometheus_output())) } + }); + + if let Err(err) = http1::Builder::new().serve_connection(io, service).await { + warn!("Error serving connection: {:?}", err); + } + }); + } } fn main() -> Result<(), Box> { @@ -82,10 +111,14 @@ fn main() -> Result<(), Box> { }, )?; - // Spawn HTTP server in a separate thread + // Spawn HTTP server in a separate thread with its own tokio runtime let metrics_clone = metrics.clone(); - thread::spawn(move || { - run_http_server(metrics_clone); + std::thread::spawn(move || { + let rt = tokio::runtime::Runtime::new().expect("Failed to create tokio runtime"); + let addr: SocketAddr = "0.0.0.0:9898".parse().unwrap(); + if let Err(e) = rt.block_on(run_http_server(addr, metrics_clone)) { + error!("HTTP server error: {:?}", e); + } }); info!("Fetching jobs from {}", &queue_name); diff --git a/ofborg/src/commitstatus.rs b/ofborg/src/commitstatus.rs index b982d904..53de8881 100644 --- a/ofborg/src/commitstatus.rs +++ b/ofborg/src/commitstatus.rs @@ -1,3 +1,4 @@ +use crate::block_on; use futures_util::future::TryFutureExt; use tracing::warn; @@ -57,7 +58,7 @@ impl CommitStatus { } else { self.description.clone() }; - crate::block_on( + block_on( self.api .create( self.sha.as_ref(), diff --git a/ofborg/src/easylapin.rs b/ofborg/src/easylapin.rs index 4daf4fee..e4d36906 100644 --- a/ofborg/src/easylapin.rs +++ b/ofborg/src/easylapin.rs @@ -9,8 +9,6 @@ use crate::notifyworker::{NotificationReceiver, SimpleNotifyWorker}; use crate::ofborg; use crate::worker::{Action, SimpleWorker}; -use async_std::future::Future; -use async_std::stream::StreamExt; use lapin::message::Delivery; use lapin::options::{ BasicAckOptions, BasicConsumeOptions, BasicNackOptions, BasicPublishOptions, BasicQosOptions, @@ -18,6 +16,7 @@ use lapin::options::{ }; use lapin::types::{AMQPValue, FieldTable}; use lapin::{BasicProperties, Channel, Connection, ConnectionProperties, ExchangeKind}; +use tokio_stream::StreamExt; use tracing::{debug, trace}; pub fn from_config(cfg: &RabbitMqConfig) -> Result { diff --git a/ofborg/src/lib.rs b/ofborg/src/lib.rs index 21374903..9dbf68a1 100644 --- a/ofborg/src/lib.rs +++ b/ofborg/src/lib.rs @@ -111,6 +111,20 @@ pub fn setup_log() { } /// Block on a future from synchronous code. +/// +/// This helper bridges sync and async code throughout the codebase, +/// used for both RabbitMQ (lapin) and GitHub API (hubcaps) operations. pub fn block_on(f: F) -> F::Output { - async_std::task::block_on(f) + // Try to use the current runtime if we're already in one + if let Ok(handle) = tokio::runtime::Handle::try_current() { + // We're inside a tokio runtime, use block_in_place + tokio::task::block_in_place(|| handle.block_on(f)) + } else { + // Create a new runtime for this blocking call + tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .expect("Failed to create tokio runtime") + .block_on(f) + } } From a9334310add8699211deb204e24ad2b93fcbfb0e Mon Sep 17 00:00:00 2001 From: Simon Hauser Date: Fri, 16 Jan 2026 19:43:53 +0100 Subject: [PATCH 6/6] feat: replace usage of block_on with async functions everywhere --- Cargo.lock | 231 +++++++++++--------- ofborg/Cargo.toml | 3 + ofborg/src/asynccmd.rs | 4 + ofborg/src/bin/build-faker.rs | 13 +- ofborg/src/bin/builder.rs | 53 ++--- ofborg/src/bin/evaluation-filter.rs | 46 ++-- ofborg/src/bin/github-comment-filter.rs | 52 +++-- ofborg/src/bin/github-comment-poster.rs | 43 ++-- ofborg/src/bin/github-webhook-receiver.rs | 27 ++- ofborg/src/bin/log-message-collector.rs | 49 +++-- ofborg/src/bin/mass-rebuilder.rs | 51 ++--- ofborg/src/bin/stats.rs | 52 ++--- ofborg/src/commitstatus.rs | 33 ++- ofborg/src/config.rs | 42 ++-- ofborg/src/easyamqp.rs | 23 +- ofborg/src/easylapin.rs | 126 ++++++----- ofborg/src/lib.rs | 20 -- ofborg/src/notifyworker.rs | 20 +- ofborg/src/stats.rs | 40 ++-- ofborg/src/tasks/build.rs | 210 ++++++++++-------- ofborg/src/tasks/eval/mod.rs | 15 +- ofborg/src/tasks/eval/nixpkgs.rs | 40 ++-- ofborg/src/tasks/evaluate.rs | 246 +++++++++++++--------- ofborg/src/tasks/evaluationfilter.rs | 15 +- ofborg/src/tasks/githubcommentfilter.rs | 29 +-- ofborg/src/tasks/githubcommentposter.rs | 26 ++- ofborg/src/tasks/log_message_collector.rs | 84 ++++---- ofborg/src/tasks/statscollector.rs | 21 +- ofborg/src/worker.rs | 12 +- 29 files changed, 918 insertions(+), 708 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fec5c0be..2208108a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -153,7 +153,7 @@ dependencies = [ "async-channel", "async-executor", "async-io 2.6.0", - "async-lock 3.4.1", + "async-lock 3.4.2", "blocking", "futures-lite 2.6.1", ] @@ -202,7 +202,7 @@ dependencies = [ "futures-lite 2.6.1", "parking", "polling 3.11.0", - "rustix 1.1.2", + "rustix 1.1.3", "slab", "windows-sys 0.61.2", ] @@ -218,9 +218,9 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.4.1" +version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" dependencies = [ "event-listener 5.4.1", "event-listener-strategy", @@ -288,9 +288,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.8.1" +version = "1.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" [[package]] name = "bitflags" @@ -343,9 +343,9 @@ checksum = "c3adb80ee272c844254166ea32c8ae11c211b3639a293fdde41b1645b6be2c62" [[package]] name = "bumpalo" -version = "3.19.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" [[package]] name = "bytes" @@ -364,9 +364,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.49" +version = "1.2.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215" +checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932" dependencies = [ "find-msvc-tools", "shlex", @@ -386,9 +386,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.42" +version = "0.4.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" +checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" dependencies = [ "iana-time-zone", "num-traits", @@ -481,9 +481,9 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" [[package]] name = "der" @@ -644,9 +644,9 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "find-msvc-tools" -version = "0.1.5" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" +checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" [[package]] name = "flagset" @@ -698,6 +698,7 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -720,6 +721,17 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + [[package]] name = "futures-io" version = "0.3.31" @@ -783,10 +795,13 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", + "futures-io", "futures-macro", "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", "slab", @@ -804,9 +819,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" dependencies = [ "cfg-if", "js-sys", @@ -831,9 +846,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" dependencies = [ "atomic-waker", "bytes", @@ -1165,9 +1180,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.12.1" +version = "2.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" dependencies = [ "equivalent", "hashbrown", @@ -1211,9 +1226,9 @@ checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "iri-string" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" dependencies = [ "memchr", "serde", @@ -1221,15 +1236,15 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.15" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "js-sys" -version = "0.3.83" +version = "0.3.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" +checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" dependencies = [ "once_cell", "wasm-bindgen", @@ -1285,9 +1300,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.178" +version = "0.2.180" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" [[package]] name = "linked-hash-map" @@ -1454,10 +1469,12 @@ dependencies = [ name = "ofborg" version = "0.1.9" dependencies = [ + "async-trait", "brace-expand", "chrono", "either", "fs2", + "futures", "futures-util", "hex", "hmac", @@ -1471,6 +1488,7 @@ dependencies = [ "md5", "mime", "nom 4.2.3", + "parking_lot", "regex", "rustls-pemfile", "serde", @@ -1689,7 +1707,7 @@ dependencies = [ "concurrent-queue", "hermit-abi 0.5.2", "pin-project-lite", - "rustix 1.1.2", + "rustix 1.1.3", "windows-sys 0.61.2", ] @@ -1719,9 +1737,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.103" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" +checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" dependencies = [ "unicode-ident", ] @@ -1783,9 +1801,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.42" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" +checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" dependencies = [ "proc-macro2", ] @@ -1818,9 +1836,9 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.9.3" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" dependencies = [ "getrandom 0.3.4", ] @@ -1885,9 +1903,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" -version = "0.12.25" +version = "0.12.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6eff9328d40131d43bd911d42d79eb6a47312002a4daefc9e37f17e74a7701a" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" dependencies = [ "base64 0.22.1", "bytes", @@ -1944,7 +1962,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.16", + "getrandom 0.2.17", "libc", "untrusted 0.9.0", "windows-sys 0.52.0", @@ -1981,9 +1999,9 @@ dependencies = [ [[package]] name = "rustix" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" dependencies = [ "bitflags 2.10.0", "errno", @@ -1994,9 +2012,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.35" +version = "0.23.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" +checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" dependencies = [ "once_cell", "ring 0.17.14", @@ -2043,9 +2061,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.13.1" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "708c0f9d5f54ba0272468c1d306a52c495b31fa155e91bc25371e6df7996908c" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" dependencies = [ "web-time", "zeroize", @@ -2053,9 +2071,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.8" +version = "0.103.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" +checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" dependencies = [ "ring 0.17.14", "rustls-pki-types", @@ -2070,9 +2088,9 @@ checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" -version = "1.0.20" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" [[package]] name = "salsa20" @@ -2164,15 +2182,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.145" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", - "ryu", "serde", "serde_core", + "zmij", ] [[package]] @@ -2307,9 +2325,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.111" +version = "2.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" dependencies = [ "proc-macro2", "quote", @@ -2350,14 +2368,14 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.23.0" +version = "3.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" dependencies = [ "fastrand 2.3.0", "getrandom 0.3.4", "once_cell", - "rustix 1.1.2", + "rustix 1.1.3", "windows-sys 0.61.2", ] @@ -2392,30 +2410,30 @@ dependencies = [ [[package]] name = "time" -version = "0.3.44" +version = "0.3.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd" dependencies = [ "deranged", "itoa", "num-conv", "powerfmt", - "serde", + "serde_core", "time-core", "time-macros", ] [[package]] name = "time-core" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" +checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca" [[package]] name = "time-macros" -version = "0.2.24" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" +checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd" dependencies = [ "num-conv", "time-core", @@ -2448,9 +2466,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.48.0" +version = "1.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" dependencies = [ "bytes", "libc", @@ -2484,9 +2502,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" dependencies = [ "futures-core", "pin-project-lite", @@ -2495,9 +2513,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.17" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ "bytes", "futures-core", @@ -2508,9 +2526,9 @@ dependencies = [ [[package]] name = "tower" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" dependencies = [ "futures-core", "futures-util", @@ -2553,9 +2571,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.43" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ "pin-project-lite", "tracing-attributes", @@ -2575,9 +2593,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.35" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" dependencies = [ "once_cell", "valuable", @@ -2639,9 +2657,9 @@ checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "unicase" -version = "2.8.1" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" +checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142" [[package]] name = "unicode-ident" @@ -2663,9 +2681,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.7" +version = "2.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" dependencies = [ "form_urlencoded", "idna", @@ -2731,18 +2749,18 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasip2" -version = "1.0.1+wasi-0.2.4" +version = "1.0.2+wasi-0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" dependencies = [ "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.106" +version = "0.2.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" +checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" dependencies = [ "cfg-if", "once_cell", @@ -2753,11 +2771,12 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.56" +version = "0.4.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" +checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" dependencies = [ "cfg-if", + "futures-util", "js-sys", "once_cell", "wasm-bindgen", @@ -2766,9 +2785,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.106" +version = "0.2.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" +checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2776,9 +2795,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.106" +version = "0.2.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" +checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" dependencies = [ "bumpalo", "proc-macro2", @@ -2789,18 +2808,18 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.106" +version = "0.2.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" +checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" dependencies = [ "unicode-ident", ] [[package]] name = "web-sys" -version = "0.3.83" +version = "0.3.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" +checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" dependencies = [ "js-sys", "wasm-bindgen", @@ -2818,9 +2837,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e" +checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" dependencies = [ "rustls-pki-types", ] @@ -3130,9 +3149,9 @@ checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "wit-bindgen" -version = "0.46.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" [[package]] name = "writeable" @@ -3193,18 +3212,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.31" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" +checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.31" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" +checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" dependencies = [ "proc-macro2", "quote", @@ -3270,3 +3289,9 @@ dependencies = [ "quote", "syn", ] + +[[package]] +name = "zmij" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd8f3f50b848df28f887acb68e41201b5aea6bc8a8dacc00fb40635ff9a72fea" diff --git a/ofborg/Cargo.toml b/ofborg/Cargo.toml index b485b106..4efbddf9 100644 --- a/ofborg/Cargo.toml +++ b/ofborg/Cargo.toml @@ -6,6 +6,7 @@ build = "build.rs" edition = "2024" [dependencies] +async-trait = "0.1.89" brace-expand = "0.1.0" chrono = { version = "0.4.38", default-features = false, features = [ "clock", @@ -13,6 +14,7 @@ chrono = { version = "0.4.38", default-features = false, features = [ ] } either = "1.13.0" fs2 = "0.4.3" +futures = "0.3.31" futures-util = "0.3.31" hex = "0.4.3" hmac = "0.12.1" @@ -28,6 +30,7 @@ lru-cache = "0.1.2" md5 = "0.8.0" mime = "0.3" nom = "4.2.3" +parking_lot = "0.12.4" regex = "1.11.1" rustls-pemfile = "2.2.0" serde = { version = "1.0.217", features = ["derive"] } diff --git a/ofborg/src/asynccmd.rs b/ofborg/src/asynccmd.rs index b538f8d0..52cd20da 100644 --- a/ofborg/src/asynccmd.rs +++ b/ofborg/src/asynccmd.rs @@ -136,6 +136,10 @@ impl SpawnedAsyncCmd { self.rx.iter() } + pub fn get_next_line(&mut self) -> Result { + self.rx.recv() + } + pub fn wait(self) -> Result { self.waiter .join() diff --git a/ofborg/src/bin/build-faker.rs b/ofborg/src/bin/build-faker.rs index e543e15b..3911b68a 100644 --- a/ofborg/src/bin/build-faker.rs +++ b/ofborg/src/bin/build-faker.rs @@ -3,7 +3,6 @@ use lapin::message::Delivery; use std::env; use std::error::Error; -use ofborg::block_on; use ofborg::commentparser; use ofborg::config; use ofborg::easylapin; @@ -11,14 +10,15 @@ use ofborg::message::{Pr, Repo, buildjob}; use ofborg::notifyworker::NotificationReceiver; use ofborg::worker; -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args().nth(1).expect("usage: build-faker "); let cfg = config::load(arg.as_ref()); - let conn = easylapin::from_config(&cfg.builder.unwrap().rabbitmq)?; - let mut chan = block_on(conn.create_channel())?; + let conn = easylapin::from_config(&cfg.builder.unwrap().rabbitmq).await?; + let chan = conn.create_channel().await?; let repo_msg = Repo { clone_url: "https://github.com/nixos/ofborg.git".to_owned(), @@ -55,14 +55,15 @@ fn main() -> Result<(), Box> { data: vec![], acker: Default::default(), }; - let mut recv = easylapin::ChannelNotificationReceiver::new(&mut chan, &deliver); + let recv = easylapin::ChannelNotificationReceiver::new(chan.clone(), deliver); for _i in 1..2 { recv.tell(worker::publish_serde_action( None, Some("build-inputs-x86_64-darwin".to_owned()), &msg, - )); + )) + .await; } } diff --git a/ofborg/src/bin/builder.rs b/ofborg/src/bin/builder.rs index a671cae3..85c7b1e2 100644 --- a/ofborg/src/bin/builder.rs +++ b/ofborg/src/bin/builder.rs @@ -5,14 +5,14 @@ use std::path::Path; use std::pin::Pin; use futures_util::future; -use ofborg::block_on; use tracing::{error, info, warn}; use ofborg::easyamqp::{self, ChannelExt, ConsumerExt}; use ofborg::easylapin; use ofborg::{checkout, config, tasks}; -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args() @@ -25,15 +25,14 @@ fn main() -> Result<(), Box> { panic!(); }; - let conn = easylapin::from_config(&builder_cfg.rabbitmq)?; + let conn = easylapin::from_config(&builder_cfg.rabbitmq).await?; let mut handles: Vec + Send>>> = Vec::new(); for system in &cfg.nix.system { - let handle_ext = self::create_handle(&conn, &cfg, system.to_string())?; - handles.push(handle_ext); + handles.push(self::create_handle(&conn, &cfg, system.to_string()).await?); } - block_on(future::join_all(handles)); + future::join_all(handles).await; drop(conn); // Close connection. info!("Closed the session... EOF"); @@ -41,12 +40,12 @@ fn main() -> Result<(), Box> { } #[allow(clippy::type_complexity)] -fn create_handle( +async fn create_handle( conn: &lapin::Connection, cfg: &config::Config, system: String, ) -> Result + Send>>, Box> { - let mut chan = block_on(conn.create_channel())?; + let mut chan = conn.create_channel().await?; let cloner = checkout::cached_cloner(Path::new(&cfg.checkout.root)); let nix = cfg.nix().with_system(system.clone()); @@ -59,7 +58,8 @@ fn create_handle( auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; let queue_name = if cfg.runner.build_all_jobs != Some(true) { let queue_name = format!("build-inputs-{system}"); @@ -70,7 +70,8 @@ fn create_handle( exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; queue_name } else { warn!("Building all jobs, please don't use this unless you're"); @@ -83,7 +84,8 @@ fn create_handle( exclusive: true, auto_delete: true, no_wait: false, - })?; + }) + .await?; queue_name }; @@ -92,19 +94,22 @@ fn create_handle( exchange: "build-jobs".to_owned(), routing_key: None, no_wait: false, - })?; - - let handle = easylapin::NotifyChannel(chan).consume( - tasks::build::BuildWorker::new(cloner, nix, system, cfg.runner.identity.clone()), - easyamqp::ConsumeConfig { - queue: queue_name.clone(), - consumer_tag: format!("{}-builder", cfg.whoami()), - no_local: false, - no_ack: false, - no_wait: false, - exclusive: false, - }, - )?; + }) + .await?; + + let handle = easylapin::NotifyChannel(chan) + .consume( + tasks::build::BuildWorker::new(cloner, nix, system, cfg.runner.identity.clone()), + easyamqp::ConsumeConfig { + queue: queue_name.clone(), + consumer_tag: format!("{}-builder", cfg.whoami()), + no_local: false, + no_ack: false, + no_wait: false, + exclusive: false, + }, + ) + .await?; info!("Fetching jobs from {}", &queue_name); Ok(handle) diff --git a/ofborg/src/bin/evaluation-filter.rs b/ofborg/src/bin/evaluation-filter.rs index 8c8b41a9..eda925f9 100644 --- a/ofborg/src/bin/evaluation-filter.rs +++ b/ofborg/src/bin/evaluation-filter.rs @@ -1,7 +1,6 @@ use std::env; use std::error::Error; -use ofborg::block_on; use tracing::{error, info}; use ofborg::config; @@ -9,7 +8,8 @@ use ofborg::easyamqp::{self, ChannelExt, ConsumerExt}; use ofborg::easylapin; use ofborg::tasks; -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args() @@ -22,8 +22,8 @@ fn main() -> Result<(), Box> { panic!(); }; - let conn = easylapin::from_config(&filter_cfg.rabbitmq)?; - let mut chan = block_on(conn.create_channel())?; + let conn = easylapin::from_config(&filter_cfg.rabbitmq).await?; + let mut chan = conn.create_channel().await?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "github-events".to_owned(), @@ -33,7 +33,8 @@ fn main() -> Result<(), Box> { auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; chan.declare_queue(easyamqp::QueueConfig { queue: "mass-rebuild-check-jobs".to_owned(), @@ -42,7 +43,8 @@ fn main() -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; let queue_name = String::from("mass-rebuild-check-inputs"); chan.declare_queue(easyamqp::QueueConfig { @@ -52,29 +54,33 @@ fn main() -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; chan.bind_queue(easyamqp::BindQueueConfig { queue: queue_name.clone(), exchange: "github-events".to_owned(), routing_key: Some("pull_request.nixos/*".to_owned()), no_wait: false, - })?; + }) + .await?; - let handle = easylapin::WorkerChannel(chan).consume( - tasks::evaluationfilter::EvaluationFilterWorker::new(cfg.acl()), - easyamqp::ConsumeConfig { - queue: queue_name.clone(), - consumer_tag: format!("{}-evaluation-filter", cfg.whoami()), - no_local: false, - no_ack: false, - no_wait: false, - exclusive: false, - }, - )?; + let handle = easylapin::WorkerChannel(chan) + .consume( + tasks::evaluationfilter::EvaluationFilterWorker::new(cfg.acl()), + easyamqp::ConsumeConfig { + queue: queue_name.clone(), + consumer_tag: format!("{}-evaluation-filter", cfg.whoami()), + no_local: false, + no_ack: false, + no_wait: false, + exclusive: false, + }, + ) + .await?; info!("Fetching jobs from {}", &queue_name); - block_on(handle); + handle.await; drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/github-comment-filter.rs b/ofborg/src/bin/github-comment-filter.rs index 95a17dfb..4d1427e0 100644 --- a/ofborg/src/bin/github-comment-filter.rs +++ b/ofborg/src/bin/github-comment-filter.rs @@ -1,7 +1,6 @@ use std::env; use std::error::Error; -use ofborg::block_on; use ofborg::systems::System; use tracing::{error, info}; @@ -10,7 +9,8 @@ use ofborg::easyamqp::{self, ChannelExt, ConsumerExt}; use ofborg::easylapin; use ofborg::tasks; -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args() @@ -23,8 +23,8 @@ fn main() -> Result<(), Box> { panic!(); }; - let conn = easylapin::from_config(&filter_cfg.rabbitmq)?; - let mut chan = block_on(conn.create_channel())?; + let conn = easylapin::from_config(&filter_cfg.rabbitmq).await?; + let mut chan = conn.create_channel().await?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "github-events".to_owned(), @@ -34,7 +34,8 @@ fn main() -> Result<(), Box> { auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "build-jobs".to_owned(), @@ -44,7 +45,8 @@ fn main() -> Result<(), Box> { auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; let queue_name = "build-inputs"; chan.declare_queue(easyamqp::QueueConfig { @@ -54,14 +56,16 @@ fn main() -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; chan.bind_queue(easyamqp::BindQueueConfig { queue: "build-inputs".to_owned(), exchange: "github-events".to_owned(), routing_key: Some("issue_comment.*".to_owned()), no_wait: false, - })?; + }) + .await?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "build-results".to_owned(), @@ -71,7 +75,8 @@ fn main() -> Result<(), Box> { auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; // Create build job queues for sys in System::all_known_systems().iter().map(System::to_string) { @@ -82,23 +87,26 @@ fn main() -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; } - let handle = easylapin::WorkerChannel(chan).consume( - tasks::githubcommentfilter::GitHubCommentWorker::new(cfg.acl(), cfg.github()), - easyamqp::ConsumeConfig { - queue: "build-inputs".to_owned(), - consumer_tag: format!("{}-github-comment-filter", cfg.whoami()), - no_local: false, - no_ack: false, - no_wait: false, - exclusive: false, - }, - )?; + let handle = easylapin::WorkerChannel(chan) + .consume( + tasks::githubcommentfilter::GitHubCommentWorker::new(cfg.acl(), cfg.github()), + easyamqp::ConsumeConfig { + queue: "build-inputs".to_owned(), + consumer_tag: format!("{}-github-comment-filter", cfg.whoami()), + no_local: false, + no_ack: false, + no_wait: false, + exclusive: false, + }, + ) + .await?; info!("Fetching jobs from {}", &queue_name); - block_on(handle); + handle.await; drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/github-comment-poster.rs b/ofborg/src/bin/github-comment-poster.rs index 579fddbd..3b26b83c 100644 --- a/ofborg/src/bin/github-comment-poster.rs +++ b/ofborg/src/bin/github-comment-poster.rs @@ -1,7 +1,6 @@ use std::env; use std::error::Error; -use ofborg::block_on; use tracing::{error, info}; use ofborg::config; @@ -9,7 +8,8 @@ use ofborg::easyamqp::{self, ChannelExt, ConsumerExt}; use ofborg::easylapin; use ofborg::tasks; -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args() @@ -22,8 +22,8 @@ fn main() -> Result<(), Box> { panic!(); }; - let conn = easylapin::from_config(&poster_cfg.rabbitmq)?; - let mut chan = block_on(conn.create_channel())?; + let conn = easylapin::from_config(&poster_cfg.rabbitmq).await?; + let mut chan = conn.create_channel().await?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "build-results".to_owned(), @@ -33,7 +33,8 @@ fn main() -> Result<(), Box> { auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; chan.declare_queue(easyamqp::QueueConfig { queue: "build-results".to_owned(), @@ -42,28 +43,32 @@ fn main() -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; chan.bind_queue(easyamqp::BindQueueConfig { queue: "build-results".to_owned(), exchange: "build-results".to_owned(), routing_key: None, no_wait: false, - })?; + }) + .await?; - let handle = easylapin::WorkerChannel(chan).consume( - tasks::githubcommentposter::GitHubCommentPoster::new(cfg.github_app_vendingmachine()), - easyamqp::ConsumeConfig { - queue: "build-results".to_owned(), - consumer_tag: format!("{}-github-comment-poster", cfg.whoami()), - no_local: false, - no_ack: false, - no_wait: false, - exclusive: false, - }, - )?; + let handle = easylapin::WorkerChannel(chan) + .consume( + tasks::githubcommentposter::GitHubCommentPoster::new(cfg.github_app_vendingmachine()), + easyamqp::ConsumeConfig { + queue: "build-results".to_owned(), + consumer_tag: format!("{}-github-comment-poster", cfg.whoami()), + no_local: false, + no_ack: false, + no_wait: false, + exclusive: false, + }, + ) + .await?; - block_on(handle); + handle.await; drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/github-webhook-receiver.rs b/ofborg/src/bin/github-webhook-receiver.rs index ef570f68..5bbf2687 100644 --- a/ofborg/src/bin/github-webhook-receiver.rs +++ b/ofborg/src/bin/github-webhook-receiver.rs @@ -22,7 +22,7 @@ use tracing::{error, info, warn}; /// Prepares the the exchange we will write to, the queues that are bound to it /// and binds them. -fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { +async fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "github-events".to_owned(), exchange_type: easyamqp::ExchangeType::Topic, @@ -31,7 +31,8 @@ fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; let queue_name = String::from("build-inputs"); chan.declare_queue(easyamqp::QueueConfig { @@ -41,13 +42,15 @@ fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; chan.bind_queue(easyamqp::BindQueueConfig { queue: queue_name.clone(), exchange: "github-events".to_owned(), routing_key: Some(String::from("issue_comment.*")), no_wait: false, - })?; + }) + .await?; let queue_name = String::from("github-events-unknown"); chan.declare_queue(easyamqp::QueueConfig { @@ -57,13 +60,15 @@ fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; chan.bind_queue(easyamqp::BindQueueConfig { queue: queue_name.clone(), exchange: "github-events".to_owned(), routing_key: Some(String::from("unknown.*")), no_wait: false, - })?; + }) + .await?; let queue_name = String::from("mass-rebuild-check-inputs"); chan.declare_queue(easyamqp::QueueConfig { @@ -73,13 +78,15 @@ fn setup_amqp(chan: &mut Channel) -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; chan.bind_queue(easyamqp::BindQueueConfig { queue: queue_name.clone(), exchange: "github-events".to_owned(), routing_key: Some(String::from("pull_request.*")), no_wait: false, - })?; + }) + .await?; Ok(()) } @@ -243,9 +250,9 @@ async fn main() -> Result<(), Box> { .expect("Unable to read webhook secret file"); let webhook_secret = Arc::new(webhook_secret.trim().to_string()); - let conn = easylapin::from_config(&cfg.rabbitmq)?; + let conn = easylapin::from_config(&cfg.rabbitmq).await?; let mut chan = conn.create_channel().await?; - setup_amqp(&mut chan)?; + setup_amqp(&mut chan).await?; let chan = Arc::new(Mutex::new(chan)); let addr: SocketAddr = cfg.listen.parse()?; diff --git a/ofborg/src/bin/log-message-collector.rs b/ofborg/src/bin/log-message-collector.rs index 19bfadb9..3ad668cb 100644 --- a/ofborg/src/bin/log-message-collector.rs +++ b/ofborg/src/bin/log-message-collector.rs @@ -2,7 +2,6 @@ use std::env; use std::error::Error; use std::path::PathBuf; -use ofborg::block_on; use tracing::{error, info}; use ofborg::config; @@ -10,7 +9,8 @@ use ofborg::easyamqp::{self, ChannelExt, ConsumerExt}; use ofborg::easylapin; use ofborg::tasks; -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args() @@ -23,8 +23,8 @@ fn main() -> Result<(), Box> { panic!(); }; - let conn = easylapin::from_config(&collector_cfg.rabbitmq)?; - let mut chan = block_on(conn.create_channel())?; + let conn = easylapin::from_config(&collector_cfg.rabbitmq).await?; + let mut chan = conn.create_channel().await?; chan.declare_exchange(easyamqp::ExchangeConfig { exchange: "logs".to_owned(), @@ -34,7 +34,8 @@ fn main() -> Result<(), Box> { auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; let queue_name = "logs".to_owned(); chan.declare_queue(easyamqp::QueueConfig { @@ -44,33 +45,37 @@ fn main() -> Result<(), Box> { exclusive: true, auto_delete: true, no_wait: false, - })?; + }) + .await?; chan.bind_queue(easyamqp::BindQueueConfig { queue: queue_name.clone(), exchange: "logs".to_owned(), routing_key: Some("*.*".to_owned()), no_wait: false, - })?; + }) + .await?; // Regular channel, we want prefetching here. - let handle = chan.consume( - tasks::log_message_collector::LogMessageCollector::new( - PathBuf::from(collector_cfg.logs_path), - 100, - ), - easyamqp::ConsumeConfig { - queue: queue_name.clone(), - consumer_tag: format!("{}-log-collector", cfg.whoami()), - no_local: false, - no_ack: false, - no_wait: false, - exclusive: false, - }, - )?; + let handle = chan + .consume( + tasks::log_message_collector::LogMessageCollector::new( + PathBuf::from(collector_cfg.logs_path), + 100, + ), + easyamqp::ConsumeConfig { + queue: queue_name.clone(), + consumer_tag: format!("{}-log-collector", cfg.whoami()), + no_local: false, + no_ack: false, + no_wait: false, + exclusive: false, + }, + ) + .await?; info!("Fetching jobs from {}", &queue_name); - block_on(handle); + handle.await; drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/mass-rebuilder.rs b/ofborg/src/bin/mass-rebuilder.rs index f96e45e3..10877590 100644 --- a/ofborg/src/bin/mass-rebuilder.rs +++ b/ofborg/src/bin/mass-rebuilder.rs @@ -2,7 +2,6 @@ use std::env; use std::error::Error; use std::path::Path; -use ofborg::block_on; use tracing::{error, info}; use ofborg::checkout; @@ -12,7 +11,8 @@ use ofborg::easylapin; use ofborg::stats; use ofborg::tasks; -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args() @@ -25,13 +25,13 @@ fn main() -> Result<(), Box> { panic!(); }; - let conn = easylapin::from_config(&rebuilder_cfg.rabbitmq)?; - let mut chan = block_on(conn.create_channel())?; + let conn = easylapin::from_config(&rebuilder_cfg.rabbitmq).await?; + let mut chan = conn.create_channel().await?; let root = Path::new(&cfg.checkout.root); let cloner = checkout::cached_cloner(&root.join(cfg.runner.instance.to_string())); - let events = stats::RabbitMq::from_lapin(&cfg.whoami(), block_on(conn.create_channel())?); + let events = stats::RabbitMq::from_lapin(&cfg.whoami(), conn.create_channel().await?); let queue_name = String::from("mass-rebuild-check-jobs"); chan.declare_queue(easyamqp::QueueConfig { @@ -41,28 +41,31 @@ fn main() -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; - let handle = easylapin::WorkerChannel(chan).consume( - tasks::evaluate::EvaluationWorker::new( - cloner, - cfg.github_app_vendingmachine(), - cfg.acl(), - cfg.runner.identity.clone(), - events, - ), - easyamqp::ConsumeConfig { - queue: queue_name.clone(), - consumer_tag: format!("{}-mass-rebuild-checker", cfg.whoami()), - no_local: false, - no_ack: false, - no_wait: false, - exclusive: false, - }, - )?; + let handle = easylapin::WorkerChannel(chan) + .consume( + tasks::evaluate::EvaluationWorker::new( + cloner, + cfg.github_app_vendingmachine(), + cfg.acl(), + cfg.runner.identity.clone(), + events, + ), + easyamqp::ConsumeConfig { + queue: queue_name.clone(), + consumer_tag: format!("{}-mass-rebuild-checker", cfg.whoami()), + no_local: false, + no_ack: false, + no_wait: false, + exclusive: false, + }, + ) + .await?; info!("Fetching jobs from {}", queue_name); - block_on(handle); + handle.await; drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/bin/stats.rs b/ofborg/src/bin/stats.rs index 62a29651..72b04e97 100644 --- a/ofborg/src/bin/stats.rs +++ b/ofborg/src/bin/stats.rs @@ -10,7 +10,6 @@ use hyper::server::conn::http1; use hyper::service::service_fn; use hyper::{Request, Response}; use hyper_util::rt::TokioIo; -use ofborg::block_on; use tokio::net::TcpListener; use tracing::{error, info, warn}; @@ -50,7 +49,8 @@ async fn run_http_server( } } -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> Result<(), Box> { ofborg::setup_log(); let arg = env::args() @@ -63,11 +63,11 @@ fn main() -> Result<(), Box> { panic!(); }; - let conn = easylapin::from_config(&stats_cfg.rabbitmq)?; + let conn = easylapin::from_config(&stats_cfg.rabbitmq).await?; - let mut chan = block_on(conn.create_channel())?; + let mut chan = conn.create_channel().await?; - let events = stats::RabbitMq::from_lapin(&cfg.whoami(), block_on(conn.create_channel())?); + let events = stats::RabbitMq::from_lapin(&cfg.whoami(), conn.create_channel().await?); let metrics = Arc::new(stats::MetricCollector::new()); let collector = tasks::statscollector::StatCollectorWorker::new(events, (*metrics).clone()); @@ -80,7 +80,8 @@ fn main() -> Result<(), Box> { auto_delete: false, no_wait: false, internal: false, - })?; + }) + .await?; let queue_name = String::from("stats-events"); chan.declare_queue(easyamqp::QueueConfig { @@ -90,39 +91,42 @@ fn main() -> Result<(), Box> { exclusive: false, auto_delete: false, no_wait: false, - })?; + }) + .await?; chan.bind_queue(easyamqp::BindQueueConfig { queue: queue_name.clone(), exchange: "stats".to_owned(), routing_key: None, no_wait: false, - })?; - - let handle = chan.consume( - collector, - easyamqp::ConsumeConfig { - queue: "stats-events".to_owned(), - consumer_tag: format!("{}-prometheus-stats-collector", cfg.whoami()), - no_local: false, - no_ack: false, - no_wait: false, - exclusive: false, - }, - )?; + }) + .await?; + + let handle = chan + .consume( + collector, + easyamqp::ConsumeConfig { + queue: "stats-events".to_owned(), + consumer_tag: format!("{}-prometheus-stats-collector", cfg.whoami()), + no_local: false, + no_ack: false, + no_wait: false, + exclusive: false, + }, + ) + .await?; // Spawn HTTP server in a separate thread with its own tokio runtime let metrics_clone = metrics.clone(); - std::thread::spawn(move || { - let rt = tokio::runtime::Runtime::new().expect("Failed to create tokio runtime"); + std::thread::spawn(async move || { let addr: SocketAddr = "0.0.0.0:9898".parse().unwrap(); - if let Err(e) = rt.block_on(run_http_server(addr, metrics_clone)) { + if let Err(e) = run_http_server(addr, metrics_clone).await { error!("HTTP server error: {:?}", e); } }); info!("Fetching jobs from {}", &queue_name); - block_on(handle); + handle.await; drop(conn); // Close connection. info!("Closed the session... EOF"); diff --git a/ofborg/src/commitstatus.rs b/ofborg/src/commitstatus.rs index 53de8881..6747f3b0 100644 --- a/ofborg/src/commitstatus.rs +++ b/ofborg/src/commitstatus.rs @@ -1,4 +1,3 @@ -use crate::block_on; use futures_util::future::TryFutureExt; use tracing::warn; @@ -35,20 +34,20 @@ impl CommitStatus { self.url = url.unwrap_or_else(|| String::from("")) } - pub fn set_with_description( + pub async fn set_with_description( &mut self, description: &str, state: hubcaps::statuses::State, ) -> Result<(), CommitStatusError> { self.set_description(description.to_owned()); - self.set(state) + self.set(state).await } pub fn set_description(&mut self, description: String) { self.description = description; } - pub fn set(&self, state: hubcaps::statuses::State) -> Result<(), CommitStatusError> { + pub async fn set(&self, state: hubcaps::statuses::State) -> Result<(), CommitStatusError> { let desc = if self.description.len() >= 140 { warn!( "description is over 140 char; truncating: {:?}", @@ -58,19 +57,19 @@ impl CommitStatus { } else { self.description.clone() }; - block_on( - self.api - .create( - self.sha.as_ref(), - &hubcaps::statuses::StatusOptions::builder(state) - .context(self.context.clone()) - .description(desc) - .target_url(self.url.clone()) - .build(), - ) - .map_ok(|_| ()) - .map_err(|e| CommitStatusError::from(e)), - ) + self.api + .create( + self.sha.as_ref(), + &hubcaps::statuses::StatusOptions::builder(state) + .context(self.context.clone()) + .description(desc) + .target_url(self.url.clone()) + .build(), + ) + .map_ok(|_| ()) + .map_err(|e| CommitStatusError::from(e)) + .await?; + Ok(()) } } diff --git a/ofborg/src/config.rs b/ofborg/src/config.rs index fd4cbd45..f158f1c1 100644 --- a/ofborg/src/config.rs +++ b/ofborg/src/config.rs @@ -1,7 +1,7 @@ use crate::acl; use crate::nix::Nix; -use std::collections::HashMap; +use std::collections::{HashMap, hash_map::Entry}; use std::fmt; use std::fs::File; use std::io::{BufReader, Read}; @@ -328,34 +328,38 @@ impl GithubAppVendingMachine { .expect("Unable to create JWTCredentials") } - fn install_id_for_repo(&mut self, owner: &str, repo: &str) -> Option { + async fn install_id_for_repo(&mut self, owner: &str, repo: &str) -> Option { let useragent = self.useragent(); let jwt = self.jwt(); let key = (owner.to_owned(), repo.to_owned()); - *self.id_cache.entry(key).or_insert_with(|| { - info!("Looking up install ID for {}/{}", owner, repo); - - let lookup_gh = Github::new(useragent, Credentials::JWT(jwt)).unwrap(); - - match crate::block_on(lookup_gh.app().find_repo_installation(owner, repo)) { - Ok(install_id) => { - debug!("Received install ID {:?}", install_id); - Some(install_id.id) - } - Err(e) => { - warn!("Error during install ID lookup: {:?}", e); - None - } + match self.id_cache.entry(key) { + Entry::Occupied(entry) => *entry.get(), + Entry::Vacant(entry) => { + info!("Looking up install ID for {}/{}", owner, repo); + + let lookup_gh = Github::new(useragent, Credentials::JWT(jwt)).unwrap(); + + let v = match lookup_gh.app().find_repo_installation(owner, repo).await { + Ok(install_id) => { + debug!("Received install ID {:?}", install_id); + Some(install_id.id) + } + Err(e) => { + warn!("Error during install ID lookup: {:?}", e); + None + } + }; + *entry.insert(v) } - }) + } } - pub fn for_repo<'a>(&'a mut self, owner: &str, repo: &str) -> Option<&'a Github> { + pub async fn for_repo<'a>(&'a mut self, owner: &str, repo: &str) -> Option<&'a Github> { let useragent = self.useragent(); let jwt = self.jwt(); - let install_id = self.install_id_for_repo(owner, repo)?; + let install_id = self.install_id_for_repo(owner, repo).await?; Some(self.client_cache.entry(install_id).or_insert_with(|| { Github::new( diff --git a/ofborg/src/easyamqp.rs b/ofborg/src/easyamqp.rs index ccdba9f8..2a84bb84 100644 --- a/ofborg/src/easyamqp.rs +++ b/ofborg/src/easyamqp.rs @@ -260,13 +260,28 @@ pub struct QueueConfig { pub trait ChannelExt { type Error; - fn declare_exchange(&mut self, config: ExchangeConfig) -> Result<(), Self::Error>; - fn declare_queue(&mut self, config: QueueConfig) -> Result<(), Self::Error>; - fn bind_queue(&mut self, config: BindQueueConfig) -> Result<(), Self::Error>; + + fn declare_exchange( + &mut self, + config: ExchangeConfig, + ) -> impl std::future::Future>; + fn declare_queue( + &mut self, + config: QueueConfig, + ) -> impl std::future::Future>; + fn bind_queue( + &mut self, + config: BindQueueConfig, + ) -> impl std::future::Future>; } pub trait ConsumerExt<'a, C> { type Error; type Handle; - fn consume(self, callback: C, config: ConsumeConfig) -> Result; + + fn consume( + self, + callback: C, + config: ConsumeConfig, + ) -> impl std::future::Future>; } diff --git a/ofborg/src/easylapin.rs b/ofborg/src/easylapin.rs index e4d36906..29e9d00d 100644 --- a/ofborg/src/easylapin.rs +++ b/ofborg/src/easylapin.rs @@ -1,4 +1,5 @@ use std::pin::Pin; +use std::sync::Arc; use crate::config::RabbitMqConfig; use crate::easyamqp::{ @@ -19,7 +20,7 @@ use lapin::{BasicProperties, Channel, Connection, ConnectionProperties, Exchange use tokio_stream::StreamExt; use tracing::{debug, trace}; -pub fn from_config(cfg: &RabbitMqConfig) -> Result { +pub async fn from_config(cfg: &RabbitMqConfig) -> Result { let mut props = FieldTable::default(); props.insert( "ofborg_version".into(), @@ -29,13 +30,13 @@ pub fn from_config(cfg: &RabbitMqConfig) -> Result { client_properties: props, ..Default::default() }; - crate::block_on(Connection::connect(&cfg.as_uri()?, opts)) + Connection::connect(&cfg.as_uri()?, opts).await } impl ChannelExt for Channel { type Error = lapin::Error; - fn declare_exchange(&mut self, config: ExchangeConfig) -> Result<(), Self::Error> { + async fn declare_exchange(&mut self, config: ExchangeConfig) -> Result<(), Self::Error> { let opts = ExchangeDeclareOptions { passive: config.passive, durable: config.durable, @@ -49,16 +50,12 @@ impl ChannelExt for Channel { ExchangeType::Fanout => ExchangeKind::Fanout, _ => panic!("exchange kind"), }; - crate::block_on(self.exchange_declare( - &config.exchange, - kind, - opts, - FieldTable::default(), - ))?; + self.exchange_declare(&config.exchange, kind, opts, FieldTable::default()) + .await?; Ok(()) } - fn declare_queue(&mut self, config: QueueConfig) -> Result<(), Self::Error> { + async fn declare_queue(&mut self, config: QueueConfig) -> Result<(), Self::Error> { let opts = QueueDeclareOptions { passive: config.passive, durable: config.durable, @@ -67,22 +64,24 @@ impl ChannelExt for Channel { nowait: config.no_wait, }; - crate::block_on(self.queue_declare(&config.queue, opts, FieldTable::default()))?; + self.queue_declare(&config.queue, opts, FieldTable::default()) + .await?; Ok(()) } - fn bind_queue(&mut self, config: BindQueueConfig) -> Result<(), Self::Error> { + async fn bind_queue(&mut self, config: BindQueueConfig) -> Result<(), Self::Error> { let opts = QueueBindOptions { nowait: config.no_wait, }; - crate::block_on(self.queue_bind( + self.queue_bind( &config.queue, &config.exchange, &config.routing_key.unwrap_or_else(|| "".into()), opts, FieldTable::default(), - ))?; + ) + .await?; Ok(()) } } @@ -91,13 +90,19 @@ impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for Channel { type Error = lapin::Error; type Handle = Pin + 'a>>; - fn consume(self, mut worker: W, config: ConsumeConfig) -> Result { - let mut consumer = crate::block_on(self.basic_consume( - &config.queue, - &config.consumer_tag, - BasicConsumeOptions::default(), - FieldTable::default(), - ))?; + async fn consume( + self, + mut worker: W, + config: ConsumeConfig, + ) -> Result { + let mut consumer = self + .basic_consume( + &config.queue, + &config.consumer_tag, + BasicConsumeOptions::default(), + FieldTable::default(), + ) + .await?; Ok(Box::pin(async move { while let Some(Ok(deliver)) = consumer.next().await { debug!(?deliver.delivery_tag, "consumed delivery"); @@ -108,9 +113,10 @@ impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for Channel { &content_type.as_ref().map(|s| s.to_string()), &deliver.data, ) + .await .expect("worker unexpected message consumed"); - for action in worker.consumer(&job) { + for action in worker.consumer(&job).await { action_deliver(&self, &deliver, action) .await .expect("action deliver failure"); @@ -129,26 +135,28 @@ impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for WorkerChannel { type Error = lapin::Error; type Handle = Pin + 'a>>; - fn consume(self, worker: W, config: ConsumeConfig) -> Result { - crate::block_on(self.0.basic_qos(1, BasicQosOptions::default()))?; - self.0.consume(worker, config) + async fn consume(self, worker: W, config: ConsumeConfig) -> Result { + self.0.basic_qos(1, BasicQosOptions::default()).await?; + self.0.consume(worker, config).await } } -pub struct ChannelNotificationReceiver<'a> { - channel: &'a mut lapin::Channel, - deliver: &'a Delivery, +pub struct ChannelNotificationReceiver { + channel: lapin::Channel, + deliver: Delivery, } -impl<'a> ChannelNotificationReceiver<'a> { - pub fn new(channel: &'a mut lapin::Channel, deliver: &'a Delivery) -> Self { +impl ChannelNotificationReceiver { + pub fn new(channel: lapin::Channel, deliver: Delivery) -> Self { ChannelNotificationReceiver { channel, deliver } } } -impl NotificationReceiver for ChannelNotificationReceiver<'_> { - fn tell(&mut self, action: Action) { - crate::block_on(action_deliver(self.channel, self.deliver, action)) +#[async_trait::async_trait] +impl NotificationReceiver for ChannelNotificationReceiver { + async fn tell(&self, action: Action) { + action_deliver(&self.channel, &self.deliver, action) + .await .expect("action deliver failure"); } } @@ -161,35 +169,39 @@ impl<'a, W: SimpleNotifyWorker + 'a + Send> ConsumerExt<'a, W> for NotifyChannel type Error = lapin::Error; type Handle = Pin + 'a + Send>>; - fn consume(self, worker: W, config: ConsumeConfig) -> Result { - crate::block_on(self.0.basic_qos(1, BasicQosOptions::default()))?; + async fn consume(self, worker: W, config: ConsumeConfig) -> Result { + self.0.basic_qos(1, BasicQosOptions::default()).await?; - let mut consumer = crate::block_on(self.0.basic_consume( - &config.queue, - &config.consumer_tag, - BasicConsumeOptions::default(), - FieldTable::default(), - ))?; - let mut chan = self.0; + let mut consumer = self + .0 + .basic_consume( + &config.queue, + &config.consumer_tag, + BasicConsumeOptions::default(), + FieldTable::default(), + ) + .await?; + let chan = self.0; Ok(Box::pin(async move { while let Some(Ok(deliver)) = consumer.next().await { - debug!(?deliver.delivery_tag, "consumed delivery"); - let mut receiver = ChannelNotificationReceiver { - channel: &mut chan, - deliver: &deliver, + let delivery_tag = deliver.delivery_tag; + debug!(?delivery_tag, "consumed delivery"); + let receiver = ChannelNotificationReceiver { + channel: chan.clone(), + deliver, }; - let content_type = deliver.properties.content_type(); + let content_type = receiver.deliver.properties.content_type(); let job = worker .msg_to_job( - deliver.routing_key.as_str(), + receiver.deliver.routing_key.as_str(), &content_type.as_ref().map(|s| s.to_string()), - &deliver.data, + &receiver.deliver.data, ) .expect("worker unexpected message consumed"); - worker.consumer(&job, &mut receiver); - debug!(?deliver.delivery_tag, "done"); + worker.consumer(job, Arc::new(receiver)).await; + debug!(?delivery_tag, "done"); } })) } @@ -219,21 +231,21 @@ async fn action_deliver( chan.basic_nack(deliver.delivery_tag, BasicNackOptions::default()) .await } - Action::Publish(mut msg) => { - let exch = msg.exchange.take().unwrap_or_else(|| "".to_owned()); - let key = msg.routing_key.take().unwrap_or_else(|| "".to_owned()); + Action::Publish(msg) => { + let exch = msg.exchange.as_deref().unwrap_or(""); + let key = msg.routing_key.as_deref().unwrap_or(""); trace!(?exch, ?key, "action publish"); let mut props = BasicProperties::default().with_delivery_mode(2); // persistent. - if let Some(s) = msg.content_type { + if let Some(s) = msg.content_type.as_deref() { props = props.with_content_type(s.into()); } let _confirmaton = chan .basic_publish( - &exch, - &key, + exch, + key, BasicPublishOptions::default(), &msg.content, props, diff --git a/ofborg/src/lib.rs b/ofborg/src/lib.rs index 9dbf68a1..51d00f9f 100644 --- a/ofborg/src/lib.rs +++ b/ofborg/src/lib.rs @@ -9,7 +9,6 @@ extern crate nom; use std::env; -use std::future::Future; use tracing_subscriber::EnvFilter; use tracing_subscriber::prelude::*; @@ -109,22 +108,3 @@ pub fn setup_log() { tracing::info!("Logging configured"); } - -/// Block on a future from synchronous code. -/// -/// This helper bridges sync and async code throughout the codebase, -/// used for both RabbitMQ (lapin) and GitHub API (hubcaps) operations. -pub fn block_on(f: F) -> F::Output { - // Try to use the current runtime if we're already in one - if let Ok(handle) = tokio::runtime::Handle::try_current() { - // We're inside a tokio runtime, use block_in_place - tokio::task::block_in_place(|| handle.block_on(f)) - } else { - // Create a new runtime for this blocking call - tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .expect("Failed to create tokio runtime") - .block_on(f) - } -} diff --git a/ofborg/src/notifyworker.rs b/ofborg/src/notifyworker.rs index c076342d..f83ab04a 100644 --- a/ofborg/src/notifyworker.rs +++ b/ofborg/src/notifyworker.rs @@ -1,9 +1,16 @@ +use std::sync::Arc; + use crate::worker::Action; +#[async_trait::async_trait] pub trait SimpleNotifyWorker { type J; - fn consumer(&self, job: &Self::J, notifier: &mut dyn NotificationReceiver); + async fn consumer( + &self, + job: Self::J, + notifier: Arc, + ); fn msg_to_job( &self, @@ -13,13 +20,14 @@ pub trait SimpleNotifyWorker { ) -> Result; } +#[async_trait::async_trait] pub trait NotificationReceiver { - fn tell(&mut self, action: Action); + async fn tell(&self, action: Action); } #[derive(Default)] pub struct DummyNotificationReceiver { - pub actions: Vec, + pub actions: parking_lot::Mutex>, } impl DummyNotificationReceiver { @@ -28,8 +36,10 @@ impl DummyNotificationReceiver { } } +#[async_trait::async_trait] impl NotificationReceiver for DummyNotificationReceiver { - fn tell(&mut self, action: Action) { - self.actions.push(action); + async fn tell(&self, action: Action) { + let mut actions = self.actions.lock(); + actions.push(action); } } diff --git a/ofborg/src/stats.rs b/ofborg/src/stats.rs index 08deb27e..5a7034d9 100644 --- a/ofborg/src/stats.rs +++ b/ofborg/src/stats.rs @@ -9,7 +9,7 @@ mod macros { } pub trait SysEvents: Send { - fn notify(&mut self, event: Event); + fn notify(&mut self, event: Event) -> impl std::future::Future; } #[derive(serde::Serialize, serde::Deserialize, Debug)] @@ -33,27 +33,25 @@ impl RabbitMq { } impl SysEvents for RabbitMq { - fn notify(&mut self, event: Event) { + async fn notify(&mut self, event: Event) { let props = lapin::BasicProperties::default().with_content_type("application/json".into()); - crate::block_on(async { - let _confirmaton = self - .channel - .basic_publish( - &String::from("stats"), - "", - BasicPublishOptions::default(), - &serde_json::to_string(&EventMessage { - sender: self.identity.clone(), - events: vec![event], - }) - .unwrap() - .into_bytes(), - props, - ) - .await + let _confirmaton = self + .channel + .basic_publish( + &String::from("stats"), + "", + BasicPublishOptions::default(), + &serde_json::to_string(&EventMessage { + sender: self.identity.clone(), + events: vec![event], + }) .unwrap() - .await - .unwrap(); - }); + .into_bytes(), + props, + ) + .await + .unwrap() + .await + .unwrap(); } } diff --git a/ofborg/src/tasks/build.rs b/ofborg/src/tasks/build.rs index 268a44f7..9ea59de3 100644 --- a/ofborg/src/tasks/build.rs +++ b/ofborg/src/tasks/build.rs @@ -7,6 +7,8 @@ use crate::notifyworker; use crate::worker; use std::collections::VecDeque; +use std::sync::Arc; +use std::sync::atomic::{AtomicU64, Ordering}; use tracing::{debug, debug_span, error, info}; use uuid::Uuid; @@ -33,22 +35,24 @@ impl BuildWorker { } } - fn actions<'a, 'b>( + fn actions( &self, - job: &'b buildjob::BuildJob, - receiver: &'a mut dyn notifyworker::NotificationReceiver, - ) -> JobActions<'a, 'b> { + job: buildjob::BuildJob, + receiver: Arc< + dyn notifyworker::NotificationReceiver + std::marker::Send + std::marker::Sync, + >, + ) -> JobActions { JobActions::new(&self.system, &self.identity, job, receiver) } } -pub struct JobActions<'a, 'b> { +pub struct JobActions { system: String, identity: String, - receiver: &'a mut dyn notifyworker::NotificationReceiver, - job: &'b buildjob::BuildJob, - line_counter: u64, - snippet_log: VecDeque, + receiver: Arc, + job: buildjob::BuildJob, + line_counter: AtomicU64, + snippet_log: parking_lot::RwLock>, attempt_id: String, log_exchange: Option, log_routing_key: Option, @@ -56,13 +60,15 @@ pub struct JobActions<'a, 'b> { result_routing_key: Option, } -impl<'a, 'b> JobActions<'a, 'b> { +impl JobActions { pub fn new( system: &str, identity: &str, - job: &'b buildjob::BuildJob, - receiver: &'a mut dyn notifyworker::NotificationReceiver, - ) -> JobActions<'a, 'b> { + job: buildjob::BuildJob, + receiver: Arc< + dyn notifyworker::NotificationReceiver + std::marker::Send + std::marker::Sync, + >, + ) -> JobActions { let (log_exchange, log_routing_key) = job .logs .clone() @@ -78,8 +84,8 @@ impl<'a, 'b> JobActions<'a, 'b> { identity: identity.to_owned(), receiver, job, - line_counter: 0, - snippet_log: VecDeque::with_capacity(10), + line_counter: 0.into(), + snippet_log: parking_lot::RwLock::new(VecDeque::with_capacity(10)), attempt_id: Uuid::new_v4().to_string(), log_exchange, log_routing_key, @@ -89,22 +95,22 @@ impl<'a, 'b> JobActions<'a, 'b> { } pub fn log_snippet(&self) -> Vec { - self.snippet_log.clone().into() + self.snippet_log.read().clone().into() } - pub fn pr_head_missing(&mut self) { - self.tell(worker::Action::Ack); + pub async fn pr_head_missing(&self) { + self.tell(worker::Action::Ack).await; } - pub fn commit_missing(&mut self) { - self.tell(worker::Action::Ack); + pub async fn commit_missing(&self) { + self.tell(worker::Action::Ack).await; } - pub fn nothing_to_do(&mut self) { - self.tell(worker::Action::Ack); + pub async fn nothing_to_do(&self) { + self.tell(worker::Action::Ack).await; } - pub fn merge_failed(&mut self) { + pub async fn merge_failed(&self) { let msg = BuildResult::V1 { tag: V1Tag::V1, repo: self.job.repo.clone(), @@ -125,11 +131,12 @@ impl<'a, 'b> JobActions<'a, 'b> { result_exchange, result_routing_key, &msg, - )); - self.tell(worker::Action::Ack); + )) + .await; + self.tell(worker::Action::Ack).await; } - pub fn log_started(&mut self, can_build: Vec, cannot_build: Vec) { + pub async fn log_started(&self, can_build: Vec, cannot_build: Vec) { let msg = buildlogmsg::BuildLogStart { identity: self.identity.clone(), system: self.system.clone(), @@ -145,34 +152,39 @@ impl<'a, 'b> JobActions<'a, 'b> { log_exchange, log_routing_key, &msg, - )); + )) + .await; } - pub fn log_instantiation_errors(&mut self, cannot_build: Vec<(String, Vec)>) { - for (attr, log) in &cannot_build { - self.log_line(&format!("Cannot nix-instantiate `{attr}` because:")); + pub async fn log_instantiation_errors(&self, cannot_build: Vec<(String, Vec)>) { + for (attr, log) in cannot_build { + self.log_line(format!("Cannot nix-instantiate `{attr}` because:")) + .await; for line in log { - self.log_line(line); + self.log_line(line).await; } - self.log_line(""); + self.log_line("".into()).await; } } - pub fn log_line(&mut self, line: &str) { - self.line_counter += 1; + pub async fn log_line(&self, line: String) { + self.line_counter.fetch_add(1, Ordering::SeqCst); - if self.snippet_log.len() >= 10 { - self.snippet_log.pop_front(); + { + let mut snippet_log = self.snippet_log.write(); + if snippet_log.len() >= 10 { + snippet_log.pop_front(); + } + snippet_log.push_back(line.clone()); } - self.snippet_log.push_back(line.to_owned()); let msg = buildlogmsg::BuildLogMsg { identity: self.identity.clone(), system: self.system.clone(), attempt_id: self.attempt_id.clone(), - line_number: self.line_counter, - output: line.to_owned(), + line_number: self.line_counter.load(Ordering::SeqCst), + output: line, }; let log_exchange = self.log_exchange.clone(); @@ -182,10 +194,11 @@ impl<'a, 'b> JobActions<'a, 'b> { log_exchange, log_routing_key, &msg, - )); + )) + .await; } - pub fn build_not_attempted(&mut self, not_attempted_attrs: Vec) { + pub async fn build_not_attempted(&self, not_attempted_attrs: Vec) { let msg = BuildResult::V1 { tag: V1Tag::V1, repo: self.job.repo.clone(), @@ -205,7 +218,8 @@ impl<'a, 'b> JobActions<'a, 'b> { result_exchange, result_routing_key, &msg, - )); + )) + .await; let log_exchange = self.log_exchange.clone(); let log_routing_key = self.log_routing_key.clone(); @@ -213,13 +227,14 @@ impl<'a, 'b> JobActions<'a, 'b> { log_exchange, log_routing_key, &msg, - )); + )) + .await; - self.tell(worker::Action::Ack); + self.tell(worker::Action::Ack).await; } - pub fn build_finished( - &mut self, + pub async fn build_finished( + &self, status: BuildStatus, attempted_attrs: Vec, not_attempted_attrs: Vec, @@ -243,7 +258,8 @@ impl<'a, 'b> JobActions<'a, 'b> { result_exchange, result_routing_key, &msg, - )); + )) + .await; let log_exchange = self.log_exchange.clone(); let log_routing_key = self.log_routing_key.clone(); @@ -251,16 +267,18 @@ impl<'a, 'b> JobActions<'a, 'b> { log_exchange, log_routing_key, &msg, - )); + )) + .await; - self.tell(worker::Action::Ack); + self.tell(worker::Action::Ack).await; } - fn tell(&mut self, action: worker::Action) { - self.receiver.tell(action); + async fn tell(&self, action: worker::Action) { + self.receiver.tell(action).await; } } +#[async_trait::async_trait] impl notifyworker::SimpleNotifyWorker for BuildWorker { type J = buildjob::BuildJob; @@ -277,60 +295,63 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { // FIXME: remove with rust/cargo update #[allow(clippy::cognitive_complexity)] - fn consumer( + async fn consumer( &self, - job: &buildjob::BuildJob, - notifier: &mut dyn notifyworker::NotificationReceiver, + job: buildjob::BuildJob, + notifier: Arc< + dyn notifyworker::NotificationReceiver + std::marker::Send + std::marker::Sync, + >, ) { let span = debug_span!("job", pr = ?job.pr.number); let _enter = span.enter(); - let mut actions = self.actions(job, notifier); + let actions = self.actions(job, notifier); - if job.attrs.is_empty() { + if actions.job.attrs.is_empty() { debug!("No attrs to build"); - actions.nothing_to_do(); + actions.nothing_to_do().await; return; } info!( "Working on https://github.com/{}/pull/{}", - job.repo.full_name, job.pr.number + actions.job.repo.full_name, actions.job.pr.number + ); + let project = self.cloner.project( + &actions.job.repo.full_name, + actions.job.repo.clone_url.clone(), ); - let project = self - .cloner - .project(&job.repo.full_name, job.repo.clone_url.clone()); let co = project .clone_for("builder".to_string(), self.identity.clone()) .unwrap(); - let target_branch = match job.pr.target_branch.clone() { + let target_branch = match actions.job.pr.target_branch.clone() { Some(x) => x, None => String::from("origin/master"), }; - let buildfile = match job.subset { + let buildfile = match actions.job.subset { Some(commentparser::Subset::NixOS) => nix::File::ReleaseNixOS, _ => nix::File::DefaultNixpkgs, }; let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap(); - if co.fetch_pr(job.pr.number).is_err() { - info!("Failed to fetch {}", job.pr.number); - actions.pr_head_missing(); + if co.fetch_pr(actions.job.pr.number).is_err() { + info!("Failed to fetch {}", actions.job.pr.number); + actions.pr_head_missing().await; return; } - if !co.commit_exists(job.pr.head_sha.as_ref()) { - info!("Commit {} doesn't exist", job.pr.head_sha); - actions.commit_missing(); + if !co.commit_exists(actions.job.pr.head_sha.as_ref()) { + info!("Commit {} doesn't exist", actions.job.pr.head_sha); + actions.commit_missing().await; return; } - if co.merge_commit(job.pr.head_sha.as_ref()).is_err() { - info!("Failed to merge {}", job.pr.head_sha); - actions.merge_failed(); + if co.merge_commit(actions.job.pr.head_sha.as_ref()).is_err() { + info!("Failed to merge {}", actions.job.pr.head_sha); + actions.merge_failed().await; return; } @@ -341,7 +362,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { let (can_build, cannot_build) = self.nix.safely_partition_instantiable_attrs( refpath.as_ref(), buildfile, - job.attrs.clone(), + actions.job.attrs.clone(), ); let cannot_build_attrs: Vec = cannot_build @@ -356,11 +377,13 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { cannot_build_attrs.join(", ") ); - actions.log_started(can_build.clone(), cannot_build_attrs.clone()); - actions.log_instantiation_errors(cannot_build); + actions + .log_started(can_build.clone(), cannot_build_attrs.clone()) + .await; + actions.log_instantiation_errors(cannot_build).await; if can_build.is_empty() { - actions.build_not_attempted(cannot_build_attrs); + actions.build_not_attempted(cannot_build_attrs).await; return; } @@ -368,8 +391,8 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { self.nix .safely_build_attrs_async(refpath.as_ref(), buildfile, can_build.clone()); - for line in spawned.lines() { - actions.log_line(&line); + while let Ok(line) = spawned.get_next_line() { + actions.log_line(line).await; } let status = nix::wait_for_build_status(spawned); @@ -384,7 +407,9 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { .next_back(); info!("----->8-----"); - actions.build_finished(status, can_build, cannot_build_attrs); + actions + .build_finished(status, can_build, cannot_build_attrs) + .await; info!("Build done!"); } } @@ -474,8 +499,8 @@ mod tests { }); } - #[test] - pub fn test_simple_build() { + #[tokio::test] + pub async fn test_simple_build() { let p = TestScratch::new_dir("build-simple-build-working"); let bare_repo = TestScratch::new_dir("build-simple-build-bare"); let co_repo = TestScratch::new_dir("build-simple-build-co"); @@ -502,12 +527,13 @@ mod tests { request_id: "bogus-request-id".to_owned(), }; - let mut dummyreceiver = notifyworker::DummyNotificationReceiver::new(); + let dummyreceiver = Arc::new(notifyworker::DummyNotificationReceiver::new()); - worker.consumer(&job, &mut dummyreceiver); + worker.consumer(job, dummyreceiver.clone()).await; - println!("Total actions: {:?}", dummyreceiver.actions.len()); - let mut actions = dummyreceiver.actions.into_iter(); + println!("Total actions: {:?}", dummyreceiver.actions.lock().len()); + let actions_vec = dummyreceiver.actions.lock().clone(); + let mut actions = actions_vec.into_iter(); assert_contains_job(&mut actions, "output\":\"hi"); assert_contains_job(&mut actions, "output\":\"1"); @@ -519,8 +545,8 @@ mod tests { assert_eq!(actions.next(), Some(worker::Action::Ack)); } - #[test] - pub fn test_all_jobs_skipped() { + #[tokio::test] + pub async fn test_all_jobs_skipped() { let p = TestScratch::new_dir("no-attempt"); let bare_repo = TestScratch::new_dir("no-attempt-bare"); let co_repo = TestScratch::new_dir("no-attempt-co"); @@ -547,12 +573,14 @@ mod tests { request_id: "bogus-request-id".to_owned(), }; - let mut dummyreceiver = notifyworker::DummyNotificationReceiver::new(); + let dummyreceiver = Arc::new(notifyworker::DummyNotificationReceiver::new()); + + worker.consumer(job, dummyreceiver.clone()).await; - worker.consumer(&job, &mut dummyreceiver); + println!("Total actions: {:?}", dummyreceiver.actions.lock().len()); + let actions_vec = dummyreceiver.actions.lock().clone(); + let mut actions = actions_vec.into_iter(); - println!("Total actions: {:?}", dummyreceiver.actions.len()); - let mut actions = dummyreceiver.actions.into_iter(); assert_contains_job( &mut actions, r#""line_number":1,"output":"Cannot nix-instantiate `not-real` because:""#, diff --git a/ofborg/src/tasks/eval/mod.rs b/ofborg/src/tasks/eval/mod.rs index 06f14fd2..b312db9a 100644 --- a/ofborg/src/tasks/eval/mod.rs +++ b/ofborg/src/tasks/eval/mod.rs @@ -9,16 +9,23 @@ use crate::message::buildjob::BuildJob; use std::path::Path; pub trait EvaluationStrategy { - fn pre_clone(&mut self) -> StepResult<()>; + fn pre_clone(&mut self) -> impl std::future::Future>; - fn on_target_branch(&mut self, co: &Path, status: &mut CommitStatus) -> StepResult<()>; + fn on_target_branch( + &mut self, + co: &Path, + status: &mut CommitStatus, + ) -> impl std::future::Future>; fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()>; - fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()>; + fn after_merge( + &mut self, + status: &mut CommitStatus, + ) -> impl std::future::Future>; fn evaluation_checks(&self) -> Vec; fn all_evaluations_passed( &mut self, status: &mut CommitStatus, - ) -> StepResult; + ) -> impl std::future::Future>; } pub type StepResult = Result; diff --git a/ofborg/src/tasks/eval/nixpkgs.rs b/ofborg/src/tasks/eval/nixpkgs.rs index 52f85fae..8f0fd7d7 100644 --- a/ofborg/src/tasks/eval/nixpkgs.rs +++ b/ofborg/src/tasks/eval/nixpkgs.rs @@ -48,8 +48,8 @@ impl<'a> NixpkgsStrategy<'a> { } } - fn tag_from_title(&self) { - let title = match crate::block_on(self.issue_ref.get()) { + async fn tag_from_title(&self) { + let title = match self.issue_ref.get().await { Ok(issue) => issue.title.to_lowercase(), Err(_) => return, }; @@ -60,7 +60,7 @@ impl<'a> NixpkgsStrategy<'a> { return; } - update_labels(self.issue_ref, &labels, &[]); + update_labels(self.issue_ref, &labels, &[]).await; } fn check_outpaths_before(&mut self, _dir: &Path) -> StepResult<()> { @@ -104,16 +104,18 @@ impl<'a> NixpkgsStrategy<'a> { } impl EvaluationStrategy for NixpkgsStrategy<'_> { - fn pre_clone(&mut self) -> StepResult<()> { - self.tag_from_title(); + async fn pre_clone(&mut self) -> StepResult<()> { + self.tag_from_title().await; Ok(()) } - fn on_target_branch(&mut self, dir: &Path, status: &mut CommitStatus) -> StepResult<()> { - status.set_with_description( - "Checking original out paths", - hubcaps::statuses::State::Pending, - )?; + async fn on_target_branch(&mut self, dir: &Path, status: &mut CommitStatus) -> StepResult<()> { + status + .set_with_description( + "Checking original out paths", + hubcaps::statuses::State::Pending, + ) + .await?; self.check_outpaths_before(dir)?; Ok(()) @@ -128,8 +130,10 @@ impl EvaluationStrategy for NixpkgsStrategy<'_> { Ok(()) } - fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> { - status.set_with_description("Checking new out paths", hubcaps::statuses::State::Pending)?; + async fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> { + status + .set_with_description("Checking new out paths", hubcaps::statuses::State::Pending) + .await?; self.check_outpaths_after()?; Ok(()) @@ -139,14 +143,16 @@ impl EvaluationStrategy for NixpkgsStrategy<'_> { vec![] } - fn all_evaluations_passed( + async fn all_evaluations_passed( &mut self, status: &mut CommitStatus, ) -> StepResult { - status.set_with_description( - "Calculating Changed Outputs", - hubcaps::statuses::State::Pending, - )?; + status + .set_with_description( + "Calculating Changed Outputs", + hubcaps::statuses::State::Pending, + ) + .await?; let builds = self.queue_builds()?; Ok(EvaluationComplete { builds }) diff --git a/ofborg/src/tasks/evaluate.rs b/ofborg/src/tasks/evaluate.rs index dc40e440..f34cf2b5 100644 --- a/ofborg/src/tasks/evaluate.rs +++ b/ofborg/src/tasks/evaluate.rs @@ -9,17 +9,17 @@ use crate::systems; use crate::tasks::eval; use crate::tasks::eval::EvaluationStrategy; use crate::worker; +use futures::stream::StreamExt; use futures_util::TryFutureExt; use std::path::Path; -use std::sync::RwLock; use std::time::Instant; use tracing::{debug_span, error, info, warn}; pub struct EvaluationWorker { cloner: checkout::CachedCloner, - github_vend: RwLock, + github_vend: tokio::sync::RwLock, acl: Acl, identity: String, events: E, @@ -35,7 +35,7 @@ impl EvaluationWorker { ) -> EvaluationWorker { EvaluationWorker { cloner, - github_vend: RwLock::new(github_vend), + github_vend: tokio::sync::RwLock::new(github_vend), acl, identity, events, @@ -46,15 +46,20 @@ impl EvaluationWorker { impl worker::SimpleWorker for EvaluationWorker { type J = evaluationjob::EvaluationJob; - fn msg_to_job(&mut self, _: &str, _: &Option, body: &[u8]) -> Result { - self.events.notify(Event::JobReceived); + async fn msg_to_job( + &mut self, + _: &str, + _: &Option, + body: &[u8], + ) -> Result { + self.events.notify(Event::JobReceived).await; match evaluationjob::from(body) { Ok(job) => { - self.events.notify(Event::JobDecodeSuccess); + self.events.notify(Event::JobDecodeSuccess).await; Ok(job) } Err(err) => { - self.events.notify(Event::JobDecodeFailure); + self.events.notify(Event::JobDecodeFailure).await; error!( "Failed to decode message: {}, Err: {err:?}", std::str::from_utf8(body).unwrap_or("") @@ -64,17 +69,15 @@ impl worker::SimpleWorker for EvaluationWorker } } - fn consumer(&mut self, job: &evaluationjob::EvaluationJob) -> worker::Actions { + async fn consumer(&mut self, job: &evaluationjob::EvaluationJob) -> worker::Actions { let span = debug_span!("job", pr = ?job.pr.number); let _enter = span.enter(); - let mut vending_machine = self - .github_vend - .write() - .expect("Failed to get write lock on github vending machine"); + let mut vending_machine = self.github_vend.write().await; let github_client = vending_machine .for_repo(&job.repo.owner, &job.repo.name) + .await .expect("Failed to get a github client token"); OneEval::new( @@ -86,6 +89,7 @@ impl worker::SimpleWorker for EvaluationWorker job, ) .worker_actions() + .await } } @@ -125,7 +129,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { evaluationjob::Actions {} } - fn update_status( + async fn update_status( &self, description: String, url: Option, @@ -143,7 +147,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { let repo = self .client_app .repo(self.job.repo.owner.clone(), self.job.repo.name.clone()); - let prefix = get_prefix(repo.statuses(), &self.job.pr.head_sha)?; + let prefix = get_prefix(repo.statuses(), &self.job.pr.head_sha).await?; let mut builder = hubcaps::statuses::StatusOptions::builder(state); builder.context(format!("{prefix}-eval")); @@ -158,30 +162,32 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { &self.job.pr.number, &self.job.pr.head_sha, &description ); - crate::block_on( - self.repo - .statuses() - .create(&self.job.pr.head_sha, &builder.build()) - .map_ok(|_| ()) - .map_err(|e| CommitStatusError::from(e)), - ) + self.repo + .statuses() + .create(&self.job.pr.head_sha, &builder.build()) + .map_ok(|_| ()) + .map_err(|e| CommitStatusError::from(e)) + .await } - fn worker_actions(&mut self) -> worker::Actions { - let eval_result = self.evaluate_job().map_err(|eval_error| match eval_error { - // Handle error cases which expect us to post statuses - // to github. Convert Eval Errors in to Result<_, CommitStatusWrite> - EvalWorkerError::EvalError(eval::Error::Fail(msg)) => { - self.update_status(msg, None, hubcaps::statuses::State::Failure) - } - EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e)) => Err(e), - EvalWorkerError::CommitStatusWrite(e) => Err(e), - }); + async fn worker_actions(&mut self) -> worker::Actions { + let eval_result = match self.evaluate_job().await { + Ok(v) => Ok(v), + Err(eval_error) => match eval_error { + // Handle error cases which expect us to post statuses + // to github. Convert Eval Errors in to Result<_, CommitStatusWrite> + EvalWorkerError::EvalError(eval::Error::Fail(msg)) => Err(self + .update_status(msg, None, hubcaps::statuses::State::Failure) + .await), + EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e)) => Err(Err(e)), + EvalWorkerError::CommitStatusWrite(e) => Err(Err(e)), + }, + }; match eval_result { Ok(eval_actions) => { let issue_ref = self.repo.issue(self.job.pr.number); - update_labels(&issue_ref, &[], &[String::from("ofborg-internal-error")]); + update_labels(&issue_ref, &[], &[String::from("ofborg-internal-error")]).await; eval_actions } @@ -190,7 +196,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { // updated the PR. let issue_ref = self.repo.issue(self.job.pr.number); - update_labels(&issue_ref, &[], &[String::from("ofborg-internal-error")]); + update_labels(&issue_ref, &[], &[String::from("ofborg-internal-error")]).await; self.actions().skip(self.job) } @@ -216,14 +222,14 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { cswerr ); let issue_ref = self.repo.issue(self.job.pr.number); - update_labels(&issue_ref, &[String::from("ofborg-internal-error")], &[]); + update_labels(&issue_ref, &[String::from("ofborg-internal-error")], &[]).await; self.actions().skip(self.job) } } } - fn evaluate_job(&mut self) -> Result { + async fn evaluate_job(&mut self) -> Result { let job = self.job; let repo = self .client_app @@ -231,10 +237,10 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { let issue_ref = repo.issue(job.pr.number); let auto_schedule_build_archs: Vec; - match crate::block_on(issue_ref.get()) { + match issue_ref.get().await { Ok(iss) => { if iss.state == "closed" { - self.events.notify(Event::IssueAlreadyClosed); + self.events.notify(Event::IssueAlreadyClosed).await; info!("Skipping {} because it is closed", job.pr.number); return Ok(self.actions().skip(job)); } @@ -250,7 +256,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { } Err(e) => { - self.events.notify(Event::IssueFetchFailed); + self.events.notify(Event::IssueFetchFailed).await; error!("Error fetching {}!", job.pr.number); error!("E: {:?}", e); return Ok(self.actions().skip(job)); @@ -259,7 +265,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { let mut evaluation_strategy = eval::NixpkgsStrategy::new(job, &issue_ref); - let prefix = get_prefix(repo.statuses(), &job.pr.head_sha)?; + let prefix = get_prefix(repo.statuses(), &job.pr.head_sha).await?; let mut overall_status = CommitStatus::new( repo.statuses(), @@ -269,16 +275,19 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { None, ); - overall_status.set_with_description("Starting", hubcaps::statuses::State::Pending)?; + overall_status + .set_with_description("Starting", hubcaps::statuses::State::Pending) + .await?; - evaluation_strategy.pre_clone()?; + evaluation_strategy.pre_clone().await?; let project = self .cloner .project(&job.repo.full_name, job.repo.clone_url.clone()); overall_status - .set_with_description("Cloning project", hubcaps::statuses::State::Pending)?; + .set_with_description("Cloning project", hubcaps::statuses::State::Pending) + .await?; info!("Working on {}", job.pr.number); let co = project @@ -295,20 +304,24 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { }; if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") { - overall_status.set_with_description( - "The branch you have targeted is a read-only mirror for channels. \ + overall_status + .set_with_description( + "The branch you have targeted is a read-only mirror for channels. \ Please target release-* or master.", - hubcaps::statuses::State::Error, - )?; + hubcaps::statuses::State::Error, + ) + .await?; info!("PR targets a nixos-* or nixpkgs-* branch"); return Ok(self.actions().skip(job)); }; - overall_status.set_with_description( - format!("Checking out {}", &target_branch).as_ref(), - hubcaps::statuses::State::Pending, - )?; + overall_status + .set_with_description( + format!("Checking out {}", &target_branch).as_ref(), + hubcaps::statuses::State::Pending, + ) + .await?; info!("Checking out target branch {}", &target_branch); let refpath = co .checkout_origin_ref(target_branch.as_ref()) @@ -318,18 +331,25 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { ))) })?; - evaluation_strategy.on_target_branch(Path::new(&refpath), &mut overall_status)?; + evaluation_strategy + .on_target_branch(Path::new(&refpath), &mut overall_status) + .await?; let target_branch_rebuild_sniff_start = Instant::now(); - self.events.notify(Event::EvaluationDuration( - target_branch.clone(), - target_branch_rebuild_sniff_start.elapsed().as_secs(), - )); self.events - .notify(Event::EvaluationDurationCount(target_branch)); + .notify(Event::EvaluationDuration( + target_branch.clone(), + target_branch_rebuild_sniff_start.elapsed().as_secs(), + )) + .await; + self.events + .notify(Event::EvaluationDurationCount(target_branch)) + .await; - overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending)?; + overall_status + .set_with_description("Fetching PR", hubcaps::statuses::State::Pending) + .await?; co.fetch_pr(job.pr.number).map_err(|e| { EvalWorkerError::CommitStatusWrite(CommitStatusError::InternalError(format!( @@ -339,7 +359,8 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { if !co.commit_exists(job.pr.head_sha.as_ref()) { overall_status - .set_with_description("Commit not found", hubcaps::statuses::State::Error)?; + .set_with_description("Commit not found", hubcaps::statuses::State::Error) + .await?; info!("Commit {} doesn't exist", job.pr.head_sha); return Ok(self.actions().skip(job)); @@ -347,71 +368,89 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { evaluation_strategy.after_fetch(&co)?; - overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending)?; + overall_status + .set_with_description("Merging PR", hubcaps::statuses::State::Pending) + .await?; if co.merge_commit(job.pr.head_sha.as_ref()).is_err() { overall_status - .set_with_description("Failed to merge", hubcaps::statuses::State::Failure)?; + .set_with_description("Failed to merge", hubcaps::statuses::State::Failure) + .await?; info!("Failed to merge {}", job.pr.head_sha); return Ok(self.actions().skip(job)); } - evaluation_strategy.after_merge(&mut overall_status)?; + evaluation_strategy.after_merge(&mut overall_status).await?; info!("Got path: {:?}, building", refpath); overall_status - .set_with_description("Beginning Evaluations", hubcaps::statuses::State::Pending)?; + .set_with_description("Beginning Evaluations", hubcaps::statuses::State::Pending) + .await?; - let eval_results: bool = evaluation_strategy - .evaluation_checks() - .into_iter() + let eval_results: bool = futures::stream::iter(evaluation_strategy.evaluation_checks()) .map(|check| { - let status = CommitStatus::new( - repo.statuses(), - job.pr.head_sha.clone(), - format!("{prefix}-eval-{}", check.name()), - check.cli_cmd(), - None, - ); - - status - .set(hubcaps::statuses::State::Pending) - .expect("Failed to set status on eval strategy"); - - let state = match check.execute(Path::new(&refpath)) { - Ok(_) => hubcaps::statuses::State::Success, - Err(_) => hubcaps::statuses::State::Failure, - }; - - status - .set(state.clone()) - .expect("Failed to set status on eval strategy"); + // We need to clone or move variables into the async block + let repo_statuses = repo.statuses(); + let head_sha = job.pr.head_sha.clone(); + let refpath = refpath.clone(); + + async move { + let status = CommitStatus::new( + repo_statuses, + head_sha, + format!("{prefix}-eval-{}", check.name()), + check.cli_cmd(), + None, + ); - if state == hubcaps::statuses::State::Success { - Ok(()) - } else { - Err(()) + status + .set(hubcaps::statuses::State::Pending) + .await + .expect("Failed to set status on eval strategy"); + + let state = match check.execute(Path::new(&refpath)) { + Ok(_) => hubcaps::statuses::State::Success, + Err(_) => hubcaps::statuses::State::Failure, + }; + + status + .set(state.clone()) + .await + .expect("Failed to set status on eval strategy"); + + if state == hubcaps::statuses::State::Success { + Ok(()) + } else { + Err(()) + } } }) - .all(|status| status == Ok(())); + .buffered(1) + .all(|res| async move { res.is_ok() }) + .await; info!("Finished evaluations"); let mut response: worker::Actions = vec![]; if eval_results { - let complete = evaluation_strategy.all_evaluations_passed(&mut overall_status)?; + let complete = evaluation_strategy + .all_evaluations_passed(&mut overall_status) + .await?; response.extend(schedule_builds(complete.builds, auto_schedule_build_archs)); - overall_status.set_with_description("^.^!", hubcaps::statuses::State::Success)?; + overall_status + .set_with_description("^.^!", hubcaps::statuses::State::Success) + .await?; } else { overall_status - .set_with_description("Complete, with errors", hubcaps::statuses::State::Failure)?; + .set_with_description("Complete, with errors", hubcaps::statuses::State::Failure) + .await?; } - self.events.notify(Event::TaskEvaluationCheckComplete); + self.events.notify(Event::TaskEvaluationCheckComplete).await; info!("Evaluations done!"); Ok(self.actions().done(job, response)) @@ -450,9 +489,13 @@ fn schedule_builds( response } -pub fn update_labels(issueref: &hubcaps::issues::IssueRef, add: &[String], remove: &[String]) { +pub async fn update_labels( + issueref: &hubcaps::issues::IssueRef, + add: &[String], + remove: &[String], +) { let l = issueref.labels(); - let issue = crate::block_on(issueref.get()).expect("Failed to get issue"); + let issue = issueref.get().await.expect("Failed to get issue"); let existing: Vec = issue.labels.iter().map(|l| l.name.clone()).collect(); @@ -472,11 +515,12 @@ pub fn update_labels(issueref: &hubcaps::issues::IssueRef, add: &[String], remov info!("Labeling issue #{issue}: + {to_add:?} , - {to_remove:?}, = {existing:?}"); - crate::block_on(l.add(to_add.clone())) + l.add(to_add.clone()) + .await .unwrap_or_else(|err| panic!("Failed to add labels {to_add:?} to issue #{issue}: {err:?}")); for label in to_remove { - crate::block_on(l.remove(&label)).unwrap_or_else(|err| { + l.remove(&label).await.unwrap_or_else(|err| { panic!("Failed to remove label {label:?} from issue #{issue}: {err:?}") }); } @@ -493,11 +537,13 @@ fn issue_is_wip(issue: &hubcaps::issues::Issue) -> bool { /// that (e.g. if someone used `@ofborg eval`, `@ofborg build`, `@ofborg test`). /// Otherwise, if it's a new PR or was recently force-pushed (and therefore /// doesn't have any old `grahamcofborg`-prefixed statuses), use the new prefix. -pub fn get_prefix( +pub async fn get_prefix( statuses: hubcaps::statuses::Statuses, sha: &str, ) -> Result<&str, CommitStatusError> { - if crate::block_on(statuses.list(sha))? + if statuses + .list(sha) + .await? .iter() .any(|s| s.context.starts_with("grahamcofborg-")) { diff --git a/ofborg/src/tasks/evaluationfilter.rs b/ofborg/src/tasks/evaluationfilter.rs index 37d9b8f5..91ca4f49 100644 --- a/ofborg/src/tasks/evaluationfilter.rs +++ b/ofborg/src/tasks/evaluationfilter.rs @@ -18,7 +18,12 @@ impl EvaluationFilterWorker { impl worker::SimpleWorker for EvaluationFilterWorker { type J = ghevent::PullRequestEvent; - fn msg_to_job(&mut self, _: &str, _: &Option, body: &[u8]) -> Result { + async fn msg_to_job( + &mut self, + _: &str, + _: &Option, + body: &[u8], + ) -> Result { match serde_json::from_slice(body) { Ok(event) => Ok(event), Err(err) => Err(format!( @@ -28,7 +33,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker { } } - fn consumer(&mut self, job: &ghevent::PullRequestEvent) -> worker::Actions { + async fn consumer(&mut self, job: &ghevent::PullRequestEvent) -> worker::Actions { let span = debug_span!("job", pr = ?job.number); let _enter = span.enter(); @@ -102,8 +107,8 @@ mod tests { use super::*; use crate::worker::SimpleWorker; - #[test] - fn changed_base() { + #[tokio::test] + async fn changed_base() { let data = include_str!("../../test-srcs/events/pr-changed-base.json"); let job: ghevent::PullRequestEvent = @@ -115,7 +120,7 @@ mod tests { )); assert_eq!( - worker.consumer(&job), + worker.consumer(&job).await, vec![ worker::publish_serde_action( None, diff --git a/ofborg/src/tasks/githubcommentfilter.rs b/ofborg/src/tasks/githubcommentfilter.rs index fd7b1ecb..191953aa 100644 --- a/ofborg/src/tasks/githubcommentfilter.rs +++ b/ofborg/src/tasks/githubcommentfilter.rs @@ -21,7 +21,12 @@ impl GitHubCommentWorker { impl worker::SimpleWorker for GitHubCommentWorker { type J = ghevent::IssueComment; - fn msg_to_job(&mut self, _: &str, _: &Option, body: &[u8]) -> Result { + async fn msg_to_job( + &mut self, + _: &str, + _: &Option, + body: &[u8], + ) -> Result { match serde_json::from_slice(body) { Ok(comment) => Ok(comment), Err(err) => { @@ -36,7 +41,7 @@ impl worker::SimpleWorker for GitHubCommentWorker { // FIXME: remove with rust/cargo update #[allow(clippy::cognitive_complexity)] - fn consumer(&mut self, job: &ghevent::IssueComment) -> worker::Actions { + async fn consumer(&mut self, job: &ghevent::IssueComment) -> worker::Actions { let span = debug_span!("job", pr = ?job.issue.number); let _enter = span.enter(); @@ -65,16 +70,16 @@ impl worker::SimpleWorker for GitHubCommentWorker { let instructions = commentparser::parse(&job.comment.body); info!("Instructions: {:?}", instructions); - let pr = crate::block_on( - self.github - .repo( - job.repository.owner.login.clone(), - job.repository.name.clone(), - ) - .pulls() - .get(job.issue.number) - .get(), - ); + let pr = self + .github + .repo( + job.repository.owner.login.clone(), + job.repository.name.clone(), + ) + .pulls() + .get(job.issue.number) + .get() + .await; if let Err(x) = pr { info!( diff --git a/ofborg/src/tasks/githubcommentposter.rs b/ofborg/src/tasks/githubcommentposter.rs index c4381ce1..12c7c7fd 100644 --- a/ofborg/src/tasks/githubcommentposter.rs +++ b/ofborg/src/tasks/githubcommentposter.rs @@ -42,11 +42,16 @@ impl PostableEvent { impl worker::SimpleWorker for GitHubCommentPoster { type J = PostableEvent; - fn msg_to_job(&mut self, _: &str, _: &Option, body: &[u8]) -> Result { + async fn msg_to_job( + &mut self, + _: &str, + _: &Option, + body: &[u8], + ) -> Result { PostableEvent::from(body) } - fn consumer(&mut self, job: &PostableEvent) -> worker::Actions { + async fn consumer(&mut self, job: &PostableEvent) -> worker::Actions { let mut checks: Vec = vec![]; let repo: Repo; @@ -78,14 +83,15 @@ impl worker::SimpleWorker for GitHubCommentPoster { ); debug!("{:?}", check); - let check_create_attempt = crate::block_on( - self.github_vend - .for_repo(&repo.owner, &repo.name) - .unwrap() - .repo(repo.owner.clone(), repo.name.clone()) - .checkruns() - .create(&check), - ); + let check_create_attempt = self + .github_vend + .for_repo(&repo.owner, &repo.name) + .await + .unwrap() + .repo(repo.owner.clone(), repo.name.clone()) + .checkruns() + .create(&check) + .await; match check_create_attempt { Ok(_) => info!("Successfully sent."), diff --git a/ofborg/src/tasks/log_message_collector.rs b/ofborg/src/tasks/log_message_collector.rs index c3e5ad22..f359ffe7 100644 --- a/ofborg/src/tasks/log_message_collector.rs +++ b/ofborg/src/tasks/log_message_collector.rs @@ -169,7 +169,7 @@ impl LogMessageCollector { impl worker::SimpleWorker for LogMessageCollector { type J = LogMessage; - fn msg_to_job( + async fn msg_to_job( &mut self, routing_key: &str, _: &Option, @@ -207,7 +207,7 @@ impl worker::SimpleWorker for LogMessageCollector { }) } - fn consumer(&mut self, job: &LogMessage) -> worker::Actions { + async fn consumer(&mut self, job: &LogMessage) -> worker::Actions { match job.message { MsgType::Start(ref start) => { self.write_metadata(&job.from, start) @@ -374,8 +374,8 @@ mod tests { ); } - #[test] - pub fn test_logs_collect() { + #[tokio::test] + pub async fn test_logs_collect() { let mut logmsg = BuildLogMsg { attempt_id: String::from("my-attempt-id"), identity: String::from("my-identity"), @@ -394,59 +394,63 @@ mod tests { let mut worker = make_worker(p.path()); assert_eq!( vec![worker::Action::Ack], - worker.consumer(&LogMessage { - from: make_from("foo"), - message: MsgType::Start(BuildLogStart { - attempt_id: String::from("my-attempt-id"), - identity: String::from("my-identity"), - system: String::from("foobar-x8664"), - attempted_attrs: Some(vec!["foo".to_owned()]), - skipped_attrs: Some(vec!["bar".to_owned()]), + worker + .consumer(&LogMessage { + from: make_from("foo"), + message: MsgType::Start(BuildLogStart { + attempt_id: String::from("my-attempt-id"), + identity: String::from("my-identity"), + system: String::from("foobar-x8664"), + attempted_attrs: Some(vec!["foo".to_owned()]), + skipped_attrs: Some(vec!["bar".to_owned()]), + }) }) - }) + .await ); assert!(p.path().join("routing-key-foo/attempt-id-foo").exists()); - assert_eq!(vec![worker::Action::Ack], worker.consumer(&job)); + assert_eq!(vec![worker::Action::Ack], worker.consumer(&job).await); logmsg.line_number = 5; logmsg.output = String::from("line-5"); job.message = MsgType::Msg(logmsg.clone()); - assert_eq!(vec![worker::Action::Ack], worker.consumer(&job)); + assert_eq!(vec![worker::Action::Ack], worker.consumer(&job).await); job.from.attempt_id = String::from("my-other-attempt"); logmsg.attempt_id = String::from("my-other-attempt"); logmsg.line_number = 3; logmsg.output = String::from("line-3"); job.message = MsgType::Msg(logmsg); - assert_eq!(vec![worker::Action::Ack], worker.consumer(&job)); + assert_eq!(vec![worker::Action::Ack], worker.consumer(&job).await); assert_eq!( vec![worker::Action::Ack], - worker.consumer(&LogMessage { - from: make_from("foo"), - message: MsgType::Finish(Box::new(BuildResult::V1 { - tag: V1Tag::V1, - repo: Repo { - clone_url: "https://github.com/nixos/ofborg.git".to_owned(), - full_name: "NixOS/ofborg".to_owned(), - owner: "NixOS".to_owned(), - name: "ofborg".to_owned(), - }, - pr: Pr { - number: 42, - head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(), - target_branch: Some("scratch".to_owned()), - }, - system: "x86_64-linux".to_owned(), - output: vec![], - attempt_id: "attempt-id-foo".to_owned(), - request_id: "bogus-request-id".to_owned(), - status: BuildStatus::Success, - attempted_attrs: Some(vec!["foo".to_owned()]), - skipped_attrs: Some(vec!["bar".to_owned()]), - })) - }) + worker + .consumer(&LogMessage { + from: make_from("foo"), + message: MsgType::Finish(Box::new(BuildResult::V1 { + tag: V1Tag::V1, + repo: Repo { + clone_url: "https://github.com/nixos/ofborg.git".to_owned(), + full_name: "NixOS/ofborg".to_owned(), + owner: "NixOS".to_owned(), + name: "ofborg".to_owned(), + }, + pr: Pr { + number: 42, + head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(), + target_branch: Some("scratch".to_owned()), + }, + system: "x86_64-linux".to_owned(), + output: vec![], + attempt_id: "attempt-id-foo".to_owned(), + request_id: "bogus-request-id".to_owned(), + status: BuildStatus::Success, + attempted_attrs: Some(vec!["foo".to_owned()]), + skipped_attrs: Some(vec!["bar".to_owned()]), + })) + }) + .await ); } diff --git a/ofborg/src/tasks/statscollector.rs b/ofborg/src/tasks/statscollector.rs index 0372e9a6..fef23ad3 100644 --- a/ofborg/src/tasks/statscollector.rs +++ b/ofborg/src/tasks/statscollector.rs @@ -17,7 +17,12 @@ impl StatCollectorWorker { impl worker::SimpleWorker for StatCollectorWorker { type J = stats::EventMessage; - fn msg_to_job(&mut self, _: &str, _: &Option, body: &[u8]) -> Result { + async fn msg_to_job( + &mut self, + _: &str, + _: &Option, + body: &[u8], + ) -> Result { match serde_json::from_slice(body) { Ok(e) => Ok(e), Err(_) => { @@ -27,16 +32,20 @@ impl worker::SimpleWorker for StatCollectorWorker match serde_json::from_slice(&modified_body) { Ok(event) => { - self.events.notify(stats::Event::StatCollectorLegacyEvent( - stats::event_metric_name(&event), - )); + self.events + .notify(stats::Event::StatCollectorLegacyEvent( + stats::event_metric_name(&event), + )) + .await; Ok(stats::EventMessage { sender: "".to_owned(), events: vec![event], }) } Err(err) => { - self.events.notify(stats::Event::StatCollectorBogusEvent); + self.events + .notify(stats::Event::StatCollectorBogusEvent) + .await; error!( "Failed to decode message: {:?}, Err: {err:?}", std::str::from_utf8(body).unwrap_or("") @@ -48,7 +57,7 @@ impl worker::SimpleWorker for StatCollectorWorker } } - fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions { + async fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions { let sender = job.sender.clone(); for event in job.events.iter() { self.collector.record(sender.clone(), event.clone()); diff --git a/ofborg/src/worker.rs b/ofborg/src/worker.rs index 48b70525..9569b450 100644 --- a/ofborg/src/worker.rs +++ b/ofborg/src/worker.rs @@ -1,4 +1,4 @@ -use std::marker::Send; +use std::{marker::Send, sync::Arc}; use serde::Serialize; @@ -6,12 +6,12 @@ pub struct Response {} pub type Actions = Vec; -#[derive(Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum Action { Ack, NackRequeue, NackDump, - Publish(Box), + Publish(Arc), } #[derive(Debug, PartialEq, Eq)] @@ -29,7 +29,7 @@ pub fn publish_serde_action( routing_key: Option, msg: &T, ) -> Action { - Action::Publish(Box::new(QueueMsg { + Action::Publish(Arc::new(QueueMsg { exchange, routing_key, mandatory: false, @@ -42,12 +42,12 @@ pub fn publish_serde_action( pub trait SimpleWorker: Send { type J: Send; - fn consumer(&mut self, job: &Self::J) -> Actions; + fn consumer(&mut self, job: &Self::J) -> impl std::future::Future; fn msg_to_job( &mut self, method: &str, headers: &Option, body: &[u8], - ) -> Result; + ) -> impl std::future::Future>; }