From 4456203433d7c4aef463371b2e71ce9e69d7b3c0 Mon Sep 17 00:00:00 2001 From: Jose Celano Date: Tue, 30 Jan 2024 15:59:20 +0000 Subject: [PATCH] feat: [#640] Tracker Chekcer: scrape check --- src/console/clients/checker/service.rs | 88 ++++++++++++++----- .../tracker/http/client/requests/scrape.rs | 17 ++++ 2 files changed, 85 insertions(+), 20 deletions(-) diff --git a/src/console/clients/checker/service.rs b/src/console/clients/checker/service.rs index 02bf1926b..1cb4725e0 100644 --- a/src/console/clients/checker/service.rs +++ b/src/console/clients/checker/service.rs @@ -12,7 +12,8 @@ use crate::console::clients::checker::printer::Printer; use crate::shared::bit_torrent::info_hash::InfoHash; use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder; use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce; -use crate::shared::bit_torrent::tracker::http::client::Client; +use crate::shared::bit_torrent::tracker::http::client::responses::scrape; +use crate::shared::bit_torrent::tracker::http::client::{requests, Client}; pub struct Service { pub(crate) config: Arc, @@ -58,9 +59,32 @@ impl Service { self.console.println("HTTP trackers ..."); for http_tracker in &self.config.http_trackers { - match self.check_http_tracker(http_tracker).await { - Ok(()) => check_results.push(Ok(())), - Err(err) => check_results.push(Err(err)), + let colored_tracker_url = http_tracker.to_string().yellow(); + + match self.check_http_announce(http_tracker).await { + Ok(()) => { + check_results.push(Ok(())); + self.console + .println(&format!("{} - Announce at {} is OK", "✓".green(), colored_tracker_url)); + } + Err(err) => { + check_results.push(Err(err)); + self.console + .println(&format!("{} - Announce at {} is failing", "✗".red(), colored_tracker_url)); + } + } + + match self.check_http_scrape(http_tracker).await { + Ok(()) => { + check_results.push(Ok(())); + self.console + .println(&format!("{} - Scrape at {} is OK", "✓".green(), colored_tracker_url)); + } + Err(err) => { + check_results.push(Err(err)); + self.console + .println(&format!("{} - Scrape at {} is failing", "✗".red(), colored_tracker_url)); + } } } } @@ -80,57 +104,81 @@ impl Service { // todo: // - Make announce request // - Make scrape request - self.console - .println(&format!("{} - UDP tracker at udp://{:?} is OK (TODO)", "✓".green(), address)); + + let colored_address = address.to_string().yellow(); + + self.console.println(&format!( + "{} - UDP tracker at udp://{} is OK ({})", + "✓".green(), + colored_address, + "TODO".red(), + )); } - async fn check_http_tracker(&self, url: &Url) -> Result<(), CheckError> { + async fn check_http_announce(&self, url: &Url) -> Result<(), CheckError> { let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237 let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required"); - // Announce request - let response = Client::new(url.clone()) .announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query()) .await; if let Ok(body) = response.bytes().await { if let Ok(_announce_response) = serde_bencode::from_bytes::(&body) { - self.console.println(&format!("{} - Announce at {} is OK", "✓".green(), url)); - Ok(()) } else { - self.console.println(&format!("{} - Announce at {} failing", "✗".red(), url)); Err(CheckError::HttpError { url: url.clone() }) } } else { - self.console.println(&format!("{} - Announce at {} failing", "✗".red(), url)); Err(CheckError::HttpError { url: url.clone() }) } + } + + async fn check_http_scrape(&self, url: &Url) -> Result<(), CheckError> { + let info_hashes: Vec = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237 + let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required"); - // Scrape request + let response = Client::new(url.clone()).scrape(&query).await; - // todo + if let Ok(body) = response.bytes().await { + if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) { + Ok(()) + } else { + Err(CheckError::HttpError { url: url.clone() }) + } + } else { + Err(CheckError::HttpError { url: url.clone() }) + } } async fn run_health_check(&self, url: Url) -> Result<(), CheckError> { let client = HttpClient::builder().timeout(Duration::from_secs(5)).build().unwrap(); + let colored_url = url.to_string().yellow(); + match client.get(url.clone()).send().await { Ok(response) => { if response.status().is_success() { self.console - .println(&format!("{} - Health API at {} is OK", "✓".green(), url)); + .println(&format!("{} - Health API at {} is OK", "✓".green(), colored_url)); Ok(()) } else { - self.console - .eprintln(&format!("{} - Health API at {} failing: {:?}", "✗".red(), url, response)); + self.console.eprintln(&format!( + "{} - Health API at {} is failing: {:?}", + "✗".red(), + colored_url, + response + )); Err(CheckError::HealthCheckError { url }) } } Err(err) => { - self.console - .eprintln(&format!("{} - Health API at {} failing: {:?}", "✗".red(), url, err)); + self.console.eprintln(&format!( + "{} - Health API at {} is failing: {:?}", + "✗".red(), + colored_url, + err + )); Err(CheckError::HealthCheckError { url }) } } diff --git a/src/shared/bit_torrent/tracker/http/client/requests/scrape.rs b/src/shared/bit_torrent/tracker/http/client/requests/scrape.rs index 771b3a45e..d0268d1f8 100644 --- a/src/shared/bit_torrent/tracker/http/client/requests/scrape.rs +++ b/src/shared/bit_torrent/tracker/http/client/requests/scrape.rs @@ -45,6 +45,23 @@ impl TryFrom<&[String]> for Query { } } +impl TryFrom> for Query { + type Error = ConversionError; + + fn try_from(info_hashes: Vec) -> Result { + let mut validated_info_hashes: Vec = Vec::new(); + + for info_hash in info_hashes { + let validated_info_hash = InfoHash::from_str(&info_hash).map_err(|_| ConversionError(info_hash.clone()))?; + validated_info_hashes.push(validated_info_hash.0); + } + + Ok(Self { + info_hash: validated_info_hashes, + }) + } +} + /// HTTP Tracker Scrape Request: /// ///