diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs index f793e5cb..f7dff1ff 100644 --- a/src/databases/mysql.rs +++ b/src/databases/mysql.rs @@ -436,7 +436,7 @@ impl Database for Mysql { title: &str, description: &str, ) -> Result { - let info_hash = torrent.info_hash_hex(); + let info_hash = torrent.canonical_info_hash_hex(); let canonical_info_hash = torrent.canonical_info_hash(); // open pool connection diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index 980e7a3b..ec8d2c01 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -426,7 +426,7 @@ impl Database for Sqlite { title: &str, description: &str, ) -> Result { - let info_hash = torrent.info_hash_hex(); + let info_hash = torrent.canonical_info_hash_hex(); let canonical_info_hash = torrent.canonical_info_hash(); // open pool connection diff --git a/src/errors.rs b/src/errors.rs index 25ea007d..29eb12f0 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -5,6 +5,7 @@ use derive_more::{Display, Error}; use hyper::StatusCode; use crate::databases::database; +use crate::models::torrent::MetadataError; pub type ServiceResult = Result; @@ -204,6 +205,18 @@ impl From for ServiceError { } } +impl From for ServiceError { + fn from(e: MetadataError) -> Self { + eprintln!("{e}"); + match e { + MetadataError::MissingTorrentTitle | MetadataError::MissingTorrentCategoryName => { + ServiceError::MissingMandatoryMetadataFields + } + MetadataError::InvalidTorrentTitleLength => ServiceError::InvalidTorrentTitleLength, + } + } +} + #[must_use] pub fn http_status_code_for_service_error(error: &ServiceError) -> StatusCode { #[allow(clippy::match_same_arms)] diff --git a/src/models/torrent.rs b/src/models/torrent.rs index 150d2bba..b86eb519 100644 --- a/src/models/torrent.rs +++ b/src/models/torrent.rs @@ -1,7 +1,9 @@ +use derive_more::{Display, Error}; use serde::{Deserialize, Serialize}; use super::torrent_tag::TagId; -use crate::errors::ServiceError; + +const MIN_TORRENT_TITLE_LENGTH: usize = 3; #[allow(clippy::module_name_repetitions)] pub type TorrentId = i64; @@ -24,6 +26,18 @@ pub struct TorrentListing { pub comment: Option, } +#[derive(Debug, Display, PartialEq, Eq, Error)] +pub enum MetadataError { + #[display(fmt = "Missing mandatory torrent title")] + MissingTorrentTitle, + + #[display(fmt = "Missing mandatory torrent category name")] + MissingTorrentCategoryName, + + #[display(fmt = "Torrent title is too short.")] + InvalidTorrentTitleLength, +} + #[derive(Debug, Deserialize)] pub struct Metadata { pub title: String, @@ -33,16 +47,48 @@ pub struct Metadata { } impl Metadata { - /// Returns the verify of this [`Metadata`]. + /// Create a new struct. + /// + /// # Errors + /// + /// This function will return an error if the metadata fields do not have a + /// valid format. + pub fn new(title: &str, description: &str, category: &str, tag_ids: &[TagId]) -> Result { + Self::validate_format(title, description, category, tag_ids)?; + + Ok(Self { + title: title.to_owned(), + description: description.to_owned(), + category: category.to_owned(), + tags: tag_ids.to_vec(), + }) + } + + /// It validates the format of the metadata fields. + /// + /// It does not validate domain rules, like: + /// + /// - Duplicate titles. + /// - Non-existing categories. + /// - ... /// /// # Errors /// - /// This function will return an error if the any of the mandatory metadata fields are missing. - pub fn verify(&self) -> Result<(), ServiceError> { - if self.title.is_empty() || self.category.is_empty() { - Err(ServiceError::MissingMandatoryMetadataFields) - } else { - Ok(()) + /// This function will return an error if any of the metadata fields does + /// not have a valid format. + fn validate_format(title: &str, _description: &str, category: &str, _tag_ids: &[TagId]) -> Result<(), MetadataError> { + if title.is_empty() { + return Err(MetadataError::MissingTorrentTitle); } + + if category.is_empty() { + return Err(MetadataError::MissingTorrentCategoryName); + } + + if title.len() < MIN_TORRENT_TITLE_LENGTH { + return Err(MetadataError::InvalidTorrentTitleLength); + } + + Ok(()) } } diff --git a/src/models/torrent_file.rs b/src/models/torrent_file.rs index aba633c5..7045c147 100644 --- a/src/models/torrent_file.rs +++ b/src/models/torrent_file.rs @@ -133,13 +133,13 @@ impl Torrent { } #[must_use] - pub fn info_hash_hex(&self) -> String { - from_bytes(&self.calculate_info_hash_as_bytes()).to_lowercase() + pub fn canonical_info_hash(&self) -> InfoHash { + self.calculate_info_hash_as_bytes().into() } #[must_use] - pub fn canonical_info_hash(&self) -> InfoHash { - self.calculate_info_hash_as_bytes().into() + pub fn canonical_info_hash_hex(&self) -> String { + self.canonical_info_hash().to_hex_string() } #[must_use] @@ -389,7 +389,7 @@ mod tests { httpseeds: None, }; - assert_eq!(torrent.info_hash_hex(), "79fa9e4a2927804fe4feab488a76c8c2d3d1cdca"); + assert_eq!(torrent.canonical_info_hash_hex(), "79fa9e4a2927804fe4feab488a76c8c2d3d1cdca"); } mod infohash_should_be_calculated_for { @@ -430,7 +430,7 @@ mod tests { httpseeds: None, }; - assert_eq!(torrent.info_hash_hex(), "79fa9e4a2927804fe4feab488a76c8c2d3d1cdca"); + assert_eq!(torrent.canonical_info_hash_hex(), "79fa9e4a2927804fe4feab488a76c8c2d3d1cdca"); } #[test] @@ -469,7 +469,7 @@ mod tests { httpseeds: None, }; - assert_eq!(torrent.info_hash_hex(), "aa2aca91ab650c4d249c475ca3fa604f2ccb0d2a"); + assert_eq!(torrent.canonical_info_hash_hex(), "aa2aca91ab650c4d249c475ca3fa604f2ccb0d2a"); } #[test] @@ -504,7 +504,7 @@ mod tests { httpseeds: None, }; - assert_eq!(torrent.info_hash_hex(), "ccc1cf4feb59f3fa85c96c9be1ebbafcfe8a9cc8"); + assert_eq!(torrent.canonical_info_hash_hex(), "ccc1cf4feb59f3fa85c96c9be1ebbafcfe8a9cc8"); } #[test] @@ -539,7 +539,7 @@ mod tests { httpseeds: None, }; - assert_eq!(torrent.info_hash_hex(), "d3a558d0a19aaa23ba6f9f430f40924d10fefa86"); + assert_eq!(torrent.canonical_info_hash_hex(), "d3a558d0a19aaa23ba6f9f430f40924d10fefa86"); } } } diff --git a/src/services/torrent.rs b/src/services/torrent.rs index 71c8fb48..f6864c06 100644 --- a/src/services/torrent.rs +++ b/src/services/torrent.rs @@ -20,8 +20,6 @@ use crate::tracker::statistics_importer::StatisticsImporter; use crate::utils::parse_torrent; use crate::{tracker, AsCSV}; -const MIN_TORRENT_TITLE_LENGTH: usize = 3; - pub struct Index { configuration: Arc, tracker_statistics_importer: Arc, @@ -128,22 +126,34 @@ impl Index { /// * Unable to parse the torrent info-hash. pub async fn add_torrent( &self, - add_torrent_form: AddTorrentRequest, + add_torrent_req: AddTorrentRequest, user_id: UserId, ) -> Result { - let metadata = Metadata { - title: add_torrent_form.title, - description: add_torrent_form.description, - category: add_torrent_form.category, - tags: add_torrent_form.tags, - }; + // Authorization: only authenticated users ere allowed to upload torrents + + let _user = self.user_repository.get_compact(&user_id).await?; + + // Validate and build metadata + + let metadata = Metadata::new( + &add_torrent_req.title, + &add_torrent_req.description, + &add_torrent_req.category, + &add_torrent_req.tags, + )?; + + let category = self + .category_repository + .get_by_name(&metadata.category) + .await + .map_err(|_| ServiceError::InvalidCategory)?; - metadata.verify()?; + // Validate and build torrent file - let original_info_hash = parse_torrent::calculate_info_hash(&add_torrent_form.torrent_buffer); + let original_info_hash = parse_torrent::calculate_info_hash(&add_torrent_req.torrent_buffer); let mut torrent = - parse_torrent::decode_torrent(&add_torrent_form.torrent_buffer).map_err(|_| ServiceError::InvalidTorrentFile)?; + parse_torrent::decode_torrent(&add_torrent_req.torrent_buffer).map_err(|_| ServiceError::InvalidTorrentFile)?; // Make sure that the pieces key has a length that is a multiple of 20 if let Some(pieces) = torrent.info.pieces.as_ref() { @@ -152,22 +162,12 @@ impl Index { } } - let _user = self.user_repository.get_compact(&user_id).await?; - torrent.set_announce_urls(&self.configuration).await; - if metadata.title.len() < MIN_TORRENT_TITLE_LENGTH { - return Err(ServiceError::InvalidTorrentTitleLength); - } - - let category = self - .category_repository - .get_by_name(&metadata.category) - .await - .map_err(|_| ServiceError::InvalidCategory)?; - let canonical_info_hash = torrent.canonical_info_hash(); + // Canonical InfoHash Group checks + let original_info_hashes = self .torrent_info_hash_repository .get_canonical_info_hash_group(&canonical_info_hash) @@ -194,35 +194,44 @@ impl Index { return Err(ServiceError::CanonicalInfoHashAlreadyExists); } - // First time a torrent with this original infohash is uploaded. + // Store the torrent into the database let torrent_id = self .torrent_repository .add(&original_info_hash, &torrent, &metadata, user_id, category) .await?; - let info_hash = torrent.canonical_info_hash(); + + self.torrent_tag_repository + .link_torrent_to_tags(&torrent_id, &metadata.tags) + .await?; + + // Secondary task: import torrent statistics from the tracker drop( self.tracker_statistics_importer - .import_torrent_statistics(torrent_id, &torrent.info_hash_hex()) + .import_torrent_statistics(torrent_id, &torrent.canonical_info_hash_hex()) .await, ); + // Secondary task: whitelist torrent on the tracker + // We always whitelist the torrent on the tracker because even if the tracker mode is `public` // it could be changed to `private` later on. - if let Err(e) = self.tracker_service.whitelist_info_hash(torrent.info_hash_hex()).await { + if let Err(e) = self + .tracker_service + .whitelist_info_hash(torrent.canonical_info_hash_hex()) + .await + { // If the torrent can't be whitelisted somehow, remove the torrent from database drop(self.torrent_repository.delete(&torrent_id).await); return Err(e); } - self.torrent_tag_repository - .link_torrent_to_tags(&torrent_id, &metadata.tags) - .await?; + // Build response Ok(AddTorrentResponse { torrent_id, - info_hash: info_hash.to_string(), + info_hash: torrent.canonical_info_hash_hex(), original_info_hash: original_info_hash.to_string(), }) } diff --git a/src/utils/parse_torrent.rs b/src/utils/parse_torrent.rs index 21a219d5..5644cd8d 100644 --- a/src/utils/parse_torrent.rs +++ b/src/utils/parse_torrent.rs @@ -99,7 +99,7 @@ mod tests { // The infohash is not the original infohash of the torrent file, // but the infohash of the info dictionary without the custom keys. assert_eq!( - torrent.info_hash_hex(), + torrent.canonical_info_hash_hex(), "8aa01a4c816332045ffec83247ccbc654547fedf".to_string() ); } diff --git a/src/web/api/v1/contexts/torrent/handlers.rs b/src/web/api/v1/contexts/torrent/handlers.rs index 6c0754f0..9ddf345d 100644 --- a/src/web/api/v1/contexts/torrent/handlers.rs +++ b/src/web/api/v1/contexts/torrent/handlers.rs @@ -100,7 +100,11 @@ pub async fn download_torrent_handler( return ServiceError::InternalServerError.into_response(); }; - torrent_file_response(bytes, &format!("{}.torrent", torrent.info.name), &torrent.info_hash_hex()) + torrent_file_response( + bytes, + &format!("{}.torrent", torrent.info.name), + &torrent.canonical_info_hash_hex(), + ) } } @@ -307,7 +311,11 @@ pub async fn create_random_torrent_handler(State(_app_data): State> return ServiceError::InternalServerError.into_response(); }; - torrent_file_response(bytes, &format!("{}.torrent", torrent.info.name), &torrent.info_hash_hex()) + torrent_file_response( + bytes, + &format!("{}.torrent", torrent.info.name), + &torrent.canonical_info_hash_hex(), + ) } /// Extracts the [`TorrentRequest`] from the multipart form payload. diff --git a/tests/e2e/web/api/v1/contexts/torrent/contract.rs b/tests/e2e/web/api/v1/contexts/torrent/contract.rs index a300cbc1..5bd59d2f 100644 --- a/tests/e2e/web/api/v1/contexts/torrent/contract.rs +++ b/tests/e2e/web/api/v1/contexts/torrent/contract.rs @@ -396,7 +396,10 @@ mod for_guests { // The returned torrent info-hash should be the same as the first torrent assert_eq!(response.status, 200); - assert_eq!(torrent.info_hash_hex(), first_torrent_canonical_info_hash.to_hex_string()); + assert_eq!( + torrent.canonical_info_hash_hex(), + first_torrent_canonical_info_hash.to_hex_string() + ); } #[tokio::test]