diff --git a/src/api/routes.rs b/src/api/routes.rs index bb459ee9..4280cdb3 100644 --- a/src/api/routes.rs +++ b/src/api/routes.rs @@ -13,7 +13,7 @@ use super::{ActionStatus, TorrentInfoQuery}; use crate::protocol::info_hash::InfoHash; use crate::tracker; use crate::tracker::services::statistics::get_metrics; -use crate::tracker::services::torrent::{get_torrent_info, get_torrents}; +use crate::tracker::services::torrent::{get_torrent_info, get_torrents, Pagination}; fn authenticate(tokens: HashMap) -> impl Filter + Clone { #[derive(Deserialize)] @@ -65,7 +65,7 @@ pub fn routes(tracker: &Arc) -> impl Filter::Ok(reply::json(&ListItem::new_vec( - &get_torrents(tracker.clone(), offset, limit).await, + &get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await, ))) }); diff --git a/src/apis/routes.rs b/src/apis/routes.rs index 9fedbc82..b86a468e 100644 --- a/src/apis/routes.rs +++ b/src/apis/routes.rs @@ -1,25 +1,27 @@ +use std::fmt; use std::str::FromStr; use std::sync::Arc; -use axum::extract::{Path, State}; +use axum::extract::{Path, Query, State}; use axum::response::{IntoResponse, Json, Response}; +use serde::{de, Deserialize, Deserializer}; use serde_json::json; use crate::api::resource::stats::Stats; -use crate::api::resource::torrent::Torrent; +use crate::api::resource::torrent::{ListItem, Torrent}; use crate::protocol::info_hash::InfoHash; use crate::tracker::services::statistics::get_metrics; -use crate::tracker::services::torrent::get_torrent_info; +use crate::tracker::services::torrent::{get_torrent_info, get_torrents, Pagination}; use crate::tracker::Tracker; -pub async fn get_stats(State(tracker): State>) -> Json { +pub async fn get_stats_handler(State(tracker): State>) -> Json { Json(Stats::from(get_metrics(tracker.clone()).await)) } /// # Panics /// /// Will panic if it can't parse the infohash in the request -pub async fn get_torrent(State(tracker): State>, Path(info_hash): Path) -> Response { +pub async fn get_torrent_handler(State(tracker): State>, Path(info_hash): Path) -> Response { let optional_torrent_info = get_torrent_info(tracker.clone(), &InfoHash::from_str(&info_hash).unwrap()).await; match optional_torrent_info { @@ -27,3 +29,37 @@ pub async fn get_torrent(State(tracker): State>, Path(info_hash): P None => Json(json!("torrent not known")).into_response(), } } + +#[derive(Deserialize)] +pub struct PaginationParams { + #[serde(default, deserialize_with = "empty_string_as_none")] + pub offset: Option, + pub limit: Option, +} + +pub async fn get_torrents_handler( + State(tracker): State>, + pagination: Query, +) -> Json> { + Json(ListItem::new_vec( + &get_torrents( + tracker.clone(), + &Pagination::new_with_options(pagination.0.offset, pagination.0.limit), + ) + .await, + )) +} + +/// Serde deserialization decorator to map empty Strings to None, +fn empty_string_as_none<'de, D, T>(de: D) -> Result, D::Error> +where + D: Deserializer<'de>, + T: FromStr, + T::Err: fmt::Display, +{ + let opt = Option::::deserialize(de)?; + match opt.as_deref() { + None | Some("") => Ok(None), + Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some), + } +} diff --git a/src/apis/server.rs b/src/apis/server.rs index dcd0924c..87916013 100644 --- a/src/apis/server.rs +++ b/src/apis/server.rs @@ -10,13 +10,14 @@ use log::info; use warp::hyper; use super::middlewares::auth::auth; -use super::routes::{get_stats, get_torrent}; +use super::routes::{get_stats_handler, get_torrent_handler, get_torrents_handler}; use crate::tracker; pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl Future> { let app = Router::new() - .route("/stats", get(get_stats).with_state(tracker.clone())) - .route("/torrent/:info_hash", get(get_torrent).with_state(tracker.clone())) + .route("/stats", get(get_stats_handler).with_state(tracker.clone())) + .route("/torrent/:info_hash", get(get_torrent_handler).with_state(tracker.clone())) + .route("/torrents", get(get_torrents_handler).with_state(tracker.clone())) .layer(middleware::from_fn_with_state(tracker.config.clone(), auth)); let server = axum::Server::bind(&socket_addr).serve(app.into_make_service()); @@ -33,7 +34,9 @@ pub fn start_tls( tracker: &Arc, ) -> impl Future> { let app = Router::new() - .route("/stats", get(get_stats).with_state(tracker.clone())) + .route("/stats", get(get_stats_handler).with_state(tracker.clone())) + .route("/torrent/:info_hash", get(get_torrent_handler).with_state(tracker.clone())) + .route("/torrents", get(get_torrents_handler).with_state(tracker.clone())) .layer(middleware::from_fn_with_state(tracker.config.clone(), auth)); let handle = Handle::new(); diff --git a/src/tracker/services/torrent.rs b/src/tracker/services/torrent.rs index 00cdfe13..a08fd54d 100644 --- a/src/tracker/services/torrent.rs +++ b/src/tracker/services/torrent.rs @@ -1,5 +1,7 @@ use std::sync::Arc; +use serde::Deserialize; + use crate::protocol::info_hash::InfoHash; use crate::tracker::peer::Peer; use crate::tracker::Tracker; @@ -21,6 +23,52 @@ pub struct BasicInfo { pub leechers: u64, } +#[derive(Deserialize)] +pub struct Pagination { + pub offset: u32, + pub limit: u32, +} + +impl Pagination { + #[must_use] + pub fn new(offset: u32, limit: u32) -> Self { + Self { offset, limit } + } + + #[must_use] + pub fn new_with_options(offset_option: Option, limit_option: Option) -> Self { + let offset = match offset_option { + Some(offset) => offset, + None => Pagination::default_offset(), + }; + let limit = match limit_option { + Some(offset) => offset, + None => Pagination::default_limit(), + }; + + Self { offset, limit } + } + + #[must_use] + pub fn default_offset() -> u32 { + 0 + } + + #[must_use] + pub fn default_limit() -> u32 { + 4000 + } +} + +impl Default for Pagination { + fn default() -> Self { + Self { + offset: Self::default_offset(), + limit: Self::default_limit(), + } + } +} + pub async fn get_torrent_info(tracker: Arc, info_hash: &InfoHash) -> Option { let db = tracker.get_torrents().await; @@ -48,7 +96,7 @@ pub async fn get_torrent_info(tracker: Arc, info_hash: &InfoHash) -> Op }) } -pub async fn get_torrents(tracker: Arc, offset: u32, limit: u32) -> Vec { +pub async fn get_torrents(tracker: Arc, pagination: &Pagination) -> Vec { let db = tracker.get_torrents().await; db.iter() @@ -61,8 +109,8 @@ pub async fn get_torrents(tracker: Arc, offset: u32, limit: u32) -> Vec leechers: u64::from(leechers), } }) - .skip(offset as usize) - .take(limit as usize) + .skip(pagination.offset as usize) + .take(pagination.limit as usize) .collect() } @@ -149,7 +197,7 @@ mod tests { use crate::protocol::info_hash::InfoHash; use crate::tracker::services::common::tracker_factory; use crate::tracker::services::torrent::tests::sample_peer; - use crate::tracker::services::torrent::{get_torrents, BasicInfo}; + use crate::tracker::services::torrent::{get_torrents, BasicInfo, Pagination}; pub fn tracker_configuration() -> Arc { Arc::new(ephemeral_configuration()) @@ -158,10 +206,8 @@ mod tests { #[tokio::test] async fn should_return_an_empty_result_if_the_tracker_does_not_have_any_torrent() { let tracker = Arc::new(tracker_factory(&tracker_configuration())); - let offset = 0; - let limit = 4000; - let torrents = get_torrents(tracker.clone(), offset, limit).await; + let torrents = get_torrents(tracker.clone(), &Pagination::default()).await; assert_eq!(torrents, vec![]); } @@ -169,8 +215,6 @@ mod tests { #[tokio::test] async fn should_return_a_summarized_info_for_all_torrents() { let tracker = Arc::new(tracker_factory(&tracker_configuration())); - let offset = 0; - let limit = 4000; let hash = "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_owned(); let info_hash = InfoHash::from_str(&hash).unwrap(); @@ -179,7 +223,7 @@ mod tests { .update_torrent_with_peer_and_get_stats(&info_hash, &sample_peer()) .await; - let torrents = get_torrents(tracker.clone(), offset, limit).await; + let torrents = get_torrents(tracker.clone(), &Pagination::default()).await; assert_eq!( torrents, @@ -211,7 +255,7 @@ mod tests { let offset = 0; let limit = 1; - let torrents = get_torrents(tracker.clone(), offset, limit).await; + let torrents = get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await; assert_eq!(torrents.len(), 1); } @@ -235,7 +279,7 @@ mod tests { let offset = 1; let limit = 4000; - let torrents = get_torrents(tracker.clone(), offset, limit).await; + let torrents = get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await; assert_eq!(torrents.len(), 1); assert_eq!( @@ -265,10 +309,7 @@ mod tests { .update_torrent_with_peer_and_get_stats(&info_hash2, &sample_peer()) .await; - let offset = 0; - let limit = 4000; - - let torrents = get_torrents(tracker.clone(), offset, limit).await; + let torrents = get_torrents(tracker.clone(), &Pagination::default()).await; assert_eq!( torrents, diff --git a/tests/tracker_api.rs b/tests/tracker_api.rs index bc5271c2..e8d1e71e 100644 --- a/tests/tracker_api.rs +++ b/tests/tracker_api.rs @@ -662,17 +662,115 @@ mod tracker_apis { mod for_torrent_resources { use std::str::FromStr; - use torrust_tracker::api::resource; use torrust_tracker::api::resource::torrent::Torrent; + use torrust_tracker::api::resource::{self, torrent}; use torrust_tracker::protocol::info_hash::InfoHash; use crate::api::asserts::{assert_token_not_valid, assert_torrent_not_known, assert_unauthorized}; - use crate::api::client::Client; + use crate::api::client::{Client, Query, QueryParam}; use crate::api::connection_info::{connection_with_invalid_token, connection_with_no_token}; use crate::api::fixtures::sample_peer; use crate::api::server::start_default_api; use crate::api::Version; + #[tokio::test] + async fn should_allow_getting_torrents() { + let api_server = start_default_api(&Version::Axum).await; + + let info_hash = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap(); + + api_server.add_torrent(&info_hash, &sample_peer()).await; + + let response = Client::new(api_server.get_connection_info(), &Version::Axum) + .get_torrents(Query::empty()) + .await; + + assert_eq!(response.status(), 200); + assert_eq!( + response.json::>().await.unwrap(), + vec![torrent::ListItem { + info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(), + seeders: 1, + completed: 0, + leechers: 0, + peers: None // Torrent list does not include the peer list for each torrent + }] + ); + } + + #[tokio::test] + async fn should_allow_limiting_the_torrents_in_the_result() { + let api_server = start_default_api(&Version::Axum).await; + + // torrents are ordered alphabetically by infohashes + let info_hash_1 = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap(); + let info_hash_2 = InfoHash::from_str("0b3aea4adc213ce32295be85d3883a63bca25446").unwrap(); + + api_server.add_torrent(&info_hash_1, &sample_peer()).await; + api_server.add_torrent(&info_hash_2, &sample_peer()).await; + + let response = Client::new(api_server.get_connection_info(), &Version::Axum) + .get_torrents(Query::params([QueryParam::new("limit", "1")].to_vec())) + .await; + + assert_eq!(response.status(), 200); + assert_eq!( + response.json::>().await.unwrap(), + vec![torrent::ListItem { + info_hash: "0b3aea4adc213ce32295be85d3883a63bca25446".to_string(), + seeders: 1, + completed: 0, + leechers: 0, + peers: None // Torrent list does not include the peer list for each torrent + }] + ); + } + + #[tokio::test] + async fn should_allow_the_torrents_result_pagination() { + let api_server = start_default_api(&Version::Axum).await; + + // torrents are ordered alphabetically by infohashes + let info_hash_1 = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap(); + let info_hash_2 = InfoHash::from_str("0b3aea4adc213ce32295be85d3883a63bca25446").unwrap(); + + api_server.add_torrent(&info_hash_1, &sample_peer()).await; + api_server.add_torrent(&info_hash_2, &sample_peer()).await; + + let response = Client::new(api_server.get_connection_info(), &Version::Axum) + .get_torrents(Query::params([QueryParam::new("offset", "1")].to_vec())) + .await; + + assert_eq!(response.status(), 200); + assert_eq!( + response.json::>().await.unwrap(), + vec![torrent::ListItem { + info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(), + seeders: 1, + completed: 0, + leechers: 0, + peers: None // Torrent list does not include the peer list for each torrent + }] + ); + } + + #[tokio::test] + async fn should_not_allow_getting_torrents_for_unauthenticated_users() { + let api_server = start_default_api(&Version::Axum).await; + + let response = Client::new(connection_with_invalid_token(&api_server.get_bind_address()), &Version::Axum) + .get_torrents(Query::empty()) + .await; + + assert_token_not_valid(response).await; + + let response = Client::new(connection_with_no_token(&api_server.get_bind_address()), &Version::Axum) + .get_torrents(Query::default()) + .await; + + assert_unauthorized(response).await; + } + #[tokio::test] async fn should_allow_getting_a_torrent_info() { let api_server = start_default_api(&Version::Axum).await;