Skip to content

Commit

Permalink
feat(api): [#143] axum api. GET /api/torrents endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
josecelano committed Jan 10, 2023
1 parent c36b121 commit 1515753
Show file tree
Hide file tree
Showing 5 changed files with 207 additions and 29 deletions.
4 changes: 2 additions & 2 deletions src/api/routes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use super::{ActionStatus, TorrentInfoQuery};
use crate::protocol::info_hash::InfoHash;
use crate::tracker;
use crate::tracker::services::statistics::get_metrics;
use crate::tracker::services::torrent::{get_torrent_info, get_torrents};
use crate::tracker::services::torrent::{get_torrent_info, get_torrents, Pagination};

fn authenticate(tokens: HashMap<String, String>) -> impl Filter<Extract = (), Error = warp::reject::Rejection> + Clone {
#[derive(Deserialize)]
Expand Down Expand Up @@ -65,7 +65,7 @@ pub fn routes(tracker: &Arc<tracker::Tracker>) -> impl Filter<Extract = impl war
let limit = min(limits.limit.unwrap_or(1000), 4000);

Result::<_, warp::reject::Rejection>::Ok(reply::json(&ListItem::new_vec(
&get_torrents(tracker.clone(), offset, limit).await,
&get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await,
)))
});

Expand Down
46 changes: 41 additions & 5 deletions src/apis/routes.rs
Original file line number Diff line number Diff line change
@@ -1,29 +1,65 @@
use std::fmt;
use std::str::FromStr;
use std::sync::Arc;

use axum::extract::{Path, State};
use axum::extract::{Path, Query, State};
use axum::response::{IntoResponse, Json, Response};
use serde::{de, Deserialize, Deserializer};
use serde_json::json;

use crate::api::resource::stats::Stats;
use crate::api::resource::torrent::Torrent;
use crate::api::resource::torrent::{ListItem, Torrent};
use crate::protocol::info_hash::InfoHash;
use crate::tracker::services::statistics::get_metrics;
use crate::tracker::services::torrent::get_torrent_info;
use crate::tracker::services::torrent::{get_torrent_info, get_torrents, Pagination};
use crate::tracker::Tracker;

pub async fn get_stats(State(tracker): State<Arc<Tracker>>) -> Json<Stats> {
pub async fn get_stats_handler(State(tracker): State<Arc<Tracker>>) -> Json<Stats> {
Json(Stats::from(get_metrics(tracker.clone()).await))
}

/// # Panics
///
/// Will panic if it can't parse the infohash in the request
pub async fn get_torrent(State(tracker): State<Arc<Tracker>>, Path(info_hash): Path<String>) -> Response {
pub async fn get_torrent_handler(State(tracker): State<Arc<Tracker>>, Path(info_hash): Path<String>) -> Response {
let optional_torrent_info = get_torrent_info(tracker.clone(), &InfoHash::from_str(&info_hash).unwrap()).await;

match optional_torrent_info {
Some(info) => Json(Torrent::from(info)).into_response(),
None => Json(json!("torrent not known")).into_response(),
}
}

#[derive(Deserialize)]
pub struct PaginationParams {
#[serde(default, deserialize_with = "empty_string_as_none")]
pub offset: Option<u32>,
pub limit: Option<u32>,
}

pub async fn get_torrents_handler(
State(tracker): State<Arc<Tracker>>,
pagination: Query<PaginationParams>,
) -> Json<Vec<ListItem>> {
Json(ListItem::new_vec(
&get_torrents(
tracker.clone(),
&Pagination::new_with_options(pagination.0.offset, pagination.0.limit),
)
.await,
))
}

/// Serde deserialization decorator to map empty Strings to None,
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
where
D: Deserializer<'de>,
T: FromStr,
T::Err: fmt::Display,
{
let opt = Option::<String>::deserialize(de)?;
match opt.as_deref() {
None | Some("") => Ok(None),
Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some),
}
}
11 changes: 7 additions & 4 deletions src/apis/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,14 @@ use log::info;
use warp::hyper;

use super::middlewares::auth::auth;
use super::routes::{get_stats, get_torrent};
use super::routes::{get_stats_handler, get_torrent_handler, get_torrents_handler};
use crate::tracker;

pub fn start(socket_addr: SocketAddr, tracker: &Arc<tracker::Tracker>) -> impl Future<Output = hyper::Result<()>> {
let app = Router::new()
.route("/stats", get(get_stats).with_state(tracker.clone()))
.route("/torrent/:info_hash", get(get_torrent).with_state(tracker.clone()))
.route("/stats", get(get_stats_handler).with_state(tracker.clone()))
.route("/torrent/:info_hash", get(get_torrent_handler).with_state(tracker.clone()))
.route("/torrents", get(get_torrents_handler).with_state(tracker.clone()))
.layer(middleware::from_fn_with_state(tracker.config.clone(), auth));

let server = axum::Server::bind(&socket_addr).serve(app.into_make_service());
Expand All @@ -33,7 +34,9 @@ pub fn start_tls(
tracker: &Arc<tracker::Tracker>,
) -> impl Future<Output = Result<(), std::io::Error>> {
let app = Router::new()
.route("/stats", get(get_stats).with_state(tracker.clone()))
.route("/stats", get(get_stats_handler).with_state(tracker.clone()))
.route("/torrent/:info_hash", get(get_torrent_handler).with_state(tracker.clone()))
.route("/torrents", get(get_torrents_handler).with_state(tracker.clone()))
.layer(middleware::from_fn_with_state(tracker.config.clone(), auth));

let handle = Handle::new();
Expand Down
73 changes: 57 additions & 16 deletions src/tracker/services/torrent.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
use std::sync::Arc;

use serde::Deserialize;

use crate::protocol::info_hash::InfoHash;
use crate::tracker::peer::Peer;
use crate::tracker::Tracker;
Expand All @@ -21,6 +23,52 @@ pub struct BasicInfo {
pub leechers: u64,
}

#[derive(Deserialize)]
pub struct Pagination {
pub offset: u32,
pub limit: u32,
}

impl Pagination {
#[must_use]
pub fn new(offset: u32, limit: u32) -> Self {
Self { offset, limit }
}

#[must_use]
pub fn new_with_options(offset_option: Option<u32>, limit_option: Option<u32>) -> Self {
let offset = match offset_option {
Some(offset) => offset,
None => Pagination::default_offset(),
};
let limit = match limit_option {
Some(offset) => offset,
None => Pagination::default_limit(),
};

Self { offset, limit }
}

#[must_use]
pub fn default_offset() -> u32 {
0
}

#[must_use]
pub fn default_limit() -> u32 {
4000
}
}

impl Default for Pagination {
fn default() -> Self {
Self {
offset: Self::default_offset(),
limit: Self::default_limit(),
}
}
}

pub async fn get_torrent_info(tracker: Arc<Tracker>, info_hash: &InfoHash) -> Option<Info> {
let db = tracker.get_torrents().await;

Expand Down Expand Up @@ -48,7 +96,7 @@ pub async fn get_torrent_info(tracker: Arc<Tracker>, info_hash: &InfoHash) -> Op
})
}

pub async fn get_torrents(tracker: Arc<Tracker>, offset: u32, limit: u32) -> Vec<BasicInfo> {
pub async fn get_torrents(tracker: Arc<Tracker>, pagination: &Pagination) -> Vec<BasicInfo> {
let db = tracker.get_torrents().await;

db.iter()
Expand All @@ -61,8 +109,8 @@ pub async fn get_torrents(tracker: Arc<Tracker>, offset: u32, limit: u32) -> Vec
leechers: u64::from(leechers),
}
})
.skip(offset as usize)
.take(limit as usize)
.skip(pagination.offset as usize)
.take(pagination.limit as usize)
.collect()
}

Expand Down Expand Up @@ -149,7 +197,7 @@ mod tests {
use crate::protocol::info_hash::InfoHash;
use crate::tracker::services::common::tracker_factory;
use crate::tracker::services::torrent::tests::sample_peer;
use crate::tracker::services::torrent::{get_torrents, BasicInfo};
use crate::tracker::services::torrent::{get_torrents, BasicInfo, Pagination};

pub fn tracker_configuration() -> Arc<Configuration> {
Arc::new(ephemeral_configuration())
Expand All @@ -158,19 +206,15 @@ mod tests {
#[tokio::test]
async fn should_return_an_empty_result_if_the_tracker_does_not_have_any_torrent() {
let tracker = Arc::new(tracker_factory(&tracker_configuration()));
let offset = 0;
let limit = 4000;

let torrents = get_torrents(tracker.clone(), offset, limit).await;
let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;

assert_eq!(torrents, vec![]);
}

#[tokio::test]
async fn should_return_a_summarized_info_for_all_torrents() {
let tracker = Arc::new(tracker_factory(&tracker_configuration()));
let offset = 0;
let limit = 4000;

let hash = "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_owned();
let info_hash = InfoHash::from_str(&hash).unwrap();
Expand All @@ -179,7 +223,7 @@ mod tests {
.update_torrent_with_peer_and_get_stats(&info_hash, &sample_peer())
.await;

let torrents = get_torrents(tracker.clone(), offset, limit).await;
let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;

assert_eq!(
torrents,
Expand Down Expand Up @@ -211,7 +255,7 @@ mod tests {
let offset = 0;
let limit = 1;

let torrents = get_torrents(tracker.clone(), offset, limit).await;
let torrents = get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await;

assert_eq!(torrents.len(), 1);
}
Expand All @@ -235,7 +279,7 @@ mod tests {
let offset = 1;
let limit = 4000;

let torrents = get_torrents(tracker.clone(), offset, limit).await;
let torrents = get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await;

assert_eq!(torrents.len(), 1);
assert_eq!(
Expand Down Expand Up @@ -265,10 +309,7 @@ mod tests {
.update_torrent_with_peer_and_get_stats(&info_hash2, &sample_peer())
.await;

let offset = 0;
let limit = 4000;

let torrents = get_torrents(tracker.clone(), offset, limit).await;
let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;

assert_eq!(
torrents,
Expand Down
102 changes: 100 additions & 2 deletions tests/tracker_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -662,17 +662,115 @@ mod tracker_apis {
mod for_torrent_resources {
use std::str::FromStr;

use torrust_tracker::api::resource;
use torrust_tracker::api::resource::torrent::Torrent;
use torrust_tracker::api::resource::{self, torrent};
use torrust_tracker::protocol::info_hash::InfoHash;

use crate::api::asserts::{assert_token_not_valid, assert_torrent_not_known, assert_unauthorized};
use crate::api::client::Client;
use crate::api::client::{Client, Query, QueryParam};
use crate::api::connection_info::{connection_with_invalid_token, connection_with_no_token};
use crate::api::fixtures::sample_peer;
use crate::api::server::start_default_api;
use crate::api::Version;

#[tokio::test]
async fn should_allow_getting_torrents() {
let api_server = start_default_api(&Version::Axum).await;

let info_hash = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap();

api_server.add_torrent(&info_hash, &sample_peer()).await;

let response = Client::new(api_server.get_connection_info(), &Version::Axum)
.get_torrents(Query::empty())
.await;

assert_eq!(response.status(), 200);
assert_eq!(
response.json::<Vec<torrent::ListItem>>().await.unwrap(),
vec![torrent::ListItem {
info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(),
seeders: 1,
completed: 0,
leechers: 0,
peers: None // Torrent list does not include the peer list for each torrent
}]
);
}

#[tokio::test]
async fn should_allow_limiting_the_torrents_in_the_result() {
let api_server = start_default_api(&Version::Axum).await;

// torrents are ordered alphabetically by infohashes
let info_hash_1 = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap();
let info_hash_2 = InfoHash::from_str("0b3aea4adc213ce32295be85d3883a63bca25446").unwrap();

api_server.add_torrent(&info_hash_1, &sample_peer()).await;
api_server.add_torrent(&info_hash_2, &sample_peer()).await;

let response = Client::new(api_server.get_connection_info(), &Version::Axum)
.get_torrents(Query::params([QueryParam::new("limit", "1")].to_vec()))
.await;

assert_eq!(response.status(), 200);
assert_eq!(
response.json::<Vec<torrent::ListItem>>().await.unwrap(),
vec![torrent::ListItem {
info_hash: "0b3aea4adc213ce32295be85d3883a63bca25446".to_string(),
seeders: 1,
completed: 0,
leechers: 0,
peers: None // Torrent list does not include the peer list for each torrent
}]
);
}

#[tokio::test]
async fn should_allow_the_torrents_result_pagination() {
let api_server = start_default_api(&Version::Axum).await;

// torrents are ordered alphabetically by infohashes
let info_hash_1 = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap();
let info_hash_2 = InfoHash::from_str("0b3aea4adc213ce32295be85d3883a63bca25446").unwrap();

api_server.add_torrent(&info_hash_1, &sample_peer()).await;
api_server.add_torrent(&info_hash_2, &sample_peer()).await;

let response = Client::new(api_server.get_connection_info(), &Version::Axum)
.get_torrents(Query::params([QueryParam::new("offset", "1")].to_vec()))
.await;

assert_eq!(response.status(), 200);
assert_eq!(
response.json::<Vec<torrent::ListItem>>().await.unwrap(),
vec![torrent::ListItem {
info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(),
seeders: 1,
completed: 0,
leechers: 0,
peers: None // Torrent list does not include the peer list for each torrent
}]
);
}

#[tokio::test]
async fn should_not_allow_getting_torrents_for_unauthenticated_users() {
let api_server = start_default_api(&Version::Axum).await;

let response = Client::new(connection_with_invalid_token(&api_server.get_bind_address()), &Version::Axum)
.get_torrents(Query::empty())
.await;

assert_token_not_valid(response).await;

let response = Client::new(connection_with_no_token(&api_server.get_bind_address()), &Version::Axum)
.get_torrents(Query::default())
.await;

assert_unauthorized(response).await;
}

#[tokio::test]
async fn should_allow_getting_a_torrent_info() {
let api_server = start_default_api(&Version::Axum).await;
Expand Down

0 comments on commit 1515753

Please sign in to comment.