Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .github/workflows/test_build_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,16 @@ jobs:
toolchain: stable
components: llvm-tools-preview
- uses: Swatinem/rust-cache@v1
- name: Check Rust Code
uses: actions-rs/cargo@v1
with:
command: check
args: --all-targets
- name: Clippy Rust Code
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all-targets
- uses: taiki-e/install-action@cargo-llvm-cov
- uses: taiki-e/install-action@nextest
- name: Run Tests
Expand Down
2 changes: 1 addition & 1 deletion .vscode/extensions.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"recommendations": [
"streetsidesoftware.code-spell-checker",
"matklad.rust-analyzer"
"rust-lang.rust-analyzer"
]
}
13 changes: 1 addition & 12 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,17 +1,6 @@
{
"cSpell.words": [
"byteorder",
"hasher",
"leechers",
"nanos",
"rngs",
"Seedable",
"thiserror",
"torrust",
"typenum"
],
"[rust]": {
"editor.formatOnSave": true
},
"rust-analyzer.checkOnSave.command": "clippy",
}
}
57 changes: 57 additions & 0 deletions cSpell.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
{
"words": [
"AUTOINCREMENT",
"automock",
"Avicora",
"Azureus",
"bencode",
"binascii",
"Bitflu",
"bufs",
"byteorder",
"canonicalize",
"canonicalized",
"chrono",
"clippy",
"completei",
"downloadedi",
"filesd",
"Freebox",
"hasher",
"hexlify",
"Hydranode",
"incompletei",
"intervali",
"leecher",
"leechers",
"libtorrent",
"Lphant",
"mockall",
"nanos",
"nextest",
"nocapture",
"ostr",
"Pando",
"Rasterbar",
"repr",
"rngs",
"rusqlite",
"rustfmt",
"Seedable",
"Shareaza",
"sharktorrent",
"socketaddr",
"sqllite",
"Swatinem",
"Swiftbit",
"thiserror",
"Torrentstorm",
"torrust",
"typenum",
"Unamed",
"untuple",
"Vagaa",
"Xtorrent",
"Xunlei"
]
}
17 changes: 4 additions & 13 deletions src/api/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use warp::{filters, reply, serve, Filter};

use crate::peer::TorrentPeer;
use crate::protocol::common::*;
use crate::tracker::tracker::TorrentTracker;
use crate::tracker::TorrentTracker;

#[derive(Deserialize, Debug)]
struct TorrentInfoQuery {
Expand Down Expand Up @@ -129,10 +129,7 @@ pub fn start(socket_addr: SocketAddr, tracker: Arc<TorrentTracker>) -> impl warp
let view_stats_list = filters::method::get()
.and(filters::path::path("stats"))
.and(filters::path::end())
.map(move || {
let tracker = api_stats.clone();
tracker
})
.map(move || api_stats.clone())
.and_then(|tracker: Arc<TorrentTracker>| async move {
let mut results = Stats {
torrents: 0,
Expand Down Expand Up @@ -304,10 +301,7 @@ pub fn start(socket_addr: SocketAddr, tracker: Arc<TorrentTracker>) -> impl warp
.and(filters::path::path("whitelist"))
.and(filters::path::path("reload"))
.and(filters::path::end())
.map(move || {
let tracker = t7.clone();
tracker
})
.map(move || t7.clone())
.and_then(|tracker: Arc<TorrentTracker>| async move {
match tracker.load_whitelist().await {
Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)),
Expand All @@ -324,10 +318,7 @@ pub fn start(socket_addr: SocketAddr, tracker: Arc<TorrentTracker>) -> impl warp
.and(filters::path::path("keys"))
.and(filters::path::path("reload"))
.and(filters::path::end())
.map(move || {
let tracker = t8.clone();
tracker
})
.map(move || t8.clone())
.and_then(|tracker: Arc<TorrentTracker>| async move {
match tracker.load_keys().await {
Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)),
Expand Down
18 changes: 9 additions & 9 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@ use {std, toml};
use crate::databases::database::DatabaseDrivers;
use crate::mode::TrackerMode;

#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub struct UdpTrackerConfig {
pub enabled: bool,
pub bind_address: String,
}

#[serde_as]
#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub struct HttpTrackerConfig {
pub enabled: bool,
pub bind_address: String,
Expand All @@ -30,14 +30,14 @@ pub struct HttpTrackerConfig {
pub ssl_key_path: Option<String>,
}

#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub struct HttpApiConfig {
pub enabled: bool,
pub bind_address: String,
pub access_tokens: HashMap<String, String>,
}

#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub struct Configuration {
pub log_level: Option<String>,
pub mode: TrackerMode,
Expand Down Expand Up @@ -140,9 +140,9 @@ impl Configuration {
eprintln!("Creating config file..");
let config = Configuration::default();
let _ = config.save_to_file(path);
return Err(ConfigError::Message(format!(
"Please edit the config.TOML in the root folder and restart the tracker."
)));
return Err(ConfigError::Message(
"Please edit the config.TOML in the root folder and restart the tracker.".to_string(),
));
}

let torrust_config: Configuration = config
Expand All @@ -152,7 +152,7 @@ impl Configuration {
Ok(torrust_config)
}

pub fn save_to_file(&self, path: &str) -> Result<(), ()> {
pub fn save_to_file(&self, path: &str) -> Result<(), ConfigurationError> {
let toml_string = toml::to_string(self).expect("Could not encode TOML value");
fs::write(path, toml_string).expect("Could not write to file!");
Ok(())
Expand Down Expand Up @@ -236,7 +236,7 @@ mod tests {
let temp_file = temp_directory.join(format!("test_config_{}.toml", Uuid::new_v4()));

// Convert to argument type for Configuration::save_to_file
let config_file_path = temp_file.clone();
let config_file_path = temp_file;
let path = config_file_path.to_string_lossy().to_string();

let default_configuration = Configuration::default();
Expand Down
4 changes: 2 additions & 2 deletions src/databases/database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::databases::sqlite::SqliteDatabase;
use crate::tracker::key::AuthKey;
use crate::InfoHash;

#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub enum DatabaseDrivers {
Sqlite3,
MySQL,
Expand Down Expand Up @@ -55,7 +55,7 @@ pub trait Database: Sync + Send {
async fn remove_key_from_keys(&self, key: &str) -> Result<usize, Error>;
}

#[derive(Debug, Display, PartialEq, Error)]
#[derive(Debug, Display, PartialEq, Eq, Error)]
#[allow(dead_code)]
pub enum Error {
#[display(fmt = "Query returned no rows.")]
Expand Down
2 changes: 1 addition & 1 deletion src/databases/mysql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pub struct MysqlDatabase {

impl MysqlDatabase {
pub fn new(db_path: &str) -> Result<Self, r2d2::Error> {
let opts = Opts::from_url(&db_path).expect("Failed to connect to MySQL database.");
let opts = Opts::from_url(db_path).expect("Failed to connect to MySQL database.");
let builder = OptsBuilder::from_opts(opts);
let manager = MysqlConnectionManager::new(builder);
let pool = r2d2::Pool::builder()
Expand Down
4 changes: 2 additions & 2 deletions src/databases/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ impl Database for SqliteDatabase {
let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?;

let mut stmt = conn.prepare("SELECT info_hash FROM whitelist WHERE info_hash = ?")?;
let mut rows = stmt.query(&[info_hash])?;
let mut rows = stmt.query([info_hash])?;

if let Some(row) = rows.next()? {
let info_hash: String = row.get(0).unwrap();
Expand Down Expand Up @@ -223,7 +223,7 @@ impl Database for SqliteDatabase {
async fn remove_key_from_keys(&self, key: &str) -> Result<usize, database::Error> {
let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?;

match conn.execute("DELETE FROM keys WHERE key = ?", &[key]) {
match conn.execute("DELETE FROM keys WHERE key = ?", [key]) {
Ok(updated) => {
if updated > 0 {
return Ok(updated);
Expand Down
14 changes: 7 additions & 7 deletions src/http/filters.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use warp::{reject, Filter, Rejection};

use crate::http::{AnnounceRequest, AnnounceRequestQuery, ScrapeRequest, ServerError, WebResult};
use crate::tracker::key::AuthKey;
use crate::tracker::tracker::TorrentTracker;
use crate::tracker::TorrentTracker;
use crate::{InfoHash, PeerId, MAX_SCRAPE_TORRENTS};

/// Pass Arc<TorrentTracker> along
Expand Down Expand Up @@ -61,12 +61,12 @@ pub fn with_scrape_request(on_reverse_proxy: bool) -> impl Filter<Extract = (Scr

/// Parse InfoHash from raw query string
async fn info_hashes(raw_query: String) -> WebResult<Vec<InfoHash>> {
let split_raw_query: Vec<&str> = raw_query.split("&").collect();
let split_raw_query: Vec<&str> = raw_query.split('&').collect();
let mut info_hashes: Vec<InfoHash> = Vec::new();

for v in split_raw_query {
if v.contains("info_hash") {
let raw_info_hash = v.split("=").collect::<Vec<&str>>()[1];
let raw_info_hash = v.split('=').collect::<Vec<&str>>()[1];
let info_hash_bytes = percent_encoding::percent_decode_str(raw_info_hash).collect::<Vec<u8>>();
let info_hash = InfoHash::from_str(&hex::encode(info_hash_bytes));
if let Ok(ih) = info_hash {
Expand All @@ -77,7 +77,7 @@ async fn info_hashes(raw_query: String) -> WebResult<Vec<InfoHash>> {

if info_hashes.len() > MAX_SCRAPE_TORRENTS as usize {
Err(reject::custom(ServerError::ExceededInfoHashLimit))
} else if info_hashes.len() < 1 {
} else if info_hashes.is_empty() {
Err(reject::custom(ServerError::InvalidInfoHash))
} else {
Ok(info_hashes)
Expand All @@ -87,15 +87,15 @@ async fn info_hashes(raw_query: String) -> WebResult<Vec<InfoHash>> {
/// Parse PeerId from raw query string
async fn peer_id(raw_query: String) -> WebResult<PeerId> {
// put all query params in a vec
let split_raw_query: Vec<&str> = raw_query.split("&").collect();
let split_raw_query: Vec<&str> = raw_query.split('&').collect();

let mut peer_id: Option<PeerId> = None;

for v in split_raw_query {
// look for the peer_id param
if v.contains("peer_id") {
// get raw percent_encoded peer_id
let raw_peer_id = v.split("=").collect::<Vec<&str>>()[1];
let raw_peer_id = v.split('=').collect::<Vec<&str>>()[1];

// decode peer_id
let peer_id_bytes = percent_encoding::percent_decode_str(raw_peer_id).collect::<Vec<u8>>();
Expand Down Expand Up @@ -143,7 +143,7 @@ async fn peer_addr(
// set client ip to last forwarded ip
let x_forwarded_ip = *x_forwarded_ips.last().unwrap();

IpAddr::from_str(x_forwarded_ip).or_else(|_| Err(reject::custom(ServerError::AddressNotFound)))
IpAddr::from_str(x_forwarded_ip).map_err(|_| reject::custom(ServerError::AddressNotFound))
}
false => Ok(remote_addr.unwrap().ip()),
}
Expand Down
6 changes: 3 additions & 3 deletions src/http/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use crate::peer::TorrentPeer;
use crate::tracker::key::AuthKey;
use crate::tracker::statistics::TrackerStatisticsEvent;
use crate::tracker::torrent::{TorrentError, TorrentStats};
use crate::tracker::tracker::TorrentTracker;
use crate::tracker::TorrentTracker;
use crate::InfoHash;

/// Authenticate InfoHash using optional AuthKey
Expand Down Expand Up @@ -93,7 +93,7 @@ pub async fn handle_scrape(
let db = tracker.get_torrents().await;

for info_hash in scrape_request.info_hashes.iter() {
let scrape_entry = match db.get(&info_hash) {
let scrape_entry = match db.get(info_hash) {
Some(torrent_info) => {
if authenticate(info_hash, &auth_key, tracker.clone()).await.is_ok() {
let (seeders, completed, leechers) = torrent_info.get_stats();
Expand All @@ -117,7 +117,7 @@ pub async fn handle_scrape(
},
};

files.insert(info_hash.clone(), scrape_entry);
files.insert(*info_hash, scrape_entry);
}

// send stats event
Expand Down
Loading