From 5af28a291c6f1d9b6197649d8ec6394d1582f3fe Mon Sep 17 00:00:00 2001 From: Jose Celano Date: Fri, 16 Dec 2022 09:39:51 +0000 Subject: [PATCH 1/2] fix: the pedantic clippy warnings --- src/config.rs | 4 ++-- src/protocol/info_hash.rs | 2 +- src/tracker/auth.rs | 2 +- src/udp/handlers.rs | 6 ++++-- tests/api.rs | 6 ++++-- tests/udp.rs | 8 +++++--- 6 files changed, 17 insertions(+), 11 deletions(-) diff --git a/src/config.rs b/src/config.rs index a7e7e9df6..ba99e0f45 100644 --- a/src/config.rs +++ b/src/config.rs @@ -74,7 +74,7 @@ impl std::fmt::Display for Error { Error::ConfigError(e) => e.fmt(f), Error::IOError(e) => e.fmt(f), Error::ParseError(e) => e.fmt(f), - Error::TrackerModeIncompatible => write!(f, "{:?}", self), + Error::TrackerModeIncompatible => write!(f, "{self:?}"), } } } @@ -296,6 +296,6 @@ mod tests { fn configuration_error_could_be_displayed() { let error = Error::TrackerModeIncompatible; - assert_eq!(format!("{}", error), "TrackerModeIncompatible"); + assert_eq!(format!("{error}"), "TrackerModeIncompatible"); } } diff --git a/src/protocol/info_hash.rs b/src/protocol/info_hash.rs index 3b9b2fa35..9a0900063 100644 --- a/src/protocol/info_hash.rs +++ b/src/protocol/info_hash.rs @@ -133,7 +133,7 @@ mod tests { fn an_info_hash_should_by_displayed_like_a_40_utf8_lowercased_char_hex_string() { let info_hash = InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap(); - let output = format!("{}", info_hash); + let output = format!("{info_hash}"); assert_eq!(output, "ffffffffffffffffffffffffffffffffffffffff"); } diff --git a/src/tracker/auth.rs b/src/tracker/auth.rs index 7ac6d6939..02450dc82 100644 --- a/src/tracker/auth.rs +++ b/src/tracker/auth.rs @@ -90,7 +90,7 @@ pub enum Error { impl From for Error { fn from(e: r2d2_sqlite::rusqlite::Error) -> Self { - eprintln!("{}", e); + eprintln!("{e}"); Error::KeyVerificationError } } diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index d167b3e6d..001fb2380 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -255,8 +255,10 @@ mod tests { } fn default_testing_tracker_configuration() -> Configuration { - let mut config = Configuration::default(); - config.log_level = Some("off".to_owned()); + let mut config = Configuration { + log_level: Some("off".to_owned()), + ..Default::default() + }; // Ephemeral socket address let port = ephemeral_random_port(); diff --git a/tests/api.rs b/tests/api.rs index 706cd0b8d..84ddac573 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -203,8 +203,10 @@ mod tracker_api { } fn tracker_configuration() -> Arc { - let mut config = Configuration::default(); - config.log_level = Some("off".to_owned()); + let mut config = Configuration { + log_level: Some("off".to_owned()), + ..Default::default() + }; // Ephemeral socket address let port = ephemeral_random_port(); diff --git a/tests/udp.rs b/tests/udp.rs index 5f7a66856..55384db05 100644 --- a/tests/udp.rs +++ b/tests/udp.rs @@ -28,8 +28,10 @@ mod udp_tracker_server { use crate::common::ephemeral_random_port; fn tracker_configuration() -> Arc { - let mut config = Configuration::default(); - config.log_level = Some("off".to_owned()); + let mut config = Configuration { + log_level: Some("off".to_owned()), + ..Default::default() + }; // Ephemeral socket address let port = ephemeral_random_port(); @@ -181,7 +183,7 @@ mod udp_tracker_server { /// Generates the source address for the UDP client fn source_address(port: u16) -> String { - format!("127.0.0.1:{}", port) + format!("127.0.0.1:{port}") } fn is_error_response(response: &Response, error_message: &str) -> bool { From b23d64b9c3d58a6f4f7dab8a60775fc234aaadbd Mon Sep 17 00:00:00 2001 From: Jose Celano Date: Fri, 16 Dec 2022 08:49:56 +0000 Subject: [PATCH 2/2] feat: add ssl support for the API New config options have been added to support HTTPs conenctionto the API: ``` [http_api] ssl_enabled = false ssl_cert_path = "./storage/ssl_certificates/localhost.crt" ssl_key_path = "./storage/ssl_certificates/localhost.key" ``` --- src/api/mod.rs | 18 +++ src/api/routes.rs | 307 ++++++++++++++++++++++++++++++++++++ src/api/server.rs | 333 +++------------------------------------ src/config.rs | 41 +++-- src/jobs/http_tracker.rs | 4 +- src/jobs/tracker_api.rs | 26 +-- src/jobs/udp_tracker.rs | 4 +- src/setup.rs | 2 +- tests/api.rs | 2 +- 9 files changed, 393 insertions(+), 344 deletions(-) create mode 100644 src/api/routes.rs diff --git a/src/api/mod.rs b/src/api/mod.rs index 16abb8e27..d254c91ac 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -1,2 +1,20 @@ pub mod resource; +pub mod routes; pub mod server; + +use serde::{Deserialize, Serialize}; + +#[derive(Deserialize, Debug)] +pub struct TorrentInfoQuery { + offset: Option, + limit: Option, +} + +#[derive(Serialize, Debug)] +#[serde(tag = "status", rename_all = "snake_case")] +enum ActionStatus<'a> { + Ok, + Err { reason: std::borrow::Cow<'a, str> }, +} + +impl warp::reject::Reject for ActionStatus<'static> {} diff --git a/src/api/routes.rs b/src/api/routes.rs new file mode 100644 index 000000000..76b449e9b --- /dev/null +++ b/src/api/routes.rs @@ -0,0 +1,307 @@ +use std::cmp::min; +use std::collections::{HashMap, HashSet}; +use std::sync::Arc; +use std::time::Duration; + +use serde::Deserialize; +use warp::{filters, reply, Filter}; + +use super::resource::auth_key::AuthKey; +use super::resource::peer; +use super::resource::stats::Stats; +use super::resource::torrent::{ListItem, Torrent}; +use super::{ActionStatus, TorrentInfoQuery}; +use crate::protocol::info_hash::InfoHash; +use crate::tracker; + +fn authenticate(tokens: HashMap) -> impl Filter + Clone { + #[derive(Deserialize)] + struct AuthToken { + token: Option, + } + + let tokens: HashSet = tokens.into_values().collect(); + + let tokens = Arc::new(tokens); + warp::filters::any::any() + .map(move || tokens.clone()) + .and(filters::query::query::()) + .and_then(|tokens: Arc>, token: AuthToken| async move { + match token.token { + Some(token) => { + if !tokens.contains(&token) { + return Err(warp::reject::custom(ActionStatus::Err { + reason: "token not valid".into(), + })); + } + + Ok(()) + } + None => Err(warp::reject::custom(ActionStatus::Err { + reason: "unauthorized".into(), + })), + } + }) + .untuple_one() +} + +#[allow(clippy::too_many_lines)] +#[must_use] +pub fn routes(tracker: &Arc) -> impl Filter + Clone { + // GET /api/torrents?offset=:u32&limit=:u32 + // View torrent list + let api_torrents = tracker.clone(); + let view_torrent_list = filters::method::get() + .and(filters::path::path("torrents")) + .and(filters::path::end()) + .and(filters::query::query()) + .map(move |limits| { + let tracker = api_torrents.clone(); + (limits, tracker) + }) + .and_then(|(limits, tracker): (TorrentInfoQuery, Arc)| async move { + let offset = limits.offset.unwrap_or(0); + let limit = min(limits.limit.unwrap_or(1000), 4000); + + let db = tracker.get_torrents().await; + let results: Vec<_> = db + .iter() + .map(|(info_hash, torrent_entry)| { + let (seeders, completed, leechers) = torrent_entry.get_stats(); + ListItem { + info_hash: info_hash.to_string(), + seeders, + completed, + leechers, + peers: None, + } + }) + .skip(offset as usize) + .take(limit as usize) + .collect(); + + Result::<_, warp::reject::Rejection>::Ok(reply::json(&results)) + }); + + // GET /api/stats + // View tracker status + let api_stats = tracker.clone(); + let view_stats_list = filters::method::get() + .and(filters::path::path("stats")) + .and(filters::path::end()) + .map(move || api_stats.clone()) + .and_then(|tracker: Arc| async move { + let mut results = Stats { + torrents: 0, + seeders: 0, + completed: 0, + leechers: 0, + tcp4_connections_handled: 0, + tcp4_announces_handled: 0, + tcp4_scrapes_handled: 0, + tcp6_connections_handled: 0, + tcp6_announces_handled: 0, + tcp6_scrapes_handled: 0, + udp4_connections_handled: 0, + udp4_announces_handled: 0, + udp4_scrapes_handled: 0, + udp6_connections_handled: 0, + udp6_announces_handled: 0, + udp6_scrapes_handled: 0, + }; + + let db = tracker.get_torrents().await; + + db.values().for_each(|torrent_entry| { + let (seeders, completed, leechers) = torrent_entry.get_stats(); + results.seeders += seeders; + results.completed += completed; + results.leechers += leechers; + results.torrents += 1; + }); + + let stats = tracker.get_stats().await; + + #[allow(clippy::cast_possible_truncation)] + { + results.tcp4_connections_handled = stats.tcp4_connections_handled as u32; + results.tcp4_announces_handled = stats.tcp4_announces_handled as u32; + results.tcp4_scrapes_handled = stats.tcp4_scrapes_handled as u32; + results.tcp6_connections_handled = stats.tcp6_connections_handled as u32; + results.tcp6_announces_handled = stats.tcp6_announces_handled as u32; + results.tcp6_scrapes_handled = stats.tcp6_scrapes_handled as u32; + results.udp4_connections_handled = stats.udp4_connections_handled as u32; + results.udp4_announces_handled = stats.udp4_announces_handled as u32; + results.udp4_scrapes_handled = stats.udp4_scrapes_handled as u32; + results.udp6_connections_handled = stats.udp6_connections_handled as u32; + results.udp6_announces_handled = stats.udp6_announces_handled as u32; + results.udp6_scrapes_handled = stats.udp6_scrapes_handled as u32; + } + + Result::<_, warp::reject::Rejection>::Ok(reply::json(&results)) + }); + + // GET /api/torrent/:info_hash + // View torrent info + let t2 = tracker.clone(); + let view_torrent_info = filters::method::get() + .and(filters::path::path("torrent")) + .and(filters::path::param()) + .and(filters::path::end()) + .map(move |info_hash: InfoHash| { + let tracker = t2.clone(); + (info_hash, tracker) + }) + .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { + let db = tracker.get_torrents().await; + let torrent_entry_option = db.get(&info_hash); + + let torrent_entry = match torrent_entry_option { + Some(torrent_entry) => torrent_entry, + None => { + return Result::<_, warp::reject::Rejection>::Ok(reply::json(&"torrent not known")); + } + }; + let (seeders, completed, leechers) = torrent_entry.get_stats(); + + let peers = torrent_entry.get_peers(None); + + let peer_resources = peers.iter().map(|peer| peer::Peer::from(**peer)).collect(); + + Ok(reply::json(&Torrent { + info_hash: info_hash.to_string(), + seeders, + completed, + leechers, + peers: Some(peer_resources), + })) + }); + + // DELETE /api/whitelist/:info_hash + // Delete info hash from whitelist + let t3 = tracker.clone(); + let delete_torrent = filters::method::delete() + .and(filters::path::path("whitelist")) + .and(filters::path::param()) + .and(filters::path::end()) + .map(move |info_hash: InfoHash| { + let tracker = t3.clone(); + (info_hash, tracker) + }) + .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { + match tracker.remove_torrent_from_whitelist(&info_hash).await { + Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), + Err(_) => Err(warp::reject::custom(ActionStatus::Err { + reason: "failed to remove torrent from whitelist".into(), + })), + } + }); + + // POST /api/whitelist/:info_hash + // Add info hash to whitelist + let t4 = tracker.clone(); + let add_torrent = filters::method::post() + .and(filters::path::path("whitelist")) + .and(filters::path::param()) + .and(filters::path::end()) + .map(move |info_hash: InfoHash| { + let tracker = t4.clone(); + (info_hash, tracker) + }) + .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { + match tracker.add_torrent_to_whitelist(&info_hash).await { + Ok(..) => Ok(warp::reply::json(&ActionStatus::Ok)), + Err(..) => Err(warp::reject::custom(ActionStatus::Err { + reason: "failed to whitelist torrent".into(), + })), + } + }); + + // POST /api/key/:seconds_valid + // Generate new key + let t5 = tracker.clone(); + let create_key = filters::method::post() + .and(filters::path::path("key")) + .and(filters::path::param()) + .and(filters::path::end()) + .map(move |seconds_valid: u64| { + let tracker = t5.clone(); + (seconds_valid, tracker) + }) + .and_then(|(seconds_valid, tracker): (u64, Arc)| async move { + match tracker.generate_auth_key(Duration::from_secs(seconds_valid)).await { + Ok(auth_key) => Ok(warp::reply::json(&AuthKey::from(auth_key))), + Err(..) => Err(warp::reject::custom(ActionStatus::Err { + reason: "failed to generate key".into(), + })), + } + }); + + // DELETE /api/key/:key + // Delete key + let t6 = tracker.clone(); + let delete_key = filters::method::delete() + .and(filters::path::path("key")) + .and(filters::path::param()) + .and(filters::path::end()) + .map(move |key: String| { + let tracker = t6.clone(); + (key, tracker) + }) + .and_then(|(key, tracker): (String, Arc)| async move { + match tracker.remove_auth_key(&key).await { + Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), + Err(_) => Err(warp::reject::custom(ActionStatus::Err { + reason: "failed to delete key".into(), + })), + } + }); + + // GET /api/whitelist/reload + // Reload whitelist + let t7 = tracker.clone(); + let reload_whitelist = filters::method::get() + .and(filters::path::path("whitelist")) + .and(filters::path::path("reload")) + .and(filters::path::end()) + .map(move || t7.clone()) + .and_then(|tracker: Arc| async move { + match tracker.load_whitelist().await { + Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), + Err(_) => Err(warp::reject::custom(ActionStatus::Err { + reason: "failed to reload whitelist".into(), + })), + } + }); + + // GET /api/keys/reload + // Reload whitelist + let t8 = tracker.clone(); + let reload_keys = filters::method::get() + .and(filters::path::path("keys")) + .and(filters::path::path("reload")) + .and(filters::path::end()) + .map(move || t8.clone()) + .and_then(|tracker: Arc| async move { + match tracker.load_keys().await { + Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), + Err(_) => Err(warp::reject::custom(ActionStatus::Err { + reason: "failed to reload keys".into(), + })), + } + }); + + let api_routes = filters::path::path("api").and( + view_torrent_list + .or(delete_torrent) + .or(view_torrent_info) + .or(view_stats_list) + .or(add_torrent) + .or(create_key) + .or(delete_key) + .or(reload_whitelist) + .or(reload_keys), + ); + + api_routes.and(authenticate(tracker.config.http_api.access_tokens.clone())) +} diff --git a/src/api/server.rs b/src/api/server.rs index 5967a8be4..5d6a3cdfd 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -1,327 +1,32 @@ -use std::cmp::min; -use std::collections::{HashMap, HashSet}; use std::net::SocketAddr; use std::sync::Arc; -use std::time::Duration; -use serde::{Deserialize, Serialize}; -use warp::{filters, reply, serve, Filter}; +use warp::serve; -use super::resource::auth_key::AuthKey; -use super::resource::peer; -use super::resource::stats::Stats; -use super::resource::torrent::{ListItem, Torrent}; -use crate::protocol::info_hash::InfoHash; +use super::routes::routes; use crate::tracker; -#[derive(Deserialize, Debug)] -struct TorrentInfoQuery { - offset: Option, - limit: Option, -} - -#[derive(Serialize, Debug)] -#[serde(tag = "status", rename_all = "snake_case")] -enum ActionStatus<'a> { - Ok, - Err { reason: std::borrow::Cow<'a, str> }, -} - -impl warp::reject::Reject for ActionStatus<'static> {} - -fn authenticate(tokens: HashMap) -> impl Filter + Clone { - #[derive(Deserialize)] - struct AuthToken { - token: Option, - } - - let tokens: HashSet = tokens.into_iter().map(|(_, v)| v).collect(); - - let tokens = Arc::new(tokens); - warp::filters::any::any() - .map(move || tokens.clone()) - .and(filters::query::query::()) - .and_then(|tokens: Arc>, token: AuthToken| async move { - match token.token { - Some(token) => { - if !tokens.contains(&token) { - return Err(warp::reject::custom(ActionStatus::Err { - reason: "token not valid".into(), - })); - } - - Ok(()) - } - None => Err(warp::reject::custom(ActionStatus::Err { - reason: "unauthorized".into(), - })), - } - }) - .untuple_one() -} - -#[allow(clippy::too_many_lines)] pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl warp::Future { - // GET /api/torrents?offset=:u32&limit=:u32 - // View torrent list - let api_torrents = tracker.clone(); - let view_torrent_list = filters::method::get() - .and(filters::path::path("torrents")) - .and(filters::path::end()) - .and(filters::query::query()) - .map(move |limits| { - let tracker = api_torrents.clone(); - (limits, tracker) - }) - .and_then(|(limits, tracker): (TorrentInfoQuery, Arc)| async move { - let offset = limits.offset.unwrap_or(0); - let limit = min(limits.limit.unwrap_or(1000), 4000); - - let db = tracker.get_torrents().await; - let results: Vec<_> = db - .iter() - .map(|(info_hash, torrent_entry)| { - let (seeders, completed, leechers) = torrent_entry.get_stats(); - ListItem { - info_hash: info_hash.to_string(), - seeders, - completed, - leechers, - peers: None, - } - }) - .skip(offset as usize) - .take(limit as usize) - .collect(); - - Result::<_, warp::reject::Rejection>::Ok(reply::json(&results)) - }); - - // GET /api/stats - // View tracker status - let api_stats = tracker.clone(); - let view_stats_list = filters::method::get() - .and(filters::path::path("stats")) - .and(filters::path::end()) - .map(move || api_stats.clone()) - .and_then(|tracker: Arc| async move { - let mut results = Stats { - torrents: 0, - seeders: 0, - completed: 0, - leechers: 0, - tcp4_connections_handled: 0, - tcp4_announces_handled: 0, - tcp4_scrapes_handled: 0, - tcp6_connections_handled: 0, - tcp6_announces_handled: 0, - tcp6_scrapes_handled: 0, - udp4_connections_handled: 0, - udp4_announces_handled: 0, - udp4_scrapes_handled: 0, - udp6_connections_handled: 0, - udp6_announces_handled: 0, - udp6_scrapes_handled: 0, - }; - - let db = tracker.get_torrents().await; - - db.values().for_each(|torrent_entry| { - let (seeders, completed, leechers) = torrent_entry.get_stats(); - results.seeders += seeders; - results.completed += completed; - results.leechers += leechers; - results.torrents += 1; - }); - - let stats = tracker.get_stats().await; - - #[allow(clippy::cast_possible_truncation)] - { - results.tcp4_connections_handled = stats.tcp4_connections_handled as u32; - results.tcp4_announces_handled = stats.tcp4_announces_handled as u32; - results.tcp4_scrapes_handled = stats.tcp4_scrapes_handled as u32; - results.tcp6_connections_handled = stats.tcp6_connections_handled as u32; - results.tcp6_announces_handled = stats.tcp6_announces_handled as u32; - results.tcp6_scrapes_handled = stats.tcp6_scrapes_handled as u32; - results.udp4_connections_handled = stats.udp4_connections_handled as u32; - results.udp4_announces_handled = stats.udp4_announces_handled as u32; - results.udp4_scrapes_handled = stats.udp4_scrapes_handled as u32; - results.udp6_connections_handled = stats.udp6_connections_handled as u32; - results.udp6_announces_handled = stats.udp6_announces_handled as u32; - results.udp6_scrapes_handled = stats.udp6_scrapes_handled as u32; - } - - Result::<_, warp::reject::Rejection>::Ok(reply::json(&results)) - }); - - // GET /api/torrent/:info_hash - // View torrent info - let t2 = tracker.clone(); - let view_torrent_info = filters::method::get() - .and(filters::path::path("torrent")) - .and(filters::path::param()) - .and(filters::path::end()) - .map(move |info_hash: InfoHash| { - let tracker = t2.clone(); - (info_hash, tracker) - }) - .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { - let db = tracker.get_torrents().await; - let torrent_entry_option = db.get(&info_hash); - - let torrent_entry = match torrent_entry_option { - Some(torrent_entry) => torrent_entry, - None => { - return Result::<_, warp::reject::Rejection>::Ok(reply::json(&"torrent not known")); - } - }; - let (seeders, completed, leechers) = torrent_entry.get_stats(); - - let peers = torrent_entry.get_peers(None); - - let peer_resources = peers.iter().map(|peer| peer::Peer::from(**peer)).collect(); - - Ok(reply::json(&Torrent { - info_hash: info_hash.to_string(), - seeders, - completed, - leechers, - peers: Some(peer_resources), - })) - }); - - // DELETE /api/whitelist/:info_hash - // Delete info hash from whitelist - let t3 = tracker.clone(); - let delete_torrent = filters::method::delete() - .and(filters::path::path("whitelist")) - .and(filters::path::param()) - .and(filters::path::end()) - .map(move |info_hash: InfoHash| { - let tracker = t3.clone(); - (info_hash, tracker) - }) - .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { - match tracker.remove_torrent_from_whitelist(&info_hash).await { - Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), - Err(_) => Err(warp::reject::custom(ActionStatus::Err { - reason: "failed to remove torrent from whitelist".into(), - })), - } - }); - - // POST /api/whitelist/:info_hash - // Add info hash to whitelist - let t4 = tracker.clone(); - let add_torrent = filters::method::post() - .and(filters::path::path("whitelist")) - .and(filters::path::param()) - .and(filters::path::end()) - .map(move |info_hash: InfoHash| { - let tracker = t4.clone(); - (info_hash, tracker) - }) - .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { - match tracker.add_torrent_to_whitelist(&info_hash).await { - Ok(..) => Ok(warp::reply::json(&ActionStatus::Ok)), - Err(..) => Err(warp::reject::custom(ActionStatus::Err { - reason: "failed to whitelist torrent".into(), - })), - } - }); - - // POST /api/key/:seconds_valid - // Generate new key - let t5 = tracker.clone(); - let create_key = filters::method::post() - .and(filters::path::path("key")) - .and(filters::path::param()) - .and(filters::path::end()) - .map(move |seconds_valid: u64| { - let tracker = t5.clone(); - (seconds_valid, tracker) - }) - .and_then(|(seconds_valid, tracker): (u64, Arc)| async move { - match tracker.generate_auth_key(Duration::from_secs(seconds_valid)).await { - Ok(auth_key) => Ok(warp::reply::json(&AuthKey::from(auth_key))), - Err(..) => Err(warp::reject::custom(ActionStatus::Err { - reason: "failed to generate key".into(), - })), - } - }); - - // DELETE /api/key/:key - // Delete key - let t6 = tracker.clone(); - let delete_key = filters::method::delete() - .and(filters::path::path("key")) - .and(filters::path::param()) - .and(filters::path::end()) - .map(move |key: String| { - let tracker = t6.clone(); - (key, tracker) - }) - .and_then(|(key, tracker): (String, Arc)| async move { - match tracker.remove_auth_key(&key).await { - Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), - Err(_) => Err(warp::reject::custom(ActionStatus::Err { - reason: "failed to delete key".into(), - })), - } - }); + let (_addr, api_server) = serve(routes(tracker)).bind_with_graceful_shutdown(socket_addr, async move { + tokio::signal::ctrl_c().await.expect("Failed to listen to shutdown signal."); + }); - // GET /api/whitelist/reload - // Reload whitelist - let t7 = tracker.clone(); - let reload_whitelist = filters::method::get() - .and(filters::path::path("whitelist")) - .and(filters::path::path("reload")) - .and(filters::path::end()) - .map(move || t7.clone()) - .and_then(|tracker: Arc| async move { - match tracker.load_whitelist().await { - Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), - Err(_) => Err(warp::reject::custom(ActionStatus::Err { - reason: "failed to reload whitelist".into(), - })), - } - }); + api_server +} - // GET /api/keys/reload - // Reload whitelist - let t8 = tracker.clone(); - let reload_keys = filters::method::get() - .and(filters::path::path("keys")) - .and(filters::path::path("reload")) - .and(filters::path::end()) - .map(move || t8.clone()) - .and_then(|tracker: Arc| async move { - match tracker.load_keys().await { - Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), - Err(_) => Err(warp::reject::custom(ActionStatus::Err { - reason: "failed to reload keys".into(), - })), - } +pub fn start_tls( + socket_addr: SocketAddr, + ssl_cert_path: String, + ssl_key_path: String, + tracker: &Arc, +) -> impl warp::Future { + let (_addr, api_server) = serve(routes(tracker)) + .tls() + .cert_path(ssl_cert_path) + .key_path(ssl_key_path) + .bind_with_graceful_shutdown(socket_addr, async move { + tokio::signal::ctrl_c().await.expect("Failed to listen to shutdown signal."); }); - let api_routes = filters::path::path("api").and( - view_torrent_list - .or(delete_torrent) - .or(view_torrent_info) - .or(view_stats_list) - .or(add_torrent) - .or(create_key) - .or(delete_key) - .or(reload_whitelist) - .or(reload_keys), - ); - - let server = api_routes.and(authenticate(tracker.config.http_api.access_tokens.clone())); - - let (_addr, api_server) = serve(server).bind_with_graceful_shutdown(socket_addr, async move { - tokio::signal::ctrl_c().await.expect("Failed to listen to shutdown signal."); - }); - api_server } diff --git a/src/config.rs b/src/config.rs index ba99e0f45..66def17cd 100644 --- a/src/config.rs +++ b/src/config.rs @@ -30,10 +30,16 @@ pub struct HttpTracker { pub ssl_key_path: Option, } +#[serde_as] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub struct HttpApi { pub enabled: bool, pub bind_address: String, + pub ssl_enabled: bool, + #[serde_as(as = "NoneAsEmptyString")] + pub ssl_cert_path: Option, + #[serde_as(as = "NoneAsEmptyString")] + pub ssl_key_path: Option, pub access_tokens: HashMap, } @@ -81,20 +87,8 @@ impl std::fmt::Display for Error { impl std::error::Error for Error {} -impl Configuration { - #[must_use] - pub fn get_ext_ip(&self) -> Option { - match &self.external_ip { - None => None, - Some(external_ip) => match IpAddr::from_str(external_ip) { - Ok(external_ip) => Some(external_ip), - Err(_) => None, - }, - } - } - - #[must_use] - pub fn default() -> Configuration { +impl Default for Configuration { + fn default() -> Self { let mut configuration = Configuration { log_level: Option::from(String::from("info")), mode: mode::Mode::Public, @@ -114,6 +108,9 @@ impl Configuration { http_api: HttpApi { enabled: true, bind_address: String::from("127.0.0.1:1212"), + ssl_enabled: false, + ssl_cert_path: None, + ssl_key_path: None, access_tokens: [(String::from("admin"), String::from("MyAccessToken"))] .iter() .cloned() @@ -133,6 +130,19 @@ impl Configuration { }); configuration } +} + +impl Configuration { + #[must_use] + pub fn get_ext_ip(&self) -> Option { + match &self.external_ip { + None => None, + Some(external_ip) => match IpAddr::from_str(external_ip) { + Ok(external_ip) => Some(external_ip), + Err(_) => None, + }, + } + } /// # Errors /// @@ -208,6 +218,9 @@ mod tests { [http_api] enabled = true bind_address = "127.0.0.1:1212" + ssl_enabled = false + ssl_cert_path = "" + ssl_key_path = "" [http_api.access_tokens] admin = "MyAccessToken" diff --git a/src/jobs/http_tracker.rs b/src/jobs/http_tracker.rs index b8f031f5a..c62bc5cc9 100644 --- a/src/jobs/http_tracker.rs +++ b/src/jobs/http_tracker.rs @@ -22,10 +22,10 @@ pub fn start_job(config: &HttpTracker, tracker: Arc) -> JoinHa let http_tracker = Http::new(tracker); if !ssl_enabled { - info!("Starting HTTP server on: {}", bind_addr); + info!("Starting HTTP server on: http://{}", bind_addr); http_tracker.start(bind_addr).await; } else if ssl_enabled && ssl_cert_path.is_some() && ssl_key_path.is_some() { - info!("Starting HTTPS server on: {} (TLS)", bind_addr); + info!("Starting HTTPS server on: https://{} (TLS)", bind_addr); http_tracker .start_tls(bind_addr, ssl_cert_path.unwrap(), ssl_key_path.unwrap()) .await; diff --git a/src/jobs/tracker_api.rs b/src/jobs/tracker_api.rs index 2c00aa453..211174f35 100644 --- a/src/jobs/tracker_api.rs +++ b/src/jobs/tracker_api.rs @@ -5,7 +5,7 @@ use tokio::sync::oneshot; use tokio::task::JoinHandle; use crate::api::server; -use crate::config::Configuration; +use crate::config::HttpApi; use crate::tracker; #[derive(Debug)] @@ -14,24 +14,30 @@ pub struct ApiServerJobStarted(); /// # Panics /// /// It would panic if unable to send the `ApiServerJobStarted` notice. -pub async fn start_job(config: &Configuration, tracker: Arc) -> JoinHandle<()> { +pub async fn start_job(config: &HttpApi, tracker: Arc) -> JoinHandle<()> { let bind_addr = config - .http_api .bind_address .parse::() .expect("Tracker API bind_address invalid."); - - info!("Starting Torrust API server on: {}", bind_addr); + let ssl_enabled = config.ssl_enabled; + let ssl_cert_path = config.ssl_cert_path.clone(); + let ssl_key_path = config.ssl_key_path.clone(); let (tx, rx) = oneshot::channel::(); // Run the API server let join_handle = tokio::spawn(async move { - let handel = server::start(bind_addr, &tracker); - - tx.send(ApiServerJobStarted()).expect("the start job dropped"); - - handel.await; + if !ssl_enabled { + info!("Starting Torrust API server on: http://{}", bind_addr); + let handle = server::start(bind_addr, &tracker); + tx.send(ApiServerJobStarted()).expect("the start job dropped"); + handle.await; + } else if ssl_enabled && ssl_cert_path.is_some() && ssl_key_path.is_some() { + info!("Starting Torrust API server on: https://{}", bind_addr); + let handle = server::start_tls(bind_addr, ssl_cert_path.unwrap(), ssl_key_path.unwrap(), &tracker); + tx.send(ApiServerJobStarted()).expect("the start job dropped"); + handle.await; + } }); // Wait until the API server job is running diff --git a/src/jobs/udp_tracker.rs b/src/jobs/udp_tracker.rs index 57369f660..d0907c976 100644 --- a/src/jobs/udp_tracker.rs +++ b/src/jobs/udp_tracker.rs @@ -14,11 +14,11 @@ pub fn start_job(config: &UdpTracker, tracker: Arc) -> JoinHan tokio::spawn(async move { match Udp::new(tracker, &bind_addr).await { Ok(udp_server) => { - info!("Starting UDP server on: {}", bind_addr); + info!("Starting UDP server on: udp://{}", bind_addr); udp_server.start().await; } Err(e) => { - warn!("Could not start UDP tracker on: {}", bind_addr); + warn!("Could not start UDP tracker on: udp://{}", bind_addr); error!("{}", e); } } diff --git a/src/setup.rs b/src/setup.rs index a7b7c5a82..c045310bb 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -49,7 +49,7 @@ pub async fn setup(config: &Configuration, tracker: Arc) -> Ve // Start HTTP API server if config.http_api.enabled { - jobs.push(tracker_api::start_job(config, tracker.clone()).await); + jobs.push(tracker_api::start_job(&config.http_api, tracker.clone()).await); } // Remove torrents without peers, every interval diff --git a/tests/api.rs b/tests/api.rs index 84ddac573..dfb8d81b3 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -292,7 +292,7 @@ mod tracker_api { logging::setup(&configuration); // Start the HTTP API job - self.job = Some(tracker_api::start_job(&configuration, tracker).await); + self.job = Some(tracker_api::start_job(&configuration.http_api, tracker).await); self.started.store(true, Ordering::Relaxed); }