From 7eb25a0d2c4524efdd6f6207a82d360607d4d92f Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 17:58:51 +0100 Subject: [PATCH 01/45] refactor: remove internal mod exports --- src/api/resources/auth_key_resource.rs | 6 ++-- src/api/resources/stats_resource.rs | 2 +- src/api/resources/torrent_resource.rs | 13 +++---- src/config.rs | 13 ++----- src/databases/database.rs | 2 +- src/databases/mysql.rs | 2 +- src/databases/sqlite.rs | 2 +- src/http/filters.rs | 6 ++-- src/http/handlers.rs | 13 +++---- src/http/mod.rs | 8 ----- src/http/request.rs | 2 +- src/http/response.rs | 2 +- src/http/routes.rs | 5 ++- src/http/server.rs | 11 +++--- src/jobs/http_tracker.rs | 3 +- src/jobs/torrent_cleanup.rs | 2 +- src/jobs/tracker_api.rs | 2 +- src/jobs/udp_tracker.rs | 3 +- src/lib.rs | 8 ----- src/logging.rs | 2 +- src/main.rs | 3 +- src/protocol/common.rs | 3 +- src/setup.rs | 2 +- src/stats.rs | 2 +- src/tracker/key.rs | 2 +- src/tracker/mod.rs | 8 ++--- src/tracker/peer.rs | 22 ++++++------ src/tracker/statistics.rs | 4 +-- src/tracker/torrent.rs | 10 +++--- src/udp/connection_cookie.rs | 2 +- src/udp/handlers.rs | 49 +++++++++++++------------- src/udp/mod.rs | 5 --- src/udp/request.rs | 2 +- src/udp/server.rs | 3 +- tests/api.rs | 6 ++-- tests/udp.rs | 3 +- 36 files changed, 108 insertions(+), 125 deletions(-) diff --git a/src/api/resources/auth_key_resource.rs b/src/api/resources/auth_key_resource.rs index c38b7cc18..4fc5d0cf9 100644 --- a/src/api/resources/auth_key_resource.rs +++ b/src/api/resources/auth_key_resource.rs @@ -2,10 +2,10 @@ use std::convert::From; use serde::{Deserialize, Serialize}; -use crate::key::AuthKey; use crate::protocol::clock::DurationSinceUnixEpoch; +use crate::tracker::key::AuthKey; -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct AuthKeyResource { pub key: String, pub valid_until: Option, @@ -36,8 +36,8 @@ mod tests { use std::time::Duration; use super::AuthKeyResource; - use crate::key::AuthKey; use crate::protocol::clock::{DefaultClock, TimeNow}; + use crate::tracker::key::AuthKey; #[test] fn it_should_be_convertible_into_an_auth_key() { diff --git a/src/api/resources/stats_resource.rs b/src/api/resources/stats_resource.rs index 2fbaf42c1..e6f184897 100644 --- a/src/api/resources/stats_resource.rs +++ b/src/api/resources/stats_resource.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct StatsResource { pub torrents: u32, pub seeders: u32, diff --git a/src/api/resources/torrent_resource.rs b/src/api/resources/torrent_resource.rs index 11e9d7196..784ffcb05 100644 --- a/src/api/resources/torrent_resource.rs +++ b/src/api/resources/torrent_resource.rs @@ -1,9 +1,9 @@ use serde::{Deserialize, Serialize}; -use crate::peer::TorrentPeer; -use crate::PeerId; +use crate::protocol::common::PeerId; +use crate::tracker::peer::TorrentPeer; -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct TorrentResource { pub info_hash: String, pub seeders: u32, @@ -13,7 +13,7 @@ pub struct TorrentResource { pub peers: Option>, } -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct TorrentListItemResource { pub info_hash: String, pub seeders: u32, @@ -23,7 +23,7 @@ pub struct TorrentListItemResource { pub peers: Option>, } -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct TorrentPeerResource { pub peer_id: PeerIdResource, pub peer_addr: String, @@ -36,7 +36,7 @@ pub struct TorrentPeerResource { pub event: String, } -#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct PeerIdResource { pub id: Option, pub client: Option, @@ -52,6 +52,7 @@ impl From for PeerIdResource { } impl From for TorrentPeerResource { + #[allow(deprecated)] fn from(peer: TorrentPeer) -> Self { TorrentPeerResource { peer_id: PeerIdResource::from(peer.peer_id), diff --git a/src/config.rs b/src/config.rs index 8c17070d2..1afc55e54 100644 --- a/src/config.rs +++ b/src/config.rs @@ -10,7 +10,7 @@ use serde_with::{serde_as, NoneAsEmptyString}; use {std, toml}; use crate::databases::database::DatabaseDrivers; -use crate::mode::TrackerMode; +use crate::tracker::mode::TrackerMode; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub struct UdpTrackerConfig { @@ -161,6 +161,7 @@ impl Configuration { #[cfg(test)] mod tests { + use crate::config::{Configuration, ConfigurationError}; #[cfg(test)] fn default_config_toml() -> String { @@ -205,8 +206,6 @@ mod tests { #[test] fn configuration_should_have_default_values() { - use crate::Configuration; - let configuration = Configuration::default(); let toml = toml::to_string(&configuration).expect("Could not encode TOML value"); @@ -216,8 +215,6 @@ mod tests { #[test] fn configuration_should_contain_the_external_ip() { - use crate::Configuration; - let configuration = Configuration::default(); assert_eq!(configuration.external_ip, Option::Some(String::from("0.0.0.0"))); @@ -229,8 +226,6 @@ mod tests { use uuid::Uuid; - use crate::Configuration; - // Build temp config file path let temp_directory = env::temp_dir(); let temp_file = temp_directory.join(format!("test_config_{}.toml", Uuid::new_v4())); @@ -275,8 +270,6 @@ mod tests { #[test] fn configuration_should_be_loaded_from_a_toml_config_file() { - use crate::Configuration; - let config_file_path = create_temp_config_file_with_default_config(); let configuration = Configuration::load_from_file(&config_file_path).expect("Could not load configuration from file"); @@ -286,8 +279,6 @@ mod tests { #[test] fn configuration_error_could_be_displayed() { - use crate::ConfigurationError; - let error = ConfigurationError::TrackerModeIncompatible; assert_eq!(format!("{}", error), "TrackerModeIncompatible"); diff --git a/src/databases/database.rs b/src/databases/database.rs index 795be0d45..52ca68291 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -4,8 +4,8 @@ use serde::{Deserialize, Serialize}; use crate::databases::mysql::MysqlDatabase; use crate::databases::sqlite::SqliteDatabase; +use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; -use crate::InfoHash; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub enum DatabaseDrivers { diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs index fc6ff5098..5e7410ac2 100644 --- a/src/databases/mysql.rs +++ b/src/databases/mysql.rs @@ -10,8 +10,8 @@ use r2d2_mysql::MysqlConnectionManager; use crate::databases::database; use crate::databases::database::{Database, Error}; +use crate::protocol::common::{InfoHash, AUTH_KEY_LENGTH}; use crate::tracker::key::AuthKey; -use crate::{InfoHash, AUTH_KEY_LENGTH}; pub struct MysqlDatabase { pool: Pool, diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index 7a567b07e..cf710a7e1 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -8,8 +8,8 @@ use r2d2_sqlite::SqliteConnectionManager; use crate::databases::database; use crate::databases::database::{Database, Error}; use crate::protocol::clock::DurationSinceUnixEpoch; +use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; -use crate::InfoHash; pub struct SqliteDatabase { pool: Pool, diff --git a/src/http/filters.rs b/src/http/filters.rs index 42d1592ff..d8f5a81f8 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -5,10 +5,12 @@ use std::sync::Arc; use warp::{reject, Filter, Rejection}; -use crate::http::{AnnounceRequest, AnnounceRequestQuery, ScrapeRequest, ServerError, WebResult}; +use super::errors::ServerError; +use super::request::{AnnounceRequest, AnnounceRequestQuery, ScrapeRequest}; +use super::WebResult; +use crate::protocol::common::{InfoHash, PeerId, MAX_SCRAPE_TORRENTS}; use crate::tracker::key::AuthKey; use crate::tracker::TorrentTracker; -use crate::{InfoHash, PeerId, MAX_SCRAPE_TORRENTS}; /// Pass Arc along pub fn with_tracker(tracker: Arc) -> impl Filter,), Error = Infallible> + Clone { diff --git a/src/http/handlers.rs b/src/http/handlers.rs index 87d2d51f6..c8b33c6d0 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -7,16 +7,17 @@ use log::debug; use warp::http::Response; use warp::{reject, Rejection, Reply}; -use crate::http::{ - AnnounceRequest, AnnounceResponse, ErrorResponse, Peer, ScrapeRequest, ScrapeResponse, ScrapeResponseEntry, ServerError, - WebResult, -}; -use crate::peer::TorrentPeer; +use super::errors::ServerError; +use super::request::{AnnounceRequest, ScrapeRequest}; +use super::response::{AnnounceResponse, Peer, ScrapeResponse, ScrapeResponseEntry}; +use crate::http::response::ErrorResponse; +use crate::http::WebResult; +use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; +use crate::tracker::peer::TorrentPeer; use crate::tracker::statistics::TrackerStatisticsEvent; use crate::tracker::torrent::{TorrentError, TorrentStats}; use crate::tracker::TorrentTracker; -use crate::InfoHash; /// Authenticate InfoHash using optional AuthKey pub async fn authenticate( diff --git a/src/http/mod.rs b/src/http/mod.rs index 4842c0a25..6e3ce7111 100644 --- a/src/http/mod.rs +++ b/src/http/mod.rs @@ -1,11 +1,3 @@ -pub use self::errors::*; -pub use self::filters::*; -pub use self::handlers::*; -pub use self::request::*; -pub use self::response::*; -pub use self::routes::*; -pub use self::server::*; - pub mod errors; pub mod filters; pub mod handlers; diff --git a/src/http/request.rs b/src/http/request.rs index 6dd025e8c..2d72a1a3c 100644 --- a/src/http/request.rs +++ b/src/http/request.rs @@ -3,7 +3,7 @@ use std::net::IpAddr; use serde::Deserialize; use crate::http::Bytes; -use crate::{InfoHash, PeerId}; +use crate::protocol::common::{InfoHash, PeerId}; #[derive(Deserialize)] pub struct AnnounceRequestQuery { diff --git a/src/http/response.rs b/src/http/response.rs index c87b5e0e8..44387a9f3 100644 --- a/src/http/response.rs +++ b/src/http/response.rs @@ -6,7 +6,7 @@ use std::net::IpAddr; use serde; use serde::Serialize; -use crate::InfoHash; +use crate::protocol::common::InfoHash; #[derive(Serialize)] pub struct Peer { diff --git a/src/http/routes.rs b/src/http/routes.rs index 8bfaf5ed9..f82bf45bc 100644 --- a/src/http/routes.rs +++ b/src/http/routes.rs @@ -3,9 +3,8 @@ use std::sync::Arc; use warp::{Filter, Rejection}; -use crate::http::{ - handle_announce, handle_scrape, send_error, with_announce_request, with_auth_key, with_scrape_request, with_tracker, -}; +use super::filters::{with_announce_request, with_auth_key, with_scrape_request, with_tracker}; +use super::handlers::{handle_announce, handle_scrape, send_error}; use crate::tracker::TorrentTracker; /// All routes diff --git a/src/http/server.rs b/src/http/server.rs index 4e48f97e3..d60387346 100644 --- a/src/http/server.rs +++ b/src/http/server.rs @@ -1,7 +1,7 @@ use std::net::SocketAddr; use std::sync::Arc; -use crate::http::routes; +use super::routes; use crate::tracker::TorrentTracker; /// Server that listens on HTTP, needs a TorrentTracker @@ -17,9 +17,10 @@ impl HttpServer { /// Start the HttpServer pub fn start(&self, socket_addr: SocketAddr) -> impl warp::Future { - let (_addr, server) = warp::serve(routes(self.tracker.clone())).bind_with_graceful_shutdown(socket_addr, async move { - tokio::signal::ctrl_c().await.expect("Failed to listen to shutdown signal."); - }); + let (_addr, server) = + warp::serve(routes::routes(self.tracker.clone())).bind_with_graceful_shutdown(socket_addr, async move { + tokio::signal::ctrl_c().await.expect("Failed to listen to shutdown signal."); + }); server } @@ -31,7 +32,7 @@ impl HttpServer { ssl_cert_path: String, ssl_key_path: String, ) -> impl warp::Future { - let (_addr, server) = warp::serve(routes(self.tracker.clone())) + let (_addr, server) = warp::serve(routes::routes(self.tracker.clone())) .tls() .cert_path(ssl_cert_path) .key_path(ssl_key_path) diff --git a/src/jobs/http_tracker.rs b/src/jobs/http_tracker.rs index 2d8f307b4..8ae9eb3f5 100644 --- a/src/jobs/http_tracker.rs +++ b/src/jobs/http_tracker.rs @@ -4,8 +4,9 @@ use std::sync::Arc; use log::{info, warn}; use tokio::task::JoinHandle; +use crate::config::HttpTrackerConfig; +use crate::http::server::HttpServer; use crate::tracker::TorrentTracker; -use crate::{HttpServer, HttpTrackerConfig}; pub fn start_job(config: &HttpTrackerConfig, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.parse::().unwrap(); diff --git a/src/jobs/torrent_cleanup.rs b/src/jobs/torrent_cleanup.rs index 04b064043..3b572d780 100644 --- a/src/jobs/torrent_cleanup.rs +++ b/src/jobs/torrent_cleanup.rs @@ -4,8 +4,8 @@ use chrono::Utc; use log::info; use tokio::task::JoinHandle; +use crate::config::Configuration; use crate::tracker::TorrentTracker; -use crate::Configuration; pub fn start_job(config: &Configuration, tracker: Arc) -> JoinHandle<()> { let weak_tracker = std::sync::Arc::downgrade(&tracker); diff --git a/src/jobs/tracker_api.rs b/src/jobs/tracker_api.rs index ba5b8a1fb..b0b315f44 100644 --- a/src/jobs/tracker_api.rs +++ b/src/jobs/tracker_api.rs @@ -5,8 +5,8 @@ use tokio::sync::oneshot; use tokio::task::JoinHandle; use crate::api::server; +use crate::config::Configuration; use crate::tracker::TorrentTracker; -use crate::Configuration; #[derive(Debug)] pub struct ApiServerJobStarted(); diff --git a/src/jobs/udp_tracker.rs b/src/jobs/udp_tracker.rs index 00fdaddbe..90986455c 100644 --- a/src/jobs/udp_tracker.rs +++ b/src/jobs/udp_tracker.rs @@ -3,8 +3,9 @@ use std::sync::Arc; use log::{error, info, warn}; use tokio::task::JoinHandle; +use crate::config::UdpTrackerConfig; use crate::tracker::TorrentTracker; -use crate::{UdpServer, UdpTrackerConfig}; +use crate::udp::server::UdpServer; pub fn start_job(config: &UdpTrackerConfig, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.clone(); diff --git a/src/lib.rs b/src/lib.rs index cf830f108..7e4fe13a7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,11 +1,3 @@ -pub use api::server::*; -pub use http::server::*; -pub use protocol::common::*; -pub use udp::server::*; - -pub use self::config::*; -pub use self::tracker::*; - pub mod api; pub mod config; pub mod databases; diff --git a/src/logging.rs b/src/logging.rs index 5d0efa8a4..7682bace1 100644 --- a/src/logging.rs +++ b/src/logging.rs @@ -3,7 +3,7 @@ use std::sync::Once; use log::{info, LevelFilter}; -use crate::Configuration; +use crate::config::Configuration; static INIT: Once = Once::new(); diff --git a/src/main.rs b/src/main.rs index bf832dbf4..f64354fcf 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,9 +1,10 @@ use std::sync::Arc; use log::info; +use torrust_tracker::config::Configuration; use torrust_tracker::stats::setup_statistics; use torrust_tracker::tracker::TorrentTracker; -use torrust_tracker::{ephemeral_instance_keys, logging, setup, static_time, Configuration}; +use torrust_tracker::{ephemeral_instance_keys, logging, setup, static_time}; #[tokio::main] async fn main() { diff --git a/src/protocol/common.rs b/src/protocol/common.rs index ce1cbf253..efeb328c9 100644 --- a/src/protocol/common.rs +++ b/src/protocol/common.rs @@ -94,12 +94,13 @@ impl<'de> serde::de::Deserialize<'de> for InfoHash { #[cfg(test)] mod tests { + use std::str::FromStr; use serde::{Deserialize, Serialize}; use serde_json::json; - use crate::InfoHash; + use super::InfoHash; #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] struct ContainingInfoHash { diff --git a/src/setup.rs b/src/setup.rs index 9906a2d03..736f448b6 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -3,9 +3,9 @@ use std::sync::Arc; use log::warn; use tokio::task::JoinHandle; +use crate::config::Configuration; use crate::jobs::{http_tracker, torrent_cleanup, tracker_api, udp_tracker}; use crate::tracker::TorrentTracker; -use crate::Configuration; pub async fn setup(config: &Configuration, tracker: Arc) -> Vec> { let mut jobs: Vec> = Vec::new(); diff --git a/src/stats.rs b/src/stats.rs index 1f387a084..22b74c8d3 100644 --- a/src/stats.rs +++ b/src/stats.rs @@ -1,4 +1,4 @@ -use crate::statistics::{StatsRepository, StatsTracker, TrackerStatisticsEventSender}; +use crate::tracker::statistics::{StatsRepository, StatsTracker, TrackerStatisticsEventSender}; pub fn setup_statistics(tracker_usage_statistics: bool) -> (Option>, StatsRepository) { let mut stats_event_sender = None; diff --git a/src/tracker/key.rs b/src/tracker/key.rs index 1bf0557a1..6d3f3c320 100644 --- a/src/tracker/key.rs +++ b/src/tracker/key.rs @@ -7,7 +7,7 @@ use rand::{thread_rng, Rng}; use serde::Serialize; use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, Time, TimeNow}; -use crate::AUTH_KEY_LENGTH; +use crate::protocol::common::AUTH_KEY_LENGTH; pub fn generate_auth_key(lifetime: Duration) -> AuthKey { let key: String = thread_rng() diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index a3eecd427..f31347e3e 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -13,15 +13,15 @@ use std::time::Duration; use tokio::sync::mpsc::error::SendError; use tokio::sync::{RwLock, RwLockReadGuard}; +use self::mode::TrackerMode; +use self::peer::TorrentPeer; +use self::statistics::{StatsRepository, TrackerStatistics, TrackerStatisticsEvent, TrackerStatisticsEventSender}; +use crate::config::Configuration; use crate::databases::database; use crate::databases::database::Database; -use crate::mode::TrackerMode; -use crate::peer::TorrentPeer; use crate::protocol::common::InfoHash; -use crate::statistics::{StatsRepository, TrackerStatistics, TrackerStatisticsEvent, TrackerStatisticsEventSender}; use crate::tracker::key::AuthKey; use crate::tracker::torrent::{TorrentEntry, TorrentError, TorrentStats}; -use crate::Configuration; pub struct TorrentTracker { pub config: Arc, diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index 42ef6a60b..77613e080 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -4,7 +4,7 @@ use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use serde; use serde::Serialize; -use crate::http::AnnounceRequest; +use crate::http::request::AnnounceRequest; use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, Time}; use crate::protocol::common::{AnnounceEventDef, NumberOfBytesDef, PeerId}; use crate::protocol::utils::ser_unix_time_value; @@ -95,9 +95,9 @@ mod test { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; - use crate::peer::TorrentPeer; use crate::protocol::clock::{DefaultClock, Time}; - use crate::PeerId; + use crate::protocol::common::PeerId; + use crate::tracker::peer::TorrentPeer; #[test] fn it_should_be_serializable() { @@ -129,7 +129,7 @@ mod test { AnnounceEvent, AnnounceRequest, NumberOfBytes, NumberOfPeers, PeerId as AquaticPeerId, PeerKey, Port, TransactionId, }; - use crate::peer::TorrentPeer; + use crate::tracker::peer::TorrentPeer; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; // todo: duplicate functions is PR 82. Remove duplication once both PR are merged. @@ -200,8 +200,8 @@ mod test { use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr}; use std::str::FromStr; - use crate::peer::test::torrent_peer_constructor_from_udp_requests::AnnounceRequestBuilder; - use crate::peer::TorrentPeer; + use crate::tracker::peer::test::torrent_peer_constructor_from_udp_requests::AnnounceRequestBuilder; + use crate::tracker::peer::TorrentPeer; #[test] fn it_should_use_the_loopback_ip_if_the_server_does_not_have_the_external_ip_configuration() { @@ -241,8 +241,8 @@ mod test { use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr}; use std::str::FromStr; - use crate::peer::test::torrent_peer_constructor_from_udp_requests::AnnounceRequestBuilder; - use crate::peer::TorrentPeer; + use crate::tracker::peer::test::torrent_peer_constructor_from_udp_requests::AnnounceRequestBuilder; + use crate::tracker::peer::TorrentPeer; #[test] fn it_should_use_the_loopback_ip_if_the_server_does_not_have_the_external_ip_configuration() { @@ -281,9 +281,9 @@ mod test { mod torrent_peer_constructor_from_for_http_requests { use std::net::{IpAddr, Ipv4Addr}; - use crate::http::AnnounceRequest; - use crate::peer::TorrentPeer; - use crate::{InfoHash, PeerId}; + use crate::http::request::AnnounceRequest; + use crate::protocol::common::{InfoHash, PeerId}; + use crate::tracker::peer::TorrentPeer; fn sample_http_announce_request(peer_addr: IpAddr, port: u16) -> AnnounceRequest { AnnounceRequest { diff --git a/src/tracker/statistics.rs b/src/tracker/statistics.rs index ac3889270..50804a5f4 100644 --- a/src/tracker/statistics.rs +++ b/src/tracker/statistics.rs @@ -271,7 +271,7 @@ impl StatsRepository { mod tests { mod stats_tracker { - use crate::statistics::{StatsTracker, TrackerStatistics, TrackerStatisticsEvent}; + use crate::tracker::statistics::{StatsTracker, TrackerStatistics, TrackerStatisticsEvent}; #[tokio::test] async fn should_contain_the_tracker_statistics() { @@ -295,7 +295,7 @@ mod tests { } mod event_handler { - use crate::statistics::{event_handler, StatsRepository, TrackerStatisticsEvent}; + use crate::tracker::statistics::{event_handler, StatsRepository, TrackerStatisticsEvent}; #[tokio::test] async fn should_increase_the_tcp4_announces_counter_when_it_receives_a_tcp4_announce_event() { diff --git a/src/tracker/torrent.rs b/src/tracker/torrent.rs index 335554006..f23858949 100644 --- a/src/tracker/torrent.rs +++ b/src/tracker/torrent.rs @@ -4,9 +4,9 @@ use std::time::Duration; use aquatic_udp_protocol::AnnounceEvent; use serde::{Deserialize, Serialize}; -use crate::peer::TorrentPeer; +use super::peer::TorrentPeer; use crate::protocol::clock::{DefaultClock, TimeNow}; -use crate::{PeerId, MAX_SCRAPE_TORRENTS}; +use crate::protocol::common::{PeerId, MAX_SCRAPE_TORRENTS}; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct TorrentEntry { @@ -113,10 +113,10 @@ mod tests { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; - use crate::peer::TorrentPeer; use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, StoppedClock, StoppedTime, Time, WorkingClock}; - use crate::torrent::TorrentEntry; - use crate::PeerId; + use crate::protocol::common::PeerId; + use crate::tracker::peer::TorrentPeer; + use crate::tracker::torrent::TorrentEntry; struct TorrentPeerBuilder { peer: TorrentPeer, diff --git a/src/udp/connection_cookie.rs b/src/udp/connection_cookie.rs index c40a56959..ef241245a 100644 --- a/src/udp/connection_cookie.rs +++ b/src/udp/connection_cookie.rs @@ -2,8 +2,8 @@ use std::net::SocketAddr; use aquatic_udp_protocol::ConnectionId; +use super::errors::ServerError; use crate::protocol::clock::time_extent::{Extent, TimeExtent}; -use crate::udp::ServerError; pub type Cookie = [u8; 8]; diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index 5514bc1eb..30b33225c 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -7,13 +7,13 @@ use aquatic_udp_protocol::{ }; use super::connection_cookie::{check_connection_cookie, from_connection_id, into_connection_id, make_connection_cookie}; -use crate::peer::TorrentPeer; +use crate::protocol::common::{InfoHash, MAX_SCRAPE_TORRENTS}; +use crate::tracker::peer::TorrentPeer; use crate::tracker::statistics::TrackerStatisticsEvent; use crate::tracker::torrent::TorrentError; use crate::tracker::TorrentTracker; use crate::udp::errors::ServerError; use crate::udp::request::AnnounceRequestWrapper; -use crate::{InfoHash, MAX_SCRAPE_TORRENTS}; pub async fn authenticate(info_hash: &InfoHash, tracker: Arc) -> Result<(), ServerError> { match tracker.authenticate_request(info_hash, &None).await { @@ -252,12 +252,13 @@ mod tests { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; - use crate::mode::TrackerMode; - use crate::peer::TorrentPeer; + use crate::config::Configuration; use crate::protocol::clock::{DefaultClock, Time}; - use crate::statistics::StatsTracker; + use crate::protocol::common::PeerId; + use crate::tracker::mode::TrackerMode; + use crate::tracker::peer::TorrentPeer; + use crate::tracker::statistics::StatsTracker; use crate::tracker::TorrentTracker; - use crate::{Configuration, PeerId}; fn default_tracker_config() -> Arc { Arc::new(Configuration::default()) @@ -373,10 +374,10 @@ mod tests { use mockall::predicate::eq; use super::{default_tracker_config, sample_ipv4_socket_address, sample_ipv6_remote_addr}; - use crate::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; + use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; use crate::tracker::TorrentTracker; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; - use crate::udp::handle_connect; + use crate::udp::handlers::handle_connect; use crate::udp::handlers::tests::{initialized_public_tracker, sample_ipv4_remote_addr}; fn sample_connect_request() -> ConnectRequest { @@ -545,15 +546,15 @@ mod tests { }; use mockall::predicate::eq; - use crate::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; + use crate::protocol::common::PeerId; + use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; use crate::tracker::TorrentTracker; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; - use crate::udp::handle_announce; + use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; use crate::udp::handlers::tests::{ default_tracker_config, initialized_public_tracker, sample_ipv4_socket_address, TorrentPeerBuilder, }; - use crate::PeerId; #[tokio::test] async fn an_announced_peer_should_be_added_to_the_tracker() { @@ -716,11 +717,11 @@ mod tests { use aquatic_udp_protocol::{InfoHash as AquaticInfoHash, PeerId as AquaticPeerId}; + use crate::protocol::common::PeerId; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; - use crate::udp::handle_announce; + use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; use crate::udp::handlers::tests::{initialized_public_tracker, TorrentPeerBuilder}; - use crate::PeerId; #[tokio::test] async fn the_peer_ip_should_be_changed_to_the_external_ip_in_the_tracker_configuration_if_defined() { @@ -770,15 +771,15 @@ mod tests { }; use mockall::predicate::eq; - use crate::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; + use crate::protocol::common::PeerId; + use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; use crate::tracker::TorrentTracker; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; - use crate::udp::handle_announce; + use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; use crate::udp::handlers::tests::{ default_tracker_config, initialized_public_tracker, sample_ipv6_remote_addr, TorrentPeerBuilder, }; - use crate::PeerId; #[tokio::test] async fn an_announced_peer_should_be_added_to_the_tracker() { @@ -951,10 +952,10 @@ mod tests { use aquatic_udp_protocol::{InfoHash as AquaticInfoHash, PeerId as AquaticPeerId}; - use crate::statistics::StatsTracker; + use crate::tracker::statistics::StatsTracker; use crate::tracker::TorrentTracker; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; - use crate::udp::handle_announce; + use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; use crate::udp::handlers::tests::TrackerConfigurationBuilder; @@ -1013,11 +1014,11 @@ mod tests { }; use super::TorrentPeerBuilder; + use crate::protocol::common::PeerId; use crate::tracker::TorrentTracker; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; - use crate::udp::handle_scrape; + use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{initialized_public_tracker, sample_ipv4_remote_addr}; - use crate::PeerId; fn zeroed_torrent_statistics() -> TorrentScrapeStatistics { TorrentScrapeStatistics { @@ -1123,7 +1124,7 @@ mod tests { use aquatic_udp_protocol::InfoHash; - use crate::udp::handle_scrape; + use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::scrape_request::{ add_a_sample_seeder_and_scrape, build_scrape_request, match_scrape_response, zeroed_torrent_statistics, }; @@ -1162,7 +1163,7 @@ mod tests { mod with_a_whitelisted_tracker { use aquatic_udp_protocol::{InfoHash, NumberOfDownloads, NumberOfPeers, TorrentScrapeStatistics}; - use crate::udp::handle_scrape; + use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::scrape_request::{ add_a_seeder, build_scrape_request, match_scrape_response, zeroed_torrent_statistics, }; @@ -1231,7 +1232,7 @@ mod tests { use mockall::predicate::eq; use super::sample_scrape_request; - use crate::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; + use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; use crate::tracker::TorrentTracker; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{default_tracker_config, sample_ipv4_remote_addr}; @@ -1264,7 +1265,7 @@ mod tests { use mockall::predicate::eq; use super::sample_scrape_request; - use crate::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; + use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; use crate::tracker::TorrentTracker; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{default_tracker_config, sample_ipv6_remote_addr}; diff --git a/src/udp/mod.rs b/src/udp/mod.rs index 4c98875c5..327f03eed 100644 --- a/src/udp/mod.rs +++ b/src/udp/mod.rs @@ -1,8 +1,3 @@ -pub use self::errors::*; -pub use self::handlers::*; -pub use self::request::*; -pub use self::server::*; - pub mod connection_cookie; pub mod errors; pub mod handlers; diff --git a/src/udp/request.rs b/src/udp/request.rs index 6531f54b9..67aaeb57f 100644 --- a/src/udp/request.rs +++ b/src/udp/request.rs @@ -1,6 +1,6 @@ use aquatic_udp_protocol::AnnounceRequest; -use crate::InfoHash; +use crate::protocol::common::InfoHash; // struct AnnounceRequest { // pub connection_id: i64, diff --git a/src/udp/server.rs b/src/udp/server.rs index 2f41c3c4d..5c215f9ec 100644 --- a/src/udp/server.rs +++ b/src/udp/server.rs @@ -7,7 +7,8 @@ use log::{debug, info}; use tokio::net::UdpSocket; use crate::tracker::TorrentTracker; -use crate::udp::{handle_packet, MAX_PACKET_SIZE}; +use crate::udp::handlers::handle_packet; +use crate::udp::MAX_PACKET_SIZE; pub struct UdpServer { socket: Arc, diff --git a/tests/api.rs b/tests/api.rs index 475da9a24..a5ae79621 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -19,13 +19,15 @@ mod tracker_api { use torrust_tracker::api::resources::auth_key_resource::AuthKeyResource; use torrust_tracker::api::resources::stats_resource::StatsResource; use torrust_tracker::api::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; + use torrust_tracker::config::Configuration; use torrust_tracker::jobs::tracker_api; - use torrust_tracker::peer::TorrentPeer; use torrust_tracker::protocol::clock::DurationSinceUnixEpoch; + use torrust_tracker::protocol::common::{InfoHash, PeerId}; use torrust_tracker::tracker::key::AuthKey; + use torrust_tracker::tracker::peer::TorrentPeer; use torrust_tracker::tracker::statistics::StatsTracker; use torrust_tracker::tracker::TorrentTracker; - use torrust_tracker::{ephemeral_instance_keys, logging, static_time, Configuration, InfoHash, PeerId}; + use torrust_tracker::{ephemeral_instance_keys, logging, static_time}; use crate::common::ephemeral_random_port; diff --git a/tests/udp.rs b/tests/udp.rs index ab96259c5..7a0d883a5 100644 --- a/tests/udp.rs +++ b/tests/udp.rs @@ -18,11 +18,12 @@ mod udp_tracker_server { }; use tokio::net::UdpSocket; use tokio::task::JoinHandle; + use torrust_tracker::config::Configuration; use torrust_tracker::jobs::udp_tracker; use torrust_tracker::tracker::statistics::StatsTracker; use torrust_tracker::tracker::TorrentTracker; use torrust_tracker::udp::MAX_PACKET_SIZE; - use torrust_tracker::{ephemeral_instance_keys, logging, static_time, Configuration}; + use torrust_tracker::{ephemeral_instance_keys, logging, static_time}; use crate::common::ephemeral_random_port; From 81c41293cb6c029085efcced0ff6bcb40c586d90 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 17:19:34 +0100 Subject: [PATCH 02/45] vscode: clippy padantic warnings --- .vscode/settings.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.vscode/settings.json b/.vscode/settings.json index f1027e9bd..94f199bd6 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -3,4 +3,6 @@ "editor.formatOnSave": true }, "rust-analyzer.checkOnSave.command": "clippy", + "rust-analyzer.checkOnSave.allTargets": true, + "rust-analyzer.checkOnSave.extraArgs": ["--","-W","clippy::pedantic"], } \ No newline at end of file From 2b88ce50070ae8593e9d06b1788cbffce016139e Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 18:11:12 +0100 Subject: [PATCH 03/45] clippy: auto fix --- src/api/resources/mod.rs | 6 ++--- src/api/resources/torrent_resource.rs | 2 +- src/api/server.rs | 2 +- src/config.rs | 4 +++- src/databases/database.rs | 2 +- src/databases/sqlite.rs | 6 ++--- src/http/filters.rs | 20 +++++++++-------- src/http/handlers.rs | 4 ++-- src/http/response.rs | 4 +++- src/http/server.rs | 7 +++--- src/jobs/http_tracker.rs | 1 + src/jobs/torrent_cleanup.rs | 1 + src/jobs/tracker_api.rs | 4 +--- src/jobs/udp_tracker.rs | 1 + src/protocol/clock/mod.rs | 4 +++- src/protocol/clock/time_extent.rs | 32 +++++++++++++++------------ src/protocol/common.rs | 4 +++- src/protocol/crypto.rs | 4 ++-- src/stats.rs | 1 + src/tracker/key.rs | 3 +++ src/tracker/mod.rs | 6 ++--- src/tracker/peer.rs | 6 ++++- src/tracker/statistics.rs | 6 ++++- src/tracker/torrent.rs | 7 ++++-- src/udp/connection_cookie.rs | 3 +++ src/udp/handlers.rs | 2 +- src/udp/mod.rs | 2 +- src/udp/request.rs | 1 + tests/api.rs | 4 ++-- tests/udp.rs | 6 ++--- 30 files changed, 95 insertions(+), 60 deletions(-) diff --git a/src/api/resources/mod.rs b/src/api/resources/mod.rs index d214d8a59..2b3e4b886 100644 --- a/src/api/resources/mod.rs +++ b/src/api/resources/mod.rs @@ -2,9 +2,9 @@ //! //! WIP. Not all endpoints have their resource structs. //! -//! - [x] AuthKeys -//! - [ ] TorrentResource, TorrentListItemResource, TorrentPeerResource, PeerIdResource -//! - [ ] StatsResource +//! - [x] `AuthKeys` +//! - [ ] `TorrentResource`, `TorrentListItemResource`, `TorrentPeerResource`, `PeerIdResource` +//! - [ ] `StatsResource` //! - [ ] ... pub mod auth_key_resource; pub mod stats_resource; diff --git a/src/api/resources/torrent_resource.rs b/src/api/resources/torrent_resource.rs index 784ffcb05..eb9620d23 100644 --- a/src/api/resources/torrent_resource.rs +++ b/src/api/resources/torrent_resource.rs @@ -46,7 +46,7 @@ impl From for PeerIdResource { fn from(peer_id: PeerId) -> Self { PeerIdResource { id: peer_id.get_id(), - client: peer_id.get_client_name().map(|client_name| client_name.to_string()), + client: peer_id.get_client_name().map(std::string::ToString::to_string), } } } diff --git a/src/api/server.rs b/src/api/server.rs index 41e6f7074..ce272b3ac 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -10,7 +10,7 @@ use warp::{filters, reply, serve, Filter}; use super::resources::auth_key_resource::AuthKeyResource; use super::resources::stats_resource::StatsResource; use super::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; -use crate::protocol::common::*; +use crate::protocol::common::InfoHash; use crate::tracker::TorrentTracker; #[derive(Deserialize, Debug)] diff --git a/src/config.rs b/src/config.rs index 1afc55e54..1199c7fe7 100644 --- a/src/config.rs +++ b/src/config.rs @@ -77,6 +77,7 @@ impl std::fmt::Display for ConfigurationError { impl std::error::Error for ConfigurationError {} impl Configuration { + #[must_use] pub fn get_ext_ip(&self) -> Option { match &self.external_ip { None => None, @@ -87,6 +88,7 @@ impl Configuration { } } + #[must_use] pub fn default() -> Configuration { let mut configuration = Configuration { log_level: Option::from(String::from("info")), @@ -198,7 +200,7 @@ mod tests { admin = "MyAccessToken" "# .lines() - .map(|line| line.trim_start()) + .map(str::trim_start) .collect::>() .join("\n"); config diff --git a/src/databases/database.rs b/src/databases/database.rs index 52ca68291..87a91ddeb 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -55,7 +55,7 @@ pub trait Database: Sync + Send { async fn remove_key_from_keys(&self, key: &str) -> Result; async fn is_info_hash_whitelisted(&self, info_hash: &InfoHash) -> Result { - if let Err(e) = self.get_info_hash_from_whitelist(&info_hash.to_owned().to_string()).await { + if let Err(e) = self.get_info_hash_from_whitelist(&info_hash.clone().to_string()).await { if let Error::QueryReturnedNoRows = e { return Ok(false); } else { diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index cf710a7e1..19849f297 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -70,7 +70,7 @@ impl Database for SqliteDatabase { Ok((info_hash, completed)) })?; - let torrents: Vec<(InfoHash, u32)> = torrent_iter.filter_map(|x| x.ok()).collect(); + let torrents: Vec<(InfoHash, u32)> = torrent_iter.filter_map(std::result::Result::ok).collect(); Ok(torrents) } @@ -90,7 +90,7 @@ impl Database for SqliteDatabase { }) })?; - let keys: Vec = keys_iter.filter_map(|x| x.ok()).collect(); + let keys: Vec = keys_iter.filter_map(std::result::Result::ok).collect(); Ok(keys) } @@ -106,7 +106,7 @@ impl Database for SqliteDatabase { Ok(InfoHash::from_str(&info_hash).unwrap()) })?; - let info_hashes: Vec = info_hash_iter.filter_map(|x| x.ok()).collect(); + let info_hashes: Vec = info_hash_iter.filter_map(std::result::Result::ok).collect(); Ok(info_hashes) } diff --git a/src/http/filters.rs b/src/http/filters.rs index d8f5a81f8..d33acbcfa 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -13,6 +13,7 @@ use crate::tracker::key::AuthKey; use crate::tracker::TorrentTracker; /// Pass Arc along +#[must_use] pub fn with_tracker(tracker: Arc) -> impl Filter,), Error = Infallible> + Clone { warp::any().map(move || tracker.clone()) } @@ -22,19 +23,20 @@ pub fn with_info_hash() -> impl Filter,), Error = Rejec warp::filters::query::raw().and_then(info_hashes) } -/// Check for PeerId +/// Check for `PeerId` pub fn with_peer_id() -> impl Filter + Clone { warp::filters::query::raw().and_then(peer_id) } /// Pass Arc along +#[must_use] pub fn with_auth_key() -> impl Filter,), Error = Infallible> + Clone { warp::path::param::() .map(|key: String| AuthKey::from_string(&key)) .or_else(|_| async { Ok::<(Option,), Infallible>((None,)) }) } -/// Check for PeerAddress +/// Check for `PeerAddress` pub fn with_peer_addr(on_reverse_proxy: bool) -> impl Filter + Clone { warp::addr::remote() .and(warp::header::optional::("X-Forwarded-For")) @@ -44,7 +46,7 @@ pub fn with_peer_addr(on_reverse_proxy: bool) -> impl Filter impl Filter + Clone { warp::filters::query::query::() .and(with_info_hash()) @@ -53,7 +55,7 @@ pub fn with_announce_request(on_reverse_proxy: bool) -> impl Filter impl Filter + Clone { warp::any() .and(with_info_hash()) @@ -61,7 +63,7 @@ pub fn with_scrape_request(on_reverse_proxy: bool) -> impl Filter WebResult> { let split_raw_query: Vec<&str> = raw_query.split('&').collect(); let mut info_hashes: Vec = Vec::new(); @@ -86,7 +88,7 @@ async fn info_hashes(raw_query: String) -> WebResult> { } } -/// Parse PeerId from raw query string +/// Parse `PeerId` from raw query string async fn peer_id(raw_query: String) -> WebResult { // put all query params in a vec let split_raw_query: Vec<&str> = raw_query.split('&').collect(); @@ -123,7 +125,7 @@ async fn peer_id(raw_query: String) -> WebResult { } } -/// Get PeerAddress from RemoteAddress or Forwarded +/// Get `PeerAddress` from `RemoteAddress` or Forwarded async fn peer_addr( (on_reverse_proxy, remote_addr, x_forwarded_for): (bool, Option, Option), ) -> WebResult { @@ -151,7 +153,7 @@ async fn peer_addr( } } -/// Parse AnnounceRequest from raw AnnounceRequestQuery, InfoHash and Option +/// Parse `AnnounceRequest` from raw `AnnounceRequestQuery`, `InfoHash` and Option async fn announce_request( announce_request_query: AnnounceRequestQuery, info_hashes: Vec, @@ -171,7 +173,7 @@ async fn announce_request( }) } -/// Parse ScrapeRequest from InfoHash +/// Parse `ScrapeRequest` from `InfoHash` async fn scrape_request(info_hashes: Vec, peer_addr: IpAddr) -> WebResult { Ok(ScrapeRequest { info_hashes, peer_addr }) } diff --git a/src/http/handlers.rs b/src/http/handlers.rs index c8b33c6d0..cd521b43b 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -19,7 +19,7 @@ use crate::tracker::statistics::TrackerStatisticsEvent; use crate::tracker::torrent::{TorrentError, TorrentStats}; use crate::tracker::TorrentTracker; -/// Authenticate InfoHash using optional AuthKey +/// Authenticate `InfoHash` using optional `AuthKey` pub async fn authenticate( info_hash: &InfoHash, auth_key: &Option, @@ -93,7 +93,7 @@ pub async fn handle_scrape( let mut files: HashMap = HashMap::new(); let db = tracker.get_torrents().await; - for info_hash in scrape_request.info_hashes.iter() { + for info_hash in &scrape_request.info_hashes { let scrape_entry = match db.get(info_hash) { Some(torrent_info) => { if authenticate(info_hash, &auth_key, tracker.clone()).await.is_ok() { diff --git a/src/http/response.rs b/src/http/response.rs index 44387a9f3..cb01068fa 100644 --- a/src/http/response.rs +++ b/src/http/response.rs @@ -27,6 +27,7 @@ pub struct AnnounceResponse { } impl AnnounceResponse { + #[must_use] pub fn write(&self) -> String { serde_bencode::to_string(&self).unwrap() } @@ -89,7 +90,7 @@ impl ScrapeResponse { bytes.write_all(b"d5:filesd")?; - for (info_hash, scrape_response_entry) in self.files.iter() { + for (info_hash, scrape_response_entry) in &self.files { bytes.write_all(b"20:")?; bytes.write_all(&info_hash.0)?; bytes.write_all(b"d8:completei")?; @@ -114,6 +115,7 @@ pub struct ErrorResponse { } impl ErrorResponse { + #[must_use] pub fn write(&self) -> String { serde_bencode::to_string(&self).unwrap() } diff --git a/src/http/server.rs b/src/http/server.rs index d60387346..97ec30aa0 100644 --- a/src/http/server.rs +++ b/src/http/server.rs @@ -4,18 +4,19 @@ use std::sync::Arc; use super::routes; use crate::tracker::TorrentTracker; -/// Server that listens on HTTP, needs a TorrentTracker +/// Server that listens on HTTP, needs a `TorrentTracker` #[derive(Clone)] pub struct HttpServer { tracker: Arc, } impl HttpServer { + #[must_use] pub fn new(tracker: Arc) -> HttpServer { HttpServer { tracker } } - /// Start the HttpServer + /// Start the `HttpServer` pub fn start(&self, socket_addr: SocketAddr) -> impl warp::Future { let (_addr, server) = warp::serve(routes::routes(self.tracker.clone())).bind_with_graceful_shutdown(socket_addr, async move { @@ -25,7 +26,7 @@ impl HttpServer { server } - /// Start the HttpServer in TLS mode + /// Start the `HttpServer` in TLS mode pub fn start_tls( &self, socket_addr: SocketAddr, diff --git a/src/jobs/http_tracker.rs b/src/jobs/http_tracker.rs index 8ae9eb3f5..6070e0d27 100644 --- a/src/jobs/http_tracker.rs +++ b/src/jobs/http_tracker.rs @@ -8,6 +8,7 @@ use crate::config::HttpTrackerConfig; use crate::http::server::HttpServer; use crate::tracker::TorrentTracker; +#[must_use] pub fn start_job(config: &HttpTrackerConfig, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.parse::().unwrap(); let ssl_enabled = config.ssl_enabled; diff --git a/src/jobs/torrent_cleanup.rs b/src/jobs/torrent_cleanup.rs index 3b572d780..3d7b49d6b 100644 --- a/src/jobs/torrent_cleanup.rs +++ b/src/jobs/torrent_cleanup.rs @@ -7,6 +7,7 @@ use tokio::task::JoinHandle; use crate::config::Configuration; use crate::tracker::TorrentTracker; +#[must_use] pub fn start_job(config: &Configuration, tracker: Arc) -> JoinHandle<()> { let weak_tracker = std::sync::Arc::downgrade(&tracker); let interval = config.inactive_peer_cleanup_interval; diff --git a/src/jobs/tracker_api.rs b/src/jobs/tracker_api.rs index b0b315f44..ac7657858 100644 --- a/src/jobs/tracker_api.rs +++ b/src/jobs/tracker_api.rs @@ -26,9 +26,7 @@ pub async fn start_job(config: &Configuration, tracker: Arc) -> let join_handle = tokio::spawn(async move { let handel = server::start(bind_addr, tracker); - if tx.send(ApiServerJobStarted()).is_err() { - panic!("the start job dropped"); - } + assert!(tx.send(ApiServerJobStarted()).is_ok(), "the start job dropped"); handel.await; }); diff --git a/src/jobs/udp_tracker.rs b/src/jobs/udp_tracker.rs index 90986455c..8bf839380 100644 --- a/src/jobs/udp_tracker.rs +++ b/src/jobs/udp_tracker.rs @@ -7,6 +7,7 @@ use crate::config::UdpTrackerConfig; use crate::tracker::TorrentTracker; use crate::udp::server::UdpServer; +#[must_use] pub fn start_job(config: &UdpTrackerConfig, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.clone(); diff --git a/src/protocol/clock/mod.rs b/src/protocol/clock/mod.rs index 4e15950e6..51197dba6 100644 --- a/src/protocol/clock/mod.rs +++ b/src/protocol/clock/mod.rs @@ -26,9 +26,11 @@ pub trait Time: Sized { } pub trait TimeNow: Time { + #[must_use] fn add(add_time: &Duration) -> Option { Self::now().checked_add(*add_time) } + #[must_use] fn sub(sub_time: &Duration) -> Option { Self::now().checked_sub(*sub_time) } @@ -240,7 +242,7 @@ mod stopped_clock { #[test] fn it_should_get_app_start_time() { - const TIME_AT_WRITING_THIS_TEST: Duration = Duration::new(1662983731, 22312); + const TIME_AT_WRITING_THIS_TEST: Duration = Duration::new(1_662_983_731, 22312); assert!(get_app_start_time() > TIME_AT_WRITING_THIS_TEST); } } diff --git a/src/protocol/clock/time_extent.rs b/src/protocol/clock/time_extent.rs index 3fa60de82..f975e9a04 100644 --- a/src/protocol/clock/time_extent.rs +++ b/src/protocol/clock/time_extent.rs @@ -37,6 +37,7 @@ pub const MAX: TimeExtent = TimeExtent { }; impl TimeExtent { + #[must_use] pub const fn from_sec(seconds: u64, amount: &TimeExtentMultiplier) -> Self { Self { increment: TimeExtentBase::from_secs(seconds), @@ -48,10 +49,10 @@ impl TimeExtent { fn checked_duration_from_nanos(time: u128) -> Result { const NANOS_PER_SEC: u32 = 1_000_000_000; - let secs = time.div_euclid(NANOS_PER_SEC as u128); - let nanos = time.rem_euclid(NANOS_PER_SEC as u128); + let secs = time.div_euclid(u128::from(NANOS_PER_SEC)); + let nanos = time.rem_euclid(u128::from(NANOS_PER_SEC)); - assert!(nanos < NANOS_PER_SEC as u128); + assert!(nanos < u128::from(NANOS_PER_SEC)); match u64::try_from(secs) { Err(error) => Err(error), @@ -94,14 +95,14 @@ impl Extent for TimeExtent { fn total(&self) -> Option> { self.increment .as_nanos() - .checked_mul(self.amount as u128) + .checked_mul(u128::from(self.amount)) .map(checked_duration_from_nanos) } fn total_next(&self) -> Option> { self.increment .as_nanos() - .checked_mul((self.amount as u128) + 1) + .checked_mul(u128::from(self.amount) + 1) .map(checked_duration_from_nanos) } } @@ -110,6 +111,7 @@ pub trait MakeTimeExtent: Sized where Clock: TimeNow, { + #[must_use] fn now(increment: &TimeExtentBase) -> Option> { Clock::now() .as_nanos() @@ -120,6 +122,7 @@ where }) } + #[must_use] fn now_after(increment: &TimeExtentBase, add_time: &Duration) -> Option> { match Clock::add(add_time) { None => None, @@ -134,6 +137,7 @@ where } } + #[must_use] fn now_before(increment: &TimeExtentBase, sub_time: &Duration) -> Option> { match Clock::sub(sub_time) { None => None, @@ -173,7 +177,7 @@ mod test { }; use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, StoppedTime}; - const TIME_EXTENT_VAL: TimeExtent = TimeExtent::from_sec(2, &239812388723); + const TIME_EXTENT_VAL: TimeExtent = TimeExtent::from_sec(2, &239_812_388_723); mod fn_checked_duration_from_nanos { use std::time::Duration; @@ -190,11 +194,11 @@ mod test { #[test] fn it_should_be_the_same_as_duration_implementation_for_u64_numbers() { assert_eq!( - checked_duration_from_nanos(1232143214343432).unwrap(), - Duration::from_nanos(1232143214343432) + checked_duration_from_nanos(1_232_143_214_343_432).unwrap(), + Duration::from_nanos(1_232_143_214_343_432) ); assert_eq!( - checked_duration_from_nanos(u64::MAX as u128).unwrap(), + checked_duration_from_nanos(u128::from(u64::MAX)).unwrap(), Duration::from_nanos(u64::MAX) ); } @@ -202,7 +206,7 @@ mod test { #[test] fn it_should_work_for_some_numbers_larger_than_u64() { assert_eq!( - checked_duration_from_nanos(TIME_EXTENT_VAL.amount as u128 * NANOS_PER_SEC as u128).unwrap(), + checked_duration_from_nanos(u128::from(TIME_EXTENT_VAL.amount) * u128::from(NANOS_PER_SEC)).unwrap(), Duration::from_secs(TIME_EXTENT_VAL.amount) ); } @@ -515,14 +519,14 @@ mod test { assert_eq!( DefaultTimeExtentMaker::now_before( - &TimeExtentBase::from_secs(u32::MAX as u64), - &Duration::from_secs(u32::MAX as u64) + &TimeExtentBase::from_secs(u64::from(u32::MAX)), + &Duration::from_secs(u64::from(u32::MAX)) ) .unwrap() .unwrap(), TimeExtent { - increment: TimeExtentBase::from_secs(u32::MAX as u64), - amount: 4294967296 + increment: TimeExtentBase::from_secs(u64::from(u32::MAX)), + amount: 4_294_967_296 } ); } diff --git a/src/protocol/common.rs b/src/protocol/common.rs index efeb328c9..c5c9b4578 100644 --- a/src/protocol/common.rs +++ b/src/protocol/common.rs @@ -233,14 +233,16 @@ impl std::fmt::Display for PeerId { } impl PeerId { + #[must_use] pub fn get_id(&self) -> Option { let buff_size = self.0.len() * 2; let mut tmp: Vec = vec![0; buff_size]; binascii::bin2hex(&self.0, &mut tmp).unwrap(); - std::str::from_utf8(&tmp).ok().map(|id| id.to_string()) + std::str::from_utf8(&tmp).ok().map(std::string::ToString::to_string) } + #[must_use] pub fn get_client_name(&self) -> Option<&'static str> { if self.0[0] == b'M' { return Some("BitTorrent"); diff --git a/src/protocol/crypto.rs b/src/protocol/crypto.rs index 18cfaf5e6..6e1517ef8 100644 --- a/src/protocol/crypto.rs +++ b/src/protocol/crypto.rs @@ -89,8 +89,8 @@ pub mod keys { #[test] fn it_should_have_a_large_random_seed() { - assert!(u128::from_ne_bytes((*RANDOM_SEED)[..16].try_into().unwrap()) > u64::MAX as u128); - assert!(u128::from_ne_bytes((*RANDOM_SEED)[16..].try_into().unwrap()) > u64::MAX as u128); + assert!(u128::from_ne_bytes((*RANDOM_SEED)[..16].try_into().unwrap()) > u128::from(u64::MAX)); + assert!(u128::from_ne_bytes((*RANDOM_SEED)[16..].try_into().unwrap()) > u128::from(u64::MAX)); } } } diff --git a/src/stats.rs b/src/stats.rs index 22b74c8d3..738909934 100644 --- a/src/stats.rs +++ b/src/stats.rs @@ -1,5 +1,6 @@ use crate::tracker::statistics::{StatsRepository, StatsTracker, TrackerStatisticsEventSender}; +#[must_use] pub fn setup_statistics(tracker_usage_statistics: bool) -> (Option>, StatsRepository) { let mut stats_event_sender = None; diff --git a/src/tracker/key.rs b/src/tracker/key.rs index 6d3f3c320..881dac877 100644 --- a/src/tracker/key.rs +++ b/src/tracker/key.rs @@ -9,6 +9,7 @@ use serde::Serialize; use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, Time, TimeNow}; use crate::protocol::common::AUTH_KEY_LENGTH; +#[must_use] pub fn generate_auth_key(lifetime: Duration) -> AuthKey { let key: String = thread_rng() .sample_iter(&Alphanumeric) @@ -43,6 +44,7 @@ pub struct AuthKey { } impl AuthKey { + #[must_use] pub fn from_buffer(key_buffer: [u8; AUTH_KEY_LENGTH]) -> Option { if let Ok(key) = String::from_utf8(Vec::from(key_buffer)) { Some(AuthKey { key, valid_until: None }) @@ -51,6 +53,7 @@ impl AuthKey { } } + #[must_use] pub fn from_string(key: &str) -> Option { if key.len() != AUTH_KEY_LENGTH { None diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index f31347e3e..6aae06a4b 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -201,7 +201,7 @@ impl TorrentTracker { match read_lock.get(info_hash) { None => vec![], - Some(entry) => entry.get_peers(Some(client_addr)).into_iter().cloned().collect(), + Some(entry) => entry.get_peers(Some(client_addr)).into_iter().copied().collect(), } } @@ -211,7 +211,7 @@ impl TorrentTracker { match read_lock.get(info_hash) { None => vec![], - Some(entry) => entry.get_peers(None).into_iter().cloned().collect(), + Some(entry) => entry.get_peers(None).into_iter().copied().collect(), } } @@ -236,9 +236,9 @@ impl TorrentTracker { let (seeders, completed, leechers) = torrent_entry.get_stats(); TorrentStats { + completed, seeders, leechers, - completed, } } diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index 77613e080..a5f000eca 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -26,6 +26,7 @@ pub struct TorrentPeer { } impl TorrentPeer { + #[must_use] pub fn from_udp_announce_request( announce_request: &aquatic_udp_protocol::AnnounceRequest, remote_ip: IpAddr, @@ -44,6 +45,7 @@ impl TorrentPeer { } } + #[must_use] pub fn from_http_announce_request( announce_request: &AnnounceRequest, remote_ip: IpAddr, @@ -63,7 +65,7 @@ impl TorrentPeer { }; TorrentPeer { - peer_id: announce_request.peer_id.clone(), + peer_id: announce_request.peer_id, peer_addr, updated: DefaultClock::now(), uploaded: NumberOfBytes(announce_request.uploaded as i64), @@ -74,6 +76,7 @@ impl TorrentPeer { } // potentially substitute localhost ip with external ip + #[must_use] pub fn peer_addr_from_ip_and_port_and_opt_host_ip(remote_ip: IpAddr, host_opt_ip: Option, port: u16) -> SocketAddr { if let Some(host_ip) = host_opt_ip.filter(|_| remote_ip.is_loopback()) { SocketAddr::new(host_ip, port) @@ -82,6 +85,7 @@ impl TorrentPeer { } } + #[must_use] pub fn is_seeder(&self) -> bool { self.left.0 <= 0 && self.event != AnnounceEvent::Stopped } diff --git a/src/tracker/statistics.rs b/src/tracker/statistics.rs index 50804a5f4..609f036aa 100644 --- a/src/tracker/statistics.rs +++ b/src/tracker/statistics.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use async_trait::async_trait; use log::debug; #[cfg(test)] -use mockall::{automock, predicate::*}; +use mockall::{automock, predicate::str}; use tokio::sync::mpsc::error::SendError; use tokio::sync::mpsc::{Receiver, Sender}; use tokio::sync::{mpsc, RwLock, RwLockReadGuard}; @@ -47,6 +47,7 @@ impl Default for TrackerStatistics { } impl TrackerStatistics { + #[must_use] pub fn new() -> Self { Self { tcp4_connections_handled: 0, @@ -76,12 +77,14 @@ impl Default for StatsTracker { } impl StatsTracker { + #[must_use] pub fn new() -> Self { Self { stats_repository: StatsRepository::new(), } } + #[must_use] pub fn new_active_instance() -> (Box, StatsRepository) { let mut stats_tracker = Self::new(); @@ -184,6 +187,7 @@ impl Default for StatsRepository { } impl StatsRepository { + #[must_use] pub fn new() -> Self { Self { stats: Arc::new(RwLock::new(TrackerStatistics::new())), diff --git a/src/tracker/torrent.rs b/src/tracker/torrent.rs index f23858949..46608643d 100644 --- a/src/tracker/torrent.rs +++ b/src/tracker/torrent.rs @@ -16,6 +16,7 @@ pub struct TorrentEntry { } impl TorrentEntry { + #[must_use] pub fn new() -> TorrentEntry { TorrentEntry { peers: std::collections::BTreeMap::new(), @@ -47,6 +48,7 @@ impl TorrentEntry { did_torrent_stats_change } + #[must_use] pub fn get_peers(&self, client_addr: Option<&SocketAddr>) -> Vec<&TorrentPeer> { self.peers .values() @@ -70,6 +72,7 @@ impl TorrentEntry { .collect() } + #[must_use] pub fn get_stats(&self) -> (u32, u32, u32) { let seeders: u32 = self.peers.values().filter(|peer| peer.is_seeder()).count() as u32; let leechers: u32 = self.peers.len() as u32 - seeders; @@ -77,7 +80,7 @@ impl TorrentEntry { } pub fn remove_inactive_peers(&mut self, max_peer_timeout: u32) { - let current_cutoff = DefaultClock::sub(&Duration::from_secs(max_peer_timeout as u64)).unwrap_or_default(); + let current_cutoff = DefaultClock::sub(&Duration::from_secs(u64::from(max_peer_timeout))).unwrap_or_default(); self.peers.retain(|_, peer| peer.updated > current_cutoff); } } @@ -358,7 +361,7 @@ mod tests { let now = WorkingClock::now(); StoppedClock::local_set(&now); - let timeout_seconds_before_now = now.sub(Duration::from_secs(timeout as u64)); + let timeout_seconds_before_now = now.sub(Duration::from_secs(u64::from(timeout))); let inactive_peer = TorrentPeerBuilder::default() .updated_at(timeout_seconds_before_now.sub(Duration::from_secs(1))) .into(); diff --git a/src/udp/connection_cookie.rs b/src/udp/connection_cookie.rs index ef241245a..b18940dfc 100644 --- a/src/udp/connection_cookie.rs +++ b/src/udp/connection_cookie.rs @@ -11,14 +11,17 @@ pub type SinceUnixEpochTimeExtent = TimeExtent; pub const COOKIE_LIFETIME: TimeExtent = TimeExtent::from_sec(2, &60); +#[must_use] pub fn from_connection_id(connection_id: &ConnectionId) -> Cookie { connection_id.0.to_le_bytes() } +#[must_use] pub fn into_connection_id(connection_cookie: &Cookie) -> ConnectionId { ConnectionId(i64::from_le_bytes(*connection_cookie)) } +#[must_use] pub fn make_connection_cookie(remote_address: &SocketAddr) -> Cookie { let time_extent = cookie_builder::get_last_time_extent(); diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index 30b33225c..81578e9c3 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -188,7 +188,7 @@ pub async fn handle_scrape( let mut torrent_stats: Vec = Vec::new(); - for info_hash in request.info_hashes.iter() { + for info_hash in &request.info_hashes { let info_hash = InfoHash(info_hash.0); let scrape_entry = match db.get(&info_hash) { diff --git a/src/udp/mod.rs b/src/udp/mod.rs index 327f03eed..2a8d42d9f 100644 --- a/src/udp/mod.rs +++ b/src/udp/mod.rs @@ -9,4 +9,4 @@ pub type Port = u16; pub type TransactionId = i64; pub const MAX_PACKET_SIZE: usize = 1496; -pub const PROTOCOL_ID: i64 = 0x41727101980; +pub const PROTOCOL_ID: i64 = 0x0417_2710_1980; diff --git a/src/udp/request.rs b/src/udp/request.rs index 67aaeb57f..53d646f1a 100644 --- a/src/udp/request.rs +++ b/src/udp/request.rs @@ -23,6 +23,7 @@ pub struct AnnounceRequestWrapper { } impl AnnounceRequestWrapper { + #[must_use] pub fn new(announce_request: AnnounceRequest) -> Self { AnnounceRequestWrapper { announce_request: announce_request.clone(), diff --git a/tests/api.rs b/tests/api.rs index a5ae79621..14fefa50e 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -1,6 +1,6 @@ /// Integration tests for the tracker API /// -/// cargo test tracker_api -- --nocapture +/// cargo test `tracker_api` -- --nocapture extern crate rand; mod common; @@ -192,7 +192,7 @@ mod tracker_api { let torrent_peer = TorrentPeer { peer_id: PeerId(*b"-qB00000000000000000"), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), - updated: DurationSinceUnixEpoch::new(1669397478934, 0), + updated: DurationSinceUnixEpoch::new(1_669_397_478_934, 0), uploaded: NumberOfBytes(0), downloaded: NumberOfBytes(0), left: NumberOfBytes(0), diff --git a/tests/udp.rs b/tests/udp.rs index 7a0d883a5..54caeaa68 100644 --- a/tests/udp.rs +++ b/tests/udp.rs @@ -1,6 +1,6 @@ /// Integration tests for UDP tracker server /// -/// cargo test udp_tracker_server -- --nocapture +/// cargo test `udp_tracker_server` -- --nocapture extern crate rand; mod common; @@ -116,7 +116,7 @@ mod udp_tracker_server { } } - /// Creates a new UdpClient connected to a Udp server + /// Creates a new `UdpClient` connected to a Udp server async fn new_connected_udp_client(remote_address: &str) -> UdpClient { let client = UdpClient::bind(&source_address(ephemeral_random_port())).await; client.connect(remote_address).await; @@ -155,7 +155,7 @@ mod udp_tracker_server { } } - /// Creates a new UdpTrackerClient connected to a Udp Tracker server + /// Creates a new `UdpTrackerClient` connected to a Udp Tracker server async fn new_connected_udp_tracker_client(remote_address: &str) -> UdpTrackerClient { let udp_client = new_connected_udp_client(remote_address).await; UdpTrackerClient { udp_client } From f74c93346b2d6aba776867a3db2777101c40a20f Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 18:24:56 +0100 Subject: [PATCH 04/45] clippy: fix src/http/response.rs --- src/http/handlers.rs | 14 +++++++------- src/http/response.rs | 29 ++++++++++++++++++++--------- 2 files changed, 27 insertions(+), 16 deletions(-) diff --git a/src/http/handlers.rs b/src/http/handlers.rs index cd521b43b..fc55c7c5b 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -9,9 +9,9 @@ use warp::{reject, Rejection, Reply}; use super::errors::ServerError; use super::request::{AnnounceRequest, ScrapeRequest}; -use super::response::{AnnounceResponse, Peer, ScrapeResponse, ScrapeResponseEntry}; -use crate::http::response::ErrorResponse; -use crate::http::WebResult; +use super::response::{Announce, Peer, Scrape, ScrapeResponseEntry}; +use super::WebResult; +use crate::http::response::Error; use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; use crate::tracker::peer::TorrentPeer; @@ -151,7 +151,7 @@ fn send_announce_response( }) .collect(); - let res = AnnounceResponse { + let res = Announce { interval, interval_min, complete: torrent_stats.seeders, @@ -172,7 +172,7 @@ fn send_announce_response( /// Send scrape response fn send_scrape_response(files: HashMap) -> WebResult { - let res = ScrapeResponse { files }; + let res = Scrape { files }; match res.write() { Ok(body) => Ok(Response::new(body)), @@ -184,12 +184,12 @@ fn send_scrape_response(files: HashMap) -> WebRes pub async fn send_error(r: Rejection) -> std::result::Result { let body = if let Some(server_error) = r.find::() { debug!("{:?}", server_error); - ErrorResponse { + Error { failure_reason: server_error.to_string(), } .write() } else { - ErrorResponse { + Error { failure_reason: ServerError::InternalServerError.to_string(), } .write() diff --git a/src/http/response.rs b/src/http/response.rs index cb01068fa..98ea6fe73 100644 --- a/src/http/response.rs +++ b/src/http/response.rs @@ -1,5 +1,4 @@ use std::collections::HashMap; -use std::error::Error; use std::io::Write; use std::net::IpAddr; @@ -16,7 +15,7 @@ pub struct Peer { } #[derive(Serialize)] -pub struct AnnounceResponse { +pub struct Announce { pub interval: u32, #[serde(rename = "min interval")] pub interval_min: u32, @@ -26,13 +25,19 @@ pub struct AnnounceResponse { pub peers: Vec, } -impl AnnounceResponse { +impl Announce { + /// # Panics + /// + /// It would panic if the `Announce` struct would contain an inappropriate type. #[must_use] pub fn write(&self) -> String { serde_bencode::to_string(&self).unwrap() } - pub fn write_compact(&self) -> Result, Box> { + /// # Errors + /// + /// Will return `Err` if internally interrupted. + pub fn write_compact(&self) -> Result, Box> { let mut peers_v4: Vec = Vec::new(); let mut peers_v6: Vec = Vec::new(); @@ -80,12 +85,15 @@ pub struct ScrapeResponseEntry { } #[derive(Serialize)] -pub struct ScrapeResponse { +pub struct Scrape { pub files: HashMap, } -impl ScrapeResponse { - pub fn write(&self) -> Result, Box> { +impl Scrape { + /// # Errors + /// + /// Will return `Err` if internally interrupted. + pub fn write(&self) -> Result, Box> { let mut bytes: Vec = Vec::new(); bytes.write_all(b"d5:filesd")?; @@ -109,12 +117,15 @@ impl ScrapeResponse { } #[derive(Serialize)] -pub struct ErrorResponse { +pub struct Error { #[serde(rename = "failure reason")] pub failure_reason: String, } -impl ErrorResponse { +impl Error { + /// # Panics + /// + /// It would panic if the `Error` struct would contain an inappropriate type. #[must_use] pub fn write(&self) -> String { serde_bencode::to_string(&self).unwrap() From a9f760b8ac8ab9562473347335e10a99db53571d Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 18:25:56 +0100 Subject: [PATCH 05/45] clippy: fix src/http/request.rs --- src/http/filters.rs | 14 +++++++------- src/http/handlers.rs | 15 +++++++-------- src/http/request.rs | 4 ++-- src/tracker/peer.rs | 14 +++++--------- 4 files changed, 21 insertions(+), 26 deletions(-) diff --git a/src/http/filters.rs b/src/http/filters.rs index d33acbcfa..f28909c7f 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -6,7 +6,7 @@ use std::sync::Arc; use warp::{reject, Filter, Rejection}; use super::errors::ServerError; -use super::request::{AnnounceRequest, AnnounceRequestQuery, ScrapeRequest}; +use super::request::{Announce, AnnounceRequestQuery, Scrape}; use super::WebResult; use crate::protocol::common::{InfoHash, PeerId, MAX_SCRAPE_TORRENTS}; use crate::tracker::key::AuthKey; @@ -47,7 +47,7 @@ pub fn with_peer_addr(on_reverse_proxy: bool) -> impl Filter impl Filter + Clone { +pub fn with_announce_request(on_reverse_proxy: bool) -> impl Filter + Clone { warp::filters::query::query::() .and(with_info_hash()) .and(with_peer_id()) @@ -56,7 +56,7 @@ pub fn with_announce_request(on_reverse_proxy: bool) -> impl Filter impl Filter + Clone { +pub fn with_scrape_request(on_reverse_proxy: bool) -> impl Filter + Clone { warp::any() .and(with_info_hash()) .and(with_peer_addr(on_reverse_proxy)) @@ -159,8 +159,8 @@ async fn announce_request( info_hashes: Vec, peer_id: PeerId, peer_addr: IpAddr, -) -> WebResult { - Ok(AnnounceRequest { +) -> WebResult { + Ok(Announce { info_hash: info_hashes[0], peer_addr, downloaded: announce_request_query.downloaded.unwrap_or(0), @@ -174,6 +174,6 @@ async fn announce_request( } /// Parse `ScrapeRequest` from `InfoHash` -async fn scrape_request(info_hashes: Vec, peer_addr: IpAddr) -> WebResult { - Ok(ScrapeRequest { info_hashes, peer_addr }) +async fn scrape_request(info_hashes: Vec, peer_addr: IpAddr) -> WebResult { + Ok(Scrape { info_hashes, peer_addr }) } diff --git a/src/http/handlers.rs b/src/http/handlers.rs index fc55c7c5b..a312ff105 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -8,9 +8,8 @@ use warp::http::Response; use warp::{reject, Rejection, Reply}; use super::errors::ServerError; -use super::request::{AnnounceRequest, ScrapeRequest}; -use super::response::{Announce, Peer, Scrape, ScrapeResponseEntry}; -use super::WebResult; +use super::response::{self, Peer, ScrapeResponseEntry}; +use super::{request, WebResult}; use crate::http::response::Error; use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; @@ -44,7 +43,7 @@ pub async fn authenticate( /// Handle announce request pub async fn handle_announce( - announce_request: AnnounceRequest, + announce_request: request::Announce, auth_key: Option, tracker: Arc, ) -> WebResult { @@ -86,7 +85,7 @@ pub async fn handle_announce( /// Handle scrape request pub async fn handle_scrape( - scrape_request: ScrapeRequest, + scrape_request: request::Scrape, auth_key: Option, tracker: Arc, ) -> WebResult { @@ -136,7 +135,7 @@ pub async fn handle_scrape( /// Send announce response fn send_announce_response( - announce_request: &AnnounceRequest, + announce_request: &request::Announce, torrent_stats: TorrentStats, peers: Vec, interval: u32, @@ -151,7 +150,7 @@ fn send_announce_response( }) .collect(); - let res = Announce { + let res = response::Announce { interval, interval_min, complete: torrent_stats.seeders, @@ -172,7 +171,7 @@ fn send_announce_response( /// Send scrape response fn send_scrape_response(files: HashMap) -> WebResult { - let res = Scrape { files }; + let res = response::Scrape { files }; match res.write() { Ok(body) => Ok(Response::new(body)), diff --git a/src/http/request.rs b/src/http/request.rs index 2d72a1a3c..b812e1173 100644 --- a/src/http/request.rs +++ b/src/http/request.rs @@ -17,7 +17,7 @@ pub struct AnnounceRequestQuery { } #[derive(Debug)] -pub struct AnnounceRequest { +pub struct Announce { pub info_hash: InfoHash, pub peer_addr: IpAddr, pub downloaded: Bytes, @@ -29,7 +29,7 @@ pub struct AnnounceRequest { pub compact: Option, } -pub struct ScrapeRequest { +pub struct Scrape { pub info_hashes: Vec, pub peer_addr: IpAddr, } diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index a5f000eca..a30723d00 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -4,7 +4,7 @@ use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use serde; use serde::Serialize; -use crate::http::request::AnnounceRequest; +use crate::http::request::Announce; use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, Time}; use crate::protocol::common::{AnnounceEventDef, NumberOfBytesDef, PeerId}; use crate::protocol::utils::ser_unix_time_value; @@ -46,11 +46,7 @@ impl TorrentPeer { } #[must_use] - pub fn from_http_announce_request( - announce_request: &AnnounceRequest, - remote_ip: IpAddr, - host_opt_ip: Option, - ) -> Self { + pub fn from_http_announce_request(announce_request: &Announce, remote_ip: IpAddr, host_opt_ip: Option) -> Self { let peer_addr = TorrentPeer::peer_addr_from_ip_and_port_and_opt_host_ip(remote_ip, host_opt_ip, announce_request.port); let event: AnnounceEvent = if let Some(event) = &announce_request.event { @@ -285,12 +281,12 @@ mod test { mod torrent_peer_constructor_from_for_http_requests { use std::net::{IpAddr, Ipv4Addr}; - use crate::http::request::AnnounceRequest; + use crate::http::request::Announce; use crate::protocol::common::{InfoHash, PeerId}; use crate::tracker::peer::TorrentPeer; - fn sample_http_announce_request(peer_addr: IpAddr, port: u16) -> AnnounceRequest { - AnnounceRequest { + fn sample_http_announce_request(peer_addr: IpAddr, port: u16) -> Announce { + Announce { info_hash: InfoHash([0u8; 20]), peer_addr, downloaded: 0u64, From 21b6e777375d2007c27cdc0d9cd9820857809f97 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 18:33:09 +0100 Subject: [PATCH 06/45] clippy: fix (ignore) src/config.rs --- cSpell.json | 1 + src/config.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/cSpell.json b/cSpell.json index a2c4235c4..cc3359d58 100644 --- a/cSpell.json +++ b/cSpell.json @@ -7,6 +7,7 @@ "bencode", "binascii", "Bitflu", + "bools", "bufs", "byteorder", "canonicalize", diff --git a/src/config.rs b/src/config.rs index 1199c7fe7..dbfb4a140 100644 --- a/src/config.rs +++ b/src/config.rs @@ -37,6 +37,7 @@ pub struct HttpApiConfig { pub access_tokens: HashMap, } +#[allow(clippy::struct_excessive_bools)] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub struct Configuration { pub log_level: Option, From 941e9825dcf0c1360212be87b90234e82199d983 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 18:44:35 +0100 Subject: [PATCH 07/45] clippy: fix src/api/resources/auth_key_resource.rs --- src/api/resources/auth_key_resource.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/api/resources/auth_key_resource.rs b/src/api/resources/auth_key_resource.rs index 4fc5d0cf9..3bc0cefb7 100644 --- a/src/api/resources/auth_key_resource.rs +++ b/src/api/resources/auth_key_resource.rs @@ -54,7 +54,7 @@ mod tests { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(DefaultClock::add(&Duration::new(duration_in_secs, 0)).unwrap()) } - ) + ); } #[test] @@ -72,7 +72,7 @@ mod tests { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(duration_in_secs) } - ) + ); } #[test] From a433c825efa2c4df59fd9e8375da623de5be3bf1 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 19:13:54 +0100 Subject: [PATCH 08/45] clippy: fix src/api/server.rs --- src/api/server.rs | 58 +++++++++++++++++++++-------------------- src/jobs/tracker_api.rs | 2 +- 2 files changed, 31 insertions(+), 29 deletions(-) diff --git a/src/api/server.rs b/src/api/server.rs index ce272b3ac..f9e5bc368 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -59,7 +59,8 @@ fn authenticate(tokens: HashMap) -> impl Filter) -> impl warp::Future { +#[allow(clippy::too_many_lines)] +pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl warp::Future { // GET /api/torrents?offset=:u32&limit=:u32 // View torrent list let api_torrents = tracker.clone(); @@ -124,31 +125,31 @@ pub fn start(socket_addr: SocketAddr, tracker: Arc) -> impl warp let db = tracker.get_torrents().await; - let _: Vec<_> = db - .iter() - .map(|(_info_hash, torrent_entry)| { - let (seeders, completed, leechers) = torrent_entry.get_stats(); - results.seeders += seeders; - results.completed += completed; - results.leechers += leechers; - results.torrents += 1; - }) - .collect(); + db.values().for_each(|torrent_entry| { + let (seeders, completed, leechers) = torrent_entry.get_stats(); + results.seeders += seeders; + results.completed += completed; + results.leechers += leechers; + results.torrents += 1; + }); let stats = tracker.get_stats().await; - results.tcp4_connections_handled = stats.tcp4_connections_handled as u32; - results.tcp4_announces_handled = stats.tcp4_announces_handled as u32; - results.tcp4_scrapes_handled = stats.tcp4_scrapes_handled as u32; - results.tcp6_connections_handled = stats.tcp6_connections_handled as u32; - results.tcp6_announces_handled = stats.tcp6_announces_handled as u32; - results.tcp6_scrapes_handled = stats.tcp6_scrapes_handled as u32; - results.udp4_connections_handled = stats.udp4_connections_handled as u32; - results.udp4_announces_handled = stats.udp4_announces_handled as u32; - results.udp4_scrapes_handled = stats.udp4_scrapes_handled as u32; - results.udp6_connections_handled = stats.udp6_connections_handled as u32; - results.udp6_announces_handled = stats.udp6_announces_handled as u32; - results.udp6_scrapes_handled = stats.udp6_scrapes_handled as u32; + #[allow(clippy::cast_possible_truncation)] + { + results.tcp4_connections_handled = stats.tcp4_connections_handled as u32; + results.tcp4_announces_handled = stats.tcp4_announces_handled as u32; + results.tcp4_scrapes_handled = stats.tcp4_scrapes_handled as u32; + results.tcp6_connections_handled = stats.tcp6_connections_handled as u32; + results.tcp6_announces_handled = stats.tcp6_announces_handled as u32; + results.tcp6_scrapes_handled = stats.tcp6_scrapes_handled as u32; + results.udp4_connections_handled = stats.udp4_connections_handled as u32; + results.udp4_announces_handled = stats.udp4_announces_handled as u32; + results.udp4_scrapes_handled = stats.udp4_scrapes_handled as u32; + results.udp6_connections_handled = stats.udp6_connections_handled as u32; + results.udp6_announces_handled = stats.udp6_announces_handled as u32; + results.udp6_scrapes_handled = stats.udp6_scrapes_handled as u32; + } Result::<_, warp::reject::Rejection>::Ok(reply::json(&results)) }); @@ -168,11 +169,12 @@ pub fn start(socket_addr: SocketAddr, tracker: Arc) -> impl warp let db = tracker.get_torrents().await; let torrent_entry_option = db.get(&info_hash); - if torrent_entry_option.is_none() { - return Result::<_, warp::reject::Rejection>::Ok(reply::json(&"torrent not known")); - } - - let torrent_entry = torrent_entry_option.unwrap(); + let torrent_entry = match torrent_entry_option { + Some(torrent_entry) => torrent_entry, + None => { + return Result::<_, warp::reject::Rejection>::Ok(reply::json(&"torrent not known")); + } + }; let (seeders, completed, leechers) = torrent_entry.get_stats(); let peers = torrent_entry.get_peers(None); diff --git a/src/jobs/tracker_api.rs b/src/jobs/tracker_api.rs index ac7657858..4e2dcd0c9 100644 --- a/src/jobs/tracker_api.rs +++ b/src/jobs/tracker_api.rs @@ -24,7 +24,7 @@ pub async fn start_job(config: &Configuration, tracker: Arc) -> // Run the API server let join_handle = tokio::spawn(async move { - let handel = server::start(bind_addr, tracker); + let handel = server::start(bind_addr, &tracker); assert!(tx.send(ApiServerJobStarted()).is_ok(), "the start job dropped"); From 2ba748925cb748259ed92c6de692624c9bc68cdc Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 19:32:22 +0100 Subject: [PATCH 09/45] clippy: fix src/protocol/common.rs --- src/protocol/common.rs | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/protocol/common.rs b/src/protocol/common.rs index c5c9b4578..d6a98cf03 100644 --- a/src/protocol/common.rs +++ b/src/protocol/common.rs @@ -212,9 +212,8 @@ impl<'v> serde::de::Visitor<'v> for InfoHashVisitor { serde::de::Unexpected::Str(v), &"expected a hexadecimal string", )); - } else { - Ok(res) - } + }; + Ok(res) } } @@ -249,8 +248,7 @@ impl PeerId { } if self.0[0] == b'-' { let name = match &self.0[1..3] { - b"AG" => "Ares", - b"A~" => "Ares", + b"AG" | b"A~" => "Ares", b"AR" => "Arctic", b"AV" => "Avicora", b"AX" => "BitPump", @@ -333,6 +331,11 @@ impl Serialize for PeerId { client: Option<&'a str>, } + let buff_size = self.0.len() * 2; + let mut tmp: Vec = vec![0; buff_size]; + binascii::bin2hex(&self.0, &mut tmp).unwrap(); + let id = std::str::from_utf8(&tmp).ok(); + let obj = PeerIdInfo { id: self.get_id(), client: self.get_client_name(), From 8e3115f80d7f98b70204caf83c253337d640da03 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 19:45:29 +0100 Subject: [PATCH 10/45] clippy: fix src/config.rs --- src/config.rs | 46 +++++++++++++++++++++++++--------------- src/jobs/http_tracker.rs | 4 ++-- src/jobs/udp_tracker.rs | 4 ++-- 3 files changed, 33 insertions(+), 21 deletions(-) diff --git a/src/config.rs b/src/config.rs index dbfb4a140..ac15f96b3 100644 --- a/src/config.rs +++ b/src/config.rs @@ -13,14 +13,14 @@ use crate::databases::database::DatabaseDrivers; use crate::tracker::mode::TrackerMode; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] -pub struct UdpTrackerConfig { +pub struct UdpTracker { pub enabled: bool, pub bind_address: String, } #[serde_as] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] -pub struct HttpTrackerConfig { +pub struct HttpTracker { pub enabled: bool, pub bind_address: String, pub ssl_enabled: bool, @@ -31,7 +31,7 @@ pub struct HttpTrackerConfig { } #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] -pub struct HttpApiConfig { +pub struct HttpApi { pub enabled: bool, pub bind_address: String, pub access_tokens: HashMap, @@ -53,13 +53,15 @@ pub struct Configuration { pub persistent_torrent_completed_stat: bool, pub inactive_peer_cleanup_interval: u64, pub remove_peerless_torrents: bool, - pub udp_trackers: Vec, - pub http_trackers: Vec, - pub http_api: HttpApiConfig, + pub udp_trackers: Vec, + pub http_trackers: Vec, + pub http_api: HttpApi, } #[derive(Debug)] pub enum ConfigurationError { + Message(String), + ConfigError(ConfigError), IOError(std::io::Error), ParseError(toml::de::Error), TrackerModeIncompatible, @@ -68,9 +70,11 @@ pub enum ConfigurationError { impl std::fmt::Display for ConfigurationError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { + ConfigurationError::Message(e) => e.fmt(f), + ConfigurationError::ConfigError(e) => e.fmt(f), ConfigurationError::IOError(e) => e.fmt(f), ConfigurationError::ParseError(e) => e.fmt(f), - _ => write!(f, "{:?}", self), + ConfigurationError::TrackerModeIncompatible => write!(f, "{:?}", self), } } } @@ -107,7 +111,7 @@ impl Configuration { remove_peerless_torrents: true, udp_trackers: Vec::new(), http_trackers: Vec::new(), - http_api: HttpApiConfig { + http_api: HttpApi { enabled: true, bind_address: String::from("127.0.0.1:1212"), access_tokens: [(String::from("admin"), String::from("MyAccessToken"))] @@ -116,11 +120,11 @@ impl Configuration { .collect(), }, }; - configuration.udp_trackers.push(UdpTrackerConfig { + configuration.udp_trackers.push(UdpTracker { enabled: false, bind_address: String::from("0.0.0.0:6969"), }); - configuration.http_trackers.push(HttpTrackerConfig { + configuration.http_trackers.push(HttpTracker { enabled: false, bind_address: String::from("0.0.0.0:6969"), ssl_enabled: false, @@ -130,31 +134,39 @@ impl Configuration { configuration } - pub fn load_from_file(path: &str) -> Result { + /// # Errors + /// + /// Will return `Err` if `path` does not exist or has a bad configuration. + pub fn load_from_file(path: &str) -> Result { let config_builder = Config::builder(); #[allow(unused_assignments)] let mut config = Config::default(); if Path::new(path).exists() { - config = config_builder.add_source(File::with_name(path)).build()?; + config = config_builder + .add_source(File::with_name(path)) + .build() + .map_err(ConfigurationError::ConfigError)?; } else { eprintln!("No config file found."); eprintln!("Creating config file.."); let config = Configuration::default(); - let _ = config.save_to_file(path); - return Err(ConfigError::Message( + config.save_to_file(path)?; + return Err(ConfigurationError::Message( "Please edit the config.TOML in the root folder and restart the tracker.".to_string(), )); } - let torrust_config: Configuration = config - .try_deserialize() - .map_err(|e| ConfigError::Message(format!("Errors while processing config: {}.", e)))?; + let torrust_config: Configuration = config.try_deserialize().map_err(ConfigurationError::ConfigError)?; Ok(torrust_config) } + /// # Errors + /// + /// Will return `Err` if `filename` does not exist or the user does not have + /// permission to read it. pub fn save_to_file(&self, path: &str) -> Result<(), ConfigurationError> { let toml_string = toml::to_string(self).expect("Could not encode TOML value"); fs::write(path, toml_string).expect("Could not write to file!"); diff --git a/src/jobs/http_tracker.rs b/src/jobs/http_tracker.rs index 6070e0d27..f6023a4e0 100644 --- a/src/jobs/http_tracker.rs +++ b/src/jobs/http_tracker.rs @@ -4,12 +4,12 @@ use std::sync::Arc; use log::{info, warn}; use tokio::task::JoinHandle; -use crate::config::HttpTrackerConfig; +use crate::config::HttpTracker; use crate::http::server::HttpServer; use crate::tracker::TorrentTracker; #[must_use] -pub fn start_job(config: &HttpTrackerConfig, tracker: Arc) -> JoinHandle<()> { +pub fn start_job(config: &HttpTracker, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.parse::().unwrap(); let ssl_enabled = config.ssl_enabled; let ssl_cert_path = config.ssl_cert_path.clone(); diff --git a/src/jobs/udp_tracker.rs b/src/jobs/udp_tracker.rs index 8bf839380..1b4bc745c 100644 --- a/src/jobs/udp_tracker.rs +++ b/src/jobs/udp_tracker.rs @@ -3,12 +3,12 @@ use std::sync::Arc; use log::{error, info, warn}; use tokio::task::JoinHandle; -use crate::config::UdpTrackerConfig; +use crate::config::UdpTracker; use crate::tracker::TorrentTracker; use crate::udp::server::UdpServer; #[must_use] -pub fn start_job(config: &UdpTrackerConfig, tracker: Arc) -> JoinHandle<()> { +pub fn start_job(config: &UdpTracker, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.clone(); tokio::spawn(async move { From 87160bdf2ffbfc91853037afda231c08bb2491bb Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 19:49:28 +0100 Subject: [PATCH 11/45] clippy: fix src/databases/database.rs --- src/config.rs | 6 +++--- src/databases/database.rs | 14 ++++++++++---- src/tracker/mod.rs | 2 +- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/src/config.rs b/src/config.rs index ac15f96b3..6eb83ad16 100644 --- a/src/config.rs +++ b/src/config.rs @@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize}; use serde_with::{serde_as, NoneAsEmptyString}; use {std, toml}; -use crate::databases::database::DatabaseDrivers; +use crate::databases::database::Drivers; use crate::tracker::mode::TrackerMode; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] @@ -42,7 +42,7 @@ pub struct HttpApi { pub struct Configuration { pub log_level: Option, pub mode: TrackerMode, - pub db_driver: DatabaseDrivers, + pub db_driver: Drivers, pub db_path: String, pub announce_interval: u32, pub min_announce_interval: u32, @@ -98,7 +98,7 @@ impl Configuration { let mut configuration = Configuration { log_level: Option::from(String::from("info")), mode: TrackerMode::Public, - db_driver: DatabaseDrivers::Sqlite3, + db_driver: Drivers::Sqlite3, db_path: String::from("data.db"), announce_interval: 120, min_announce_interval: 120, diff --git a/src/databases/database.rs b/src/databases/database.rs index 87a91ddeb..212224b25 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -8,18 +8,21 @@ use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] -pub enum DatabaseDrivers { +pub enum Drivers { Sqlite3, MySQL, } -pub fn connect_database(db_driver: &DatabaseDrivers, db_path: &str) -> Result, r2d2::Error> { +/// # Errors +/// +/// Will return `r2d2::Error` if `db_path` is not able to create a database. +pub fn connect(db_driver: &Drivers, db_path: &str) -> Result, r2d2::Error> { let database: Box = match db_driver { - DatabaseDrivers::Sqlite3 => { + Drivers::Sqlite3 => { let db = SqliteDatabase::new(db_path)?; Box::new(db) } - DatabaseDrivers::MySQL => { + Drivers::MySQL => { let db = MysqlDatabase::new(db_path)?; Box::new(db) } @@ -32,6 +35,9 @@ pub fn connect_database(db_driver: &DatabaseDrivers, db_path: &str) -> Result Result<(), Error>; async fn load_persistent_torrents(&self) -> Result, Error>; diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 6aae06a4b..680f2635d 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -40,7 +40,7 @@ impl TorrentTracker { stats_event_sender: Option>, stats_repository: StatsRepository, ) -> Result { - let database = database::connect_database(&config.db_driver, &config.db_path)?; + let database = database::connect(&config.db_driver, &config.db_path)?; Ok(TorrentTracker { config: config.clone(), From 38eabc4ae5647b6d885bcff8dfb32adc04c62b3b Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 19:56:53 +0100 Subject: [PATCH 12/45] clippy: fix src/databases/mysql.rs --- src/databases/database.rs | 4 ++-- src/databases/mysql.rs | 15 +++++++++------ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/databases/database.rs b/src/databases/database.rs index 212224b25..7344010d8 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -2,7 +2,7 @@ use async_trait::async_trait; use derive_more::{Display, Error}; use serde::{Deserialize, Serialize}; -use crate::databases::mysql::MysqlDatabase; +use crate::databases::mysql::Mysql; use crate::databases::sqlite::SqliteDatabase; use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; @@ -23,7 +23,7 @@ pub fn connect(db_driver: &Drivers, db_path: &str) -> Result, Box::new(db) } Drivers::MySQL => { - let db = MysqlDatabase::new(db_path)?; + let db = Mysql::new(db_path)?; Box::new(db) } }; diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs index 5e7410ac2..5db358d5a 100644 --- a/src/databases/mysql.rs +++ b/src/databases/mysql.rs @@ -13,11 +13,14 @@ use crate::databases::database::{Database, Error}; use crate::protocol::common::{InfoHash, AUTH_KEY_LENGTH}; use crate::tracker::key::AuthKey; -pub struct MysqlDatabase { +pub struct Mysql { pool: Pool, } -impl MysqlDatabase { +impl Mysql { + /// # Errors + /// + /// Will return `r2d2::Error` if `db_path` is not able to create `MySQL` database. pub fn new(db_path: &str) -> Result { let opts = Opts::from_url(db_path).expect("Failed to connect to MySQL database."); let builder = OptsBuilder::from_opts(opts); @@ -31,7 +34,7 @@ impl MysqlDatabase { } #[async_trait] -impl Database for MysqlDatabase { +impl Database for Mysql { fn create_database_tables(&self) -> Result<(), database::Error> { let create_whitelist_table = " CREATE TABLE IF NOT EXISTS whitelist ( @@ -57,7 +60,7 @@ impl Database for MysqlDatabase { PRIMARY KEY (`id`), UNIQUE (`key`) );", - AUTH_KEY_LENGTH as i8 + i8::try_from(AUTH_KEY_LENGTH).expect("Auth Key Length Should fit within a i8!") ); let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; @@ -95,7 +98,7 @@ impl Database for MysqlDatabase { "SELECT `key`, valid_until FROM `keys`", |(key, valid_until): (String, i64)| AuthKey { key, - valid_until: Some(Duration::from_secs(valid_until as u64)), + valid_until: Some(Duration::from_secs(valid_until.unsigned_abs())), }, ) .map_err(|_| database::Error::QueryReturnedNoRows)?; @@ -188,7 +191,7 @@ impl Database for MysqlDatabase { { Some((key, valid_until)) => Ok(AuthKey { key, - valid_until: Some(Duration::from_secs(valid_until as u64)), + valid_until: Some(Duration::from_secs(valid_until.unsigned_abs())), }), None => Err(database::Error::InvalidQuery), } From be6676a6315022ce4a3d7d2a02482c2a40a67798 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 19:59:28 +0100 Subject: [PATCH 13/45] clippy: fix src/databases/sqlite.rs --- src/databases/database.rs | 4 ++-- src/databases/sqlite.rs | 19 +++++++++++-------- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/databases/database.rs b/src/databases/database.rs index 7344010d8..62105dee5 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -3,7 +3,7 @@ use derive_more::{Display, Error}; use serde::{Deserialize, Serialize}; use crate::databases::mysql::Mysql; -use crate::databases::sqlite::SqliteDatabase; +use crate::databases::sqlite::Sqlite; use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; @@ -19,7 +19,7 @@ pub enum Drivers { pub fn connect(db_driver: &Drivers, db_path: &str) -> Result, r2d2::Error> { let database: Box = match db_driver { Drivers::Sqlite3 => { - let db = SqliteDatabase::new(db_path)?; + let db = Sqlite::new(db_path)?; Box::new(db) } Drivers::MySQL => { diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index 19849f297..ee637049b 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -11,20 +11,23 @@ use crate::protocol::clock::DurationSinceUnixEpoch; use crate::protocol::common::InfoHash; use crate::tracker::key::AuthKey; -pub struct SqliteDatabase { +pub struct Sqlite { pool: Pool, } -impl SqliteDatabase { - pub fn new(db_path: &str) -> Result { +impl Sqlite { + /// # Errors + /// + /// Will return `r2d2::Error` if `db_path` is not able to create `SqLite` database. + pub fn new(db_path: &str) -> Result { let cm = SqliteConnectionManager::file(db_path); let pool = Pool::new(cm).expect("Failed to create r2d2 SQLite connection pool."); - Ok(SqliteDatabase { pool }) + Ok(Sqlite { pool }) } } #[async_trait] -impl Database for SqliteDatabase { +impl Database for Sqlite { fn create_database_tables(&self) -> Result<(), database::Error> { let create_whitelist_table = " CREATE TABLE IF NOT EXISTS whitelist ( @@ -86,7 +89,7 @@ impl Database for SqliteDatabase { Ok(AuthKey { key, - valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until as u64)), + valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until.unsigned_abs())), }) })?; @@ -191,11 +194,11 @@ impl Database for SqliteDatabase { if let Some(row) = rows.next()? { let key: String = row.get(0).unwrap(); - let valid_until_i64: i64 = row.get(1).unwrap(); + let valid_until: i64 = row.get(1).unwrap(); Ok(AuthKey { key, - valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until_i64 as u64)), + valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until.unsigned_abs())), }) } else { Err(database::Error::QueryReturnedNoRows) From b5ce7e9f0cfc6f11a83e781f4e85b3c6c5e93a0d Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 21:36:53 +0100 Subject: [PATCH 14/45] clippy: fix src/http/filters.rs --- src/http/filters.rs | 66 ++++++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 30 deletions(-) diff --git a/src/http/filters.rs b/src/http/filters.rs index f28909c7f..f2e214e87 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -19,13 +19,15 @@ pub fn with_tracker(tracker: Arc) -> impl Filter impl Filter,), Error = Rejection> + Clone { - warp::filters::query::raw().and_then(info_hashes) + warp::filters::query::raw().and_then(|q| async move { info_hashes(&q) }) } /// Check for `PeerId` +#[must_use] pub fn with_peer_id() -> impl Filter + Clone { - warp::filters::query::raw().and_then(peer_id) + warp::filters::query::raw().and_then(|q| async move { peer_id(&q) }) } /// Pass Arc along @@ -37,34 +39,38 @@ pub fn with_auth_key() -> impl Filter,), Error = Infa } /// Check for `PeerAddress` +#[must_use] pub fn with_peer_addr(on_reverse_proxy: bool) -> impl Filter + Clone { warp::addr::remote() .and(warp::header::optional::("X-Forwarded-For")) .map(move |remote_addr: Option, x_forwarded_for: Option| { (on_reverse_proxy, remote_addr, x_forwarded_for) }) - .and_then(peer_addr) + .and_then(|q| async move { peer_addr(q) }) } /// Check for `AnnounceRequest` +#[must_use] pub fn with_announce_request(on_reverse_proxy: bool) -> impl Filter + Clone { warp::filters::query::query::() .and(with_info_hash()) .and(with_peer_id()) .and(with_peer_addr(on_reverse_proxy)) - .and_then(announce_request) + .and_then(|q, r, s, t| async move { announce_request(q, &r, s, t) }) } /// Check for `ScrapeRequest` +#[must_use] pub fn with_scrape_request(on_reverse_proxy: bool) -> impl Filter + Clone { warp::any() .and(with_info_hash()) .and(with_peer_addr(on_reverse_proxy)) - .and_then(scrape_request) + .and_then(|q, r| async move { scrape_request(q, r) }) } /// Parse `InfoHash` from raw query string -async fn info_hashes(raw_query: String) -> WebResult> { +#[allow(clippy::ptr_arg)] +fn info_hashes(raw_query: &String) -> WebResult> { let split_raw_query: Vec<&str> = raw_query.split('&').collect(); let mut info_hashes: Vec = Vec::new(); @@ -89,7 +95,8 @@ async fn info_hashes(raw_query: String) -> WebResult> { } /// Parse `PeerId` from raw query string -async fn peer_id(raw_query: String) -> WebResult { +#[allow(clippy::ptr_arg)] +fn peer_id(raw_query: &String) -> WebResult { // put all query params in a vec let split_raw_query: Vec<&str> = raw_query.split('&').collect(); @@ -118,17 +125,14 @@ async fn peer_id(raw_query: String) -> WebResult { } } - if peer_id.is_none() { - Err(reject::custom(ServerError::InvalidPeerId)) - } else { - Ok(peer_id.unwrap()) + match peer_id { + Some(id) => Ok(id), + None => Err(reject::custom(ServerError::InvalidPeerId)), } } /// Get `PeerAddress` from `RemoteAddress` or Forwarded -async fn peer_addr( - (on_reverse_proxy, remote_addr, x_forwarded_for): (bool, Option, Option), -) -> WebResult { +fn peer_addr((on_reverse_proxy, remote_addr, x_forwarded_for): (bool, Option, Option)) -> WebResult { if !on_reverse_proxy && remote_addr.is_none() { return Err(reject::custom(ServerError::AddressNotFound)); } @@ -137,26 +141,27 @@ async fn peer_addr( return Err(reject::custom(ServerError::AddressNotFound)); } - match on_reverse_proxy { - true => { - let mut x_forwarded_for_raw = x_forwarded_for.unwrap(); - // remove whitespace chars - x_forwarded_for_raw.retain(|c| !c.is_whitespace()); - // get all forwarded ip's in a vec - let x_forwarded_ips: Vec<&str> = x_forwarded_for_raw.split(',').collect(); - // set client ip to last forwarded ip - let x_forwarded_ip = *x_forwarded_ips.last().unwrap(); - - IpAddr::from_str(x_forwarded_ip).map_err(|_| reject::custom(ServerError::AddressNotFound)) - } - false => Ok(remote_addr.unwrap().ip()), + if on_reverse_proxy { + let mut x_forwarded_for_raw = x_forwarded_for.unwrap(); + // remove whitespace chars + x_forwarded_for_raw.retain(|c| !c.is_whitespace()); + // get all forwarded ip's in a vec + let x_forwarded_ips: Vec<&str> = x_forwarded_for_raw.split(',').collect(); + // set client ip to last forwarded ip + let x_forwarded_ip = *x_forwarded_ips.last().unwrap(); + + IpAddr::from_str(x_forwarded_ip).map_err(|_| reject::custom(ServerError::AddressNotFound)) + } else { + Ok(remote_addr.unwrap().ip()) } } /// Parse `AnnounceRequest` from raw `AnnounceRequestQuery`, `InfoHash` and Option -async fn announce_request( +#[allow(clippy::unnecessary_wraps)] +#[allow(clippy::ptr_arg)] +fn announce_request( announce_request_query: AnnounceRequestQuery, - info_hashes: Vec, + info_hashes: &Vec, peer_id: PeerId, peer_addr: IpAddr, ) -> WebResult { @@ -174,6 +179,7 @@ async fn announce_request( } /// Parse `ScrapeRequest` from `InfoHash` -async fn scrape_request(info_hashes: Vec, peer_addr: IpAddr) -> WebResult { +#[allow(clippy::unnecessary_wraps)] +fn scrape_request(info_hashes: Vec, peer_addr: IpAddr) -> WebResult { Ok(Scrape { info_hashes, peer_addr }) } From 75bef77799f46c281eb3a8adae947705c9d1186f Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 22:04:00 +0100 Subject: [PATCH 15/45] clippy: fix src/http/handlers.rs --- src/http/handlers.rs | 56 ++++++++++++++++++++++++++------------------ src/http/routes.rs | 5 +++- 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/src/http/handlers.rs b/src/http/handlers.rs index a312ff105..064047ba0 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -19,37 +19,38 @@ use crate::tracker::torrent::{TorrentError, TorrentStats}; use crate::tracker::TorrentTracker; /// Authenticate `InfoHash` using optional `AuthKey` +/// +/// # Errors +/// +/// Will return `ServerError` that wraps the `TorrentError` if unable to `authenticate_request`. pub async fn authenticate( info_hash: &InfoHash, auth_key: &Option, tracker: Arc, ) -> Result<(), ServerError> { - match tracker.authenticate_request(info_hash, auth_key).await { - Ok(_) => Ok(()), - Err(e) => { - let err = match e { - TorrentError::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, - TorrentError::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, - TorrentError::PeerKeyNotValid => ServerError::PeerKeyNotValid, - TorrentError::NoPeersFound => ServerError::NoPeersFound, - TorrentError::CouldNotSendResponse => ServerError::InternalServerError, - TorrentError::InvalidInfoHash => ServerError::InvalidInfoHash, - }; - - Err(err) - } - } + tracker.authenticate_request(info_hash, auth_key).await.map_err(|e| match e { + TorrentError::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, + TorrentError::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, + TorrentError::PeerKeyNotValid => ServerError::PeerKeyNotValid, + TorrentError::NoPeersFound => ServerError::NoPeersFound, + TorrentError::CouldNotSendResponse => ServerError::InternalServerError, + TorrentError::InvalidInfoHash => ServerError::InvalidInfoHash, + }) } /// Handle announce request +/// +/// # Errors +/// +/// Will return `warp::Rejection` that wraps the `ServerError` if unable to `send_scrape_response`. pub async fn handle_announce( announce_request: request::Announce, auth_key: Option, tracker: Arc, ) -> WebResult { - if let Err(e) = authenticate(&announce_request.info_hash, &auth_key, tracker.clone()).await { - return Err(reject::custom(e)); - } + authenticate(&announce_request.info_hash, &auth_key, tracker.clone()) + .await + .map_err(reject::custom)?; debug!("{:?}", announce_request); @@ -76,14 +77,18 @@ pub async fn handle_announce( send_announce_response( &announce_request, - torrent_stats, - peers, + &torrent_stats, + &peers, announce_interval, tracker.config.min_announce_interval, ) } /// Handle scrape request +/// +/// # Errors +/// +/// Will return `warp::Rejection` that wraps the `ServerError` if unable to `send_scrape_response`. pub async fn handle_scrape( scrape_request: request::Scrape, auth_key: Option, @@ -134,10 +139,11 @@ pub async fn handle_scrape( } /// Send announce response +#[allow(clippy::ptr_arg)] fn send_announce_response( announce_request: &request::Announce, - torrent_stats: TorrentStats, - peers: Vec, + torrent_stats: &TorrentStats, + peers: &Vec, interval: u32, interval_min: u32, ) -> WebResult { @@ -180,7 +186,11 @@ fn send_scrape_response(files: HashMap) -> WebRes } /// Handle all server errors and send error reply -pub async fn send_error(r: Rejection) -> std::result::Result { +/// +/// # Errors +/// +/// Will not return a error, `Infallible`, but instead convert the `ServerError` into a `Response`. +pub fn send_error(r: &Rejection) -> std::result::Result { let body = if let Some(server_error) = r.find::() { debug!("{:?}", server_error); Error { diff --git a/src/http/routes.rs b/src/http/routes.rs index f82bf45bc..992febc2c 100644 --- a/src/http/routes.rs +++ b/src/http/routes.rs @@ -8,8 +8,11 @@ use super::handlers::{handle_announce, handle_scrape, send_error}; use crate::tracker::TorrentTracker; /// All routes +#[must_use] pub fn routes(tracker: Arc) -> impl Filter + Clone { - announce(tracker.clone()).or(scrape(tracker)).recover(send_error) + announce(tracker.clone()) + .or(scrape(tracker)) + .recover(|q| async move { send_error(&q) }) } /// GET /announce or /announce/ From 208b10eaf1da30627c6503a6854f914c7de4eb6f Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 22:05:30 +0100 Subject: [PATCH 16/45] clippy: fix src/http/server.rs --- src/http/server.rs | 8 ++++---- src/jobs/http_tracker.rs | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/http/server.rs b/src/http/server.rs index 97ec30aa0..755fdc73a 100644 --- a/src/http/server.rs +++ b/src/http/server.rs @@ -6,14 +6,14 @@ use crate::tracker::TorrentTracker; /// Server that listens on HTTP, needs a `TorrentTracker` #[derive(Clone)] -pub struct HttpServer { +pub struct Http { tracker: Arc, } -impl HttpServer { +impl Http { #[must_use] - pub fn new(tracker: Arc) -> HttpServer { - HttpServer { tracker } + pub fn new(tracker: Arc) -> Http { + Http { tracker } } /// Start the `HttpServer` diff --git a/src/jobs/http_tracker.rs b/src/jobs/http_tracker.rs index f6023a4e0..d0c289e81 100644 --- a/src/jobs/http_tracker.rs +++ b/src/jobs/http_tracker.rs @@ -5,7 +5,7 @@ use log::{info, warn}; use tokio::task::JoinHandle; use crate::config::HttpTracker; -use crate::http::server::HttpServer; +use crate::http::server::Http; use crate::tracker::TorrentTracker; #[must_use] @@ -16,7 +16,7 @@ pub fn start_job(config: &HttpTracker, tracker: Arc) -> JoinHand let ssl_key_path = config.ssl_key_path.clone(); tokio::spawn(async move { - let http_tracker = HttpServer::new(tracker); + let http_tracker = Http::new(tracker); if !ssl_enabled { info!("Starting HTTP server on: {}", bind_addr); From 577ddb97b0b25eb766bcdd99f222850b9375e013 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 22:07:52 +0100 Subject: [PATCH 17/45] clippy: fix src/jobs/http_tracker.rs --- src/jobs/http_tracker.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/jobs/http_tracker.rs b/src/jobs/http_tracker.rs index d0c289e81..276da8099 100644 --- a/src/jobs/http_tracker.rs +++ b/src/jobs/http_tracker.rs @@ -8,6 +8,9 @@ use crate::config::HttpTracker; use crate::http::server::Http; use crate::tracker::TorrentTracker; +/// # Panics +/// +/// It would panic if the `config::HttpTracker` struct would contain an inappropriate values. #[must_use] pub fn start_job(config: &HttpTracker, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.parse::().unwrap(); From d092580db3bd53206b44d98df820b0c3f7de391c Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 22:12:33 +0100 Subject: [PATCH 18/45] clippy: fix src/jobs/torrent_cleanup.rs --- src/jobs/torrent_cleanup.rs | 6 +++--- src/setup.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/jobs/torrent_cleanup.rs b/src/jobs/torrent_cleanup.rs index 3d7b49d6b..7bdfc1677 100644 --- a/src/jobs/torrent_cleanup.rs +++ b/src/jobs/torrent_cleanup.rs @@ -8,8 +8,8 @@ use crate::config::Configuration; use crate::tracker::TorrentTracker; #[must_use] -pub fn start_job(config: &Configuration, tracker: Arc) -> JoinHandle<()> { - let weak_tracker = std::sync::Arc::downgrade(&tracker); +pub fn start_job(config: &Configuration, tracker: &Arc) -> JoinHandle<()> { + let weak_tracker = std::sync::Arc::downgrade(tracker); let interval = config.inactive_peer_cleanup_interval; tokio::spawn(async move { @@ -28,7 +28,7 @@ pub fn start_job(config: &Configuration, tracker: Arc) -> JoinHa let start_time = Utc::now().time(); info!("Cleaning up torrents.."); tracker.cleanup_torrents().await; - info!("Cleaned up torrents in: {}ms", (Utc::now().time() - start_time).num_milliseconds()) + info!("Cleaned up torrents in: {}ms", (Utc::now().time() - start_time).num_milliseconds()); } else { break; } diff --git a/src/setup.rs b/src/setup.rs index 736f448b6..804b6258a 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -54,7 +54,7 @@ pub async fn setup(config: &Configuration, tracker: Arc) -> Vec< // Remove torrents without peers, every interval if config.inactive_peer_cleanup_interval > 0 { - jobs.push(torrent_cleanup::start_job(config, tracker.clone())); + jobs.push(torrent_cleanup::start_job(config, &tracker)); } jobs From 9adbfd137fa83d90e4e3073ad9adc240afccbc04 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 22:13:58 +0100 Subject: [PATCH 19/45] clippy: fix src/logging.rs --- src/logging.rs | 4 ++-- src/main.rs | 2 +- tests/api.rs | 2 +- tests/udp.rs | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/logging.rs b/src/logging.rs index 7682bace1..4d16f7670 100644 --- a/src/logging.rs +++ b/src/logging.rs @@ -7,7 +7,7 @@ use crate::config::Configuration; static INIT: Once = Once::new(); -pub fn setup_logging(cfg: &Configuration) { +pub fn setup(cfg: &Configuration) { let level = config_level_or_default(&cfg.log_level); if level == log::LevelFilter::Off { @@ -35,7 +35,7 @@ fn stdout_config(level: LevelFilter) { record.target(), record.level(), message - )) + )); }) .level(level) .chain(std::io::stdout()) diff --git a/src/main.rs b/src/main.rs index f64354fcf..baffc6fa5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -36,7 +36,7 @@ async fn main() { }; // Initialize logging - logging::setup_logging(&config); + logging::setup(&config); // Run jobs let jobs = setup::setup(&config, tracker.clone()).await; diff --git a/tests/api.rs b/tests/api.rs index 14fefa50e..6cfcbc092 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -288,7 +288,7 @@ mod tracker_api { self.tracker = Some(tracker.clone()); // Initialize logging - logging::setup_logging(&configuration); + logging::setup(&configuration); // Start the HTTP API job self.job = Some(tracker_api::start_job(&configuration, tracker).await); diff --git a/tests/udp.rs b/tests/udp.rs index 54caeaa68..b365c4fc6 100644 --- a/tests/udp.rs +++ b/tests/udp.rs @@ -69,7 +69,7 @@ mod udp_tracker_server { }; // Initialize logging - logging::setup_logging(&configuration); + logging::setup(&configuration); let udp_tracker_config = &configuration.udp_trackers[0]; From c78404ff33915057cc2cbc70a041e324fb30ea43 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 22:27:38 +0100 Subject: [PATCH 20/45] clippy: fix src/protocol/clock/mod.rs --- src/api/resources/auth_key_resource.rs | 6 +- src/protocol/clock/mod.rs | 94 ++++++++++++++------------ src/protocol/clock/time_extent.rs | 24 +++---- src/tracker/key.rs | 14 ++-- src/tracker/peer.rs | 10 +-- src/tracker/torrent.rs | 12 ++-- src/udp/connection_cookie.rs | 10 +-- src/udp/handlers.rs | 4 +- 8 files changed, 91 insertions(+), 83 deletions(-) diff --git a/src/api/resources/auth_key_resource.rs b/src/api/resources/auth_key_resource.rs index 3bc0cefb7..9bcfca596 100644 --- a/src/api/resources/auth_key_resource.rs +++ b/src/api/resources/auth_key_resource.rs @@ -36,7 +36,7 @@ mod tests { use std::time::Duration; use super::AuthKeyResource; - use crate::protocol::clock::{DefaultClock, TimeNow}; + use crate::protocol::clock::{Current, TimeNow}; use crate::tracker::key::AuthKey; #[test] @@ -52,7 +52,7 @@ mod tests { AuthKey::from(auth_key_resource), AuthKey { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line - valid_until: Some(DefaultClock::add(&Duration::new(duration_in_secs, 0)).unwrap()) + valid_until: Some(Current::add(&Duration::new(duration_in_secs, 0)).unwrap()) } ); } @@ -63,7 +63,7 @@ mod tests { let auth_key = AuthKey { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line - valid_until: Some(DefaultClock::add(&Duration::new(duration_in_secs, 0)).unwrap()), + valid_until: Some(Current::add(&Duration::new(duration_in_secs, 0)).unwrap()), }; assert_eq!( diff --git a/src/protocol/clock/mod.rs b/src/protocol/clock/mod.rs index 51197dba6..7868d4c5e 100644 --- a/src/protocol/clock/mod.rs +++ b/src/protocol/clock/mod.rs @@ -4,7 +4,7 @@ use std::time::Duration; pub type DurationSinceUnixEpoch = Duration; #[derive(Debug)] -pub enum ClockType { +pub enum Type { WorkingClock, StoppedClock, } @@ -12,14 +12,14 @@ pub enum ClockType { #[derive(Debug)] pub struct Clock; -pub type WorkingClock = Clock<{ ClockType::WorkingClock as usize }>; -pub type StoppedClock = Clock<{ ClockType::StoppedClock as usize }>; +pub type Working = Clock<{ Type::WorkingClock as usize }>; +pub type Stopped = Clock<{ Type::StoppedClock as usize }>; #[cfg(not(test))] -pub type DefaultClock = WorkingClock; +pub type Current = Working; #[cfg(test)] -pub type DefaultClock = StoppedClock; +pub type Current = Stopped; pub trait Time: Sized { fn now() -> DurationSinceUnixEpoch; @@ -40,44 +40,52 @@ pub trait TimeNow: Time { mod tests { use std::any::TypeId; - use crate::protocol::clock::{DefaultClock, StoppedClock, Time, WorkingClock}; + use crate::protocol::clock::{Current, Stopped, Time, Working}; #[test] fn it_should_be_the_stopped_clock_as_default_when_testing() { // We are testing, so we should default to the fixed time. - assert_eq!(TypeId::of::(), TypeId::of::()); - assert_eq!(StoppedClock::now(), DefaultClock::now()) + assert_eq!(TypeId::of::(), TypeId::of::()); + assert_eq!(Stopped::now(), Current::now()); } #[test] fn it_should_have_different_times() { - assert_ne!(TypeId::of::(), TypeId::of::()); - assert_ne!(StoppedClock::now(), WorkingClock::now()) + assert_ne!(TypeId::of::(), TypeId::of::()); + assert_ne!(Stopped::now(), Working::now()); } } mod working_clock { use std::time::SystemTime; - use super::{DurationSinceUnixEpoch, Time, TimeNow, WorkingClock}; + use super::{DurationSinceUnixEpoch, Time, TimeNow, Working}; - impl Time for WorkingClock { + impl Time for Working { fn now() -> DurationSinceUnixEpoch { SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap() } } - impl TimeNow for WorkingClock {} + impl TimeNow for Working {} } pub trait StoppedTime: TimeNow { fn local_set(unix_time: &DurationSinceUnixEpoch); fn local_set_to_unix_epoch() { - Self::local_set(&DurationSinceUnixEpoch::ZERO) + Self::local_set(&DurationSinceUnixEpoch::ZERO); } fn local_set_to_app_start_time(); fn local_set_to_system_time_now(); + + /// # Errors + /// + /// Will return `IntErrorKind` if `duration` would overflow the internal `Duration`. fn local_add(duration: &Duration) -> Result<(), IntErrorKind>; + + /// # Errors + /// + /// Will return `IntErrorKind` if `duration` would underflow the internal `Duration`. fn local_sub(duration: &Duration) -> Result<(), IntErrorKind>; fn local_reset(); } @@ -86,9 +94,9 @@ mod stopped_clock { use std::num::IntErrorKind; use std::time::Duration; - use super::{DurationSinceUnixEpoch, StoppedClock, StoppedTime, Time, TimeNow}; + use super::{DurationSinceUnixEpoch, Stopped, StoppedTime, Time, TimeNow}; - impl Time for StoppedClock { + impl Time for Stopped { fn now() -> DurationSinceUnixEpoch { detail::FIXED_TIME.with(|time| { return *time.borrow(); @@ -96,21 +104,21 @@ mod stopped_clock { } } - impl TimeNow for StoppedClock {} + impl TimeNow for Stopped {} - impl StoppedTime for StoppedClock { + impl StoppedTime for Stopped { fn local_set(unix_time: &DurationSinceUnixEpoch) { detail::FIXED_TIME.with(|time| { *time.borrow_mut() = *unix_time; - }) + }); } fn local_set_to_app_start_time() { - Self::local_set(&detail::get_app_start_time()) + Self::local_set(&detail::get_app_start_time()); } fn local_set_to_system_time_now() { - Self::local_set(&detail::get_app_start_time()) + Self::local_set(&detail::get_app_start_time()); } fn local_add(duration: &Duration) -> Result<(), IntErrorKind> { @@ -140,7 +148,7 @@ mod stopped_clock { } fn local_reset() { - Self::local_set(&detail::get_default_fixed_time()) + Self::local_set(&detail::get_default_fixed_time()); } } @@ -149,58 +157,58 @@ mod stopped_clock { use std::thread; use std::time::Duration; - use crate::protocol::clock::{DurationSinceUnixEpoch, StoppedClock, StoppedTime, Time, TimeNow, WorkingClock}; + use crate::protocol::clock::{DurationSinceUnixEpoch, Stopped, StoppedTime, Time, TimeNow, Working}; #[test] fn it_should_default_to_zero_when_testing() { - assert_eq!(StoppedClock::now(), DurationSinceUnixEpoch::ZERO) + assert_eq!(Stopped::now(), DurationSinceUnixEpoch::ZERO); } #[test] fn it_should_possible_to_set_the_time() { // Check we start with ZERO. - assert_eq!(StoppedClock::now(), Duration::ZERO); + assert_eq!(Stopped::now(), Duration::ZERO); // Set to Current Time and Check - let timestamp = WorkingClock::now(); - StoppedClock::local_set(×tamp); - assert_eq!(StoppedClock::now(), timestamp); + let timestamp = Working::now(); + Stopped::local_set(×tamp); + assert_eq!(Stopped::now(), timestamp); // Elapse the Current Time and Check - StoppedClock::local_add(×tamp).unwrap(); - assert_eq!(StoppedClock::now(), timestamp + timestamp); + Stopped::local_add(×tamp).unwrap(); + assert_eq!(Stopped::now(), timestamp + timestamp); // Reset to ZERO and Check - StoppedClock::local_reset(); - assert_eq!(StoppedClock::now(), Duration::ZERO); + Stopped::local_reset(); + assert_eq!(Stopped::now(), Duration::ZERO); } #[test] fn it_should_default_to_zero_on_thread_exit() { - assert_eq!(StoppedClock::now(), Duration::ZERO); - let after5 = WorkingClock::add(&Duration::from_secs(5)).unwrap(); - StoppedClock::local_set(&after5); - assert_eq!(StoppedClock::now(), after5); + assert_eq!(Stopped::now(), Duration::ZERO); + let after5 = Working::add(&Duration::from_secs(5)).unwrap(); + Stopped::local_set(&after5); + assert_eq!(Stopped::now(), after5); let t = thread::spawn(move || { // each thread starts out with the initial value of ZERO - assert_eq!(StoppedClock::now(), Duration::ZERO); + assert_eq!(Stopped::now(), Duration::ZERO); // and gets set to the current time. - let timestamp = WorkingClock::now(); - StoppedClock::local_set(×tamp); - assert_eq!(StoppedClock::now(), timestamp); + let timestamp = Working::now(); + Stopped::local_set(×tamp); + assert_eq!(Stopped::now(), timestamp); }); // wait for the thread to complete and bail out on panic t.join().unwrap(); // we retain our original value of current time + 5sec despite the child thread - assert_eq!(StoppedClock::now(), after5); + assert_eq!(Stopped::now(), after5); // Reset to ZERO and Check - StoppedClock::local_reset(); - assert_eq!(StoppedClock::now(), Duration::ZERO); + Stopped::local_reset(); + assert_eq!(Stopped::now(), Duration::ZERO); } } diff --git a/src/protocol/clock/time_extent.rs b/src/protocol/clock/time_extent.rs index f975e9a04..0ff74400b 100644 --- a/src/protocol/clock/time_extent.rs +++ b/src/protocol/clock/time_extent.rs @@ -1,7 +1,7 @@ use std::num::{IntErrorKind, TryFromIntError}; use std::time::Duration; -use super::{ClockType, StoppedClock, TimeNow, WorkingClock}; +use super::{Stopped, TimeNow, Type, Working}; pub trait Extent: Sized + Default { type Base; @@ -156,11 +156,11 @@ where #[derive(Debug)] pub struct TimeExtentMaker {} -pub type WorkingTimeExtentMaker = TimeExtentMaker<{ ClockType::WorkingClock as usize }>; -pub type StoppedTimeExtentMaker = TimeExtentMaker<{ ClockType::StoppedClock as usize }>; +pub type WorkingTimeExtentMaker = TimeExtentMaker<{ Type::WorkingClock as usize }>; +pub type StoppedTimeExtentMaker = TimeExtentMaker<{ Type::StoppedClock as usize }>; -impl MakeTimeExtent for WorkingTimeExtentMaker {} -impl MakeTimeExtent for StoppedTimeExtentMaker {} +impl MakeTimeExtent for WorkingTimeExtentMaker {} +impl MakeTimeExtent for StoppedTimeExtentMaker {} #[cfg(not(test))] pub type DefaultTimeExtentMaker = WorkingTimeExtentMaker; @@ -175,7 +175,7 @@ mod test { checked_duration_from_nanos, DefaultTimeExtentMaker, Extent, MakeTimeExtent, TimeExtent, TimeExtentBase, TimeExtentMultiplier, TimeExtentProduct, MAX, ZERO, }; - use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, StoppedTime}; + use crate::protocol::clock::{Current, DurationSinceUnixEpoch, StoppedTime}; const TIME_EXTENT_VAL: TimeExtent = TimeExtent::from_sec(2, &239_812_388_723); @@ -443,7 +443,7 @@ mod test { } ); - DefaultClock::local_set(&DurationSinceUnixEpoch::from_secs(TIME_EXTENT_VAL.amount * 2)); + Current::local_set(&DurationSinceUnixEpoch::from_secs(TIME_EXTENT_VAL.amount * 2)); assert_eq!( DefaultTimeExtentMaker::now(&TIME_EXTENT_VAL.increment).unwrap().unwrap(), @@ -458,7 +458,7 @@ mod test { #[test] fn it_should_fail_if_amount_exceeds_bounds() { - DefaultClock::local_set(&DurationSinceUnixEpoch::MAX); + Current::local_set(&DurationSinceUnixEpoch::MAX); assert_eq!( DefaultTimeExtentMaker::now(&TimeExtentBase::from_millis(1)) .unwrap() @@ -493,13 +493,13 @@ mod test { None ); - DefaultClock::local_set(&DurationSinceUnixEpoch::MAX); + Current::local_set(&DurationSinceUnixEpoch::MAX); assert_eq!(DefaultTimeExtentMaker::now_after(&TimeExtentBase::ZERO, &Duration::MAX), None); } #[test] fn it_should_fail_if_amount_exceeds_bounds() { - DefaultClock::local_set(&DurationSinceUnixEpoch::MAX); + Current::local_set(&DurationSinceUnixEpoch::MAX); assert_eq!( DefaultTimeExtentMaker::now_after(&TimeExtentBase::from_millis(1), &Duration::ZERO) .unwrap() @@ -515,7 +515,7 @@ mod test { #[test] fn it_should_give_a_time_extent() { - DefaultClock::local_set(&DurationSinceUnixEpoch::MAX); + Current::local_set(&DurationSinceUnixEpoch::MAX); assert_eq!( DefaultTimeExtentMaker::now_before( @@ -546,7 +546,7 @@ mod test { #[test] fn it_should_fail_if_amount_exceeds_bounds() { - DefaultClock::local_set(&DurationSinceUnixEpoch::MAX); + Current::local_set(&DurationSinceUnixEpoch::MAX); assert_eq!( DefaultTimeExtentMaker::now_before(&TimeExtentBase::from_millis(1), &Duration::ZERO) .unwrap() diff --git a/src/tracker/key.rs b/src/tracker/key.rs index 881dac877..2b6e71223 100644 --- a/src/tracker/key.rs +++ b/src/tracker/key.rs @@ -6,7 +6,7 @@ use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use serde::Serialize; -use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, Time, TimeNow}; +use crate::protocol::clock::{Current, DurationSinceUnixEpoch, Time, TimeNow}; use crate::protocol::common::AUTH_KEY_LENGTH; #[must_use] @@ -21,12 +21,12 @@ pub fn generate_auth_key(lifetime: Duration) -> AuthKey { AuthKey { key, - valid_until: Some(DefaultClock::add(&lifetime).unwrap()), + valid_until: Some(Current::add(&lifetime).unwrap()), } } pub fn verify_auth_key(auth_key: &AuthKey) -> Result<(), Error> { - let current_time: DurationSinceUnixEpoch = DefaultClock::now(); + let current_time: DurationSinceUnixEpoch = Current::now(); if auth_key.valid_until.is_none() { return Err(Error::KeyInvalid); } @@ -88,7 +88,7 @@ impl From for Error { mod tests { use std::time::Duration; - use crate::protocol::clock::{DefaultClock, StoppedTime}; + use crate::protocol::clock::{Current, StoppedTime}; use crate::tracker::key; #[test] @@ -121,18 +121,18 @@ mod tests { #[test] fn generate_and_check_expired_auth_key() { // Set the time to the current time. - DefaultClock::local_set_to_system_time_now(); + Current::local_set_to_system_time_now(); // Make key that is valid for 19 seconds. let auth_key = key::generate_auth_key(Duration::from_secs(19)); // Mock the time has passed 10 sec. - DefaultClock::local_add(&Duration::from_secs(10)).unwrap(); + Current::local_add(&Duration::from_secs(10)).unwrap(); assert!(key::verify_auth_key(&auth_key).is_ok()); // Mock the time has passed another 10 sec. - DefaultClock::local_add(&Duration::from_secs(10)).unwrap(); + Current::local_add(&Duration::from_secs(10)).unwrap(); assert!(key::verify_auth_key(&auth_key).is_err()); } diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index a30723d00..115a2bfb9 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -5,7 +5,7 @@ use serde; use serde::Serialize; use crate::http::request::Announce; -use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, Time}; +use crate::protocol::clock::{Current, DurationSinceUnixEpoch, Time}; use crate::protocol::common::{AnnounceEventDef, NumberOfBytesDef, PeerId}; use crate::protocol::utils::ser_unix_time_value; @@ -37,7 +37,7 @@ impl TorrentPeer { TorrentPeer { peer_id: PeerId(announce_request.peer_id.0), peer_addr, - updated: DefaultClock::now(), + updated: Current::now(), uploaded: announce_request.bytes_uploaded, downloaded: announce_request.bytes_downloaded, left: announce_request.bytes_left, @@ -63,7 +63,7 @@ impl TorrentPeer { TorrentPeer { peer_id: announce_request.peer_id, peer_addr, - updated: DefaultClock::now(), + updated: Current::now(), uploaded: NumberOfBytes(announce_request.uploaded as i64), downloaded: NumberOfBytes(announce_request.downloaded as i64), left: NumberOfBytes(announce_request.left as i64), @@ -95,7 +95,7 @@ mod test { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; - use crate::protocol::clock::{DefaultClock, Time}; + use crate::protocol::clock::{Current, Time}; use crate::protocol::common::PeerId; use crate::tracker::peer::TorrentPeer; @@ -104,7 +104,7 @@ mod test { let torrent_peer = TorrentPeer { peer_id: PeerId(*b"-qB00000000000000000"), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), - updated: DefaultClock::now(), + updated: Current::now(), uploaded: NumberOfBytes(0), downloaded: NumberOfBytes(0), left: NumberOfBytes(0), diff --git a/src/tracker/torrent.rs b/src/tracker/torrent.rs index 46608643d..4007976c9 100644 --- a/src/tracker/torrent.rs +++ b/src/tracker/torrent.rs @@ -5,7 +5,7 @@ use aquatic_udp_protocol::AnnounceEvent; use serde::{Deserialize, Serialize}; use super::peer::TorrentPeer; -use crate::protocol::clock::{DefaultClock, TimeNow}; +use crate::protocol::clock::{Current, TimeNow}; use crate::protocol::common::{PeerId, MAX_SCRAPE_TORRENTS}; #[derive(Serialize, Deserialize, Clone, Debug)] @@ -80,7 +80,7 @@ impl TorrentEntry { } pub fn remove_inactive_peers(&mut self, max_peer_timeout: u32) { - let current_cutoff = DefaultClock::sub(&Duration::from_secs(u64::from(max_peer_timeout))).unwrap_or_default(); + let current_cutoff = Current::sub(&Duration::from_secs(u64::from(max_peer_timeout))).unwrap_or_default(); self.peers.retain(|_, peer| peer.updated > current_cutoff); } } @@ -116,7 +116,7 @@ mod tests { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; - use crate::protocol::clock::{DefaultClock, DurationSinceUnixEpoch, StoppedClock, StoppedTime, Time, WorkingClock}; + use crate::protocol::clock::{Current, DurationSinceUnixEpoch, Stopped, StoppedTime, Time, Working}; use crate::protocol::common::PeerId; use crate::tracker::peer::TorrentPeer; use crate::tracker::torrent::TorrentEntry; @@ -130,7 +130,7 @@ mod tests { let default_peer = TorrentPeer { peer_id: PeerId([0u8; 20]), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080), - updated: DefaultClock::now(), + updated: Current::now(), uploaded: NumberOfBytes(0), downloaded: NumberOfBytes(0), left: NumberOfBytes(0), @@ -358,8 +358,8 @@ mod tests { let timeout = 120u32; - let now = WorkingClock::now(); - StoppedClock::local_set(&now); + let now = Working::now(); + Stopped::local_set(&now); let timeout_seconds_before_now = now.sub(Duration::from_secs(u64::from(timeout))); let inactive_peer = TorrentPeerBuilder::default() diff --git a/src/udp/connection_cookie.rs b/src/udp/connection_cookie.rs index b18940dfc..1b77d47e2 100644 --- a/src/udp/connection_cookie.rs +++ b/src/udp/connection_cookie.rs @@ -84,7 +84,7 @@ mod tests { use super::cookie_builder::{self}; use crate::protocol::clock::time_extent::{self, Extent}; - use crate::protocol::clock::{StoppedClock, StoppedTime}; + use crate::protocol::clock::{Stopped, StoppedTime}; use crate::udp::connection_cookie::{check_connection_cookie, make_connection_cookie, Cookie, COOKIE_LIFETIME}; // #![feature(const_socketaddr)] @@ -195,7 +195,7 @@ mod tests { let cookie = make_connection_cookie(&remote_address); - StoppedClock::local_add(&COOKIE_LIFETIME.increment).unwrap(); + Stopped::local_add(&COOKIE_LIFETIME.increment).unwrap(); let cookie_next = make_connection_cookie(&remote_address); @@ -217,7 +217,7 @@ mod tests { let cookie = make_connection_cookie(&remote_address); - StoppedClock::local_add(&COOKIE_LIFETIME.increment).unwrap(); + Stopped::local_add(&COOKIE_LIFETIME.increment).unwrap(); check_connection_cookie(&remote_address, &cookie).unwrap(); } @@ -228,7 +228,7 @@ mod tests { let cookie = make_connection_cookie(&remote_address); - StoppedClock::local_set(&COOKIE_LIFETIME.total().unwrap().unwrap()); + Stopped::local_set(&COOKIE_LIFETIME.total().unwrap().unwrap()); check_connection_cookie(&remote_address, &cookie).unwrap(); } @@ -240,7 +240,7 @@ mod tests { let cookie = make_connection_cookie(&remote_address); - StoppedClock::local_set(&COOKIE_LIFETIME.total_next().unwrap().unwrap()); + Stopped::local_set(&COOKIE_LIFETIME.total_next().unwrap().unwrap()); check_connection_cookie(&remote_address, &cookie).unwrap(); } diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index 81578e9c3..679a11ffc 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -253,7 +253,7 @@ mod tests { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use crate::config::Configuration; - use crate::protocol::clock::{DefaultClock, Time}; + use crate::protocol::clock::{Current, Time}; use crate::protocol::common::PeerId; use crate::tracker::mode::TrackerMode; use crate::tracker::peer::TorrentPeer; @@ -309,7 +309,7 @@ mod tests { let default_peer = TorrentPeer { peer_id: PeerId([255u8; 20]), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), - updated: DefaultClock::now(), + updated: Current::now(), uploaded: NumberOfBytes(0), downloaded: NumberOfBytes(0), left: NumberOfBytes(0), From 5ea7c0d8d047316a90235c945607f16ec7eb77fe Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 22:30:17 +0100 Subject: [PATCH 21/45] clippy: fix src/protocol/clock/time_extent.rs --- src/protocol/clock/time_extent.rs | 156 ++++++++++++++---------------- src/udp/connection_cookie.rs | 2 +- 2 files changed, 74 insertions(+), 84 deletions(-) diff --git a/src/protocol/clock/time_extent.rs b/src/protocol/clock/time_extent.rs index 0ff74400b..b4c20cd70 100644 --- a/src/protocol/clock/time_extent.rs +++ b/src/protocol/clock/time_extent.rs @@ -10,37 +10,44 @@ pub trait Extent: Sized + Default { fn new(unit: &Self::Base, count: &Self::Multiplier) -> Self; + /// # Errors + /// + /// Will return `IntErrorKind` if `add` would overflow the internal `Duration`. fn increase(&self, add: Self::Multiplier) -> Result; + + /// # Errors + /// + /// Will return `IntErrorKind` if `sub` would underflow the internal `Duration`. fn decrease(&self, sub: Self::Multiplier) -> Result; fn total(&self) -> Option>; fn total_next(&self) -> Option>; } -pub type TimeExtentBase = Duration; -pub type TimeExtentMultiplier = u64; -pub type TimeExtentProduct = TimeExtentBase; +pub type Base = Duration; +pub type Multiplier = u64; +pub type Product = Base; #[derive(Debug, Default, Hash, PartialEq, Eq)] pub struct TimeExtent { - pub increment: TimeExtentBase, - pub amount: TimeExtentMultiplier, + pub increment: Base, + pub amount: Multiplier, } pub const ZERO: TimeExtent = TimeExtent { - increment: TimeExtentBase::ZERO, - amount: TimeExtentMultiplier::MIN, + increment: Base::ZERO, + amount: Multiplier::MIN, }; pub const MAX: TimeExtent = TimeExtent { - increment: TimeExtentBase::MAX, - amount: TimeExtentMultiplier::MAX, + increment: Base::MAX, + amount: Multiplier::MAX, }; impl TimeExtent { #[must_use] - pub const fn from_sec(seconds: u64, amount: &TimeExtentMultiplier) -> Self { + pub const fn from_sec(seconds: u64, amount: &Multiplier) -> Self { Self { - increment: TimeExtentBase::from_secs(seconds), + increment: Base::from_secs(seconds), amount: *amount, } } @@ -61,9 +68,9 @@ fn checked_duration_from_nanos(time: u128) -> Result } impl Extent for TimeExtent { - type Base = TimeExtentBase; - type Multiplier = TimeExtentMultiplier; - type Product = TimeExtentProduct; + type Base = Base; + type Multiplier = Multiplier; + type Product = Product; fn new(increment: &Self::Base, amount: &Self::Multiplier) -> Self { Self { @@ -107,60 +114,58 @@ impl Extent for TimeExtent { } } -pub trait MakeTimeExtent: Sized +pub trait Make: Sized where Clock: TimeNow, { #[must_use] - fn now(increment: &TimeExtentBase) -> Option> { + fn now(increment: &Base) -> Option> { Clock::now() .as_nanos() .checked_div((*increment).as_nanos()) - .map(|amount| match TimeExtentMultiplier::try_from(amount) { + .map(|amount| match Multiplier::try_from(amount) { Err(error) => Err(error), Ok(amount) => Ok(TimeExtent::new(increment, &amount)), }) } #[must_use] - fn now_after(increment: &TimeExtentBase, add_time: &Duration) -> Option> { + fn now_after(increment: &Base, add_time: &Duration) -> Option> { match Clock::add(add_time) { None => None, - Some(time) => { - time.as_nanos() - .checked_div(increment.as_nanos()) - .map(|amount| match TimeExtentMultiplier::try_from(amount) { - Err(error) => Err(error), - Ok(amount) => Ok(TimeExtent::new(increment, &amount)), - }) - } + Some(time) => time + .as_nanos() + .checked_div(increment.as_nanos()) + .map(|amount| match Multiplier::try_from(amount) { + Err(error) => Err(error), + Ok(amount) => Ok(TimeExtent::new(increment, &amount)), + }), } } #[must_use] - fn now_before(increment: &TimeExtentBase, sub_time: &Duration) -> Option> { + fn now_before(increment: &Base, sub_time: &Duration) -> Option> { match Clock::sub(sub_time) { None => None, - Some(time) => { - time.as_nanos() - .checked_div(increment.as_nanos()) - .map(|amount| match TimeExtentMultiplier::try_from(amount) { - Err(error) => Err(error), - Ok(amount) => Ok(TimeExtent::new(increment, &amount)), - }) - } + Some(time) => time + .as_nanos() + .checked_div(increment.as_nanos()) + .map(|amount| match Multiplier::try_from(amount) { + Err(error) => Err(error), + Ok(amount) => Ok(TimeExtent::new(increment, &amount)), + }), } } } #[derive(Debug)] -pub struct TimeExtentMaker {} +pub struct Maker {} -pub type WorkingTimeExtentMaker = TimeExtentMaker<{ Type::WorkingClock as usize }>; -pub type StoppedTimeExtentMaker = TimeExtentMaker<{ Type::StoppedClock as usize }>; +pub type WorkingTimeExtentMaker = Maker<{ Type::WorkingClock as usize }>; +pub type StoppedTimeExtentMaker = Maker<{ Type::StoppedClock as usize }>; -impl MakeTimeExtent for WorkingTimeExtentMaker {} -impl MakeTimeExtent for StoppedTimeExtentMaker {} +impl Make for WorkingTimeExtentMaker {} +impl Make for StoppedTimeExtentMaker {} #[cfg(not(test))] pub type DefaultTimeExtentMaker = WorkingTimeExtentMaker; @@ -172,8 +177,7 @@ pub type DefaultTimeExtentMaker = StoppedTimeExtentMaker; mod test { use crate::protocol::clock::time_extent::{ - checked_duration_from_nanos, DefaultTimeExtentMaker, Extent, MakeTimeExtent, TimeExtent, TimeExtentBase, - TimeExtentMultiplier, TimeExtentProduct, MAX, ZERO, + checked_duration_from_nanos, Base, DefaultTimeExtentMaker, Extent, Make, Multiplier, Product, TimeExtent, MAX, ZERO, }; use crate::protocol::clock::{Current, DurationSinceUnixEpoch, StoppedTime}; @@ -238,7 +242,7 @@ mod test { #[test] fn it_should_make_empty_for_zero() { - assert_eq!(TimeExtent::from_sec(u64::MIN, &TimeExtentMultiplier::MIN), ZERO); + assert_eq!(TimeExtent::from_sec(u64::MIN, &Multiplier::MIN), ZERO); } #[test] fn it_should_make_from_seconds() { @@ -254,15 +258,15 @@ mod test { #[test] fn it_should_make_empty_for_zero() { - assert_eq!(TimeExtent::new(&TimeExtentBase::ZERO, &TimeExtentMultiplier::MIN), ZERO); + assert_eq!(TimeExtent::new(&Base::ZERO, &Multiplier::MIN), ZERO); } #[test] fn it_should_make_new() { assert_eq!( - TimeExtent::new(&TimeExtentBase::from_millis(2), &TIME_EXTENT_VAL.amount), + TimeExtent::new(&Base::from_millis(2), &TIME_EXTENT_VAL.amount), TimeExtent { - increment: TimeExtentBase::from_millis(2), + increment: Base::from_millis(2), amount: TIME_EXTENT_VAL.amount } ); @@ -328,30 +332,27 @@ mod test { #[test] fn it_should_be_zero_for_zero() { - assert_eq!(ZERO.total().unwrap().unwrap(), TimeExtentProduct::ZERO); + assert_eq!(ZERO.total().unwrap().unwrap(), Product::ZERO); } #[test] fn it_should_give_a_total() { assert_eq!( TIME_EXTENT_VAL.total().unwrap().unwrap(), - TimeExtentProduct::from_secs(TIME_EXTENT_VAL.increment.as_secs() * TIME_EXTENT_VAL.amount) + Product::from_secs(TIME_EXTENT_VAL.increment.as_secs() * TIME_EXTENT_VAL.amount) ); assert_eq!( - TimeExtent::new(&TimeExtentBase::from_millis(2), &(TIME_EXTENT_VAL.amount * 1000)) + TimeExtent::new(&Base::from_millis(2), &(TIME_EXTENT_VAL.amount * 1000)) .total() .unwrap() .unwrap(), - TimeExtentProduct::from_secs(TIME_EXTENT_VAL.increment.as_secs() * TIME_EXTENT_VAL.amount) + Product::from_secs(TIME_EXTENT_VAL.increment.as_secs() * TIME_EXTENT_VAL.amount) ); assert_eq!( - TimeExtent::new(&TimeExtentBase::from_secs(1), &(u64::MAX)) - .total() - .unwrap() - .unwrap(), - TimeExtentProduct::from_secs(u64::MAX) + TimeExtent::new(&Base::from_secs(1), &(u64::MAX)).total().unwrap().unwrap(), + Product::from_secs(u64::MAX) ); } @@ -378,33 +379,33 @@ mod test { #[test] fn it_should_be_zero_for_zero() { - assert_eq!(ZERO.total_next().unwrap().unwrap(), TimeExtentProduct::ZERO); + assert_eq!(ZERO.total_next().unwrap().unwrap(), Product::ZERO); } #[test] fn it_should_give_a_total() { assert_eq!( TIME_EXTENT_VAL.total_next().unwrap().unwrap(), - TimeExtentProduct::from_secs(TIME_EXTENT_VAL.increment.as_secs() * (TIME_EXTENT_VAL.amount + 1)) + Product::from_secs(TIME_EXTENT_VAL.increment.as_secs() * (TIME_EXTENT_VAL.amount + 1)) ); assert_eq!( - TimeExtent::new(&TimeExtentBase::from_millis(2), &(TIME_EXTENT_VAL.amount * 1000)) + TimeExtent::new(&Base::from_millis(2), &(TIME_EXTENT_VAL.amount * 1000)) .total_next() .unwrap() .unwrap(), - TimeExtentProduct::new( + Product::new( TIME_EXTENT_VAL.increment.as_secs() * (TIME_EXTENT_VAL.amount), - TimeExtentBase::from_millis(2).as_nanos().try_into().unwrap() + Base::from_millis(2).as_nanos().try_into().unwrap() ) ); assert_eq!( - TimeExtent::new(&TimeExtentBase::from_secs(1), &(u64::MAX - 1)) + TimeExtent::new(&Base::from_secs(1), &(u64::MAX - 1)) .total_next() .unwrap() .unwrap(), - TimeExtentProduct::from_secs(u64::MAX) + Product::from_secs(u64::MAX) ); } @@ -453,16 +454,14 @@ mod test { #[test] fn it_should_fail_for_zero() { - assert_eq!(DefaultTimeExtentMaker::now(&TimeExtentBase::ZERO), None); + assert_eq!(DefaultTimeExtentMaker::now(&Base::ZERO), None); } #[test] fn it_should_fail_if_amount_exceeds_bounds() { Current::local_set(&DurationSinceUnixEpoch::MAX); assert_eq!( - DefaultTimeExtentMaker::now(&TimeExtentBase::from_millis(1)) - .unwrap() - .unwrap_err(), + DefaultTimeExtentMaker::now(&Base::from_millis(1)).unwrap().unwrap_err(), u64::try_from(u128::MAX).unwrap_err() ); } @@ -488,20 +487,17 @@ mod test { #[test] fn it_should_fail_for_zero() { - assert_eq!( - DefaultTimeExtentMaker::now_after(&TimeExtentBase::ZERO, &Duration::ZERO), - None - ); + assert_eq!(DefaultTimeExtentMaker::now_after(&Base::ZERO, &Duration::ZERO), None); Current::local_set(&DurationSinceUnixEpoch::MAX); - assert_eq!(DefaultTimeExtentMaker::now_after(&TimeExtentBase::ZERO, &Duration::MAX), None); + assert_eq!(DefaultTimeExtentMaker::now_after(&Base::ZERO, &Duration::MAX), None); } #[test] fn it_should_fail_if_amount_exceeds_bounds() { Current::local_set(&DurationSinceUnixEpoch::MAX); assert_eq!( - DefaultTimeExtentMaker::now_after(&TimeExtentBase::from_millis(1), &Duration::ZERO) + DefaultTimeExtentMaker::now_after(&Base::from_millis(1), &Duration::ZERO) .unwrap() .unwrap_err(), u64::try_from(u128::MAX).unwrap_err() @@ -519,13 +515,13 @@ mod test { assert_eq!( DefaultTimeExtentMaker::now_before( - &TimeExtentBase::from_secs(u64::from(u32::MAX)), + &Base::from_secs(u64::from(u32::MAX)), &Duration::from_secs(u64::from(u32::MAX)) ) .unwrap() .unwrap(), TimeExtent { - increment: TimeExtentBase::from_secs(u64::from(u32::MAX)), + increment: Base::from_secs(u64::from(u32::MAX)), amount: 4_294_967_296 } ); @@ -533,22 +529,16 @@ mod test { #[test] fn it_should_fail_for_zero() { - assert_eq!( - DefaultTimeExtentMaker::now_before(&TimeExtentBase::ZERO, &Duration::ZERO), - None - ); + assert_eq!(DefaultTimeExtentMaker::now_before(&Base::ZERO, &Duration::ZERO), None); - assert_eq!( - DefaultTimeExtentMaker::now_before(&TimeExtentBase::ZERO, &Duration::MAX), - None - ); + assert_eq!(DefaultTimeExtentMaker::now_before(&Base::ZERO, &Duration::MAX), None); } #[test] fn it_should_fail_if_amount_exceeds_bounds() { Current::local_set(&DurationSinceUnixEpoch::MAX); assert_eq!( - DefaultTimeExtentMaker::now_before(&TimeExtentBase::from_millis(1), &Duration::ZERO) + DefaultTimeExtentMaker::now_before(&Base::from_millis(1), &Duration::ZERO) .unwrap() .unwrap_err(), u64::try_from(u128::MAX).unwrap_err() diff --git a/src/udp/connection_cookie.rs b/src/udp/connection_cookie.rs index 1b77d47e2..5a1e564dd 100644 --- a/src/udp/connection_cookie.rs +++ b/src/udp/connection_cookie.rs @@ -54,7 +54,7 @@ mod cookie_builder { use std::net::SocketAddr; use super::{Cookie, SinceUnixEpochTimeExtent, COOKIE_LIFETIME}; - use crate::protocol::clock::time_extent::{DefaultTimeExtentMaker, Extent, MakeTimeExtent, TimeExtent}; + use crate::protocol::clock::time_extent::{DefaultTimeExtentMaker, Extent, Make, TimeExtent}; use crate::protocol::crypto::keys::seeds::{DefaultSeed, SeedKeeper}; pub(super) fn get_last_time_extent() -> SinceUnixEpochTimeExtent { From d03269ad9d48a776c7390f18c2a71efa784f1538 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 22:34:11 +0100 Subject: [PATCH 22/45] clippy: fix src/protocol/utils.rs --- src/protocol/utils.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/protocol/utils.rs b/src/protocol/utils.rs index ac20aa41e..cec02ceaf 100644 --- a/src/protocol/utils.rs +++ b/src/protocol/utils.rs @@ -1,5 +1,9 @@ use super::clock::DurationSinceUnixEpoch; +/// # Errors +/// +/// Will return `serde::Serializer::Error` if unable to serialize the `unix_time_value`. pub fn ser_unix_time_value(unix_time_value: &DurationSinceUnixEpoch, ser: S) -> Result { + #[allow(clippy::cast_possible_truncation)] ser.serialize_u64(unix_time_value.as_millis() as u64) } From efed1bc2c9729c1fcf434db0804570510047359a Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Thu, 24 Nov 2022 19:37:14 +0100 Subject: [PATCH 23/45] clippy: fix src/setup.rs --- src/setup.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/setup.rs b/src/setup.rs index 804b6258a..cfca5eb9e 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -35,7 +35,7 @@ pub async fn setup(config: &Configuration, tracker: Arc) -> Vec< udp_tracker_config.bind_address, config.mode ); } else { - jobs.push(udp_tracker::start_job(udp_tracker_config, tracker.clone())) + jobs.push(udp_tracker::start_job(udp_tracker_config, tracker.clone())); } } From 58e5909379203f32ee5d628414f68c143649ffea Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Thu, 24 Nov 2022 19:51:36 +0100 Subject: [PATCH 24/45] clippy: fix src/tracker/key.rs --- src/api/resources/auth_key_resource.rs | 18 ++++---- src/databases/database.rs | 8 ++-- src/databases/mysql.rs | 14 +++--- src/databases/sqlite.rs | 14 +++--- src/http/filters.rs | 8 ++-- src/http/handlers.rs | 8 ++-- src/tracker/key.rs | 63 +++++++++++++++----------- src/tracker/mod.rs | 14 +++--- tests/api.rs | 4 +- 9 files changed, 81 insertions(+), 70 deletions(-) diff --git a/src/api/resources/auth_key_resource.rs b/src/api/resources/auth_key_resource.rs index 9bcfca596..9b3cc9646 100644 --- a/src/api/resources/auth_key_resource.rs +++ b/src/api/resources/auth_key_resource.rs @@ -3,7 +3,7 @@ use std::convert::From; use serde::{Deserialize, Serialize}; use crate::protocol::clock::DurationSinceUnixEpoch; -use crate::tracker::key::AuthKey; +use crate::tracker::key::Auth; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct AuthKeyResource { @@ -11,9 +11,9 @@ pub struct AuthKeyResource { pub valid_until: Option, } -impl From for AuthKey { +impl From for Auth { fn from(auth_key_resource: AuthKeyResource) -> Self { - AuthKey { + Auth { key: auth_key_resource.key, valid_until: auth_key_resource .valid_until @@ -22,8 +22,8 @@ impl From for AuthKey { } } -impl From for AuthKeyResource { - fn from(auth_key: AuthKey) -> Self { +impl From for AuthKeyResource { + fn from(auth_key: Auth) -> Self { AuthKeyResource { key: auth_key.key, valid_until: auth_key.valid_until.map(|valid_until| valid_until.as_secs()), @@ -37,7 +37,7 @@ mod tests { use super::AuthKeyResource; use crate::protocol::clock::{Current, TimeNow}; - use crate::tracker::key::AuthKey; + use crate::tracker::key::Auth; #[test] fn it_should_be_convertible_into_an_auth_key() { @@ -49,8 +49,8 @@ mod tests { }; assert_eq!( - AuthKey::from(auth_key_resource), - AuthKey { + Auth::from(auth_key_resource), + Auth { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(Current::add(&Duration::new(duration_in_secs, 0)).unwrap()) } @@ -61,7 +61,7 @@ mod tests { fn it_should_be_convertible_from_an_auth_key() { let duration_in_secs = 60; - let auth_key = AuthKey { + let auth_key = Auth { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(Current::add(&Duration::new(duration_in_secs, 0)).unwrap()), }; diff --git a/src/databases/database.rs b/src/databases/database.rs index 62105dee5..5186f96b3 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use crate::databases::mysql::Mysql; use crate::databases::sqlite::Sqlite; use crate::protocol::common::InfoHash; -use crate::tracker::key::AuthKey; +use crate::tracker::key::Auth; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub enum Drivers { @@ -42,7 +42,7 @@ pub trait Database: Sync + Send { async fn load_persistent_torrents(&self) -> Result, Error>; - async fn load_keys(&self) -> Result, Error>; + async fn load_keys(&self) -> Result, Error>; async fn load_whitelist(&self) -> Result, Error>; @@ -54,9 +54,9 @@ pub trait Database: Sync + Send { async fn remove_info_hash_from_whitelist(&self, info_hash: InfoHash) -> Result; - async fn get_key_from_keys(&self, key: &str) -> Result; + async fn get_key_from_keys(&self, key: &str) -> Result; - async fn add_key_to_keys(&self, auth_key: &AuthKey) -> Result; + async fn add_key_to_keys(&self, auth_key: &Auth) -> Result; async fn remove_key_from_keys(&self, key: &str) -> Result; diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs index 5db358d5a..4fd00e31e 100644 --- a/src/databases/mysql.rs +++ b/src/databases/mysql.rs @@ -11,7 +11,7 @@ use r2d2_mysql::MysqlConnectionManager; use crate::databases::database; use crate::databases::database::{Database, Error}; use crate::protocol::common::{InfoHash, AUTH_KEY_LENGTH}; -use crate::tracker::key::AuthKey; +use crate::tracker::key::Auth; pub struct Mysql { pool: Pool, @@ -90,13 +90,13 @@ impl Database for Mysql { Ok(torrents) } - async fn load_keys(&self) -> Result, Error> { + async fn load_keys(&self) -> Result, Error> { let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; - let keys: Vec = conn + let keys: Vec = conn .query_map( "SELECT `key`, valid_until FROM `keys`", - |(key, valid_until): (String, i64)| AuthKey { + |(key, valid_until): (String, i64)| Auth { key, valid_until: Some(Duration::from_secs(valid_until.unsigned_abs())), }, @@ -182,14 +182,14 @@ impl Database for Mysql { } } - async fn get_key_from_keys(&self, key: &str) -> Result { + async fn get_key_from_keys(&self, key: &str) -> Result { let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; match conn .exec_first::<(String, i64), _, _>("SELECT `key`, valid_until FROM `keys` WHERE `key` = :key", params! { key }) .map_err(|_| database::Error::QueryReturnedNoRows)? { - Some((key, valid_until)) => Ok(AuthKey { + Some((key, valid_until)) => Ok(Auth { key, valid_until: Some(Duration::from_secs(valid_until.unsigned_abs())), }), @@ -197,7 +197,7 @@ impl Database for Mysql { } } - async fn add_key_to_keys(&self, auth_key: &AuthKey) -> Result { + async fn add_key_to_keys(&self, auth_key: &Auth) -> Result { let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; let key = auth_key.key.to_string(); diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index ee637049b..159da9922 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -9,7 +9,7 @@ use crate::databases::database; use crate::databases::database::{Database, Error}; use crate::protocol::clock::DurationSinceUnixEpoch; use crate::protocol::common::InfoHash; -use crate::tracker::key::AuthKey; +use crate::tracker::key::Auth; pub struct Sqlite { pool: Pool, @@ -78,7 +78,7 @@ impl Database for Sqlite { Ok(torrents) } - async fn load_keys(&self) -> Result, Error> { + async fn load_keys(&self) -> Result, Error> { let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT key, valid_until FROM keys")?; @@ -87,13 +87,13 @@ impl Database for Sqlite { let key = row.get(0)?; let valid_until: i64 = row.get(1)?; - Ok(AuthKey { + Ok(Auth { key, valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until.unsigned_abs())), }) })?; - let keys: Vec = keys_iter.filter_map(std::result::Result::ok).collect(); + let keys: Vec = keys_iter.filter_map(std::result::Result::ok).collect(); Ok(keys) } @@ -186,7 +186,7 @@ impl Database for Sqlite { } } - async fn get_key_from_keys(&self, key: &str) -> Result { + async fn get_key_from_keys(&self, key: &str) -> Result { let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT key, valid_until FROM keys WHERE key = ?")?; @@ -196,7 +196,7 @@ impl Database for Sqlite { let key: String = row.get(0).unwrap(); let valid_until: i64 = row.get(1).unwrap(); - Ok(AuthKey { + Ok(Auth { key, valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until.unsigned_abs())), }) @@ -205,7 +205,7 @@ impl Database for Sqlite { } } - async fn add_key_to_keys(&self, auth_key: &AuthKey) -> Result { + async fn add_key_to_keys(&self, auth_key: &Auth) -> Result { let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; match conn.execute( diff --git a/src/http/filters.rs b/src/http/filters.rs index f2e214e87..3375c781f 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -9,7 +9,7 @@ use super::errors::ServerError; use super::request::{Announce, AnnounceRequestQuery, Scrape}; use super::WebResult; use crate::protocol::common::{InfoHash, PeerId, MAX_SCRAPE_TORRENTS}; -use crate::tracker::key::AuthKey; +use crate::tracker::key::Auth; use crate::tracker::TorrentTracker; /// Pass Arc along @@ -32,10 +32,10 @@ pub fn with_peer_id() -> impl Filter + C /// Pass Arc along #[must_use] -pub fn with_auth_key() -> impl Filter,), Error = Infallible> + Clone { +pub fn with_auth_key() -> impl Filter,), Error = Infallible> + Clone { warp::path::param::() - .map(|key: String| AuthKey::from_string(&key)) - .or_else(|_| async { Ok::<(Option,), Infallible>((None,)) }) + .map(|key: String| Auth::from_string(&key)) + .or_else(|_| async { Ok::<(Option,), Infallible>((None,)) }) } /// Check for `PeerAddress` diff --git a/src/http/handlers.rs b/src/http/handlers.rs index 064047ba0..793de9ef5 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -12,7 +12,7 @@ use super::response::{self, Peer, ScrapeResponseEntry}; use super::{request, WebResult}; use crate::http::response::Error; use crate::protocol::common::InfoHash; -use crate::tracker::key::AuthKey; +use crate::tracker::key::Auth; use crate::tracker::peer::TorrentPeer; use crate::tracker::statistics::TrackerStatisticsEvent; use crate::tracker::torrent::{TorrentError, TorrentStats}; @@ -25,7 +25,7 @@ use crate::tracker::TorrentTracker; /// Will return `ServerError` that wraps the `TorrentError` if unable to `authenticate_request`. pub async fn authenticate( info_hash: &InfoHash, - auth_key: &Option, + auth_key: &Option, tracker: Arc, ) -> Result<(), ServerError> { tracker.authenticate_request(info_hash, auth_key).await.map_err(|e| match e { @@ -45,7 +45,7 @@ pub async fn authenticate( /// Will return `warp::Rejection` that wraps the `ServerError` if unable to `send_scrape_response`. pub async fn handle_announce( announce_request: request::Announce, - auth_key: Option, + auth_key: Option, tracker: Arc, ) -> WebResult { authenticate(&announce_request.info_hash, &auth_key, tracker.clone()) @@ -91,7 +91,7 @@ pub async fn handle_announce( /// Will return `warp::Rejection` that wraps the `ServerError` if unable to `send_scrape_response`. pub async fn handle_scrape( scrape_request: request::Scrape, - auth_key: Option, + auth_key: Option, tracker: Arc, ) -> WebResult { let mut files: HashMap = HashMap::new(); diff --git a/src/tracker/key.rs b/src/tracker/key.rs index 2b6e71223..673780ad0 100644 --- a/src/tracker/key.rs +++ b/src/tracker/key.rs @@ -10,7 +10,10 @@ use crate::protocol::clock::{Current, DurationSinceUnixEpoch, Time, TimeNow}; use crate::protocol::common::AUTH_KEY_LENGTH; #[must_use] -pub fn generate_auth_key(lifetime: Duration) -> AuthKey { +/// # Panics +/// +/// It would panic if the `lifetime: Duration` + Duration is more than `Duration::MAX`. +pub fn generate(lifetime: Duration) -> Auth { let key: String = thread_rng() .sample_iter(&Alphanumeric) .take(AUTH_KEY_LENGTH) @@ -19,49 +22,57 @@ pub fn generate_auth_key(lifetime: Duration) -> AuthKey { debug!("Generated key: {}, valid for: {:?} seconds", key, lifetime); - AuthKey { + Auth { key, valid_until: Some(Current::add(&lifetime).unwrap()), } } -pub fn verify_auth_key(auth_key: &AuthKey) -> Result<(), Error> { +/// # Errors +/// +/// Will return `Error::KeyExpired` if `auth_key.valid_until` is past the `current_time`. +/// +/// Will return `Error::KeyInvalid` if `auth_key.valid_until` is past the `None`. +pub fn verify(auth_key: &Auth) -> Result<(), Error> { let current_time: DurationSinceUnixEpoch = Current::now(); - if auth_key.valid_until.is_none() { - return Err(Error::KeyInvalid); - } - if auth_key.valid_until.unwrap() < current_time { - return Err(Error::KeyExpired); - } - Ok(()) + match auth_key.valid_until { + Some(valid_untill) => { + if valid_untill < current_time { + Err(Error::KeyExpired) + } else { + Ok(()) + } + } + None => Err(Error::KeyInvalid), + } } #[derive(Serialize, Debug, Eq, PartialEq, Clone)] -pub struct AuthKey { +pub struct Auth { pub key: String, pub valid_until: Option, } -impl AuthKey { +impl Auth { #[must_use] - pub fn from_buffer(key_buffer: [u8; AUTH_KEY_LENGTH]) -> Option { + pub fn from_buffer(key_buffer: [u8; AUTH_KEY_LENGTH]) -> Option { if let Ok(key) = String::from_utf8(Vec::from(key_buffer)) { - Some(AuthKey { key, valid_until: None }) + Some(Auth { key, valid_until: None }) } else { None } } #[must_use] - pub fn from_string(key: &str) -> Option { - if key.len() != AUTH_KEY_LENGTH { - None - } else { - Some(AuthKey { + pub fn from_string(key: &str) -> Option { + if key.len() == AUTH_KEY_LENGTH { + Some(Auth { key: key.to_string(), valid_until: None, }) + } else { + None } } } @@ -93,7 +104,7 @@ mod tests { #[test] fn auth_key_from_buffer() { - let auth_key = key::AuthKey::from_buffer([ + let auth_key = key::Auth::from_buffer([ 89, 90, 83, 108, 52, 108, 77, 90, 117, 112, 82, 117, 79, 112, 83, 82, 67, 51, 107, 114, 73, 75, 82, 53, 66, 80, 66, 49, 52, 110, 114, 74, ]); @@ -105,7 +116,7 @@ mod tests { #[test] fn auth_key_from_string() { let key_string = "YZSl4lMZupRuOpSRC3krIKR5BPB14nrJ"; - let auth_key = key::AuthKey::from_string(key_string); + let auth_key = key::Auth::from_string(key_string); assert!(auth_key.is_some()); assert_eq!(auth_key.unwrap().key, key_string); @@ -113,9 +124,9 @@ mod tests { #[test] fn generate_valid_auth_key() { - let auth_key = key::generate_auth_key(Duration::new(9999, 0)); + let auth_key = key::generate(Duration::new(9999, 0)); - assert!(key::verify_auth_key(&auth_key).is_ok()); + assert!(key::verify(&auth_key).is_ok()); } #[test] @@ -124,16 +135,16 @@ mod tests { Current::local_set_to_system_time_now(); // Make key that is valid for 19 seconds. - let auth_key = key::generate_auth_key(Duration::from_secs(19)); + let auth_key = key::generate(Duration::from_secs(19)); // Mock the time has passed 10 sec. Current::local_add(&Duration::from_secs(10)).unwrap(); - assert!(key::verify_auth_key(&auth_key).is_ok()); + assert!(key::verify(&auth_key).is_ok()); // Mock the time has passed another 10 sec. Current::local_add(&Duration::from_secs(10)).unwrap(); - assert!(key::verify_auth_key(&auth_key).is_err()); + assert!(key::verify(&auth_key).is_err()); } } diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 680f2635d..1e24326da 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -20,13 +20,13 @@ use crate::config::Configuration; use crate::databases::database; use crate::databases::database::Database; use crate::protocol::common::InfoHash; -use crate::tracker::key::AuthKey; +use crate::tracker::key::Auth; use crate::tracker::torrent::{TorrentEntry, TorrentError, TorrentStats}; pub struct TorrentTracker { pub config: Arc, mode: TrackerMode, - keys: RwLock>, + keys: RwLock>, whitelist: RwLock>, torrents: RwLock>, stats_event_sender: Option>, @@ -66,8 +66,8 @@ impl TorrentTracker { self.mode == TrackerMode::Listed || self.mode == TrackerMode::PrivateListed } - pub async fn generate_auth_key(&self, lifetime: Duration) -> Result { - let auth_key = key::generate_auth_key(lifetime); + pub async fn generate_auth_key(&self, lifetime: Duration) -> Result { + let auth_key = key::generate(lifetime); self.database.add_key_to_keys(&auth_key).await?; self.keys.write().await.insert(auth_key.key.clone(), auth_key.clone()); Ok(auth_key) @@ -79,10 +79,10 @@ impl TorrentTracker { Ok(()) } - pub async fn verify_auth_key(&self, auth_key: &AuthKey) -> Result<(), key::Error> { + pub async fn verify_auth_key(&self, auth_key: &Auth) -> Result<(), key::Error> { match self.keys.read().await.get(&auth_key.key) { None => Err(key::Error::KeyInvalid), - Some(key) => key::verify_auth_key(key), + Some(key) => key::verify(key), } } @@ -145,7 +145,7 @@ impl TorrentTracker { Ok(()) } - pub async fn authenticate_request(&self, info_hash: &InfoHash, key: &Option) -> Result<(), TorrentError> { + pub async fn authenticate_request(&self, info_hash: &InfoHash, key: &Option) -> Result<(), TorrentError> { // no authentication needed in public mode if self.is_public() { return Ok(()); diff --git a/tests/api.rs b/tests/api.rs index 6cfcbc092..380ab90ca 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -23,7 +23,7 @@ mod tracker_api { use torrust_tracker::jobs::tracker_api; use torrust_tracker::protocol::clock::DurationSinceUnixEpoch; use torrust_tracker::protocol::common::{InfoHash, PeerId}; - use torrust_tracker::tracker::key::AuthKey; + use torrust_tracker::tracker::key::Auth; use torrust_tracker::tracker::peer::TorrentPeer; use torrust_tracker::tracker::statistics::StatsTracker; use torrust_tracker::tracker::TorrentTracker; @@ -45,7 +45,7 @@ mod tracker_api { assert!(api_server .tracker .unwrap() - .verify_auth_key(&AuthKey::from(auth_key)) + .verify_auth_key(&Auth::from(auth_key)) .await .is_ok()); } From 363b21a19814762321f4401886cc0744e7573eda Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Thu, 24 Nov 2022 19:52:30 +0100 Subject: [PATCH 25/45] clippy: fix src/tracker/mode.rs --- src/config.rs | 6 +++--- src/tracker/mod.rs | 9 ++++----- src/tracker/mode.rs | 2 +- src/udp/handlers.rs | 15 +++++++++------ 4 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/config.rs b/src/config.rs index 6eb83ad16..9f6ca7092 100644 --- a/src/config.rs +++ b/src/config.rs @@ -10,7 +10,7 @@ use serde_with::{serde_as, NoneAsEmptyString}; use {std, toml}; use crate::databases::database::Drivers; -use crate::tracker::mode::TrackerMode; +use crate::tracker::mode; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub struct UdpTracker { @@ -41,7 +41,7 @@ pub struct HttpApi { #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub struct Configuration { pub log_level: Option, - pub mode: TrackerMode, + pub mode: mode::Tracker, pub db_driver: Drivers, pub db_path: String, pub announce_interval: u32, @@ -97,7 +97,7 @@ impl Configuration { pub fn default() -> Configuration { let mut configuration = Configuration { log_level: Option::from(String::from("info")), - mode: TrackerMode::Public, + mode: mode::Tracker::Public, db_driver: Drivers::Sqlite3, db_path: String::from("data.db"), announce_interval: 120, diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 1e24326da..0312ac3e2 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -13,7 +13,6 @@ use std::time::Duration; use tokio::sync::mpsc::error::SendError; use tokio::sync::{RwLock, RwLockReadGuard}; -use self::mode::TrackerMode; use self::peer::TorrentPeer; use self::statistics::{StatsRepository, TrackerStatistics, TrackerStatisticsEvent, TrackerStatisticsEventSender}; use crate::config::Configuration; @@ -25,7 +24,7 @@ use crate::tracker::torrent::{TorrentEntry, TorrentError, TorrentStats}; pub struct TorrentTracker { pub config: Arc, - mode: TrackerMode, + mode: mode::Tracker, keys: RwLock>, whitelist: RwLock>, torrents: RwLock>, @@ -55,15 +54,15 @@ impl TorrentTracker { } pub fn is_public(&self) -> bool { - self.mode == TrackerMode::Public + self.mode == mode::Tracker::Public } pub fn is_private(&self) -> bool { - self.mode == TrackerMode::Private || self.mode == TrackerMode::PrivateListed + self.mode == mode::Tracker::Private || self.mode == mode::Tracker::PrivateListed } pub fn is_whitelisted(&self) -> bool { - self.mode == TrackerMode::Listed || self.mode == TrackerMode::PrivateListed + self.mode == mode::Tracker::Listed || self.mode == mode::Tracker::PrivateListed } pub async fn generate_auth_key(&self, lifetime: Duration) -> Result { diff --git a/src/tracker/mode.rs b/src/tracker/mode.rs index f444b4523..f1fff169e 100644 --- a/src/tracker/mode.rs +++ b/src/tracker/mode.rs @@ -2,7 +2,7 @@ use serde; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Copy, Clone, PartialEq, Eq, Debug)] -pub enum TrackerMode { +pub enum Tracker { // Will track every new info hash and serve every peer. #[serde(rename = "public")] Public, diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index 679a11ffc..ecf1beae0 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -255,27 +255,30 @@ mod tests { use crate::config::Configuration; use crate::protocol::clock::{Current, Time}; use crate::protocol::common::PeerId; - use crate::tracker::mode::TrackerMode; use crate::tracker::peer::TorrentPeer; use crate::tracker::statistics::StatsTracker; - use crate::tracker::TorrentTracker; + use crate::tracker::{mode, TorrentTracker}; fn default_tracker_config() -> Arc { Arc::new(Configuration::default()) } fn initialized_public_tracker() -> Arc { - let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(TrackerMode::Public).into()); + let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Tracker::Public).into()); initialized_tracker(configuration) } fn initialized_private_tracker() -> Arc { - let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(TrackerMode::Private).into()); + let configuration = Arc::new( + TrackerConfigurationBuilder::default() + .with_mode(mode::Tracker::Private) + .into(), + ); initialized_tracker(configuration) } fn initialized_whitelisted_tracker() -> Arc { - let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(TrackerMode::Listed).into()); + let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Tracker::Listed).into()); initialized_tracker(configuration) } @@ -355,7 +358,7 @@ mod tests { self } - pub fn with_mode(mut self, mode: TrackerMode) -> Self { + pub fn with_mode(mut self, mode: mode::Tracker) -> Self { self.configuration.mode = mode; self } From 0f281c3ed336ee17446d9ec56a167d82480d8c79 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Thu, 24 Nov 2022 20:07:55 +0100 Subject: [PATCH 26/45] clippy: fix src/tracker/peer.rs --- src/http/handlers.rs | 7 +++---- src/tracker/mod.rs | 7 +++---- src/tracker/peer.rs | 9 +++++---- src/tracker/torrent.rs | 20 ++++++++++---------- src/udp/handlers.rs | 14 ++++++-------- 5 files changed, 27 insertions(+), 30 deletions(-) diff --git a/src/http/handlers.rs b/src/http/handlers.rs index 793de9ef5..5dab842e2 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -13,10 +13,9 @@ use super::{request, WebResult}; use crate::http::response::Error; use crate::protocol::common::InfoHash; use crate::tracker::key::Auth; -use crate::tracker::peer::TorrentPeer; use crate::tracker::statistics::TrackerStatisticsEvent; use crate::tracker::torrent::{TorrentError, TorrentStats}; -use crate::tracker::TorrentTracker; +use crate::tracker::{peer, TorrentTracker}; /// Authenticate `InfoHash` using optional `AuthKey` /// @@ -55,7 +54,7 @@ pub async fn handle_announce( debug!("{:?}", announce_request); let peer = - TorrentPeer::from_http_announce_request(&announce_request, announce_request.peer_addr, tracker.config.get_ext_ip()); + peer::TorrentPeer::from_http_announce_request(&announce_request, announce_request.peer_addr, tracker.config.get_ext_ip()); let torrent_stats = tracker .update_torrent_with_peer_and_get_stats(&announce_request.info_hash, &peer) .await; @@ -143,7 +142,7 @@ pub async fn handle_scrape( fn send_announce_response( announce_request: &request::Announce, torrent_stats: &TorrentStats, - peers: &Vec, + peers: &Vec, interval: u32, interval_min: u32, ) -> WebResult { diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 0312ac3e2..fab254663 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -13,7 +13,6 @@ use std::time::Duration; use tokio::sync::mpsc::error::SendError; use tokio::sync::{RwLock, RwLockReadGuard}; -use self::peer::TorrentPeer; use self::statistics::{StatsRepository, TrackerStatistics, TrackerStatisticsEvent, TrackerStatisticsEventSender}; use crate::config::Configuration; use crate::databases::database; @@ -195,7 +194,7 @@ impl TorrentTracker { } /// Get all torrent peers for a given torrent filtering out the peer with the client address - pub async fn get_torrent_peers(&self, info_hash: &InfoHash, client_addr: &SocketAddr) -> Vec { + pub async fn get_torrent_peers(&self, info_hash: &InfoHash, client_addr: &SocketAddr) -> Vec { let read_lock = self.torrents.read().await; match read_lock.get(info_hash) { @@ -205,7 +204,7 @@ impl TorrentTracker { } /// Get all torrent peers for a given torrent - pub async fn get_all_torrent_peers(&self, info_hash: &InfoHash) -> Vec { + pub async fn get_all_torrent_peers(&self, info_hash: &InfoHash) -> Vec { let read_lock = self.torrents.read().await; match read_lock.get(info_hash) { @@ -214,7 +213,7 @@ impl TorrentTracker { } } - pub async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &TorrentPeer) -> TorrentStats { + pub async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::TorrentPeer) -> TorrentStats { let mut torrents = self.torrents.write().await; let torrent_entry = match torrents.entry(*info_hash) { diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index 115a2bfb9..d590b590d 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -60,13 +60,14 @@ impl TorrentPeer { AnnounceEvent::None }; + #[allow(clippy::cast_possible_truncation)] TorrentPeer { - peer_id: announce_request.peer_id, + peer_id: announce_request.peer_id.clone(), peer_addr, updated: Current::now(), - uploaded: NumberOfBytes(announce_request.uploaded as i64), - downloaded: NumberOfBytes(announce_request.downloaded as i64), - left: NumberOfBytes(announce_request.left as i64), + uploaded: NumberOfBytes(i128::from(announce_request.uploaded) as i64), + downloaded: NumberOfBytes(i128::from(announce_request.downloaded) as i64), + left: NumberOfBytes(i128::from(announce_request.left) as i64), event, } } diff --git a/src/tracker/torrent.rs b/src/tracker/torrent.rs index 4007976c9..734e7a66c 100644 --- a/src/tracker/torrent.rs +++ b/src/tracker/torrent.rs @@ -4,14 +4,14 @@ use std::time::Duration; use aquatic_udp_protocol::AnnounceEvent; use serde::{Deserialize, Serialize}; -use super::peer::TorrentPeer; +use super::peer; use crate::protocol::clock::{Current, TimeNow}; use crate::protocol::common::{PeerId, MAX_SCRAPE_TORRENTS}; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct TorrentEntry { #[serde(skip)] - pub peers: std::collections::BTreeMap, + pub peers: std::collections::BTreeMap, pub completed: u32, } @@ -25,7 +25,7 @@ impl TorrentEntry { } // Update peer and return completed (times torrent has been downloaded) - pub fn update_peer(&mut self, peer: &TorrentPeer) -> bool { + pub fn update_peer(&mut self, peer: &peer::TorrentPeer) -> bool { let mut did_torrent_stats_change: bool = false; match peer.event { @@ -49,7 +49,7 @@ impl TorrentEntry { } #[must_use] - pub fn get_peers(&self, client_addr: Option<&SocketAddr>) -> Vec<&TorrentPeer> { + pub fn get_peers(&self, client_addr: Option<&SocketAddr>) -> Vec<&peer::TorrentPeer> { self.peers .values() .filter(|peer| match client_addr { @@ -118,16 +118,16 @@ mod tests { use crate::protocol::clock::{Current, DurationSinceUnixEpoch, Stopped, StoppedTime, Time, Working}; use crate::protocol::common::PeerId; - use crate::tracker::peer::TorrentPeer; + use crate::tracker::peer; use crate::tracker::torrent::TorrentEntry; struct TorrentPeerBuilder { - peer: TorrentPeer, + peer: peer::TorrentPeer, } impl TorrentPeerBuilder { pub fn default() -> TorrentPeerBuilder { - let default_peer = TorrentPeer { + let default_peer = peer::TorrentPeer { peer_id: PeerId([0u8; 20]), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080), updated: Current::now(), @@ -164,14 +164,14 @@ mod tests { self } - pub fn into(self) -> TorrentPeer { + pub fn into(self) -> peer::TorrentPeer { self.peer } } /// A torrent seeder is a peer with 0 bytes left to download which /// has not announced it has stopped - fn a_torrent_seeder() -> TorrentPeer { + fn a_torrent_seeder() -> peer::TorrentPeer { TorrentPeerBuilder::default() .with_number_of_bytes_left(0) .with_event_completed() @@ -180,7 +180,7 @@ mod tests { /// A torrent leecher is a peer that is not a seeder. /// Leecher: left > 0 OR event = Stopped - fn a_torrent_leecher() -> TorrentPeer { + fn a_torrent_leecher() -> peer::TorrentPeer { TorrentPeerBuilder::default() .with_number_of_bytes_left(1) .with_event_completed() diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index ecf1beae0..d1ae72924 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -8,10 +8,9 @@ use aquatic_udp_protocol::{ use super::connection_cookie::{check_connection_cookie, from_connection_id, into_connection_id, make_connection_cookie}; use crate::protocol::common::{InfoHash, MAX_SCRAPE_TORRENTS}; -use crate::tracker::peer::TorrentPeer; use crate::tracker::statistics::TrackerStatisticsEvent; use crate::tracker::torrent::TorrentError; -use crate::tracker::TorrentTracker; +use crate::tracker::{peer, TorrentTracker}; use crate::udp::errors::ServerError; use crate::udp::request::AnnounceRequestWrapper; @@ -106,7 +105,7 @@ pub async fn handle_announce( authenticate(&wrapped_announce_request.info_hash, tracker.clone()).await?; - let peer = TorrentPeer::from_udp_announce_request( + let peer = peer::TorrentPeer::from_udp_announce_request( &wrapped_announce_request.announce_request, remote_addr.ip(), tracker.config.get_ext_ip(), @@ -255,9 +254,8 @@ mod tests { use crate::config::Configuration; use crate::protocol::clock::{Current, Time}; use crate::protocol::common::PeerId; - use crate::tracker::peer::TorrentPeer; use crate::tracker::statistics::StatsTracker; - use crate::tracker::{mode, TorrentTracker}; + use crate::tracker::{mode, peer, TorrentTracker}; fn default_tracker_config() -> Arc { Arc::new(Configuration::default()) @@ -304,12 +302,12 @@ mod tests { } struct TorrentPeerBuilder { - peer: TorrentPeer, + peer: peer::TorrentPeer, } impl TorrentPeerBuilder { pub fn default() -> TorrentPeerBuilder { - let default_peer = TorrentPeer { + let default_peer = peer::TorrentPeer { peer_id: PeerId([255u8; 20]), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), updated: Current::now(), @@ -336,7 +334,7 @@ mod tests { self } - pub fn into(self) -> TorrentPeer { + pub fn into(self) -> peer::TorrentPeer { self.peer } } From 3c2232388fa0bd79fe6b2e9068e6c2375202e5ed Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Thu, 24 Nov 2022 21:49:03 +0100 Subject: [PATCH 27/45] clippy: fix src/tracker/statistics.rs --- src/tracker/mod.rs | 4 ++-- src/tracker/statistics.rs | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index fab254663..5877c7f21 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -13,7 +13,7 @@ use std::time::Duration; use tokio::sync::mpsc::error::SendError; use tokio::sync::{RwLock, RwLockReadGuard}; -use self::statistics::{StatsRepository, TrackerStatistics, TrackerStatisticsEvent, TrackerStatisticsEventSender}; +use self::statistics::{Metrics, StatsRepository, TrackerStatisticsEvent, TrackerStatisticsEventSender}; use crate::config::Configuration; use crate::databases::database; use crate::databases::database::Database; @@ -244,7 +244,7 @@ impl TorrentTracker { self.torrents.read().await } - pub async fn get_stats(&self) -> RwLockReadGuard<'_, TrackerStatistics> { + pub async fn get_stats(&self) -> RwLockReadGuard<'_, Metrics> { self.stats_repository.get_stats().await } diff --git a/src/tracker/statistics.rs b/src/tracker/statistics.rs index 609f036aa..fd830fa88 100644 --- a/src/tracker/statistics.rs +++ b/src/tracker/statistics.rs @@ -25,7 +25,7 @@ pub enum TrackerStatisticsEvent { } #[derive(Debug)] -pub struct TrackerStatistics { +pub struct Metrics { pub tcp4_connections_handled: u64, pub tcp4_announces_handled: u64, pub tcp4_scrapes_handled: u64, @@ -40,13 +40,13 @@ pub struct TrackerStatistics { pub udp6_scrapes_handled: u64, } -impl Default for TrackerStatistics { +impl Default for Metrics { fn default() -> Self { Self::new() } } -impl TrackerStatistics { +impl Metrics { #[must_use] pub fn new() -> Self { Self { @@ -177,7 +177,7 @@ impl TrackerStatisticsEventSender for StatsEventSender { #[derive(Clone)] pub struct StatsRepository { - pub stats: Arc>, + pub stats: Arc>, } impl Default for StatsRepository { @@ -190,11 +190,11 @@ impl StatsRepository { #[must_use] pub fn new() -> Self { Self { - stats: Arc::new(RwLock::new(TrackerStatistics::new())), + stats: Arc::new(RwLock::new(Metrics::new())), } } - pub async fn get_stats(&self) -> RwLockReadGuard<'_, TrackerStatistics> { + pub async fn get_stats(&self) -> RwLockReadGuard<'_, Metrics> { self.stats.read().await } @@ -275,7 +275,7 @@ impl StatsRepository { mod tests { mod stats_tracker { - use crate::tracker::statistics::{StatsTracker, TrackerStatistics, TrackerStatisticsEvent}; + use crate::tracker::statistics::{Metrics, StatsTracker, TrackerStatisticsEvent}; #[tokio::test] async fn should_contain_the_tracker_statistics() { @@ -283,7 +283,7 @@ mod tests { let stats = stats_tracker.stats_repository.get_stats().await; - assert_eq!(stats.tcp4_announces_handled, TrackerStatistics::new().tcp4_announces_handled); + assert_eq!(stats.tcp4_announces_handled, Metrics::new().tcp4_announces_handled); } #[tokio::test] From 143a11e18b5970420a391f848449f19dab7f82da Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Thu, 24 Nov 2022 22:18:48 +0100 Subject: [PATCH 28/45] refactor: cleanup src/tracker/statistics.rs naming --- src/http/handlers.rs | 11 ++- src/stats.rs | 8 +-- src/tracker/mod.rs | 13 ++-- src/tracker/statistics.rs | 137 +++++++++++++++++++------------------- src/udp/handlers.rs | 75 ++++++++++----------- tests/api.rs | 4 +- tests/udp.rs | 4 +- 7 files changed, 120 insertions(+), 132 deletions(-) diff --git a/src/http/handlers.rs b/src/http/handlers.rs index 5dab842e2..5256ef291 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -13,9 +13,8 @@ use super::{request, WebResult}; use crate::http::response::Error; use crate::protocol::common::InfoHash; use crate::tracker::key::Auth; -use crate::tracker::statistics::TrackerStatisticsEvent; use crate::tracker::torrent::{TorrentError, TorrentStats}; -use crate::tracker::{peer, TorrentTracker}; +use crate::tracker::{peer, statistics, TorrentTracker}; /// Authenticate `InfoHash` using optional `AuthKey` /// @@ -67,10 +66,10 @@ pub async fn handle_announce( // send stats event match announce_request.peer_addr { IpAddr::V4(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Tcp4Announce).await; + tracker.send_stats_event(statistics::Event::Tcp4Announce).await; } IpAddr::V6(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Tcp6Announce).await; + tracker.send_stats_event(statistics::Event::Tcp6Announce).await; } } @@ -127,10 +126,10 @@ pub async fn handle_scrape( // send stats event match scrape_request.peer_addr { IpAddr::V4(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Tcp4Scrape).await; + tracker.send_stats_event(statistics::Event::Tcp4Scrape).await; } IpAddr::V6(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Tcp6Scrape).await; + tracker.send_stats_event(statistics::Event::Tcp6Scrape).await; } } diff --git a/src/stats.rs b/src/stats.rs index 738909934..8f87c01a3 100644 --- a/src/stats.rs +++ b/src/stats.rs @@ -1,16 +1,16 @@ -use crate::tracker::statistics::{StatsRepository, StatsTracker, TrackerStatisticsEventSender}; +use crate::tracker::statistics; #[must_use] -pub fn setup_statistics(tracker_usage_statistics: bool) -> (Option>, StatsRepository) { +pub fn setup_statistics(tracker_usage_statistics: bool) -> (Option>, statistics::Repo) { let mut stats_event_sender = None; - let mut stats_tracker = StatsTracker::new(); + let mut stats_tracker = statistics::Keeper::new(); if tracker_usage_statistics { stats_event_sender = Some(stats_tracker.run_event_listener()); } - (stats_event_sender, stats_tracker.stats_repository) + (stats_event_sender, stats_tracker.repository) } #[cfg(test)] diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 5877c7f21..d0ab3e514 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -13,7 +13,6 @@ use std::time::Duration; use tokio::sync::mpsc::error::SendError; use tokio::sync::{RwLock, RwLockReadGuard}; -use self::statistics::{Metrics, StatsRepository, TrackerStatisticsEvent, TrackerStatisticsEventSender}; use crate::config::Configuration; use crate::databases::database; use crate::databases::database::Database; @@ -27,16 +26,16 @@ pub struct TorrentTracker { keys: RwLock>, whitelist: RwLock>, torrents: RwLock>, - stats_event_sender: Option>, - stats_repository: StatsRepository, + stats_event_sender: Option>, + stats_repository: statistics::Repo, database: Box, } impl TorrentTracker { pub fn new( config: Arc, - stats_event_sender: Option>, - stats_repository: StatsRepository, + stats_event_sender: Option>, + stats_repository: statistics::Repo, ) -> Result { let database = database::connect(&config.db_driver, &config.db_path)?; @@ -244,11 +243,11 @@ impl TorrentTracker { self.torrents.read().await } - pub async fn get_stats(&self) -> RwLockReadGuard<'_, Metrics> { + pub async fn get_stats(&self) -> RwLockReadGuard<'_, statistics::Metrics> { self.stats_repository.get_stats().await } - pub async fn send_stats_event(&self, event: TrackerStatisticsEvent) -> Option>> { + pub async fn send_stats_event(&self, event: statistics::Event) -> Option>> { match &self.stats_event_sender { None => None, Some(stats_event_sender) => stats_event_sender.send_event(event).await, diff --git a/src/tracker/statistics.rs b/src/tracker/statistics.rs index fd830fa88..b787e1267 100644 --- a/src/tracker/statistics.rs +++ b/src/tracker/statistics.rs @@ -5,13 +5,12 @@ use log::debug; #[cfg(test)] use mockall::{automock, predicate::str}; use tokio::sync::mpsc::error::SendError; -use tokio::sync::mpsc::{Receiver, Sender}; use tokio::sync::{mpsc, RwLock, RwLockReadGuard}; const CHANNEL_BUFFER_SIZE: usize = 65_535; #[derive(Debug, PartialEq, Eq)] -pub enum TrackerStatisticsEvent { +pub enum Event { Tcp4Announce, Tcp4Scrape, Tcp6Announce, @@ -66,91 +65,89 @@ impl Metrics { } } -pub struct StatsTracker { - pub stats_repository: StatsRepository, +pub struct Keeper { + pub repository: Repo, } -impl Default for StatsTracker { +impl Default for Keeper { fn default() -> Self { Self::new() } } -impl StatsTracker { +impl Keeper { #[must_use] pub fn new() -> Self { - Self { - stats_repository: StatsRepository::new(), - } + Self { repository: Repo::new() } } #[must_use] - pub fn new_active_instance() -> (Box, StatsRepository) { + pub fn new_active_instance() -> (Box, Repo) { let mut stats_tracker = Self::new(); let stats_event_sender = stats_tracker.run_event_listener(); - (stats_event_sender, stats_tracker.stats_repository) + (stats_event_sender, stats_tracker.repository) } - pub fn run_event_listener(&mut self) -> Box { - let (sender, receiver) = mpsc::channel::(CHANNEL_BUFFER_SIZE); + pub fn run_event_listener(&mut self) -> Box { + let (sender, receiver) = mpsc::channel::(CHANNEL_BUFFER_SIZE); - let stats_repository = self.stats_repository.clone(); + let stats_repository = self.repository.clone(); tokio::spawn(async move { event_listener(receiver, stats_repository).await }); - Box::new(StatsEventSender { sender }) + Box::new(Sender { sender }) } } -async fn event_listener(mut receiver: Receiver, stats_repository: StatsRepository) { +async fn event_listener(mut receiver: mpsc::Receiver, stats_repository: Repo) { while let Some(event) = receiver.recv().await { event_handler(event, &stats_repository).await; } } -async fn event_handler(event: TrackerStatisticsEvent, stats_repository: &StatsRepository) { +async fn event_handler(event: Event, stats_repository: &Repo) { match event { // TCP4 - TrackerStatisticsEvent::Tcp4Announce => { + Event::Tcp4Announce => { stats_repository.increase_tcp4_announces().await; stats_repository.increase_tcp4_connections().await; } - TrackerStatisticsEvent::Tcp4Scrape => { + Event::Tcp4Scrape => { stats_repository.increase_tcp4_scrapes().await; stats_repository.increase_tcp4_connections().await; } // TCP6 - TrackerStatisticsEvent::Tcp6Announce => { + Event::Tcp6Announce => { stats_repository.increase_tcp6_announces().await; stats_repository.increase_tcp6_connections().await; } - TrackerStatisticsEvent::Tcp6Scrape => { + Event::Tcp6Scrape => { stats_repository.increase_tcp6_scrapes().await; stats_repository.increase_tcp6_connections().await; } // UDP4 - TrackerStatisticsEvent::Udp4Connect => { + Event::Udp4Connect => { stats_repository.increase_udp4_connections().await; } - TrackerStatisticsEvent::Udp4Announce => { + Event::Udp4Announce => { stats_repository.increase_udp4_announces().await; } - TrackerStatisticsEvent::Udp4Scrape => { + Event::Udp4Scrape => { stats_repository.increase_udp4_scrapes().await; } // UDP6 - TrackerStatisticsEvent::Udp6Connect => { + Event::Udp6Connect => { stats_repository.increase_udp6_connections().await; } - TrackerStatisticsEvent::Udp6Announce => { + Event::Udp6Announce => { stats_repository.increase_udp6_announces().await; } - TrackerStatisticsEvent::Udp6Scrape => { + Event::Udp6Scrape => { stats_repository.increase_udp6_scrapes().await; } } @@ -160,33 +157,33 @@ async fn event_handler(event: TrackerStatisticsEvent, stats_repository: &StatsRe #[async_trait] #[cfg_attr(test, automock)] -pub trait TrackerStatisticsEventSender: Sync + Send { - async fn send_event(&self, event: TrackerStatisticsEvent) -> Option>>; +pub trait EventSender: Sync + Send { + async fn send_event(&self, event: Event) -> Option>>; } -pub struct StatsEventSender { - sender: Sender, +pub struct Sender { + sender: mpsc::Sender, } #[async_trait] -impl TrackerStatisticsEventSender for StatsEventSender { - async fn send_event(&self, event: TrackerStatisticsEvent) -> Option>> { +impl EventSender for Sender { + async fn send_event(&self, event: Event) -> Option>> { Some(self.sender.send(event).await) } } #[derive(Clone)] -pub struct StatsRepository { +pub struct Repo { pub stats: Arc>, } -impl Default for StatsRepository { +impl Default for Repo { fn default() -> Self { Self::new() } } -impl StatsRepository { +impl Repo { #[must_use] pub fn new() -> Self { Self { @@ -275,37 +272,37 @@ impl StatsRepository { mod tests { mod stats_tracker { - use crate::tracker::statistics::{Metrics, StatsTracker, TrackerStatisticsEvent}; + use crate::tracker::statistics::{Event, Keeper, Metrics}; #[tokio::test] async fn should_contain_the_tracker_statistics() { - let stats_tracker = StatsTracker::new(); + let stats_tracker = Keeper::new(); - let stats = stats_tracker.stats_repository.get_stats().await; + let stats = stats_tracker.repository.get_stats().await; assert_eq!(stats.tcp4_announces_handled, Metrics::new().tcp4_announces_handled); } #[tokio::test] async fn should_create_an_event_sender_to_send_statistical_events() { - let mut stats_tracker = StatsTracker::new(); + let mut stats_tracker = Keeper::new(); let event_sender = stats_tracker.run_event_listener(); - let result = event_sender.send_event(TrackerStatisticsEvent::Udp4Connect).await; + let result = event_sender.send_event(Event::Udp4Connect).await; assert!(result.is_some()); } } mod event_handler { - use crate::tracker::statistics::{event_handler, StatsRepository, TrackerStatisticsEvent}; + use crate::tracker::statistics::{event_handler, Event, Repo}; #[tokio::test] async fn should_increase_the_tcp4_announces_counter_when_it_receives_a_tcp4_announce_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Tcp4Announce, &stats_repository).await; + event_handler(Event::Tcp4Announce, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -314,9 +311,9 @@ mod tests { #[tokio::test] async fn should_increase_the_tcp4_connections_counter_when_it_receives_a_tcp4_announce_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Tcp4Announce, &stats_repository).await; + event_handler(Event::Tcp4Announce, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -325,9 +322,9 @@ mod tests { #[tokio::test] async fn should_increase_the_tcp4_scrapes_counter_when_it_receives_a_tcp4_scrape_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Tcp4Scrape, &stats_repository).await; + event_handler(Event::Tcp4Scrape, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -336,9 +333,9 @@ mod tests { #[tokio::test] async fn should_increase_the_tcp4_connections_counter_when_it_receives_a_tcp4_scrape_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Tcp4Scrape, &stats_repository).await; + event_handler(Event::Tcp4Scrape, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -347,9 +344,9 @@ mod tests { #[tokio::test] async fn should_increase_the_tcp6_announces_counter_when_it_receives_a_tcp6_announce_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Tcp6Announce, &stats_repository).await; + event_handler(Event::Tcp6Announce, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -358,9 +355,9 @@ mod tests { #[tokio::test] async fn should_increase_the_tcp6_connections_counter_when_it_receives_a_tcp6_announce_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Tcp6Announce, &stats_repository).await; + event_handler(Event::Tcp6Announce, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -369,9 +366,9 @@ mod tests { #[tokio::test] async fn should_increase_the_tcp6_scrapes_counter_when_it_receives_a_tcp6_scrape_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Tcp6Scrape, &stats_repository).await; + event_handler(Event::Tcp6Scrape, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -380,9 +377,9 @@ mod tests { #[tokio::test] async fn should_increase_the_tcp6_connections_counter_when_it_receives_a_tcp6_scrape_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Tcp6Scrape, &stats_repository).await; + event_handler(Event::Tcp6Scrape, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -391,9 +388,9 @@ mod tests { #[tokio::test] async fn should_increase_the_udp4_connections_counter_when_it_receives_a_udp4_connect_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Udp4Connect, &stats_repository).await; + event_handler(Event::Udp4Connect, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -402,9 +399,9 @@ mod tests { #[tokio::test] async fn should_increase_the_udp4_announces_counter_when_it_receives_a_udp4_announce_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Udp4Announce, &stats_repository).await; + event_handler(Event::Udp4Announce, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -413,9 +410,9 @@ mod tests { #[tokio::test] async fn should_increase_the_udp4_scrapes_counter_when_it_receives_a_udp4_scrape_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Udp4Scrape, &stats_repository).await; + event_handler(Event::Udp4Scrape, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -424,9 +421,9 @@ mod tests { #[tokio::test] async fn should_increase_the_udp6_connections_counter_when_it_receives_a_udp6_connect_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Udp6Connect, &stats_repository).await; + event_handler(Event::Udp6Connect, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -435,9 +432,9 @@ mod tests { #[tokio::test] async fn should_increase_the_udp6_announces_counter_when_it_receives_a_udp6_announce_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Udp6Announce, &stats_repository).await; + event_handler(Event::Udp6Announce, &stats_repository).await; let stats = stats_repository.get_stats().await; @@ -446,9 +443,9 @@ mod tests { #[tokio::test] async fn should_increase_the_udp6_scrapes_counter_when_it_receives_a_udp6_scrape_event() { - let stats_repository = StatsRepository::new(); + let stats_repository = Repo::new(); - event_handler(TrackerStatisticsEvent::Udp6Scrape, &stats_repository).await; + event_handler(Event::Udp6Scrape, &stats_repository).await; let stats = stats_repository.get_stats().await; diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index d1ae72924..f460c1b7e 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -8,9 +8,8 @@ use aquatic_udp_protocol::{ use super::connection_cookie::{check_connection_cookie, from_connection_id, into_connection_id, make_connection_cookie}; use crate::protocol::common::{InfoHash, MAX_SCRAPE_TORRENTS}; -use crate::tracker::statistics::TrackerStatisticsEvent; use crate::tracker::torrent::TorrentError; -use crate::tracker::{peer, TorrentTracker}; +use crate::tracker::{peer, statistics, TorrentTracker}; use crate::udp::errors::ServerError; use crate::udp::request::AnnounceRequestWrapper; @@ -79,10 +78,10 @@ pub async fn handle_connect( // send stats event match remote_addr { SocketAddr::V4(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Udp4Connect).await; + tracker.send_stats_event(statistics::Event::Udp4Connect).await; } SocketAddr::V6(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Udp6Connect).await; + tracker.send_stats_event(statistics::Event::Udp6Connect).await; } } @@ -167,10 +166,10 @@ pub async fn handle_announce( // send stats event match remote_addr { SocketAddr::V4(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Udp4Announce).await; + tracker.send_stats_event(statistics::Event::Udp4Announce).await; } SocketAddr::V6(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Udp6Announce).await; + tracker.send_stats_event(statistics::Event::Udp6Announce).await; } } @@ -223,10 +222,10 @@ pub async fn handle_scrape( // send stats event match remote_addr { SocketAddr::V4(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Udp4Scrape).await; + tracker.send_stats_event(statistics::Event::Udp4Scrape).await; } SocketAddr::V6(_) => { - tracker.send_stats_event(TrackerStatisticsEvent::Udp6Scrape).await; + tracker.send_stats_event(statistics::Event::Udp6Scrape).await; } } @@ -254,8 +253,7 @@ mod tests { use crate::config::Configuration; use crate::protocol::clock::{Current, Time}; use crate::protocol::common::PeerId; - use crate::tracker::statistics::StatsTracker; - use crate::tracker::{mode, peer, TorrentTracker}; + use crate::tracker::{mode, peer, statistics, TorrentTracker}; fn default_tracker_config() -> Arc { Arc::new(Configuration::default()) @@ -281,7 +279,7 @@ mod tests { } fn initialized_tracker(configuration: Arc) -> Arc { - let (stats_event_sender, stats_repository) = StatsTracker::new_active_instance(); + let (stats_event_sender, stats_repository) = statistics::Keeper::new_active_instance(); Arc::new(TorrentTracker::new(configuration, Some(stats_event_sender), stats_repository).unwrap()) } @@ -375,8 +373,7 @@ mod tests { use mockall::predicate::eq; use super::{default_tracker_config, sample_ipv4_socket_address, sample_ipv6_remote_addr}; - use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; - use crate::tracker::TorrentTracker; + use crate::tracker::{statistics, TorrentTracker}; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_connect; use crate::udp::handlers::tests::{initialized_public_tracker, sample_ipv4_remote_addr}; @@ -427,10 +424,10 @@ mod tests { #[tokio::test] async fn it_should_send_the_upd4_connect_event_when_a_client_tries_to_connect_using_a_ip4_socket_address() { - let mut stats_event_sender_mock = MockTrackerStatisticsEventSender::new(); + let mut stats_event_sender_mock = statistics::MockEventSender::new(); stats_event_sender_mock .expect_send_event() - .with(eq(TrackerStatisticsEvent::Udp4Connect)) + .with(eq(statistics::Event::Udp4Connect)) .times(1) .returning(|_| Box::pin(future::ready(Some(Ok(()))))); let stats_event_sender = Box::new(stats_event_sender_mock); @@ -438,7 +435,7 @@ mod tests { let client_socket_address = sample_ipv4_socket_address(); let torrent_tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), StatsRepository::new()).unwrap(), + TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_connect(client_socket_address, &sample_connect_request(), torrent_tracker) .await @@ -447,16 +444,16 @@ mod tests { #[tokio::test] async fn it_should_send_the_upd6_connect_event_when_a_client_tries_to_connect_using_a_ip6_socket_address() { - let mut stats_event_sender_mock = MockTrackerStatisticsEventSender::new(); + let mut stats_event_sender_mock = statistics::MockEventSender::new(); stats_event_sender_mock .expect_send_event() - .with(eq(TrackerStatisticsEvent::Udp6Connect)) + .with(eq(statistics::Event::Udp6Connect)) .times(1) .returning(|_| Box::pin(future::ready(Some(Ok(()))))); let stats_event_sender = Box::new(stats_event_sender_mock); let torrent_tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), StatsRepository::new()).unwrap(), + TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_connect(sample_ipv6_remote_addr(), &sample_connect_request(), torrent_tracker) .await @@ -548,8 +545,7 @@ mod tests { use mockall::predicate::eq; use crate::protocol::common::PeerId; - use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; - use crate::tracker::TorrentTracker; + use crate::tracker::{statistics, TorrentTracker}; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; @@ -692,16 +688,16 @@ mod tests { #[tokio::test] async fn should_send_the_upd4_announce_event() { - let mut stats_event_sender_mock = MockTrackerStatisticsEventSender::new(); + let mut stats_event_sender_mock = statistics::MockEventSender::new(); stats_event_sender_mock .expect_send_event() - .with(eq(TrackerStatisticsEvent::Udp4Announce)) + .with(eq(statistics::Event::Udp4Announce)) .times(1) .returning(|_| Box::pin(future::ready(Some(Ok(()))))); let stats_event_sender = Box::new(stats_event_sender_mock); let tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), StatsRepository::new()).unwrap(), + TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_announce( @@ -773,8 +769,7 @@ mod tests { use mockall::predicate::eq; use crate::protocol::common::PeerId; - use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; - use crate::tracker::TorrentTracker; + use crate::tracker::{statistics, TorrentTracker}; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; @@ -924,16 +919,16 @@ mod tests { #[tokio::test] async fn should_send_the_upd6_announce_event() { - let mut stats_event_sender_mock = MockTrackerStatisticsEventSender::new(); + let mut stats_event_sender_mock = statistics::MockEventSender::new(); stats_event_sender_mock .expect_send_event() - .with(eq(TrackerStatisticsEvent::Udp6Announce)) + .with(eq(statistics::Event::Udp6Announce)) .times(1) .returning(|_| Box::pin(future::ready(Some(Ok(()))))); let stats_event_sender = Box::new(stats_event_sender_mock); let tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), StatsRepository::new()).unwrap(), + TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); let remote_addr = sample_ipv6_remote_addr(); @@ -953,7 +948,7 @@ mod tests { use aquatic_udp_protocol::{InfoHash as AquaticInfoHash, PeerId as AquaticPeerId}; - use crate::tracker::statistics::StatsTracker; + use crate::tracker::statistics::Keeper; use crate::tracker::TorrentTracker; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_announce; @@ -963,7 +958,7 @@ mod tests { #[tokio::test] async fn the_peer_ip_should_be_changed_to_the_external_ip_in_the_tracker_configuration() { let configuration = Arc::new(TrackerConfigurationBuilder::default().with_external_ip("::126.0.0.1").into()); - let (stats_event_sender, stats_repository) = StatsTracker::new_active_instance(); + let (stats_event_sender, stats_repository) = Keeper::new_active_instance(); let tracker = Arc::new(TorrentTracker::new(configuration, Some(stats_event_sender), stats_repository).unwrap()); @@ -1233,24 +1228,23 @@ mod tests { use mockall::predicate::eq; use super::sample_scrape_request; - use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; - use crate::tracker::TorrentTracker; + use crate::tracker::{statistics, TorrentTracker}; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{default_tracker_config, sample_ipv4_remote_addr}; #[tokio::test] async fn should_send_the_upd4_scrape_event() { - let mut stats_event_sender_mock = MockTrackerStatisticsEventSender::new(); + let mut stats_event_sender_mock = statistics::MockEventSender::new(); stats_event_sender_mock .expect_send_event() - .with(eq(TrackerStatisticsEvent::Udp4Scrape)) + .with(eq(statistics::Event::Udp4Scrape)) .times(1) .returning(|_| Box::pin(future::ready(Some(Ok(()))))); let stats_event_sender = Box::new(stats_event_sender_mock); let remote_addr = sample_ipv4_remote_addr(); let tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), StatsRepository::new()).unwrap(), + TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_scrape(remote_addr, &sample_scrape_request(&remote_addr), tracker.clone()) @@ -1266,24 +1260,23 @@ mod tests { use mockall::predicate::eq; use super::sample_scrape_request; - use crate::tracker::statistics::{MockTrackerStatisticsEventSender, StatsRepository, TrackerStatisticsEvent}; - use crate::tracker::TorrentTracker; + use crate::tracker::{statistics, TorrentTracker}; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{default_tracker_config, sample_ipv6_remote_addr}; #[tokio::test] async fn should_send_the_upd6_scrape_event() { - let mut stats_event_sender_mock = MockTrackerStatisticsEventSender::new(); + let mut stats_event_sender_mock = statistics::MockEventSender::new(); stats_event_sender_mock .expect_send_event() - .with(eq(TrackerStatisticsEvent::Udp6Scrape)) + .with(eq(statistics::Event::Udp6Scrape)) .times(1) .returning(|_| Box::pin(future::ready(Some(Ok(()))))); let stats_event_sender = Box::new(stats_event_sender_mock); let remote_addr = sample_ipv6_remote_addr(); let tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), StatsRepository::new()).unwrap(), + TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_scrape(remote_addr, &sample_scrape_request(&remote_addr), tracker.clone()) diff --git a/tests/api.rs b/tests/api.rs index 380ab90ca..a4043fe7c 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -25,7 +25,7 @@ mod tracker_api { use torrust_tracker::protocol::common::{InfoHash, PeerId}; use torrust_tracker::tracker::key::Auth; use torrust_tracker::tracker::peer::TorrentPeer; - use torrust_tracker::tracker::statistics::StatsTracker; + use torrust_tracker::tracker::statistics::Keeper; use torrust_tracker::tracker::TorrentTracker; use torrust_tracker::{ephemeral_instance_keys, logging, static_time}; @@ -276,7 +276,7 @@ mod tracker_api { lazy_static::initialize(&ephemeral_instance_keys::RANDOM_SEED); // Initialize stats tracker - let (stats_event_sender, stats_repository) = StatsTracker::new_active_instance(); + let (stats_event_sender, stats_repository) = Keeper::new_active_instance(); // Initialize Torrust tracker let tracker = match TorrentTracker::new(configuration.clone(), Some(stats_event_sender), stats_repository) { diff --git a/tests/udp.rs b/tests/udp.rs index b365c4fc6..fabca137a 100644 --- a/tests/udp.rs +++ b/tests/udp.rs @@ -20,7 +20,7 @@ mod udp_tracker_server { use tokio::task::JoinHandle; use torrust_tracker::config::Configuration; use torrust_tracker::jobs::udp_tracker; - use torrust_tracker::tracker::statistics::StatsTracker; + use torrust_tracker::tracker::statistics::Keeper; use torrust_tracker::tracker::TorrentTracker; use torrust_tracker::udp::MAX_PACKET_SIZE; use torrust_tracker::{ephemeral_instance_keys, logging, static_time}; @@ -58,7 +58,7 @@ mod udp_tracker_server { lazy_static::initialize(&ephemeral_instance_keys::RANDOM_SEED); // Initialize stats tracker - let (stats_event_sender, stats_repository) = StatsTracker::new_active_instance(); + let (stats_event_sender, stats_repository) = Keeper::new_active_instance(); // Initialize Torrust tracker let tracker = match TorrentTracker::new(configuration.clone(), Some(stats_event_sender), stats_repository) { From 81e72da07fe4359384c058540c3c33c6353b1ad2 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Thu, 24 Nov 2022 22:26:32 +0100 Subject: [PATCH 29/45] clippy: fix src/tracker/torrent.rs --- src/http/handlers.rs | 19 ++++++++--------- src/tracker/mod.rs | 21 +++++++++---------- src/tracker/torrent.rs | 47 +++++++++++++++++++++--------------------- src/udp/handlers.rs | 15 +++++++------- 4 files changed, 50 insertions(+), 52 deletions(-) diff --git a/src/http/handlers.rs b/src/http/handlers.rs index 5256ef291..ace20ada9 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -13,26 +13,25 @@ use super::{request, WebResult}; use crate::http::response::Error; use crate::protocol::common::InfoHash; use crate::tracker::key::Auth; -use crate::tracker::torrent::{TorrentError, TorrentStats}; -use crate::tracker::{peer, statistics, TorrentTracker}; +use crate::tracker::{peer, statistics, torrent, TorrentTracker}; /// Authenticate `InfoHash` using optional `AuthKey` /// /// # Errors /// -/// Will return `ServerError` that wraps the `TorrentError` if unable to `authenticate_request`. +/// Will return `ServerError` that wraps the `Error` if unable to `authenticate_request`. pub async fn authenticate( info_hash: &InfoHash, auth_key: &Option, tracker: Arc, ) -> Result<(), ServerError> { tracker.authenticate_request(info_hash, auth_key).await.map_err(|e| match e { - TorrentError::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, - TorrentError::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, - TorrentError::PeerKeyNotValid => ServerError::PeerKeyNotValid, - TorrentError::NoPeersFound => ServerError::NoPeersFound, - TorrentError::CouldNotSendResponse => ServerError::InternalServerError, - TorrentError::InvalidInfoHash => ServerError::InvalidInfoHash, + torrent::Error::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, + torrent::Error::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, + torrent::Error::PeerKeyNotValid => ServerError::PeerKeyNotValid, + torrent::Error::NoPeersFound => ServerError::NoPeersFound, + torrent::Error::CouldNotSendResponse => ServerError::InternalServerError, + torrent::Error::InvalidInfoHash => ServerError::InvalidInfoHash, }) } @@ -140,7 +139,7 @@ pub async fn handle_scrape( #[allow(clippy::ptr_arg)] fn send_announce_response( announce_request: &request::Announce, - torrent_stats: &TorrentStats, + torrent_stats: &torrent::Stats, peers: &Vec, interval: u32, interval_min: u32, diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index d0ab3e514..b3a7ab6d6 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -18,14 +18,13 @@ use crate::databases::database; use crate::databases::database::Database; use crate::protocol::common::InfoHash; use crate::tracker::key::Auth; -use crate::tracker::torrent::{TorrentEntry, TorrentError, TorrentStats}; pub struct TorrentTracker { pub config: Arc, mode: mode::Tracker, keys: RwLock>, whitelist: RwLock>, - torrents: RwLock>, + torrents: RwLock>, stats_event_sender: Option>, stats_repository: statistics::Repo, database: Box, @@ -142,7 +141,7 @@ impl TorrentTracker { Ok(()) } - pub async fn authenticate_request(&self, info_hash: &InfoHash, key: &Option) -> Result<(), TorrentError> { + pub async fn authenticate_request(&self, info_hash: &InfoHash, key: &Option) -> Result<(), torrent::Error> { // no authentication needed in public mode if self.is_public() { return Ok(()); @@ -153,18 +152,18 @@ impl TorrentTracker { match key { Some(key) => { if self.verify_auth_key(key).await.is_err() { - return Err(TorrentError::PeerKeyNotValid); + return Err(torrent::Error::PeerKeyNotValid); } } None => { - return Err(TorrentError::PeerNotAuthenticated); + return Err(torrent::Error::PeerNotAuthenticated); } } } // check if info_hash is whitelisted if self.is_whitelisted() && !self.is_info_hash_whitelisted(info_hash).await { - return Err(TorrentError::TorrentNotWhitelisted); + return Err(torrent::Error::TorrentNotWhitelisted); } Ok(()) @@ -181,7 +180,7 @@ impl TorrentTracker { continue; } - let torrent_entry = TorrentEntry { + let torrent_entry = torrent::Entry { peers: Default::default(), completed, }; @@ -212,11 +211,11 @@ impl TorrentTracker { } } - pub async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::TorrentPeer) -> TorrentStats { + pub async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::TorrentPeer) -> torrent::Stats { let mut torrents = self.torrents.write().await; let torrent_entry = match torrents.entry(*info_hash) { - Entry::Vacant(vacant) => vacant.insert(TorrentEntry::new()), + Entry::Vacant(vacant) => vacant.insert(torrent::Entry::new()), Entry::Occupied(entry) => entry.into_mut(), }; @@ -232,14 +231,14 @@ impl TorrentTracker { let (seeders, completed, leechers) = torrent_entry.get_stats(); - TorrentStats { + torrent::Stats { completed, seeders, leechers, } } - pub async fn get_torrents(&self) -> RwLockReadGuard<'_, BTreeMap> { + pub async fn get_torrents(&self) -> RwLockReadGuard<'_, BTreeMap> { self.torrents.read().await } diff --git a/src/tracker/torrent.rs b/src/tracker/torrent.rs index 734e7a66c..21bcfc513 100644 --- a/src/tracker/torrent.rs +++ b/src/tracker/torrent.rs @@ -9,16 +9,16 @@ use crate::protocol::clock::{Current, TimeNow}; use crate::protocol::common::{PeerId, MAX_SCRAPE_TORRENTS}; #[derive(Serialize, Deserialize, Clone, Debug)] -pub struct TorrentEntry { +pub struct Entry { #[serde(skip)] pub peers: std::collections::BTreeMap, pub completed: u32, } -impl TorrentEntry { +impl Entry { #[must_use] - pub fn new() -> TorrentEntry { - TorrentEntry { + pub fn new() -> Entry { + Entry { peers: std::collections::BTreeMap::new(), completed: 0, } @@ -72,6 +72,7 @@ impl TorrentEntry { .collect() } + #[allow(clippy::cast_possible_truncation)] #[must_use] pub fn get_stats(&self) -> (u32, u32, u32) { let seeders: u32 = self.peers.values().filter(|peer| peer.is_seeder()).count() as u32; @@ -85,21 +86,21 @@ impl TorrentEntry { } } -impl Default for TorrentEntry { +impl Default for Entry { fn default() -> Self { Self::new() } } #[derive(Debug)] -pub struct TorrentStats { +pub struct Stats { pub completed: u32, pub seeders: u32, pub leechers: u32, } #[derive(Debug)] -pub enum TorrentError { +pub enum Error { TorrentNotWhitelisted, PeerNotAuthenticated, PeerKeyNotValid, @@ -119,7 +120,7 @@ mod tests { use crate::protocol::clock::{Current, DurationSinceUnixEpoch, Stopped, StoppedTime, Time, Working}; use crate::protocol::common::PeerId; use crate::tracker::peer; - use crate::tracker::torrent::TorrentEntry; + use crate::tracker::torrent::Entry; struct TorrentPeerBuilder { peer: peer::TorrentPeer, @@ -189,14 +190,14 @@ mod tests { #[test] fn the_default_torrent_entry_should_contain_an_empty_list_of_peers() { - let torrent_entry = TorrentEntry::new(); + let torrent_entry = Entry::new(); assert_eq!(torrent_entry.get_peers(None).len(), 0); } #[test] fn a_new_peer_can_be_added_to_a_torrent_entry() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let torrent_peer = TorrentPeerBuilder::default().into(); torrent_entry.update_peer(&torrent_peer); // Add the peer @@ -207,7 +208,7 @@ mod tests { #[test] fn a_torrent_entry_should_contain_the_list_of_peers_that_were_added_to_the_torrent() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let torrent_peer = TorrentPeerBuilder::default().into(); torrent_entry.update_peer(&torrent_peer); // Add the peer @@ -217,7 +218,7 @@ mod tests { #[test] fn a_peer_can_be_updated_in_a_torrent_entry() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let mut torrent_peer = TorrentPeerBuilder::default().into(); torrent_entry.update_peer(&torrent_peer); // Add the peer @@ -229,7 +230,7 @@ mod tests { #[test] fn a_peer_should_be_removed_from_a_torrent_entry_when_the_peer_announces_it_has_stopped() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let mut torrent_peer = TorrentPeerBuilder::default().into(); torrent_entry.update_peer(&torrent_peer); // Add the peer @@ -241,7 +242,7 @@ mod tests { #[test] fn torrent_stats_change_when_a_previously_known_peer_announces_it_has_completed_the_torrent() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let mut torrent_peer = TorrentPeerBuilder::default().into(); torrent_entry.update_peer(&torrent_peer); // Add the peer @@ -255,7 +256,7 @@ mod tests { #[test] fn torrent_stats_should_not_change_when_a_peer_announces_it_has_completed_the_torrent_if_it_is_the_first_announce_from_the_peer( ) { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let torrent_peer_announcing_complete_event = TorrentPeerBuilder::default().with_event_completed().into(); // Add a peer that did not exist before in the entry @@ -266,7 +267,7 @@ mod tests { #[test] fn a_torrent_entry_could_filter_out_peers_with_a_given_socket_address() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let peer_socket_address = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080); let torrent_peer = TorrentPeerBuilder::default().with_peer_address(peer_socket_address).into(); torrent_entry.update_peer(&torrent_peer); // Add peer @@ -287,7 +288,7 @@ mod tests { #[test] fn the_tracker_should_limit_the_list_of_peers_to_74_when_clients_scrape_torrents() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); // We add one more peer than the scrape limit for peer_number in 1..=74 + 1 { @@ -299,12 +300,12 @@ mod tests { let peers = torrent_entry.get_peers(None); - assert_eq!(peers.len(), 74) + assert_eq!(peers.len(), 74); } #[test] fn torrent_stats_should_have_the_number_of_seeders_for_a_torrent() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let torrent_seeder = a_torrent_seeder(); torrent_entry.update_peer(&torrent_seeder); // Add seeder @@ -314,7 +315,7 @@ mod tests { #[test] fn torrent_stats_should_have_the_number_of_leechers_for_a_torrent() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let torrent_leecher = a_torrent_leecher(); torrent_entry.update_peer(&torrent_leecher); // Add leecher @@ -325,7 +326,7 @@ mod tests { #[test] fn torrent_stats_should_have_the_number_of_peers_that_having_announced_at_least_two_events_the_latest_one_is_the_completed_event( ) { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let mut torrent_peer = TorrentPeerBuilder::default().into(); torrent_entry.update_peer(&torrent_peer); // Add the peer @@ -340,7 +341,7 @@ mod tests { #[test] fn torrent_stats_should_not_include_a_peer_in_the_completed_counter_if_the_peer_has_announced_only_one_event() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let torrent_peer_announcing_complete_event = TorrentPeerBuilder::default().with_event_completed().into(); // Announce "Completed" torrent download event. @@ -354,7 +355,7 @@ mod tests { #[test] fn a_torrent_entry_should_remove_a_peer_not_updated_after_a_timeout_in_seconds() { - let mut torrent_entry = TorrentEntry::new(); + let mut torrent_entry = Entry::new(); let timeout = 120u32; diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index f460c1b7e..632180a92 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -8,8 +8,7 @@ use aquatic_udp_protocol::{ use super::connection_cookie::{check_connection_cookie, from_connection_id, into_connection_id, make_connection_cookie}; use crate::protocol::common::{InfoHash, MAX_SCRAPE_TORRENTS}; -use crate::tracker::torrent::TorrentError; -use crate::tracker::{peer, statistics, TorrentTracker}; +use crate::tracker::{peer, statistics, torrent, TorrentTracker}; use crate::udp::errors::ServerError; use crate::udp::request::AnnounceRequestWrapper; @@ -18,12 +17,12 @@ pub async fn authenticate(info_hash: &InfoHash, tracker: Arc) -> Ok(_) => Ok(()), Err(e) => { let err = match e { - TorrentError::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, - TorrentError::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, - TorrentError::PeerKeyNotValid => ServerError::PeerKeyNotValid, - TorrentError::NoPeersFound => ServerError::NoPeersFound, - TorrentError::CouldNotSendResponse => ServerError::InternalServerError, - TorrentError::InvalidInfoHash => ServerError::InvalidInfoHash, + torrent::Error::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, + torrent::Error::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, + torrent::Error::PeerKeyNotValid => ServerError::PeerKeyNotValid, + torrent::Error::NoPeersFound => ServerError::NoPeersFound, + torrent::Error::CouldNotSendResponse => ServerError::InternalServerError, + torrent::Error::InvalidInfoHash => ServerError::InvalidInfoHash, }; Err(err) From 78221b6174a435ea815f9aefa1f84aa6d9ea4f8e Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Thu, 24 Nov 2022 22:41:46 +0100 Subject: [PATCH 30/45] clippy: fix src/tracker/mod.rs --- src/api/server.rs | 22 ++++++------- src/http/filters.rs | 10 +++--- src/http/handlers.rs | 8 ++--- src/http/routes.rs | 8 ++--- src/http/server.rs | 8 ++--- src/jobs/http_tracker.rs | 4 +-- src/jobs/torrent_cleanup.rs | 4 +-- src/jobs/tracker_api.rs | 4 +-- src/jobs/udp_tracker.rs | 4 +-- src/main.rs | 5 ++- src/setup.rs | 4 +-- src/tracker/mod.rs | 64 +++++++++++++++++++++++++++-------- src/udp/handlers.rs | 66 ++++++++++++++++++------------------- src/udp/server.rs | 6 ++-- tests/api.rs | 7 ++-- tests/udp.rs | 5 ++- 16 files changed, 133 insertions(+), 96 deletions(-) diff --git a/src/api/server.rs b/src/api/server.rs index f9e5bc368..fac25e297 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -11,7 +11,7 @@ use super::resources::auth_key_resource::AuthKeyResource; use super::resources::stats_resource::StatsResource; use super::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; use crate::protocol::common::InfoHash; -use crate::tracker::TorrentTracker; +use crate::tracker; #[derive(Deserialize, Debug)] struct TorrentInfoQuery { @@ -60,7 +60,7 @@ fn authenticate(tokens: HashMap) -> impl Filter) -> impl warp::Future { +pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl warp::Future { // GET /api/torrents?offset=:u32&limit=:u32 // View torrent list let api_torrents = tracker.clone(); @@ -72,7 +72,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war let tracker = api_torrents.clone(); (limits, tracker) }) - .and_then(|(limits, tracker): (TorrentInfoQuery, Arc)| async move { + .and_then(|(limits, tracker): (TorrentInfoQuery, Arc)| async move { let offset = limits.offset.unwrap_or(0); let limit = min(limits.limit.unwrap_or(1000), 4000); @@ -103,7 +103,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war .and(filters::path::path("stats")) .and(filters::path::end()) .map(move || api_stats.clone()) - .and_then(|tracker: Arc| async move { + .and_then(|tracker: Arc| async move { let mut results = StatsResource { torrents: 0, seeders: 0, @@ -165,7 +165,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war let tracker = t2.clone(); (info_hash, tracker) }) - .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { + .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { let db = tracker.get_torrents().await; let torrent_entry_option = db.get(&info_hash); @@ -201,7 +201,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war let tracker = t3.clone(); (info_hash, tracker) }) - .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { + .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { match tracker.remove_torrent_from_whitelist(&info_hash).await { Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), Err(_) => Err(warp::reject::custom(ActionStatus::Err { @@ -221,7 +221,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war let tracker = t4.clone(); (info_hash, tracker) }) - .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { + .and_then(|(info_hash, tracker): (InfoHash, Arc)| async move { match tracker.add_torrent_to_whitelist(&info_hash).await { Ok(..) => Ok(warp::reply::json(&ActionStatus::Ok)), Err(..) => Err(warp::reject::custom(ActionStatus::Err { @@ -241,7 +241,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war let tracker = t5.clone(); (seconds_valid, tracker) }) - .and_then(|(seconds_valid, tracker): (u64, Arc)| async move { + .and_then(|(seconds_valid, tracker): (u64, Arc)| async move { match tracker.generate_auth_key(Duration::from_secs(seconds_valid)).await { Ok(auth_key) => Ok(warp::reply::json(&AuthKeyResource::from(auth_key))), Err(..) => Err(warp::reject::custom(ActionStatus::Err { @@ -261,7 +261,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war let tracker = t6.clone(); (key, tracker) }) - .and_then(|(key, tracker): (String, Arc)| async move { + .and_then(|(key, tracker): (String, Arc)| async move { match tracker.remove_auth_key(&key).await { Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), Err(_) => Err(warp::reject::custom(ActionStatus::Err { @@ -278,7 +278,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war .and(filters::path::path("reload")) .and(filters::path::end()) .map(move || t7.clone()) - .and_then(|tracker: Arc| async move { + .and_then(|tracker: Arc| async move { match tracker.load_whitelist().await { Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), Err(_) => Err(warp::reject::custom(ActionStatus::Err { @@ -295,7 +295,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl war .and(filters::path::path("reload")) .and(filters::path::end()) .map(move || t8.clone()) - .and_then(|tracker: Arc| async move { + .and_then(|tracker: Arc| async move { match tracker.load_keys().await { Ok(_) => Ok(warp::reply::json(&ActionStatus::Ok)), Err(_) => Err(warp::reject::custom(ActionStatus::Err { diff --git a/src/http/filters.rs b/src/http/filters.rs index 3375c781f..2c3ab626d 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -9,12 +9,14 @@ use super::errors::ServerError; use super::request::{Announce, AnnounceRequestQuery, Scrape}; use super::WebResult; use crate::protocol::common::{InfoHash, PeerId, MAX_SCRAPE_TORRENTS}; +use crate::tracker; use crate::tracker::key::Auth; -use crate::tracker::TorrentTracker; -/// Pass Arc along +/// Pass Arc along #[must_use] -pub fn with_tracker(tracker: Arc) -> impl Filter,), Error = Infallible> + Clone { +pub fn with_tracker( + tracker: Arc, +) -> impl Filter,), Error = Infallible> + Clone { warp::any().map(move || tracker.clone()) } @@ -30,7 +32,7 @@ pub fn with_peer_id() -> impl Filter + C warp::filters::query::raw().and_then(|q| async move { peer_id(&q) }) } -/// Pass Arc along +/// Pass Arc along #[must_use] pub fn with_auth_key() -> impl Filter,), Error = Infallible> + Clone { warp::path::param::() diff --git a/src/http/handlers.rs b/src/http/handlers.rs index ace20ada9..d4ae76e65 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -13,7 +13,7 @@ use super::{request, WebResult}; use crate::http::response::Error; use crate::protocol::common::InfoHash; use crate::tracker::key::Auth; -use crate::tracker::{peer, statistics, torrent, TorrentTracker}; +use crate::tracker::{self, peer, statistics, torrent}; /// Authenticate `InfoHash` using optional `AuthKey` /// @@ -23,7 +23,7 @@ use crate::tracker::{peer, statistics, torrent, TorrentTracker}; pub async fn authenticate( info_hash: &InfoHash, auth_key: &Option, - tracker: Arc, + tracker: Arc, ) -> Result<(), ServerError> { tracker.authenticate_request(info_hash, auth_key).await.map_err(|e| match e { torrent::Error::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, @@ -43,7 +43,7 @@ pub async fn authenticate( pub async fn handle_announce( announce_request: request::Announce, auth_key: Option, - tracker: Arc, + tracker: Arc, ) -> WebResult { authenticate(&announce_request.info_hash, &auth_key, tracker.clone()) .await @@ -89,7 +89,7 @@ pub async fn handle_announce( pub async fn handle_scrape( scrape_request: request::Scrape, auth_key: Option, - tracker: Arc, + tracker: Arc, ) -> WebResult { let mut files: HashMap = HashMap::new(); let db = tracker.get_torrents().await; diff --git a/src/http/routes.rs b/src/http/routes.rs index 992febc2c..c46c502e4 100644 --- a/src/http/routes.rs +++ b/src/http/routes.rs @@ -5,18 +5,18 @@ use warp::{Filter, Rejection}; use super::filters::{with_announce_request, with_auth_key, with_scrape_request, with_tracker}; use super::handlers::{handle_announce, handle_scrape, send_error}; -use crate::tracker::TorrentTracker; +use crate::tracker; /// All routes #[must_use] -pub fn routes(tracker: Arc) -> impl Filter + Clone { +pub fn routes(tracker: Arc) -> impl Filter + Clone { announce(tracker.clone()) .or(scrape(tracker)) .recover(|q| async move { send_error(&q) }) } /// GET /announce or /announce/ -fn announce(tracker: Arc) -> impl Filter + Clone { +fn announce(tracker: Arc) -> impl Filter + Clone { warp::path::path("announce") .and(warp::filters::method::get()) .and(with_announce_request(tracker.config.on_reverse_proxy)) @@ -26,7 +26,7 @@ fn announce(tracker: Arc) -> impl Filter -fn scrape(tracker: Arc) -> impl Filter + Clone { +fn scrape(tracker: Arc) -> impl Filter + Clone { warp::path::path("scrape") .and(warp::filters::method::get()) .and(with_scrape_request(tracker.config.on_reverse_proxy)) diff --git a/src/http/server.rs b/src/http/server.rs index 755fdc73a..894d3e911 100644 --- a/src/http/server.rs +++ b/src/http/server.rs @@ -2,17 +2,17 @@ use std::net::SocketAddr; use std::sync::Arc; use super::routes; -use crate::tracker::TorrentTracker; +use crate::tracker; -/// Server that listens on HTTP, needs a `TorrentTracker` +/// Server that listens on HTTP, needs a `tracker::TorrentTracker` #[derive(Clone)] pub struct Http { - tracker: Arc, + tracker: Arc, } impl Http { #[must_use] - pub fn new(tracker: Arc) -> Http { + pub fn new(tracker: Arc) -> Http { Http { tracker } } diff --git a/src/jobs/http_tracker.rs b/src/jobs/http_tracker.rs index 276da8099..b8f031f5a 100644 --- a/src/jobs/http_tracker.rs +++ b/src/jobs/http_tracker.rs @@ -6,13 +6,13 @@ use tokio::task::JoinHandle; use crate::config::HttpTracker; use crate::http::server::Http; -use crate::tracker::TorrentTracker; +use crate::tracker; /// # Panics /// /// It would panic if the `config::HttpTracker` struct would contain an inappropriate values. #[must_use] -pub fn start_job(config: &HttpTracker, tracker: Arc) -> JoinHandle<()> { +pub fn start_job(config: &HttpTracker, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.parse::().unwrap(); let ssl_enabled = config.ssl_enabled; let ssl_cert_path = config.ssl_cert_path.clone(); diff --git a/src/jobs/torrent_cleanup.rs b/src/jobs/torrent_cleanup.rs index 7bdfc1677..073ceda61 100644 --- a/src/jobs/torrent_cleanup.rs +++ b/src/jobs/torrent_cleanup.rs @@ -5,10 +5,10 @@ use log::info; use tokio::task::JoinHandle; use crate::config::Configuration; -use crate::tracker::TorrentTracker; +use crate::tracker; #[must_use] -pub fn start_job(config: &Configuration, tracker: &Arc) -> JoinHandle<()> { +pub fn start_job(config: &Configuration, tracker: &Arc) -> JoinHandle<()> { let weak_tracker = std::sync::Arc::downgrade(tracker); let interval = config.inactive_peer_cleanup_interval; diff --git a/src/jobs/tracker_api.rs b/src/jobs/tracker_api.rs index 4e2dcd0c9..7787ea3f4 100644 --- a/src/jobs/tracker_api.rs +++ b/src/jobs/tracker_api.rs @@ -6,12 +6,12 @@ use tokio::task::JoinHandle; use crate::api::server; use crate::config::Configuration; -use crate::tracker::TorrentTracker; +use crate::tracker; #[derive(Debug)] pub struct ApiServerJobStarted(); -pub async fn start_job(config: &Configuration, tracker: Arc) -> JoinHandle<()> { +pub async fn start_job(config: &Configuration, tracker: Arc) -> JoinHandle<()> { let bind_addr = config .http_api .bind_address diff --git a/src/jobs/udp_tracker.rs b/src/jobs/udp_tracker.rs index 1b4bc745c..d5fdae4c1 100644 --- a/src/jobs/udp_tracker.rs +++ b/src/jobs/udp_tracker.rs @@ -4,11 +4,11 @@ use log::{error, info, warn}; use tokio::task::JoinHandle; use crate::config::UdpTracker; -use crate::tracker::TorrentTracker; +use crate::tracker; use crate::udp::server::UdpServer; #[must_use] -pub fn start_job(config: &UdpTracker, tracker: Arc) -> JoinHandle<()> { +pub fn start_job(config: &UdpTracker, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.clone(); tokio::spawn(async move { diff --git a/src/main.rs b/src/main.rs index baffc6fa5..a7316cef2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,8 +3,7 @@ use std::sync::Arc; use log::info; use torrust_tracker::config::Configuration; use torrust_tracker::stats::setup_statistics; -use torrust_tracker::tracker::TorrentTracker; -use torrust_tracker::{ephemeral_instance_keys, logging, setup, static_time}; +use torrust_tracker::{ephemeral_instance_keys, logging, setup, static_time, tracker}; #[tokio::main] async fn main() { @@ -28,7 +27,7 @@ async fn main() { let (stats_event_sender, stats_repository) = setup_statistics(config.tracker_usage_statistics); // Initialize Torrust tracker - let tracker = match TorrentTracker::new(config.clone(), stats_event_sender, stats_repository) { + let tracker = match tracker::Tracker::new(&config.clone(), stats_event_sender, stats_repository) { Ok(tracker) => Arc::new(tracker), Err(error) => { panic!("{}", error) diff --git a/src/setup.rs b/src/setup.rs index cfca5eb9e..a7b7c5a82 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -5,9 +5,9 @@ use tokio::task::JoinHandle; use crate::config::Configuration; use crate::jobs::{http_tracker, torrent_cleanup, tracker_api, udp_tracker}; -use crate::tracker::TorrentTracker; +use crate::tracker; -pub async fn setup(config: &Configuration, tracker: Arc) -> Vec> { +pub async fn setup(config: &Configuration, tracker: Arc) -> Vec> { let mut jobs: Vec> = Vec::new(); // Load peer keys diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index b3a7ab6d6..fcd9ebe2d 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -19,7 +19,7 @@ use crate::databases::database::Database; use crate::protocol::common::InfoHash; use crate::tracker::key::Auth; -pub struct TorrentTracker { +pub struct Tracker { pub config: Arc, mode: mode::Tracker, keys: RwLock>, @@ -30,15 +30,18 @@ pub struct TorrentTracker { database: Box, } -impl TorrentTracker { +impl Tracker { + /// # Errors + /// + /// Will return a `r2d2::Error` if unable to connect to database. pub fn new( - config: Arc, + config: &Arc, stats_event_sender: Option>, stats_repository: statistics::Repo, - ) -> Result { + ) -> Result { let database = database::connect(&config.db_driver, &config.db_path)?; - Ok(TorrentTracker { + Ok(Tracker { config: config.clone(), mode: config.mode, keys: RwLock::new(std::collections::HashMap::new()), @@ -62,6 +65,9 @@ impl TorrentTracker { self.mode == mode::Tracker::Listed || self.mode == mode::Tracker::PrivateListed } + /// # Errors + /// + /// Will return a `database::Error` if unable to add the `auth_key` to the database. pub async fn generate_auth_key(&self, lifetime: Duration) -> Result { let auth_key = key::generate(lifetime); self.database.add_key_to_keys(&auth_key).await?; @@ -69,12 +75,18 @@ impl TorrentTracker { Ok(auth_key) } + /// # Errors + /// + /// Will return a `database::Error` if unable to remove the `key` to the database. pub async fn remove_auth_key(&self, key: &str) -> Result<(), database::Error> { self.database.remove_key_from_keys(key).await?; self.keys.write().await.remove(key); Ok(()) } + /// # Errors + /// + /// Will return a `key::Error` if unable to get any `auth_key`. pub async fn verify_auth_key(&self, auth_key: &Auth) -> Result<(), key::Error> { match self.keys.read().await.get(&auth_key.key) { None => Err(key::Error::KeyInvalid), @@ -82,6 +94,9 @@ impl TorrentTracker { } } + /// # Errors + /// + /// Will return a `database::Error` if unable to `load_keys` from the database. pub async fn load_keys(&self) -> Result<(), database::Error> { let keys_from_database = self.database.load_keys().await?; let mut keys = self.keys.write().await; @@ -89,13 +104,17 @@ impl TorrentTracker { keys.clear(); for key in keys_from_database { - let _ = keys.insert(key.key.clone(), key); + keys.insert(key.key.clone(), key); } Ok(()) } - // Adding torrents is not relevant to public trackers. + /// Adding torrents is not relevant to public trackers. + /// + /// # Errors + /// + /// Will return a `database::Error` if unable to add the `info_hash` into the whitelist database. pub async fn add_torrent_to_whitelist(&self, info_hash: &InfoHash) -> Result<(), database::Error> { self.add_torrent_to_database_whitelist(info_hash).await?; self.add_torrent_to_memory_whitelist(info_hash).await; @@ -117,7 +136,11 @@ impl TorrentTracker { self.whitelist.write().await.insert(*info_hash) } - // Removing torrents is not relevant to public trackers. + /// Removing torrents is not relevant to public trackers. + /// + /// # Errors + /// + /// Will return a `database::Error` if unable to remove the `info_hash` from the whitelist database. pub async fn remove_torrent_from_whitelist(&self, info_hash: &InfoHash) -> Result<(), database::Error> { self.database.remove_info_hash_from_whitelist(*info_hash).await?; self.whitelist.write().await.remove(info_hash); @@ -128,6 +151,9 @@ impl TorrentTracker { self.whitelist.read().await.contains(info_hash) } + /// # Errors + /// + /// Will return a `database::Error` if unable to load the list whitelisted `info_hash`s from the database. pub async fn load_whitelist(&self) -> Result<(), database::Error> { let whitelisted_torrents_from_database = self.database.load_whitelist().await?; let mut whitelist = self.whitelist.write().await; @@ -141,6 +167,13 @@ impl TorrentTracker { Ok(()) } + /// # Errors + /// + /// Will return a `torrent::Error::PeerKeyNotValid` if the `key` is not valid. + /// + /// Will return a `torrent::Error::PeerNotAuthenticated` if the `key` is `None`. + /// + /// Will return a `torrent::Error::TorrentNotWhitelisted` if the the Tracker is in listed mode and the `info_hash` is not whitelisted. pub async fn authenticate_request(&self, info_hash: &InfoHash, key: &Option) -> Result<(), torrent::Error> { // no authentication needed in public mode if self.is_public() { @@ -169,7 +202,11 @@ impl TorrentTracker { Ok(()) } - // Loading the torrents from database into memory + /// Loading the torrents from database into memory + /// + /// # Errors + /// + /// Will return a `database::Error` if unable to load the list of `persistent_torrents` from the database. pub async fn load_persistent_torrents(&self) -> Result<(), database::Error> { let persistent_torrents = self.database.load_persistent_torrents().await?; let mut torrents = self.torrents.write().await; @@ -181,7 +218,7 @@ impl TorrentTracker { } let torrent_entry = torrent::Entry { - peers: Default::default(), + peers: BTreeMap::default(), completed, }; @@ -262,9 +299,10 @@ impl TorrentTracker { torrents_lock.retain(|_, torrent_entry| { torrent_entry.remove_inactive_peers(self.config.max_peer_timeout); - match self.config.persistent_torrent_completed_stat { - true => torrent_entry.completed > 0 || !torrent_entry.peers.is_empty(), - false => !torrent_entry.peers.is_empty(), + if self.config.persistent_torrent_completed_stat { + torrent_entry.completed > 0 || !torrent_entry.peers.is_empty() + } else { + !torrent_entry.peers.is_empty() } }); } else { diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index 632180a92..bf34326c6 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -8,11 +8,11 @@ use aquatic_udp_protocol::{ use super::connection_cookie::{check_connection_cookie, from_connection_id, into_connection_id, make_connection_cookie}; use crate::protocol::common::{InfoHash, MAX_SCRAPE_TORRENTS}; -use crate::tracker::{peer, statistics, torrent, TorrentTracker}; +use crate::tracker::{self, peer, statistics, torrent}; use crate::udp::errors::ServerError; use crate::udp::request::AnnounceRequestWrapper; -pub async fn authenticate(info_hash: &InfoHash, tracker: Arc) -> Result<(), ServerError> { +pub async fn authenticate(info_hash: &InfoHash, tracker: Arc) -> Result<(), ServerError> { match tracker.authenticate_request(info_hash, &None).await { Ok(_) => Ok(()), Err(e) => { @@ -30,7 +30,7 @@ pub async fn authenticate(info_hash: &InfoHash, tracker: Arc) -> } } -pub async fn handle_packet(remote_addr: SocketAddr, payload: Vec, tracker: Arc) -> Response { +pub async fn handle_packet(remote_addr: SocketAddr, payload: Vec, tracker: Arc) -> Response { match Request::from_bytes(&payload[..payload.len()], MAX_SCRAPE_TORRENTS).map_err(|_| ServerError::InternalServerError) { Ok(request) => { let transaction_id = match &request { @@ -52,7 +52,7 @@ pub async fn handle_packet(remote_addr: SocketAddr, payload: Vec, tracker: A pub async fn handle_request( request: Request, remote_addr: SocketAddr, - tracker: Arc, + tracker: Arc, ) -> Result { match request { Request::Connect(connect_request) => handle_connect(remote_addr, &connect_request, tracker).await, @@ -64,7 +64,7 @@ pub async fn handle_request( pub async fn handle_connect( remote_addr: SocketAddr, request: &ConnectRequest, - tracker: Arc, + tracker: Arc, ) -> Result { let connection_cookie = make_connection_cookie(&remote_addr); let connection_id = into_connection_id(&connection_cookie); @@ -90,7 +90,7 @@ pub async fn handle_connect( pub async fn handle_announce( remote_addr: SocketAddr, announce_request: &AnnounceRequest, - tracker: Arc, + tracker: Arc, ) -> Result { match check_connection_cookie(&remote_addr, &from_connection_id(&announce_request.connection_id)) { Ok(_) => {} @@ -179,7 +179,7 @@ pub async fn handle_announce( pub async fn handle_scrape( remote_addr: SocketAddr, request: &ScrapeRequest, - tracker: Arc, + tracker: Arc, ) -> Result { let db = tracker.get_torrents().await; @@ -252,18 +252,18 @@ mod tests { use crate::config::Configuration; use crate::protocol::clock::{Current, Time}; use crate::protocol::common::PeerId; - use crate::tracker::{mode, peer, statistics, TorrentTracker}; + use crate::tracker::{self, mode, peer, statistics}; fn default_tracker_config() -> Arc { Arc::new(Configuration::default()) } - fn initialized_public_tracker() -> Arc { + fn initialized_public_tracker() -> Arc { let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Tracker::Public).into()); initialized_tracker(configuration) } - fn initialized_private_tracker() -> Arc { + fn initialized_private_tracker() -> Arc { let configuration = Arc::new( TrackerConfigurationBuilder::default() .with_mode(mode::Tracker::Private) @@ -272,14 +272,14 @@ mod tests { initialized_tracker(configuration) } - fn initialized_whitelisted_tracker() -> Arc { + fn initialized_whitelisted_tracker() -> Arc { let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Tracker::Listed).into()); initialized_tracker(configuration) } - fn initialized_tracker(configuration: Arc) -> Arc { + fn initialized_tracker(configuration: Arc) -> Arc { let (stats_event_sender, stats_repository) = statistics::Keeper::new_active_instance(); - Arc::new(TorrentTracker::new(configuration, Some(stats_event_sender), stats_repository).unwrap()) + Arc::new(tracker::Tracker::new(&configuration, Some(stats_event_sender), stats_repository).unwrap()) } fn sample_ipv4_remote_addr() -> SocketAddr { @@ -372,7 +372,7 @@ mod tests { use mockall::predicate::eq; use super::{default_tracker_config, sample_ipv4_socket_address, sample_ipv6_remote_addr}; - use crate::tracker::{statistics, TorrentTracker}; + use crate::tracker::{self, statistics}; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_connect; use crate::udp::handlers::tests::{initialized_public_tracker, sample_ipv4_remote_addr}; @@ -434,7 +434,7 @@ mod tests { let client_socket_address = sample_ipv4_socket_address(); let torrent_tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), + tracker::Tracker::new(&default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_connect(client_socket_address, &sample_connect_request(), torrent_tracker) .await @@ -452,7 +452,7 @@ mod tests { let stats_event_sender = Box::new(stats_event_sender_mock); let torrent_tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), + tracker::Tracker::new(&default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_connect(sample_ipv6_remote_addr(), &sample_connect_request(), torrent_tracker) .await @@ -544,7 +544,7 @@ mod tests { use mockall::predicate::eq; use crate::protocol::common::PeerId; - use crate::tracker::{statistics, TorrentTracker}; + use crate::tracker::{self, statistics}; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; @@ -641,7 +641,7 @@ mod tests { assert_eq!(peers[0].peer_addr, SocketAddr::new(IpAddr::V4(remote_client_ip), client_port)); } - async fn add_a_torrent_peer_using_ipv6(tracker: Arc) { + async fn add_a_torrent_peer_using_ipv6(tracker: Arc) { let info_hash = AquaticInfoHash([0u8; 20]); let client_ip_v4 = Ipv4Addr::new(126, 0, 0, 1); @@ -659,7 +659,7 @@ mod tests { .await; } - async fn announce_a_new_peer_using_ipv4(tracker: Arc) -> Response { + async fn announce_a_new_peer_using_ipv4(tracker: Arc) -> Response { let remote_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080); let request = AnnounceRequestBuilder::default() .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) @@ -696,7 +696,7 @@ mod tests { let stats_event_sender = Box::new(stats_event_sender_mock); let tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), + tracker::Tracker::new(&default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_announce( @@ -768,7 +768,7 @@ mod tests { use mockall::predicate::eq; use crate::protocol::common::PeerId; - use crate::tracker::{statistics, TorrentTracker}; + use crate::tracker::{self, statistics}; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; @@ -870,7 +870,7 @@ mod tests { assert_eq!(peers[0].peer_addr, SocketAddr::new(IpAddr::V6(remote_client_ip), client_port)); } - async fn add_a_torrent_peer_using_ipv4(tracker: Arc) { + async fn add_a_torrent_peer_using_ipv4(tracker: Arc) { let info_hash = AquaticInfoHash([0u8; 20]); let client_ip_v4 = Ipv4Addr::new(126, 0, 0, 1); @@ -887,7 +887,7 @@ mod tests { .await; } - async fn announce_a_new_peer_using_ipv6(tracker: Arc) -> Response { + async fn announce_a_new_peer_using_ipv6(tracker: Arc) -> Response { let client_ip_v4 = Ipv4Addr::new(126, 0, 0, 1); let client_ip_v6 = client_ip_v4.to_ipv6_compatible(); let client_port = 8080; @@ -927,7 +927,7 @@ mod tests { let stats_event_sender = Box::new(stats_event_sender_mock); let tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), + tracker::Tracker::new(&default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); let remote_addr = sample_ipv6_remote_addr(); @@ -947,8 +947,8 @@ mod tests { use aquatic_udp_protocol::{InfoHash as AquaticInfoHash, PeerId as AquaticPeerId}; + use crate::tracker; use crate::tracker::statistics::Keeper; - use crate::tracker::TorrentTracker; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; @@ -959,7 +959,7 @@ mod tests { let configuration = Arc::new(TrackerConfigurationBuilder::default().with_external_ip("::126.0.0.1").into()); let (stats_event_sender, stats_repository) = Keeper::new_active_instance(); let tracker = - Arc::new(TorrentTracker::new(configuration, Some(stats_event_sender), stats_repository).unwrap()); + Arc::new(tracker::Tracker::new(&configuration, Some(stats_event_sender), stats_repository).unwrap()); let loopback_ipv4 = Ipv4Addr::new(127, 0, 0, 1); let loopback_ipv6 = Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1); @@ -1010,7 +1010,7 @@ mod tests { use super::TorrentPeerBuilder; use crate::protocol::common::PeerId; - use crate::tracker::TorrentTracker; + use crate::tracker; use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{initialized_public_tracker, sample_ipv4_remote_addr}; @@ -1051,7 +1051,7 @@ mod tests { ); } - async fn add_a_seeder(tracker: Arc, remote_addr: &SocketAddr, info_hash: &InfoHash) { + async fn add_a_seeder(tracker: Arc, remote_addr: &SocketAddr, info_hash: &InfoHash) { let peer_id = PeerId([255u8; 20]); let peer = TorrentPeerBuilder::default() @@ -1075,7 +1075,7 @@ mod tests { } } - async fn add_a_sample_seeder_and_scrape(tracker: Arc) -> Response { + async fn add_a_sample_seeder_and_scrape(tracker: Arc) -> Response { let remote_addr = sample_ipv4_remote_addr(); let info_hash = InfoHash([0u8; 20]); @@ -1227,7 +1227,7 @@ mod tests { use mockall::predicate::eq; use super::sample_scrape_request; - use crate::tracker::{statistics, TorrentTracker}; + use crate::tracker::{self, statistics}; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{default_tracker_config, sample_ipv4_remote_addr}; @@ -1243,7 +1243,7 @@ mod tests { let remote_addr = sample_ipv4_remote_addr(); let tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), + tracker::Tracker::new(&default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_scrape(remote_addr, &sample_scrape_request(&remote_addr), tracker.clone()) @@ -1259,7 +1259,7 @@ mod tests { use mockall::predicate::eq; use super::sample_scrape_request; - use crate::tracker::{statistics, TorrentTracker}; + use crate::tracker::{self, statistics}; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{default_tracker_config, sample_ipv6_remote_addr}; @@ -1275,7 +1275,7 @@ mod tests { let remote_addr = sample_ipv6_remote_addr(); let tracker = Arc::new( - TorrentTracker::new(default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), + tracker::Tracker::new(&default_tracker_config(), Some(stats_event_sender), statistics::Repo::new()).unwrap(), ); handle_scrape(remote_addr, &sample_scrape_request(&remote_addr), tracker.clone()) diff --git a/src/udp/server.rs b/src/udp/server.rs index 5c215f9ec..705a6c263 100644 --- a/src/udp/server.rs +++ b/src/udp/server.rs @@ -6,17 +6,17 @@ use aquatic_udp_protocol::Response; use log::{debug, info}; use tokio::net::UdpSocket; -use crate::tracker::TorrentTracker; +use crate::tracker; use crate::udp::handlers::handle_packet; use crate::udp::MAX_PACKET_SIZE; pub struct UdpServer { socket: Arc, - tracker: Arc, + tracker: Arc, } impl UdpServer { - pub async fn new(tracker: Arc, bind_address: &str) -> tokio::io::Result { + pub async fn new(tracker: Arc, bind_address: &str) -> tokio::io::Result { let socket = UdpSocket::bind(bind_address).await?; Ok(UdpServer { diff --git a/tests/api.rs b/tests/api.rs index a4043fe7c..72c3c65c7 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -26,8 +26,7 @@ mod tracker_api { use torrust_tracker::tracker::key::Auth; use torrust_tracker::tracker::peer::TorrentPeer; use torrust_tracker::tracker::statistics::Keeper; - use torrust_tracker::tracker::TorrentTracker; - use torrust_tracker::{ephemeral_instance_keys, logging, static_time}; + use torrust_tracker::{ephemeral_instance_keys, logging, static_time, tracker}; use crate::common::ephemeral_random_port; @@ -237,7 +236,7 @@ mod tracker_api { struct ApiServer { pub started: AtomicBool, pub job: Option>, - pub tracker: Option>, + pub tracker: Option>, pub connection_info: Option, } @@ -279,7 +278,7 @@ mod tracker_api { let (stats_event_sender, stats_repository) = Keeper::new_active_instance(); // Initialize Torrust tracker - let tracker = match TorrentTracker::new(configuration.clone(), Some(stats_event_sender), stats_repository) { + let tracker = match tracker::Tracker::new(&configuration.clone(), Some(stats_event_sender), stats_repository) { Ok(tracker) => Arc::new(tracker), Err(error) => { panic!("{}", error) diff --git a/tests/udp.rs b/tests/udp.rs index fabca137a..e93894843 100644 --- a/tests/udp.rs +++ b/tests/udp.rs @@ -21,9 +21,8 @@ mod udp_tracker_server { use torrust_tracker::config::Configuration; use torrust_tracker::jobs::udp_tracker; use torrust_tracker::tracker::statistics::Keeper; - use torrust_tracker::tracker::TorrentTracker; use torrust_tracker::udp::MAX_PACKET_SIZE; - use torrust_tracker::{ephemeral_instance_keys, logging, static_time}; + use torrust_tracker::{ephemeral_instance_keys, logging, static_time, tracker}; use crate::common::ephemeral_random_port; @@ -61,7 +60,7 @@ mod udp_tracker_server { let (stats_event_sender, stats_repository) = Keeper::new_active_instance(); // Initialize Torrust tracker - let tracker = match TorrentTracker::new(configuration.clone(), Some(stats_event_sender), stats_repository) { + let tracker = match tracker::Tracker::new(&configuration.clone(), Some(stats_event_sender), stats_repository) { Ok(tracker) => Arc::new(tracker), Err(error) => { panic!("{}", error) From 4a34f685fc3c88e01269ab68b805f07a25ab4a03 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Fri, 25 Nov 2022 02:10:57 +0100 Subject: [PATCH 31/45] clippy: fix src/udp/connection_cookie.rs --- src/tracker/peer.rs | 5 ++-- src/udp/connection_cookie.rs | 54 +++++++++++++++++++----------------- src/udp/handlers.rs | 54 ++++++++++++++++++------------------ 3 files changed, 59 insertions(+), 54 deletions(-) diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index d590b590d..dd49ffaa7 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -131,7 +131,8 @@ mod test { }; use crate::tracker::peer::TorrentPeer; - use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; + use crate::udp::connection_cookie::{into_connection_id, make}; + // todo: duplicate functions is PR 82. Remove duplication once both PR are merged. fn sample_ipv4_remote_addr() -> SocketAddr { @@ -153,7 +154,7 @@ mod test { let info_hash_aquatic = aquatic_udp_protocol::InfoHash([0u8; 20]); let default_request = AnnounceRequest { - connection_id: into_connection_id(&make_connection_cookie(&sample_ipv4_remote_addr())), + connection_id: into_connection_id(&make(&sample_ipv4_remote_addr())), transaction_id: TransactionId(0i32), info_hash: info_hash_aquatic, peer_id: AquaticPeerId(*b"-qB00000000000000000"), diff --git a/src/udp/connection_cookie.rs b/src/udp/connection_cookie.rs index 5a1e564dd..8a544fa6a 100644 --- a/src/udp/connection_cookie.rs +++ b/src/udp/connection_cookie.rs @@ -22,17 +22,21 @@ pub fn into_connection_id(connection_cookie: &Cookie) -> ConnectionId { } #[must_use] -pub fn make_connection_cookie(remote_address: &SocketAddr) -> Cookie { +pub fn make(remote_address: &SocketAddr) -> Cookie { let time_extent = cookie_builder::get_last_time_extent(); //println!("remote_address: {remote_address:?}, time_extent: {time_extent:?}, cookie: {cookie:?}"); cookie_builder::build(remote_address, &time_extent) } -pub fn check_connection_cookie( - remote_address: &SocketAddr, - connection_cookie: &Cookie, -) -> Result { +/// # Panics +/// +/// It would panic if the `COOKIE_LIFETIME` constant would be an unreasonably large number. +/// +/// # Errors +/// +/// Will return a `ServerError::InvalidConnectionId` if the supplied `connection_cookie` fails to verify. +pub fn check(remote_address: &SocketAddr, connection_cookie: &Cookie) -> Result { // we loop backwards testing each time_extent until we find one that matches. // (or the lifetime of time_extents is exhausted) for offset in 0..=COOKIE_LIFETIME.amount { @@ -85,19 +89,19 @@ mod tests { use super::cookie_builder::{self}; use crate::protocol::clock::time_extent::{self, Extent}; use crate::protocol::clock::{Stopped, StoppedTime}; - use crate::udp::connection_cookie::{check_connection_cookie, make_connection_cookie, Cookie, COOKIE_LIFETIME}; + use crate::udp::connection_cookie::{check, make, Cookie, COOKIE_LIFETIME}; // #![feature(const_socketaddr)] // const REMOTE_ADDRESS_IPV4_ZERO: SocketAddr = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); #[test] fn it_should_make_a_connection_cookie() { - let cookie = make_connection_cookie(&SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0)); - // Note: This constant may need to be updated in the future as the hash is not guaranteed to to be stable between versions. const ID_COOKIE: Cookie = [23, 204, 198, 29, 48, 180, 62, 19]; - assert_eq!(cookie, ID_COOKIE) + let cookie = make(&SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0)); + + assert_eq!(cookie, ID_COOKIE); } #[test] @@ -114,7 +118,7 @@ mod tests { //remote_address: 127.0.0.1:8080, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [212, 9, 204, 223, 176, 190, 150, 153] //remote_address: 127.0.0.1:8080, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [212, 9, 204, 223, 176, 190, 150, 153] - assert_eq!(cookie, cookie_2) + assert_eq!(cookie, cookie_2); } #[test] @@ -132,7 +136,7 @@ mod tests { //remote_address: 0.0.0.0:0, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [151, 130, 30, 157, 190, 41, 179, 135] //remote_address: 255.255.255.255:0, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [217, 87, 239, 178, 182, 126, 66, 166] - assert_ne!(cookie, cookie_2) + assert_ne!(cookie, cookie_2); } #[test] @@ -150,7 +154,7 @@ mod tests { //remote_address: 0.0.0.0:0, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [151, 130, 30, 157, 190, 41, 179, 135] //remote_address: [::]:0, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [99, 119, 230, 177, 20, 220, 163, 187] - assert_ne!(cookie, cookie_2) + assert_ne!(cookie, cookie_2); } #[test] @@ -168,7 +172,7 @@ mod tests { //remote_address: 0.0.0.0:0, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [151, 130, 30, 157, 190, 41, 179, 135] //remote_address: 0.0.0.0:1, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [38, 8, 0, 102, 92, 170, 220, 11] - assert_ne!(cookie, cookie_2) + assert_ne!(cookie, cookie_2); } #[test] @@ -186,51 +190,51 @@ mod tests { //remote_address: 0.0.0.0:0, time_extent: TimeExtent { increment: 0ns, amount: 0 }, cookie: [151, 130, 30, 157, 190, 41, 179, 135] //remote_address: 0.0.0.0:0, time_extent: TimeExtent { increment: 18446744073709551615.999999999s, amount: 18446744073709551615 }, cookie: [87, 111, 109, 125, 182, 206, 3, 201] - assert_ne!(cookie, cookie_2) + assert_ne!(cookie, cookie_2); } #[test] fn it_should_make_different_cookies_for_the_next_time_extent() { let remote_address = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); - let cookie = make_connection_cookie(&remote_address); + let cookie = make(&remote_address); Stopped::local_add(&COOKIE_LIFETIME.increment).unwrap(); - let cookie_next = make_connection_cookie(&remote_address); + let cookie_next = make(&remote_address); - assert_ne!(cookie, cookie_next) + assert_ne!(cookie, cookie_next); } #[test] fn it_should_be_valid_for_this_time_extent() { let remote_address = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); - let cookie = make_connection_cookie(&remote_address); + let cookie = make(&remote_address); - check_connection_cookie(&remote_address, &cookie).unwrap(); + check(&remote_address, &cookie).unwrap(); } #[test] fn it_should_be_valid_for_the_next_time_extent() { let remote_address = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); - let cookie = make_connection_cookie(&remote_address); + let cookie = make(&remote_address); Stopped::local_add(&COOKIE_LIFETIME.increment).unwrap(); - check_connection_cookie(&remote_address, &cookie).unwrap(); + check(&remote_address, &cookie).unwrap(); } #[test] fn it_should_be_valid_for_the_last_time_extent() { let remote_address = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); - let cookie = make_connection_cookie(&remote_address); + let cookie = make(&remote_address); Stopped::local_set(&COOKIE_LIFETIME.total().unwrap().unwrap()); - check_connection_cookie(&remote_address, &cookie).unwrap(); + check(&remote_address, &cookie).unwrap(); } #[test] @@ -238,10 +242,10 @@ mod tests { fn it_should_be_not_valid_after_their_last_time_extent() { let remote_address = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); - let cookie = make_connection_cookie(&remote_address); + let cookie = make(&remote_address); Stopped::local_set(&COOKIE_LIFETIME.total_next().unwrap().unwrap()); - check_connection_cookie(&remote_address, &cookie).unwrap(); + check(&remote_address, &cookie).unwrap(); } } diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index bf34326c6..cdf12ed6b 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -6,7 +6,7 @@ use aquatic_udp_protocol::{ NumberOfPeers, Port, Request, Response, ResponsePeer, ScrapeRequest, ScrapeResponse, TorrentScrapeStatistics, TransactionId, }; -use super::connection_cookie::{check_connection_cookie, from_connection_id, into_connection_id, make_connection_cookie}; +use super::connection_cookie::{check, from_connection_id, into_connection_id, make}; use crate::protocol::common::{InfoHash, MAX_SCRAPE_TORRENTS}; use crate::tracker::{self, peer, statistics, torrent}; use crate::udp::errors::ServerError; @@ -66,7 +66,7 @@ pub async fn handle_connect( request: &ConnectRequest, tracker: Arc, ) -> Result { - let connection_cookie = make_connection_cookie(&remote_addr); + let connection_cookie = make(&remote_addr); let connection_id = into_connection_id(&connection_cookie); let response = Response::from(ConnectResponse { @@ -92,7 +92,7 @@ pub async fn handle_announce( announce_request: &AnnounceRequest, tracker: Arc, ) -> Result { - match check_connection_cookie(&remote_addr, &from_connection_id(&announce_request.connection_id)) { + match check(&remote_addr, &from_connection_id(&announce_request.connection_id)) { Ok(_) => {} Err(e) => { return Err(e); @@ -373,7 +373,7 @@ mod tests { use super::{default_tracker_config, sample_ipv4_socket_address, sample_ipv6_remote_addr}; use crate::tracker::{self, statistics}; - use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; + use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_connect; use crate::udp::handlers::tests::{initialized_public_tracker, sample_ipv4_remote_addr}; @@ -396,7 +396,7 @@ mod tests { assert_eq!( response, Response::Connect(ConnectResponse { - connection_id: into_connection_id(&make_connection_cookie(&sample_ipv4_remote_addr())), + connection_id: into_connection_id(&make(&sample_ipv4_remote_addr())), transaction_id: request.transaction_id }) ); @@ -415,7 +415,7 @@ mod tests { assert_eq!( response, Response::Connect(ConnectResponse { - connection_id: into_connection_id(&make_connection_cookie(&sample_ipv4_remote_addr())), + connection_id: into_connection_id(&make(&sample_ipv4_remote_addr())), transaction_id: request.transaction_id }) ); @@ -469,7 +469,7 @@ mod tests { TransactionId, }; - use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; + use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::tests::sample_ipv4_remote_addr; struct AnnounceRequestBuilder { @@ -483,7 +483,7 @@ mod tests { let info_hash_aquatic = aquatic_udp_protocol::InfoHash([0u8; 20]); let default_request = AnnounceRequest { - connection_id: into_connection_id(&make_connection_cookie(&sample_ipv4_remote_addr())), + connection_id: into_connection_id(&make(&sample_ipv4_remote_addr())), transaction_id: TransactionId(0i32), info_hash: info_hash_aquatic, peer_id: AquaticPeerId([255u8; 20]), @@ -545,7 +545,7 @@ mod tests { use crate::protocol::common::PeerId; use crate::tracker::{self, statistics}; - use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; + use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; use crate::udp::handlers::tests::{ @@ -564,7 +564,7 @@ mod tests { let remote_addr = SocketAddr::new(IpAddr::V4(client_ip), client_port); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .with_info_hash(info_hash) .with_peer_id(peer_id) .with_ip_address(client_ip) @@ -588,7 +588,7 @@ mod tests { let remote_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .into(); let response = handle_announce(remote_addr, &request, initialized_public_tracker()) @@ -627,7 +627,7 @@ mod tests { let remote_addr = SocketAddr::new(IpAddr::V4(remote_client_ip), remote_client_port); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .with_info_hash(info_hash) .with_peer_id(peer_id) .with_ip_address(peer_address) @@ -662,7 +662,7 @@ mod tests { async fn announce_a_new_peer_using_ipv4(tracker: Arc) -> Response { let remote_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .into(); handle_announce(remote_addr, &request, tracker.clone()).await.unwrap() @@ -714,7 +714,7 @@ mod tests { use aquatic_udp_protocol::{InfoHash as AquaticInfoHash, PeerId as AquaticPeerId}; use crate::protocol::common::PeerId; - use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; + use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; use crate::udp::handlers::tests::{initialized_public_tracker, TorrentPeerBuilder}; @@ -731,7 +731,7 @@ mod tests { let remote_addr = SocketAddr::new(IpAddr::V4(client_ip), client_port); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .with_info_hash(info_hash) .with_peer_id(peer_id) .with_ip_address(client_ip) @@ -769,7 +769,7 @@ mod tests { use crate::protocol::common::PeerId; use crate::tracker::{self, statistics}; - use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; + use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; use crate::udp::handlers::tests::{ @@ -789,7 +789,7 @@ mod tests { let remote_addr = SocketAddr::new(IpAddr::V6(client_ip_v6), client_port); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .with_info_hash(info_hash) .with_peer_id(peer_id) .with_ip_address(client_ip_v4) @@ -816,7 +816,7 @@ mod tests { let remote_addr = SocketAddr::new(IpAddr::V6(client_ip_v6), 8080); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .into(); let response = handle_announce(remote_addr, &request, initialized_public_tracker()) @@ -855,7 +855,7 @@ mod tests { let remote_addr = SocketAddr::new(IpAddr::V6(remote_client_ip), remote_client_port); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .with_info_hash(info_hash) .with_peer_id(peer_id) .with_ip_address(peer_address) @@ -893,7 +893,7 @@ mod tests { let client_port = 8080; let remote_addr = SocketAddr::new(IpAddr::V6(client_ip_v6), client_port); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .into(); handle_announce(remote_addr, &request, tracker.clone()).await.unwrap() @@ -933,7 +933,7 @@ mod tests { let remote_addr = sample_ipv6_remote_addr(); let announce_request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .into(); handle_announce(remote_addr, &announce_request, tracker.clone()) @@ -949,7 +949,7 @@ mod tests { use crate::tracker; use crate::tracker::statistics::Keeper; - use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; + use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; use crate::udp::handlers::tests::TrackerConfigurationBuilder; @@ -974,7 +974,7 @@ mod tests { let remote_addr = SocketAddr::new(IpAddr::V6(client_ip_v6), client_port); let request = AnnounceRequestBuilder::default() - .with_connection_id(into_connection_id(&make_connection_cookie(&remote_addr))) + .with_connection_id(into_connection_id(&make(&remote_addr))) .with_info_hash(info_hash) .with_peer_id(peer_id) .with_ip_address(client_ip_v4) @@ -1011,7 +1011,7 @@ mod tests { use super::TorrentPeerBuilder; use crate::protocol::common::PeerId; use crate::tracker; - use crate::udp::connection_cookie::{into_connection_id, make_connection_cookie}; + use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{initialized_public_tracker, sample_ipv4_remote_addr}; @@ -1031,7 +1031,7 @@ mod tests { let info_hashes = vec![info_hash]; let request = ScrapeRequest { - connection_id: into_connection_id(&make_connection_cookie(&remote_addr)), + connection_id: into_connection_id(&make(&remote_addr)), transaction_id: TransactionId(0i32), info_hashes, }; @@ -1069,7 +1069,7 @@ mod tests { let info_hashes = vec![*info_hash]; ScrapeRequest { - connection_id: into_connection_id(&make_connection_cookie(remote_addr)), + connection_id: into_connection_id(&make(remote_addr)), transaction_id: TransactionId(0i32), info_hashes, } @@ -1214,7 +1214,7 @@ mod tests { let info_hashes = vec![info_hash]; ScrapeRequest { - connection_id: into_connection_id(&make_connection_cookie(remote_addr)), + connection_id: into_connection_id(&make(remote_addr)), transaction_id: TransactionId(0i32), info_hashes, } From 6e2a34226b1dc17f17e9420dd25e47255fd45fe2 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Fri, 25 Nov 2022 19:35:53 +0100 Subject: [PATCH 32/45] clippy: fix src/databases/database.rs --- src/databases/database.rs | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/databases/database.rs b/src/databases/database.rs index 5186f96b3..5e4a7c1f9 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -61,14 +61,15 @@ pub trait Database: Sync + Send { async fn remove_key_from_keys(&self, key: &str) -> Result; async fn is_info_hash_whitelisted(&self, info_hash: &InfoHash) -> Result { - if let Err(e) = self.get_info_hash_from_whitelist(&info_hash.clone().to_string()).await { - if let Error::QueryReturnedNoRows = e { - return Ok(false); - } else { - return Err(e); - } - } - Ok(true) + self.get_info_hash_from_whitelist(&info_hash.clone().to_string()) + .await + .map_or_else( + |e| match e { + Error::QueryReturnedNoRows => Ok(false), + e => Err(e), + }, + |_| Ok(true), + ) } } From 0a7d9276b4958f56a5e4099c24cee08ed2c8084a Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Fri, 25 Nov 2022 19:44:59 +0100 Subject: [PATCH 33/45] clippy: fix src/jobs/tracker_api.rs --- src/jobs/tracker_api.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/jobs/tracker_api.rs b/src/jobs/tracker_api.rs index 7787ea3f4..2c00aa453 100644 --- a/src/jobs/tracker_api.rs +++ b/src/jobs/tracker_api.rs @@ -11,6 +11,9 @@ use crate::tracker; #[derive(Debug)] pub struct ApiServerJobStarted(); +/// # Panics +/// +/// It would panic if unable to send the `ApiServerJobStarted` notice. pub async fn start_job(config: &Configuration, tracker: Arc) -> JoinHandle<()> { let bind_addr = config .http_api @@ -26,7 +29,7 @@ pub async fn start_job(config: &Configuration, tracker: Arc) - let join_handle = tokio::spawn(async move { let handel = server::start(bind_addr, &tracker); - assert!(tx.send(ApiServerJobStarted()).is_ok(), "the start job dropped"); + tx.send(ApiServerJobStarted()).expect("the start job dropped"); handel.await; }); @@ -34,7 +37,7 @@ pub async fn start_job(config: &Configuration, tracker: Arc) - // Wait until the API server job is running match rx.await { Ok(_msg) => info!("Torrust API server started"), - Err(_) => panic!("the api server dropped"), + Err(e) => panic!("the api server dropped: {e}"), } join_handle From 220f83af3d6d98bd8fd181b14601b249fcc00772 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Fri, 25 Nov 2022 20:14:12 +0100 Subject: [PATCH 34/45] clippy: fix src/udp/handlers.rs --- src/udp/errors.rs | 15 ++++++++ src/udp/handlers.rs | 84 +++++++++++++++++++++------------------------ 2 files changed, 54 insertions(+), 45 deletions(-) diff --git a/src/udp/errors.rs b/src/udp/errors.rs index 8d7b04b4f..f90149a99 100644 --- a/src/udp/errors.rs +++ b/src/udp/errors.rs @@ -1,5 +1,7 @@ use thiserror::Error; +use crate::tracker::torrent; + #[derive(Error, Debug)] pub enum ServerError { #[error("internal server error")] @@ -32,3 +34,16 @@ pub enum ServerError { #[error("bad request")] BadRequest, } + +impl From for ServerError { + fn from(e: torrent::Error) -> Self { + match e { + torrent::Error::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, + torrent::Error::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, + torrent::Error::PeerKeyNotValid => ServerError::PeerKeyNotValid, + torrent::Error::NoPeersFound => ServerError::NoPeersFound, + torrent::Error::CouldNotSendResponse => ServerError::InternalServerError, + torrent::Error::InvalidInfoHash => ServerError::InvalidInfoHash, + } + } +} diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index cdf12ed6b..dde8d14ae 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -8,28 +8,10 @@ use aquatic_udp_protocol::{ use super::connection_cookie::{check, from_connection_id, into_connection_id, make}; use crate::protocol::common::{InfoHash, MAX_SCRAPE_TORRENTS}; -use crate::tracker::{self, peer, statistics, torrent}; +use crate::tracker::{self, peer, statistics}; use crate::udp::errors::ServerError; use crate::udp::request::AnnounceRequestWrapper; -pub async fn authenticate(info_hash: &InfoHash, tracker: Arc) -> Result<(), ServerError> { - match tracker.authenticate_request(info_hash, &None).await { - Ok(_) => Ok(()), - Err(e) => { - let err = match e { - torrent::Error::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, - torrent::Error::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, - torrent::Error::PeerKeyNotValid => ServerError::PeerKeyNotValid, - torrent::Error::NoPeersFound => ServerError::NoPeersFound, - torrent::Error::CouldNotSendResponse => ServerError::InternalServerError, - torrent::Error::InvalidInfoHash => ServerError::InvalidInfoHash, - }; - - Err(err) - } - } -} - pub async fn handle_packet(remote_addr: SocketAddr, payload: Vec, tracker: Arc) -> Response { match Request::from_bytes(&payload[..payload.len()], MAX_SCRAPE_TORRENTS).map_err(|_| ServerError::InternalServerError) { Ok(request) => { @@ -41,14 +23,17 @@ pub async fn handle_packet(remote_addr: SocketAddr, payload: Vec, tracker: A match handle_request(request, remote_addr, tracker).await { Ok(response) => response, - Err(e) => handle_error(e, transaction_id), + Err(e) => handle_error(&e, transaction_id), } } // bad request - Err(_) => handle_error(ServerError::BadRequest, TransactionId(0)), + Err(_) => handle_error(&ServerError::BadRequest, TransactionId(0)), } } +/// # Errors +/// +/// If a error happens in the `handle_request` function, it will just return the `ServerError`. pub async fn handle_request( request: Request, remote_addr: SocketAddr, @@ -61,6 +46,9 @@ pub async fn handle_request( } } +/// # Errors +/// +/// This function dose not ever return an error. pub async fn handle_connect( remote_addr: SocketAddr, request: &ConnectRequest, @@ -87,21 +75,21 @@ pub async fn handle_connect( Ok(response) } +/// # Errors +/// +/// If a error happens in the `handle_announce` function, it will just return the `ServerError`. pub async fn handle_announce( remote_addr: SocketAddr, announce_request: &AnnounceRequest, tracker: Arc, ) -> Result { - match check(&remote_addr, &from_connection_id(&announce_request.connection_id)) { - Ok(_) => {} - Err(e) => { - return Err(e); - } - } + check(&remote_addr, &from_connection_id(&announce_request.connection_id))?; let wrapped_announce_request = AnnounceRequestWrapper::new(announce_request.clone()); - authenticate(&wrapped_announce_request.info_hash, tracker.clone()).await?; + tracker + .authenticate_request(&wrapped_announce_request.info_hash, &None) + .await?; let peer = peer::TorrentPeer::from_udp_announce_request( &wrapped_announce_request.announce_request, @@ -120,12 +108,13 @@ pub async fn handle_announce( .get_torrent_peers(&wrapped_announce_request.info_hash, &peer.peer_addr) .await; + #[allow(clippy::cast_possible_truncation)] let announce_response = if remote_addr.is_ipv4() { Response::from(AnnounceResponse { transaction_id: wrapped_announce_request.announce_request.transaction_id, - announce_interval: AnnounceInterval(tracker.config.announce_interval as i32), - leechers: NumberOfPeers(torrent_stats.leechers as i32), - seeders: NumberOfPeers(torrent_stats.seeders as i32), + announce_interval: AnnounceInterval(i64::from(tracker.config.announce_interval) as i32), + leechers: NumberOfPeers(i64::from(torrent_stats.leechers) as i32), + seeders: NumberOfPeers(i64::from(torrent_stats.seeders) as i32), peers: peers .iter() .filter_map(|peer| { @@ -143,9 +132,9 @@ pub async fn handle_announce( } else { Response::from(AnnounceResponse { transaction_id: wrapped_announce_request.announce_request.transaction_id, - announce_interval: AnnounceInterval(tracker.config.announce_interval as i32), - leechers: NumberOfPeers(torrent_stats.leechers as i32), - seeders: NumberOfPeers(torrent_stats.seeders as i32), + announce_interval: AnnounceInterval(i64::from(tracker.config.announce_interval) as i32), + leechers: NumberOfPeers(i64::from(torrent_stats.leechers) as i32), + seeders: NumberOfPeers(i64::from(torrent_stats.seeders) as i32), peers: peers .iter() .filter_map(|peer| { @@ -175,7 +164,11 @@ pub async fn handle_announce( Ok(announce_response) } -// todo: refactor this, db lock can be a lot shorter +/// # Errors +/// +/// This function dose not ever return an error. +/// +/// TODO: refactor this, db lock can be a lot shorter pub async fn handle_scrape( remote_addr: SocketAddr, request: &ScrapeRequest, @@ -190,13 +183,14 @@ pub async fn handle_scrape( let scrape_entry = match db.get(&info_hash) { Some(torrent_info) => { - if authenticate(&info_hash, tracker.clone()).await.is_ok() { + if tracker.authenticate_request(&info_hash, &None).await.is_ok() { let (seeders, completed, leechers) = torrent_info.get_stats(); + #[allow(clippy::cast_possible_truncation)] TorrentScrapeStatistics { - seeders: NumberOfPeers(seeders as i32), - completed: NumberOfDownloads(completed as i32), - leechers: NumberOfPeers(leechers as i32), + seeders: NumberOfPeers(i64::from(seeders) as i32), + completed: NumberOfDownloads(i64::from(completed) as i32), + leechers: NumberOfPeers(i64::from(leechers) as i32), } } else { TorrentScrapeStatistics { @@ -234,7 +228,7 @@ pub async fn handle_scrape( })) } -fn handle_error(e: ServerError, transaction_id: TransactionId) -> Response { +fn handle_error(e: &ServerError, transaction_id: TransactionId) -> Response { let message = e.to_string(); Response::from(ErrorResponse { transaction_id, @@ -260,7 +254,7 @@ mod tests { fn initialized_public_tracker() -> Arc { let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Tracker::Public).into()); - initialized_tracker(configuration) + initialized_tracker(&configuration) } fn initialized_private_tracker() -> Arc { @@ -269,17 +263,17 @@ mod tests { .with_mode(mode::Tracker::Private) .into(), ); - initialized_tracker(configuration) + initialized_tracker(&configuration) } fn initialized_whitelisted_tracker() -> Arc { let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Tracker::Listed).into()); - initialized_tracker(configuration) + initialized_tracker(&configuration) } - fn initialized_tracker(configuration: Arc) -> Arc { + fn initialized_tracker(configuration: &Arc) -> Arc { let (stats_event_sender, stats_repository) = statistics::Keeper::new_active_instance(); - Arc::new(tracker::Tracker::new(&configuration, Some(stats_event_sender), stats_repository).unwrap()) + Arc::new(tracker::Tracker::new(configuration, Some(stats_event_sender), stats_repository).unwrap()) } fn sample_ipv4_remote_addr() -> SocketAddr { From aa30bb1c933b6091d9eae1fe790b00a349a52a6d Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Fri, 25 Nov 2022 20:15:56 +0100 Subject: [PATCH 35/45] clippy: fix src/udp/request.rs --- src/udp/handlers.rs | 2 +- src/udp/request.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index dde8d14ae..274af1e2c 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -85,7 +85,7 @@ pub async fn handle_announce( ) -> Result { check(&remote_addr, &from_connection_id(&announce_request.connection_id))?; - let wrapped_announce_request = AnnounceRequestWrapper::new(announce_request.clone()); + let wrapped_announce_request = AnnounceRequestWrapper::new(announce_request); tracker .authenticate_request(&wrapped_announce_request.info_hash, &None) diff --git a/src/udp/request.rs b/src/udp/request.rs index 53d646f1a..34139384b 100644 --- a/src/udp/request.rs +++ b/src/udp/request.rs @@ -24,7 +24,7 @@ pub struct AnnounceRequestWrapper { impl AnnounceRequestWrapper { #[must_use] - pub fn new(announce_request: AnnounceRequest) -> Self { + pub fn new(announce_request: &AnnounceRequest) -> Self { AnnounceRequestWrapper { announce_request: announce_request.clone(), info_hash: InfoHash(announce_request.info_hash.0), From 436a0c1e03b09878bbb16102f83fd3350f95e054 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Fri, 25 Nov 2022 20:25:56 +0100 Subject: [PATCH 36/45] clippy: fix src/udp/server.rs --- src/jobs/udp_tracker.rs | 4 ++-- src/udp/server.rs | 21 ++++++++++++++------- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/src/jobs/udp_tracker.rs b/src/jobs/udp_tracker.rs index d5fdae4c1..57369f660 100644 --- a/src/jobs/udp_tracker.rs +++ b/src/jobs/udp_tracker.rs @@ -5,14 +5,14 @@ use tokio::task::JoinHandle; use crate::config::UdpTracker; use crate::tracker; -use crate::udp::server::UdpServer; +use crate::udp::server::Udp; #[must_use] pub fn start_job(config: &UdpTracker, tracker: Arc) -> JoinHandle<()> { let bind_addr = config.bind_address.clone(); tokio::spawn(async move { - match UdpServer::new(tracker, &bind_addr).await { + match Udp::new(tracker, &bind_addr).await { Ok(udp_server) => { info!("Starting UDP server on: {}", bind_addr); udp_server.start().await; diff --git a/src/udp/server.rs b/src/udp/server.rs index 705a6c263..5bd835365 100644 --- a/src/udp/server.rs +++ b/src/udp/server.rs @@ -10,21 +10,27 @@ use crate::tracker; use crate::udp::handlers::handle_packet; use crate::udp::MAX_PACKET_SIZE; -pub struct UdpServer { +pub struct Udp { socket: Arc, tracker: Arc, } -impl UdpServer { - pub async fn new(tracker: Arc, bind_address: &str) -> tokio::io::Result { +impl Udp { + /// # Errors + /// + /// Will return `Err` unable to bind to the supplied `bind_address`. + pub async fn new(tracker: Arc, bind_address: &str) -> tokio::io::Result { let socket = UdpSocket::bind(bind_address).await?; - Ok(UdpServer { + Ok(Udp { socket: Arc::new(socket), tracker, }) } + /// # Panics + /// + /// It would panic if unable to resolve the `local_addr` from the supplied ´socket´. pub async fn start(&self) { loop { let mut data = [0; MAX_PACKET_SIZE]; @@ -43,7 +49,7 @@ impl UdpServer { debug!("{:?}", payload); let response = handle_packet(remote_addr, payload, tracker).await; - UdpServer::send_response(socket, remote_addr, response).await; + Udp::send_response(socket, remote_addr, response).await; } } } @@ -57,11 +63,12 @@ impl UdpServer { match response.write(&mut cursor) { Ok(_) => { + #[allow(clippy::cast_possible_truncation)] let position = cursor.position() as usize; let inner = cursor.get_ref(); debug!("{:?}", &inner[..position]); - UdpServer::send_packet(socket, &remote_addr, &inner[..position]).await; + Udp::send_packet(socket, &remote_addr, &inner[..position]).await; } Err(_) => { debug!("could not write response to bytes."); @@ -71,6 +78,6 @@ impl UdpServer { async fn send_packet(socket: Arc, remote_addr: &SocketAddr, payload: &[u8]) { // doesn't matter if it reaches or not - let _ = socket.send_to(payload, remote_addr).await; + drop(socket.send_to(payload, remote_addr).await); } } From 6564c10de70ebd1fda24443537205bf5186be44c Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Fri, 25 Nov 2022 20:30:19 +0100 Subject: [PATCH 37/45] clippy: fix src/protocol/crypto.rs --- src/protocol/crypto.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/protocol/crypto.rs b/src/protocol/crypto.rs index 6e1517ef8..2d3f8f6fa 100644 --- a/src/protocol/crypto.rs +++ b/src/protocol/crypto.rs @@ -48,12 +48,12 @@ pub mod keys { #[test] fn the_default_seed_and_the_zeroed_seed_should_be_the_same_when_testing() { - assert_eq!(DefaultSeed::get_seed(), ZeroedTestSeed::get_seed()) + assert_eq!(DefaultSeed::get_seed(), ZeroedTestSeed::get_seed()); } #[test] fn the_default_seed_and_the_instance_seed_should_be_different_when_testing() { - assert_ne!(DefaultSeed::get_seed(), InstanceSeed::get_seed()) + assert_ne!(DefaultSeed::get_seed(), InstanceSeed::get_seed()); } } @@ -79,12 +79,12 @@ pub mod keys { #[test] fn it_should_have_a_zero_test_seed() { - assert_eq!(*ZEROED_TEST_SEED, [0u8; 32]) + assert_eq!(*ZEROED_TEST_SEED, [0u8; 32]); } #[test] fn it_should_default_to_zeroed_seed_when_testing() { - assert_eq!(*DEFAULT_SEED, *ZEROED_TEST_SEED) + assert_eq!(*DEFAULT_SEED, *ZEROED_TEST_SEED); } #[test] From baba21b31f669904c2528b87d91dc07edbcaa60b Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Fri, 25 Nov 2022 20:37:27 +0100 Subject: [PATCH 38/45] clippy: fix tests/udp.rs --- tests/udp.rs | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/tests/udp.rs b/tests/udp.rs index e93894843..8bad37dbe 100644 --- a/tests/udp.rs +++ b/tests/udp.rs @@ -48,7 +48,7 @@ mod udp_tracker_server { } } - pub async fn start(&mut self, configuration: Arc) { + pub fn start(&mut self, configuration: &Arc) { if !self.started.load(Ordering::Relaxed) { // Set the time of Torrust app starting lazy_static::initialize(&static_time::TIME_AT_APP_START); @@ -68,7 +68,7 @@ mod udp_tracker_server { }; // Initialize logging - logging::setup(&configuration); + logging::setup(configuration); let udp_tracker_config = &configuration.udp_trackers[0]; @@ -82,9 +82,9 @@ mod udp_tracker_server { } } - async fn new_running_udp_server(configuration: Arc) -> UdpServer { + fn new_running_udp_server(configuration: &Arc) -> UdpServer { let mut udp_server = UdpServer::new(); - udp_server.start(configuration).await; + udp_server.start(configuration); udp_server } @@ -101,7 +101,7 @@ mod udp_tracker_server { } async fn connect(&self, remote_address: &str) { - self.socket.connect(remote_address).await.unwrap() + self.socket.connect(remote_address).await.unwrap(); } async fn send(&self, bytes: &[u8]) -> usize { @@ -134,12 +134,13 @@ mod udp_tracker_server { let request_data = match request.write(&mut cursor) { Ok(_) => { + #[allow(clippy::cast_possible_truncation)] let position = cursor.position() as usize; let inner_request_buffer = cursor.get_ref(); // Return slice which contains written request data &inner_request_buffer[..position] } - Err(_) => panic!("could not write request to bytes."), + Err(e) => panic!("could not write request to bytes: {e}."), }; self.udp_client.send(request_data).await @@ -199,7 +200,7 @@ mod udp_tracker_server { async fn should_return_a_bad_request_response_when_the_client_sends_an_empty_request() { let configuration = tracker_configuration(); - let udp_server = new_running_udp_server(configuration).await; + let udp_server = new_running_udp_server(&configuration); let client = new_connected_udp_client(&udp_server.bind_address.unwrap()).await; @@ -216,7 +217,7 @@ mod udp_tracker_server { async fn should_return_a_connect_response_when_the_client_sends_a_connection_request() { let configuration = tracker_configuration(); - let udp_server = new_running_udp_server(configuration).await; + let udp_server = new_running_udp_server(&configuration); let client = new_connected_udp_tracker_client(&udp_server.bind_address.unwrap()).await; @@ -248,7 +249,7 @@ mod udp_tracker_server { async fn should_return_an_announce_response_when_the_client_sends_an_announce_request() { let configuration = tracker_configuration(); - let udp_server = new_running_udp_server(configuration).await; + let udp_server = new_running_udp_server(&configuration); let client = new_connected_udp_tracker_client(&udp_server.bind_address.unwrap()).await; @@ -282,7 +283,7 @@ mod udp_tracker_server { async fn should_return_a_scrape_response_when_the_client_sends_a_scrape_request() { let configuration = tracker_configuration(); - let udp_server = new_running_udp_server(configuration).await; + let udp_server = new_running_udp_server(&configuration); let client = new_connected_udp_tracker_client(&udp_server.bind_address.unwrap()).await; From 0d162a12880754412a989cda09ce91c03dceb6e5 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Mon, 28 Nov 2022 18:51:58 +0100 Subject: [PATCH 39/45] refactor: correct naming of structs and enums --- src/api/resources/auth_key_resource.rs | 20 +- src/api/resources/torrent_resource.rs | 7 +- src/api/server.rs | 6 +- src/config.rs | 34 +-- src/databases/database.rs | 2 +- src/databases/mysql.rs | 3 +- src/databases/sqlite.rs | 2 +- src/http/{errors.rs => error.rs} | 8 +- src/http/filters.rs | 37 +-- src/http/handlers.rs | 47 ++-- src/http/mod.rs | 2 +- src/http/request.rs | 7 +- src/http/response.rs | 6 +- src/protocol/common.rs | 318 ------------------------- src/protocol/crypto.rs | 30 +-- src/protocol/info_hash.rs | 190 +++++++++++++++ src/protocol/mod.rs | 1 + src/tracker/mod.rs | 10 +- src/tracker/mode.rs | 2 +- src/tracker/peer.rs | 144 ++++++++++- src/tracker/torrent.rs | 13 +- src/udp/connection_cookie.rs | 10 +- src/udp/{errors.rs => error.rs} | 18 +- src/udp/handlers.rs | 65 +++-- src/udp/mod.rs | 2 +- src/udp/request.rs | 8 +- tests/api.rs | 10 +- 27 files changed, 495 insertions(+), 507 deletions(-) rename src/http/{errors.rs => error.rs} (86%) create mode 100644 src/protocol/info_hash.rs rename src/udp/{errors.rs => error.rs} (57%) diff --git a/src/api/resources/auth_key_resource.rs b/src/api/resources/auth_key_resource.rs index 9b3cc9646..b575984db 100644 --- a/src/api/resources/auth_key_resource.rs +++ b/src/api/resources/auth_key_resource.rs @@ -6,13 +6,13 @@ use crate::protocol::clock::DurationSinceUnixEpoch; use crate::tracker::key::Auth; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct AuthKeyResource { +pub struct AuthKey { pub key: String, pub valid_until: Option, } -impl From for Auth { - fn from(auth_key_resource: AuthKeyResource) -> Self { +impl From for Auth { + fn from(auth_key_resource: AuthKey) -> Self { Auth { key: auth_key_resource.key, valid_until: auth_key_resource @@ -22,9 +22,9 @@ impl From for Auth { } } -impl From for AuthKeyResource { +impl From for AuthKey { fn from(auth_key: Auth) -> Self { - AuthKeyResource { + AuthKey { key: auth_key.key, valid_until: auth_key.valid_until.map(|valid_until| valid_until.as_secs()), } @@ -35,7 +35,7 @@ impl From for AuthKeyResource { mod tests { use std::time::Duration; - use super::AuthKeyResource; + use super::AuthKey; use crate::protocol::clock::{Current, TimeNow}; use crate::tracker::key::Auth; @@ -43,7 +43,7 @@ mod tests { fn it_should_be_convertible_into_an_auth_key() { let duration_in_secs = 60; - let auth_key_resource = AuthKeyResource { + let auth_key_resource = AuthKey { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(duration_in_secs), }; @@ -67,8 +67,8 @@ mod tests { }; assert_eq!( - AuthKeyResource::from(auth_key), - AuthKeyResource { + AuthKey::from(auth_key), + AuthKey { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(duration_in_secs) } @@ -78,7 +78,7 @@ mod tests { #[test] fn it_should_be_convertible_into_json() { assert_eq!( - serde_json::to_string(&AuthKeyResource { + serde_json::to_string(&AuthKey { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(60) }) diff --git a/src/api/resources/torrent_resource.rs b/src/api/resources/torrent_resource.rs index eb9620d23..4063b95f5 100644 --- a/src/api/resources/torrent_resource.rs +++ b/src/api/resources/torrent_resource.rs @@ -1,7 +1,6 @@ use serde::{Deserialize, Serialize}; -use crate::protocol::common::PeerId; -use crate::tracker::peer::TorrentPeer; +use crate::tracker::peer::{self, TorrentPeer}; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct TorrentResource { @@ -42,8 +41,8 @@ pub struct PeerIdResource { pub client: Option, } -impl From for PeerIdResource { - fn from(peer_id: PeerId) -> Self { +impl From for PeerIdResource { + fn from(peer_id: peer::Id) -> Self { PeerIdResource { id: peer_id.get_id(), client: peer_id.get_client_name().map(std::string::ToString::to_string), diff --git a/src/api/server.rs b/src/api/server.rs index fac25e297..61fd8ed3d 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -7,10 +7,10 @@ use std::time::Duration; use serde::{Deserialize, Serialize}; use warp::{filters, reply, serve, Filter}; -use super::resources::auth_key_resource::AuthKeyResource; +use super::resources::auth_key_resource::AuthKey; use super::resources::stats_resource::StatsResource; use super::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; -use crate::protocol::common::InfoHash; +use crate::protocol::info_hash::InfoHash; use crate::tracker; #[derive(Deserialize, Debug)] @@ -243,7 +243,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl w }) .and_then(|(seconds_valid, tracker): (u64, Arc)| async move { match tracker.generate_auth_key(Duration::from_secs(seconds_valid)).await { - Ok(auth_key) => Ok(warp::reply::json(&AuthKeyResource::from(auth_key))), + Ok(auth_key) => Ok(warp::reply::json(&AuthKey::from(auth_key))), Err(..) => Err(warp::reject::custom(ActionStatus::Err { reason: "failed to generate key".into(), })), diff --git a/src/config.rs b/src/config.rs index 9f6ca7092..67177aca1 100644 --- a/src/config.rs +++ b/src/config.rs @@ -41,7 +41,7 @@ pub struct HttpApi { #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub struct Configuration { pub log_level: Option, - pub mode: mode::Tracker, + pub mode: mode::Mode, pub db_driver: Drivers, pub db_path: String, pub announce_interval: u32, @@ -59,7 +59,7 @@ pub struct Configuration { } #[derive(Debug)] -pub enum ConfigurationError { +pub enum Error { Message(String), ConfigError(ConfigError), IOError(std::io::Error), @@ -67,19 +67,19 @@ pub enum ConfigurationError { TrackerModeIncompatible, } -impl std::fmt::Display for ConfigurationError { +impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { - ConfigurationError::Message(e) => e.fmt(f), - ConfigurationError::ConfigError(e) => e.fmt(f), - ConfigurationError::IOError(e) => e.fmt(f), - ConfigurationError::ParseError(e) => e.fmt(f), - ConfigurationError::TrackerModeIncompatible => write!(f, "{:?}", self), + Error::Message(e) => e.fmt(f), + Error::ConfigError(e) => e.fmt(f), + Error::IOError(e) => e.fmt(f), + Error::ParseError(e) => e.fmt(f), + Error::TrackerModeIncompatible => write!(f, "{:?}", self), } } } -impl std::error::Error for ConfigurationError {} +impl std::error::Error for Error {} impl Configuration { #[must_use] @@ -97,7 +97,7 @@ impl Configuration { pub fn default() -> Configuration { let mut configuration = Configuration { log_level: Option::from(String::from("info")), - mode: mode::Tracker::Public, + mode: mode::Mode::Public, db_driver: Drivers::Sqlite3, db_path: String::from("data.db"), announce_interval: 120, @@ -137,7 +137,7 @@ impl Configuration { /// # Errors /// /// Will return `Err` if `path` does not exist or has a bad configuration. - pub fn load_from_file(path: &str) -> Result { + pub fn load_from_file(path: &str) -> Result { let config_builder = Config::builder(); #[allow(unused_assignments)] @@ -147,18 +147,18 @@ impl Configuration { config = config_builder .add_source(File::with_name(path)) .build() - .map_err(ConfigurationError::ConfigError)?; + .map_err(Error::ConfigError)?; } else { eprintln!("No config file found."); eprintln!("Creating config file.."); let config = Configuration::default(); config.save_to_file(path)?; - return Err(ConfigurationError::Message( + return Err(Error::Message( "Please edit the config.TOML in the root folder and restart the tracker.".to_string(), )); } - let torrust_config: Configuration = config.try_deserialize().map_err(ConfigurationError::ConfigError)?; + let torrust_config: Configuration = config.try_deserialize().map_err(Error::ConfigError)?; Ok(torrust_config) } @@ -167,7 +167,7 @@ impl Configuration { /// /// Will return `Err` if `filename` does not exist or the user does not have /// permission to read it. - pub fn save_to_file(&self, path: &str) -> Result<(), ConfigurationError> { + pub fn save_to_file(&self, path: &str) -> Result<(), Error> { let toml_string = toml::to_string(self).expect("Could not encode TOML value"); fs::write(path, toml_string).expect("Could not write to file!"); Ok(()) @@ -176,7 +176,7 @@ impl Configuration { #[cfg(test)] mod tests { - use crate::config::{Configuration, ConfigurationError}; + use crate::config::{Configuration, Error}; #[cfg(test)] fn default_config_toml() -> String { @@ -294,7 +294,7 @@ mod tests { #[test] fn configuration_error_could_be_displayed() { - let error = ConfigurationError::TrackerModeIncompatible; + let error = Error::TrackerModeIncompatible; assert_eq!(format!("{}", error), "TrackerModeIncompatible"); } diff --git a/src/databases/database.rs b/src/databases/database.rs index 5e4a7c1f9..7055d2a09 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize}; use crate::databases::mysql::Mysql; use crate::databases::sqlite::Sqlite; -use crate::protocol::common::InfoHash; +use crate::protocol::info_hash::InfoHash; use crate::tracker::key::Auth; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs index 4fd00e31e..0dafc3a60 100644 --- a/src/databases/mysql.rs +++ b/src/databases/mysql.rs @@ -10,7 +10,8 @@ use r2d2_mysql::MysqlConnectionManager; use crate::databases::database; use crate::databases::database::{Database, Error}; -use crate::protocol::common::{InfoHash, AUTH_KEY_LENGTH}; +use crate::protocol::common::AUTH_KEY_LENGTH; +use crate::protocol::info_hash::InfoHash; use crate::tracker::key::Auth; pub struct Mysql { diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index 159da9922..39dea8502 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -8,7 +8,7 @@ use r2d2_sqlite::SqliteConnectionManager; use crate::databases::database; use crate::databases::database::{Database, Error}; use crate::protocol::clock::DurationSinceUnixEpoch; -use crate::protocol::common::InfoHash; +use crate::protocol::info_hash::InfoHash; use crate::tracker::key::Auth; pub struct Sqlite { diff --git a/src/http/errors.rs b/src/http/error.rs similarity index 86% rename from src/http/errors.rs rename to src/http/error.rs index fe0cf26e6..b6c08a8ba 100644 --- a/src/http/errors.rs +++ b/src/http/error.rs @@ -2,12 +2,12 @@ use thiserror::Error; use warp::reject::Reject; #[derive(Error, Debug)] -pub enum ServerError { +pub enum Error { #[error("internal server error")] - InternalServerError, + InternalServer, #[error("info_hash is either missing or invalid")] - InvalidInfoHash, + InvalidInfo, #[error("peer_id is either missing or invalid")] InvalidPeerId, @@ -31,4 +31,4 @@ pub enum ServerError { ExceededInfoHashLimit, } -impl Reject for ServerError {} +impl Reject for Error {} diff --git a/src/http/filters.rs b/src/http/filters.rs index 2c3ab626d..484ae2311 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -5,12 +5,13 @@ use std::sync::Arc; use warp::{reject, Filter, Rejection}; -use super::errors::ServerError; -use super::request::{Announce, AnnounceRequestQuery, Scrape}; +use super::error::Error; +use super::request::{Announce, AnnounceQuery, Scrape}; use super::WebResult; -use crate::protocol::common::{InfoHash, PeerId, MAX_SCRAPE_TORRENTS}; -use crate::tracker; +use crate::protocol::common::MAX_SCRAPE_TORRENTS; +use crate::protocol::info_hash::InfoHash; use crate::tracker::key::Auth; +use crate::tracker::{self, peer}; /// Pass Arc along #[must_use] @@ -28,7 +29,7 @@ pub fn with_info_hash() -> impl Filter,), Error = Rejec /// Check for `PeerId` #[must_use] -pub fn with_peer_id() -> impl Filter + Clone { +pub fn with_peer_id() -> impl Filter + Clone { warp::filters::query::raw().and_then(|q| async move { peer_id(&q) }) } @@ -54,7 +55,7 @@ pub fn with_peer_addr(on_reverse_proxy: bool) -> impl Filter impl Filter + Clone { - warp::filters::query::query::() + warp::filters::query::query::() .and(with_info_hash()) .and(with_peer_id()) .and(with_peer_addr(on_reverse_proxy)) @@ -88,9 +89,9 @@ fn info_hashes(raw_query: &String) -> WebResult> { } if info_hashes.len() > MAX_SCRAPE_TORRENTS as usize { - Err(reject::custom(ServerError::ExceededInfoHashLimit)) + Err(reject::custom(Error::ExceededInfoHashLimit)) } else if info_hashes.is_empty() { - Err(reject::custom(ServerError::InvalidInfoHash)) + Err(reject::custom(Error::InvalidInfo)) } else { Ok(info_hashes) } @@ -98,11 +99,11 @@ fn info_hashes(raw_query: &String) -> WebResult> { /// Parse `PeerId` from raw query string #[allow(clippy::ptr_arg)] -fn peer_id(raw_query: &String) -> WebResult { +fn peer_id(raw_query: &String) -> WebResult { // put all query params in a vec let split_raw_query: Vec<&str> = raw_query.split('&').collect(); - let mut peer_id: Option = None; + let mut peer_id: Option = None; for v in split_raw_query { // look for the peer_id param @@ -115,32 +116,32 @@ fn peer_id(raw_query: &String) -> WebResult { // peer_id must be 20 bytes if peer_id_bytes.len() != 20 { - return Err(reject::custom(ServerError::InvalidPeerId)); + return Err(reject::custom(Error::InvalidPeerId)); } // clone peer_id_bytes into fixed length array let mut byte_arr: [u8; 20] = Default::default(); byte_arr.clone_from_slice(peer_id_bytes.as_slice()); - peer_id = Some(PeerId(byte_arr)); + peer_id = Some(peer::Id(byte_arr)); break; } } match peer_id { Some(id) => Ok(id), - None => Err(reject::custom(ServerError::InvalidPeerId)), + None => Err(reject::custom(Error::InvalidPeerId)), } } /// Get `PeerAddress` from `RemoteAddress` or Forwarded fn peer_addr((on_reverse_proxy, remote_addr, x_forwarded_for): (bool, Option, Option)) -> WebResult { if !on_reverse_proxy && remote_addr.is_none() { - return Err(reject::custom(ServerError::AddressNotFound)); + return Err(reject::custom(Error::AddressNotFound)); } if on_reverse_proxy && x_forwarded_for.is_none() { - return Err(reject::custom(ServerError::AddressNotFound)); + return Err(reject::custom(Error::AddressNotFound)); } if on_reverse_proxy { @@ -152,7 +153,7 @@ fn peer_addr((on_reverse_proxy, remote_addr, x_forwarded_for): (bool, Option, - peer_id: PeerId, + peer_id: peer::Id, peer_addr: IpAddr, ) -> WebResult { Ok(Announce { diff --git a/src/http/handlers.rs b/src/http/handlers.rs index d4ae76e65..ff5469168 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -7,11 +7,10 @@ use log::debug; use warp::http::Response; use warp::{reject, Rejection, Reply}; -use super::errors::ServerError; -use super::response::{self, Peer, ScrapeResponseEntry}; +use super::error::Error; +use super::response::{self, Peer, ScrapeEntry}; use super::{request, WebResult}; -use crate::http::response::Error; -use crate::protocol::common::InfoHash; +use crate::protocol::info_hash::InfoHash; use crate::tracker::key::Auth; use crate::tracker::{self, peer, statistics, torrent}; @@ -20,18 +19,14 @@ use crate::tracker::{self, peer, statistics, torrent}; /// # Errors /// /// Will return `ServerError` that wraps the `Error` if unable to `authenticate_request`. -pub async fn authenticate( - info_hash: &InfoHash, - auth_key: &Option, - tracker: Arc, -) -> Result<(), ServerError> { +pub async fn authenticate(info_hash: &InfoHash, auth_key: &Option, tracker: Arc) -> Result<(), Error> { tracker.authenticate_request(info_hash, auth_key).await.map_err(|e| match e { - torrent::Error::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, - torrent::Error::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, - torrent::Error::PeerKeyNotValid => ServerError::PeerKeyNotValid, - torrent::Error::NoPeersFound => ServerError::NoPeersFound, - torrent::Error::CouldNotSendResponse => ServerError::InternalServerError, - torrent::Error::InvalidInfoHash => ServerError::InvalidInfoHash, + torrent::Error::TorrentNotWhitelisted => Error::TorrentNotWhitelisted, + torrent::Error::PeerNotAuthenticated => Error::PeerNotAuthenticated, + torrent::Error::PeerKeyNotValid => Error::PeerKeyNotValid, + torrent::Error::NoPeersFound => Error::NoPeersFound, + torrent::Error::CouldNotSendResponse => Error::InternalServer, + torrent::Error::InvalidInfoHash => Error::InvalidInfo, }) } @@ -91,7 +86,7 @@ pub async fn handle_scrape( auth_key: Option, tracker: Arc, ) -> WebResult { - let mut files: HashMap = HashMap::new(); + let mut files: HashMap = HashMap::new(); let db = tracker.get_torrents().await; for info_hash in &scrape_request.info_hashes { @@ -99,20 +94,20 @@ pub async fn handle_scrape( Some(torrent_info) => { if authenticate(info_hash, &auth_key, tracker.clone()).await.is_ok() { let (seeders, completed, leechers) = torrent_info.get_stats(); - ScrapeResponseEntry { + ScrapeEntry { complete: seeders, downloaded: completed, incomplete: leechers, } } else { - ScrapeResponseEntry { + ScrapeEntry { complete: 0, downloaded: 0, incomplete: 0, } } } - None => ScrapeResponseEntry { + None => ScrapeEntry { complete: 0, downloaded: 0, incomplete: 0, @@ -165,7 +160,7 @@ fn send_announce_response( if let Some(1) = announce_request.compact { match res.write_compact() { Ok(body) => Ok(Response::new(body)), - Err(_) => Err(reject::custom(ServerError::InternalServerError)), + Err(_) => Err(reject::custom(Error::InternalServer)), } } else { Ok(Response::new(res.write().into())) @@ -173,12 +168,12 @@ fn send_announce_response( } /// Send scrape response -fn send_scrape_response(files: HashMap) -> WebResult { +fn send_scrape_response(files: HashMap) -> WebResult { let res = response::Scrape { files }; match res.write() { Ok(body) => Ok(Response::new(body)), - Err(_) => Err(reject::custom(ServerError::InternalServerError)), + Err(_) => Err(reject::custom(Error::InternalServer)), } } @@ -188,15 +183,15 @@ fn send_scrape_response(files: HashMap) -> WebRes /// /// Will not return a error, `Infallible`, but instead convert the `ServerError` into a `Response`. pub fn send_error(r: &Rejection) -> std::result::Result { - let body = if let Some(server_error) = r.find::() { + let body = if let Some(server_error) = r.find::() { debug!("{:?}", server_error); - Error { + response::Error { failure_reason: server_error.to_string(), } .write() } else { - Error { - failure_reason: ServerError::InternalServerError.to_string(), + response::Error { + failure_reason: Error::InternalServer.to_string(), } .write() }; diff --git a/src/http/mod.rs b/src/http/mod.rs index 6e3ce7111..701dba407 100644 --- a/src/http/mod.rs +++ b/src/http/mod.rs @@ -1,4 +1,4 @@ -pub mod errors; +pub mod error; pub mod filters; pub mod handlers; pub mod request; diff --git a/src/http/request.rs b/src/http/request.rs index b812e1173..bc549b698 100644 --- a/src/http/request.rs +++ b/src/http/request.rs @@ -3,10 +3,11 @@ use std::net::IpAddr; use serde::Deserialize; use crate::http::Bytes; -use crate::protocol::common::{InfoHash, PeerId}; +use crate::protocol::info_hash::InfoHash; +use crate::tracker::peer; #[derive(Deserialize)] -pub struct AnnounceRequestQuery { +pub struct AnnounceQuery { pub downloaded: Option, pub uploaded: Option, pub key: Option, @@ -22,7 +23,7 @@ pub struct Announce { pub peer_addr: IpAddr, pub downloaded: Bytes, pub uploaded: Bytes, - pub peer_id: PeerId, + pub peer_id: peer::Id, pub port: u16, pub left: Bytes, pub event: Option, diff --git a/src/http/response.rs b/src/http/response.rs index 98ea6fe73..962e72fac 100644 --- a/src/http/response.rs +++ b/src/http/response.rs @@ -5,7 +5,7 @@ use std::net::IpAddr; use serde; use serde::Serialize; -use crate::protocol::common::InfoHash; +use crate::protocol::info_hash::InfoHash; #[derive(Serialize)] pub struct Peer { @@ -78,7 +78,7 @@ impl Announce { } #[derive(Serialize)] -pub struct ScrapeResponseEntry { +pub struct ScrapeEntry { pub complete: u32, pub downloaded: u32, pub incomplete: u32, @@ -86,7 +86,7 @@ pub struct ScrapeResponseEntry { #[derive(Serialize)] pub struct Scrape { - pub files: HashMap, + pub files: HashMap, } impl Scrape { diff --git a/src/protocol/common.rs b/src/protocol/common.rs index d6a98cf03..527ae9ebc 100644 --- a/src/protocol/common.rs +++ b/src/protocol/common.rs @@ -25,321 +25,3 @@ pub enum AnnounceEventDef { #[derive(Serialize, Deserialize)] #[serde(remote = "NumberOfBytes")] pub struct NumberOfBytesDef(pub i64); - -#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)] -pub struct InfoHash(pub [u8; 20]); - -impl std::fmt::Display for InfoHash { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - let mut chars = [0u8; 40]; - binascii::bin2hex(&self.0, &mut chars).expect("failed to hexlify"); - write!(f, "{}", std::str::from_utf8(&chars).unwrap()) - } -} - -impl std::str::FromStr for InfoHash { - type Err = binascii::ConvertError; - - fn from_str(s: &str) -> Result { - let mut i = Self([0u8; 20]); - if s.len() != 40 { - return Err(binascii::ConvertError::InvalidInputLength); - } - binascii::hex2bin(s.as_bytes(), &mut i.0)?; - Ok(i) - } -} - -impl Ord for InfoHash { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.0.cmp(&other.0) - } -} - -impl std::cmp::PartialOrd for InfoHash { - fn partial_cmp(&self, other: &InfoHash) -> Option { - self.0.partial_cmp(&other.0) - } -} - -impl std::convert::From<&[u8]> for InfoHash { - fn from(data: &[u8]) -> InfoHash { - assert_eq!(data.len(), 20); - let mut ret = InfoHash([0u8; 20]); - ret.0.clone_from_slice(data); - ret - } -} - -impl std::convert::From<[u8; 20]> for InfoHash { - fn from(val: [u8; 20]) -> Self { - InfoHash(val) - } -} - -impl serde::ser::Serialize for InfoHash { - fn serialize(&self, serializer: S) -> Result { - let mut buffer = [0u8; 40]; - let bytes_out = binascii::bin2hex(&self.0, &mut buffer).ok().unwrap(); - let str_out = std::str::from_utf8(bytes_out).unwrap(); - serializer.serialize_str(str_out) - } -} - -impl<'de> serde::de::Deserialize<'de> for InfoHash { - fn deserialize>(des: D) -> Result { - des.deserialize_str(InfoHashVisitor) - } -} - -#[cfg(test)] -mod tests { - - use std::str::FromStr; - - use serde::{Deserialize, Serialize}; - use serde_json::json; - - use super::InfoHash; - - #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] - struct ContainingInfoHash { - pub info_hash: InfoHash, - } - - #[test] - fn an_info_hash_can_be_created_from_a_valid_40_utf8_char_string_representing_an_hexadecimal_value() { - let info_hash = InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"); - assert!(info_hash.is_ok()); - } - - #[test] - fn an_info_hash_can_not_be_created_from_a_utf8_string_representing_a_not_valid_hexadecimal_value() { - let info_hash = InfoHash::from_str("GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG"); - assert!(info_hash.is_err()); - } - - #[test] - fn an_info_hash_can_only_be_created_from_a_40_utf8_char_string() { - let info_hash = InfoHash::from_str(&"F".repeat(39)); - assert!(info_hash.is_err()); - - let info_hash = InfoHash::from_str(&"F".repeat(41)); - assert!(info_hash.is_err()); - } - - #[test] - fn an_info_hash_should_by_displayed_like_a_40_utf8_lowercased_char_hex_string() { - let info_hash = InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap(); - - let output = format!("{}", info_hash); - - assert_eq!(output, "ffffffffffffffffffffffffffffffffffffffff"); - } - - #[test] - fn an_info_hash_can_be_created_from_a_valid_20_byte_array_slice() { - let info_hash: InfoHash = [255u8; 20].as_slice().into(); - - assert_eq!( - info_hash, - InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap() - ); - } - - #[test] - fn an_info_hash_can_be_created_from_a_valid_20_byte_array() { - let info_hash: InfoHash = [255u8; 20].into(); - - assert_eq!( - info_hash, - InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap() - ); - } - - #[test] - fn an_info_hash_can_be_serialized() { - let s = ContainingInfoHash { - info_hash: InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap(), - }; - - let json_serialized_value = serde_json::to_string(&s).unwrap(); - - assert_eq!( - json_serialized_value, - r#"{"info_hash":"ffffffffffffffffffffffffffffffffffffffff"}"# - ); - } - - #[test] - fn an_info_hash_can_be_deserialized() { - let json = json!({ - "info_hash": "ffffffffffffffffffffffffffffffffffffffff", - }); - - let s: ContainingInfoHash = serde_json::from_value(json).unwrap(); - - assert_eq!( - s, - ContainingInfoHash { - info_hash: InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap() - } - ); - } -} - -struct InfoHashVisitor; - -impl<'v> serde::de::Visitor<'v> for InfoHashVisitor { - type Value = InfoHash; - - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(formatter, "a 40 character long hash") - } - - fn visit_str(self, v: &str) -> Result { - if v.len() != 40 { - return Err(serde::de::Error::invalid_value( - serde::de::Unexpected::Str(v), - &"expected a 40 character long string", - )); - } - - let mut res = InfoHash([0u8; 20]); - - if binascii::hex2bin(v.as_bytes(), &mut res.0).is_err() { - return Err(serde::de::Error::invalid_value( - serde::de::Unexpected::Str(v), - &"expected a hexadecimal string", - )); - }; - Ok(res) - } -} - -#[derive(PartialEq, Eq, Hash, Clone, Debug, PartialOrd, Ord, Copy)] -pub struct PeerId(pub [u8; 20]); - -impl std::fmt::Display for PeerId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut buffer = [0u8; 20]; - let bytes_out = binascii::bin2hex(&self.0, &mut buffer).ok(); - match bytes_out { - Some(bytes) => write!(f, "{}", std::str::from_utf8(bytes).unwrap()), - None => write!(f, ""), - } - } -} - -impl PeerId { - #[must_use] - pub fn get_id(&self) -> Option { - let buff_size = self.0.len() * 2; - let mut tmp: Vec = vec![0; buff_size]; - binascii::bin2hex(&self.0, &mut tmp).unwrap(); - - std::str::from_utf8(&tmp).ok().map(std::string::ToString::to_string) - } - - #[must_use] - pub fn get_client_name(&self) -> Option<&'static str> { - if self.0[0] == b'M' { - return Some("BitTorrent"); - } - if self.0[0] == b'-' { - let name = match &self.0[1..3] { - b"AG" | b"A~" => "Ares", - b"AR" => "Arctic", - b"AV" => "Avicora", - b"AX" => "BitPump", - b"AZ" => "Azureus", - b"BB" => "BitBuddy", - b"BC" => "BitComet", - b"BF" => "Bitflu", - b"BG" => "BTG (uses Rasterbar libtorrent)", - b"BR" => "BitRocket", - b"BS" => "BTSlave", - b"BX" => "~Bittorrent X", - b"CD" => "Enhanced CTorrent", - b"CT" => "CTorrent", - b"DE" => "DelugeTorrent", - b"DP" => "Propagate Data Client", - b"EB" => "EBit", - b"ES" => "electric sheep", - b"FT" => "FoxTorrent", - b"FW" => "FrostWire", - b"FX" => "Freebox BitTorrent", - b"GS" => "GSTorrent", - b"HL" => "Halite", - b"HN" => "Hydranode", - b"KG" => "KGet", - b"KT" => "KTorrent", - b"LH" => "LH-ABC", - b"LP" => "Lphant", - b"LT" => "libtorrent", - b"lt" => "libTorrent", - b"LW" => "LimeWire", - b"MO" => "MonoTorrent", - b"MP" => "MooPolice", - b"MR" => "Miro", - b"MT" => "MoonlightTorrent", - b"NX" => "Net Transport", - b"PD" => "Pando", - b"qB" => "qBittorrent", - b"QD" => "QQDownload", - b"QT" => "Qt 4 Torrent example", - b"RT" => "Retriever", - b"S~" => "Shareaza alpha/beta", - b"SB" => "~Swiftbit", - b"SS" => "SwarmScope", - b"ST" => "SymTorrent", - b"st" => "sharktorrent", - b"SZ" => "Shareaza", - b"TN" => "TorrentDotNET", - b"TR" => "Transmission", - b"TS" => "Torrentstorm", - b"TT" => "TuoTu", - b"UL" => "uLeecher!", - b"UT" => "µTorrent", - b"UW" => "µTorrent Web", - b"VG" => "Vagaa", - b"WD" => "WebTorrent Desktop", - b"WT" => "BitLet", - b"WW" => "WebTorrent", - b"WY" => "FireTorrent", - b"XL" => "Xunlei", - b"XT" => "XanTorrent", - b"XX" => "Xtorrent", - b"ZT" => "ZipTorrent", - _ => return None, - }; - Some(name) - } else { - None - } - } -} - -impl Serialize for PeerId { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - #[derive(Serialize)] - struct PeerIdInfo<'a> { - id: Option, - client: Option<&'a str>, - } - - let buff_size = self.0.len() * 2; - let mut tmp: Vec = vec![0; buff_size]; - binascii::bin2hex(&self.0, &mut tmp).unwrap(); - let id = std::str::from_utf8(&tmp).ok(); - - let obj = PeerIdInfo { - id: self.get_id(), - client: self.get_client_name(), - }; - obj.serialize(serializer) - } -} diff --git a/src/protocol/crypto.rs b/src/protocol/crypto.rs index 2d3f8f6fa..a335e2dba 100644 --- a/src/protocol/crypto.rs +++ b/src/protocol/crypto.rs @@ -1,18 +1,18 @@ pub mod keys { pub mod seeds { - use self::detail::DEFAULT_SEED; + use self::detail::CURRENT_SEED; use crate::ephemeral_instance_keys::{Seed, RANDOM_SEED}; - pub trait SeedKeeper { + pub trait Keeper { type Seed: Sized + Default + AsMut<[u8]>; fn get_seed() -> &'static Self::Seed; } - pub struct InstanceSeed; - pub struct DefaultSeed; + pub struct Instance; + pub struct Current; - impl SeedKeeper for InstanceSeed { + impl Keeper for Instance { type Seed = Seed; fn get_seed() -> &'static Self::Seed { @@ -20,24 +20,24 @@ pub mod keys { } } - impl SeedKeeper for DefaultSeed { + impl Keeper for Current { type Seed = Seed; #[allow(clippy::needless_borrow)] fn get_seed() -> &'static Self::Seed { - &DEFAULT_SEED + &CURRENT_SEED } } #[cfg(test)] mod tests { use super::detail::ZEROED_TEST_SEED; - use super::{DefaultSeed, InstanceSeed, SeedKeeper}; + use super::{Current, Instance, Keeper}; use crate::ephemeral_instance_keys::Seed; pub struct ZeroedTestSeed; - impl SeedKeeper for ZeroedTestSeed { + impl Keeper for ZeroedTestSeed { type Seed = Seed; #[allow(clippy::needless_borrow)] @@ -48,12 +48,12 @@ pub mod keys { #[test] fn the_default_seed_and_the_zeroed_seed_should_be_the_same_when_testing() { - assert_eq!(DefaultSeed::get_seed(), ZeroedTestSeed::get_seed()); + assert_eq!(Current::get_seed(), ZeroedTestSeed::get_seed()); } #[test] fn the_default_seed_and_the_instance_seed_should_be_different_when_testing() { - assert_ne!(DefaultSeed::get_seed(), InstanceSeed::get_seed()); + assert_ne!(Current::get_seed(), Instance::get_seed()); } } @@ -64,10 +64,10 @@ pub mod keys { pub const ZEROED_TEST_SEED: &Seed = &[0u8; 32]; #[cfg(test)] - pub use ZEROED_TEST_SEED as DEFAULT_SEED; + pub use ZEROED_TEST_SEED as CURRENT_SEED; #[cfg(not(test))] - pub use crate::ephemeral_instance_keys::RANDOM_SEED as DEFAULT_SEED; + pub use crate::ephemeral_instance_keys::RANDOM_SEED as CURRENT_SEED; #[cfg(test)] mod tests { @@ -75,7 +75,7 @@ pub mod keys { use crate::ephemeral_instance_keys::RANDOM_SEED; use crate::protocol::crypto::keys::seeds::detail::ZEROED_TEST_SEED; - use crate::protocol::crypto::keys::seeds::DEFAULT_SEED; + use crate::protocol::crypto::keys::seeds::CURRENT_SEED; #[test] fn it_should_have_a_zero_test_seed() { @@ -84,7 +84,7 @@ pub mod keys { #[test] fn it_should_default_to_zeroed_seed_when_testing() { - assert_eq!(*DEFAULT_SEED, *ZEROED_TEST_SEED); + assert_eq!(*CURRENT_SEED, *ZEROED_TEST_SEED); } #[test] diff --git a/src/protocol/info_hash.rs b/src/protocol/info_hash.rs new file mode 100644 index 000000000..3b9b2fa35 --- /dev/null +++ b/src/protocol/info_hash.rs @@ -0,0 +1,190 @@ +#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)] +pub struct InfoHash(pub [u8; 20]); + +impl std::fmt::Display for InfoHash { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + let mut chars = [0u8; 40]; + binascii::bin2hex(&self.0, &mut chars).expect("failed to hexlify"); + write!(f, "{}", std::str::from_utf8(&chars).unwrap()) + } +} + +impl std::str::FromStr for InfoHash { + type Err = binascii::ConvertError; + + fn from_str(s: &str) -> Result { + let mut i = Self([0u8; 20]); + if s.len() != 40 { + return Err(binascii::ConvertError::InvalidInputLength); + } + binascii::hex2bin(s.as_bytes(), &mut i.0)?; + Ok(i) + } +} + +impl Ord for InfoHash { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.0.cmp(&other.0) + } +} + +impl std::cmp::PartialOrd for InfoHash { + fn partial_cmp(&self, other: &InfoHash) -> Option { + self.0.partial_cmp(&other.0) + } +} + +impl std::convert::From<&[u8]> for InfoHash { + fn from(data: &[u8]) -> InfoHash { + assert_eq!(data.len(), 20); + let mut ret = InfoHash([0u8; 20]); + ret.0.clone_from_slice(data); + ret + } +} + +impl std::convert::From<[u8; 20]> for InfoHash { + fn from(val: [u8; 20]) -> Self { + InfoHash(val) + } +} + +impl serde::ser::Serialize for InfoHash { + fn serialize(&self, serializer: S) -> Result { + let mut buffer = [0u8; 40]; + let bytes_out = binascii::bin2hex(&self.0, &mut buffer).ok().unwrap(); + let str_out = std::str::from_utf8(bytes_out).unwrap(); + serializer.serialize_str(str_out) + } +} + +impl<'de> serde::de::Deserialize<'de> for InfoHash { + fn deserialize>(des: D) -> Result { + des.deserialize_str(InfoHashVisitor) + } +} + +struct InfoHashVisitor; + +impl<'v> serde::de::Visitor<'v> for InfoHashVisitor { + type Value = InfoHash; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(formatter, "a 40 character long hash") + } + + fn visit_str(self, v: &str) -> Result { + if v.len() != 40 { + return Err(serde::de::Error::invalid_value( + serde::de::Unexpected::Str(v), + &"expected a 40 character long string", + )); + } + + let mut res = InfoHash([0u8; 20]); + + if binascii::hex2bin(v.as_bytes(), &mut res.0).is_err() { + return Err(serde::de::Error::invalid_value( + serde::de::Unexpected::Str(v), + &"expected a hexadecimal string", + )); + }; + Ok(res) + } +} + +#[cfg(test)] +mod tests { + + use std::str::FromStr; + + use serde::{Deserialize, Serialize}; + use serde_json::json; + + use super::InfoHash; + + #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] + struct ContainingInfoHash { + pub info_hash: InfoHash, + } + + #[test] + fn an_info_hash_can_be_created_from_a_valid_40_utf8_char_string_representing_an_hexadecimal_value() { + let info_hash = InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"); + assert!(info_hash.is_ok()); + } + + #[test] + fn an_info_hash_can_not_be_created_from_a_utf8_string_representing_a_not_valid_hexadecimal_value() { + let info_hash = InfoHash::from_str("GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG"); + assert!(info_hash.is_err()); + } + + #[test] + fn an_info_hash_can_only_be_created_from_a_40_utf8_char_string() { + let info_hash = InfoHash::from_str(&"F".repeat(39)); + assert!(info_hash.is_err()); + + let info_hash = InfoHash::from_str(&"F".repeat(41)); + assert!(info_hash.is_err()); + } + + #[test] + fn an_info_hash_should_by_displayed_like_a_40_utf8_lowercased_char_hex_string() { + let info_hash = InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap(); + + let output = format!("{}", info_hash); + + assert_eq!(output, "ffffffffffffffffffffffffffffffffffffffff"); + } + + #[test] + fn an_info_hash_can_be_created_from_a_valid_20_byte_array_slice() { + let info_hash: InfoHash = [255u8; 20].as_slice().into(); + + assert_eq!( + info_hash, + InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap() + ); + } + + #[test] + fn an_info_hash_can_be_created_from_a_valid_20_byte_array() { + let info_hash: InfoHash = [255u8; 20].into(); + + assert_eq!( + info_hash, + InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap() + ); + } + + #[test] + fn an_info_hash_can_be_serialized() { + let s = ContainingInfoHash { + info_hash: InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap(), + }; + + let json_serialized_value = serde_json::to_string(&s).unwrap(); + + assert_eq!( + json_serialized_value, + r#"{"info_hash":"ffffffffffffffffffffffffffffffffffffffff"}"# + ); + } + + #[test] + fn an_info_hash_can_be_deserialized() { + let json = json!({ + "info_hash": "ffffffffffffffffffffffffffffffffffffffff", + }); + + let s: ContainingInfoHash = serde_json::from_value(json).unwrap(); + + assert_eq!( + s, + ContainingInfoHash { + info_hash: InfoHash::from_str("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF").unwrap() + } + ); + } +} diff --git a/src/protocol/mod.rs b/src/protocol/mod.rs index 85e4f90ad..bd4310dcf 100644 --- a/src/protocol/mod.rs +++ b/src/protocol/mod.rs @@ -1,4 +1,5 @@ pub mod clock; pub mod common; pub mod crypto; +pub mod info_hash; pub mod utils; diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index fcd9ebe2d..508280b1a 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -16,12 +16,12 @@ use tokio::sync::{RwLock, RwLockReadGuard}; use crate::config::Configuration; use crate::databases::database; use crate::databases::database::Database; -use crate::protocol::common::InfoHash; +use crate::protocol::info_hash::InfoHash; use crate::tracker::key::Auth; pub struct Tracker { pub config: Arc, - mode: mode::Tracker, + mode: mode::Mode, keys: RwLock>, whitelist: RwLock>, torrents: RwLock>, @@ -54,15 +54,15 @@ impl Tracker { } pub fn is_public(&self) -> bool { - self.mode == mode::Tracker::Public + self.mode == mode::Mode::Public } pub fn is_private(&self) -> bool { - self.mode == mode::Tracker::Private || self.mode == mode::Tracker::PrivateListed + self.mode == mode::Mode::Private || self.mode == mode::Mode::PrivateListed } pub fn is_whitelisted(&self) -> bool { - self.mode == mode::Tracker::Listed || self.mode == mode::Tracker::PrivateListed + self.mode == mode::Mode::Listed || self.mode == mode::Mode::PrivateListed } /// # Errors diff --git a/src/tracker/mode.rs b/src/tracker/mode.rs index f1fff169e..a0dba6e67 100644 --- a/src/tracker/mode.rs +++ b/src/tracker/mode.rs @@ -2,7 +2,7 @@ use serde; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Copy, Clone, PartialEq, Eq, Debug)] -pub enum Tracker { +pub enum Mode { // Will track every new info hash and serve every peer. #[serde(rename = "public")] Public, diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index dd49ffaa7..16aada0ed 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -6,12 +6,12 @@ use serde::Serialize; use crate::http::request::Announce; use crate::protocol::clock::{Current, DurationSinceUnixEpoch, Time}; -use crate::protocol::common::{AnnounceEventDef, NumberOfBytesDef, PeerId}; +use crate::protocol::common::{AnnounceEventDef, NumberOfBytesDef}; use crate::protocol::utils::ser_unix_time_value; #[derive(PartialEq, Eq, Debug, Clone, Serialize, Copy)] pub struct TorrentPeer { - pub peer_id: PeerId, + pub peer_id: Id, pub peer_addr: SocketAddr, #[serde(serialize_with = "ser_unix_time_value")] pub updated: DurationSinceUnixEpoch, @@ -35,7 +35,7 @@ impl TorrentPeer { let peer_addr = TorrentPeer::peer_addr_from_ip_and_port_and_opt_host_ip(remote_ip, host_opt_ip, announce_request.port.0); TorrentPeer { - peer_id: PeerId(announce_request.peer_id.0), + peer_id: Id(announce_request.peer_id.0), peer_addr, updated: Current::now(), uploaded: announce_request.bytes_uploaded, @@ -88,6 +88,133 @@ impl TorrentPeer { } } +#[derive(PartialEq, Eq, Hash, Clone, Debug, PartialOrd, Ord, Copy)] +pub struct Id(pub [u8; 20]); + +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut buffer = [0u8; 20]; + let bytes_out = binascii::bin2hex(&self.0, &mut buffer).ok(); + match bytes_out { + Some(bytes) => write!(f, "{}", std::str::from_utf8(bytes).unwrap()), + None => write!(f, ""), + } + } +} + +impl Id { + #[must_use] + pub fn get_id(&self) -> Option { + let buff_size = self.0.len() * 2; + let mut tmp: Vec = vec![0; buff_size]; + binascii::bin2hex(&self.0, &mut tmp).unwrap(); + + std::str::from_utf8(&tmp).ok().map(std::string::ToString::to_string) + } + + #[must_use] + pub fn get_client_name(&self) -> Option<&'static str> { + if self.0[0] == b'M' { + return Some("BitTorrent"); + } + if self.0[0] == b'-' { + let name = match &self.0[1..3] { + b"AG" | b"A~" => "Ares", + b"AR" => "Arctic", + b"AV" => "Avicora", + b"AX" => "BitPump", + b"AZ" => "Azureus", + b"BB" => "BitBuddy", + b"BC" => "BitComet", + b"BF" => "Bitflu", + b"BG" => "BTG (uses Rasterbar libtorrent)", + b"BR" => "BitRocket", + b"BS" => "BTSlave", + b"BX" => "~Bittorrent X", + b"CD" => "Enhanced CTorrent", + b"CT" => "CTorrent", + b"DE" => "DelugeTorrent", + b"DP" => "Propagate Data Client", + b"EB" => "EBit", + b"ES" => "electric sheep", + b"FT" => "FoxTorrent", + b"FW" => "FrostWire", + b"FX" => "Freebox BitTorrent", + b"GS" => "GSTorrent", + b"HL" => "Halite", + b"HN" => "Hydranode", + b"KG" => "KGet", + b"KT" => "KTorrent", + b"LH" => "LH-ABC", + b"LP" => "Lphant", + b"LT" => "libtorrent", + b"lt" => "libTorrent", + b"LW" => "LimeWire", + b"MO" => "MonoTorrent", + b"MP" => "MooPolice", + b"MR" => "Miro", + b"MT" => "MoonlightTorrent", + b"NX" => "Net Transport", + b"PD" => "Pando", + b"qB" => "qBittorrent", + b"QD" => "QQDownload", + b"QT" => "Qt 4 Torrent example", + b"RT" => "Retriever", + b"S~" => "Shareaza alpha/beta", + b"SB" => "~Swiftbit", + b"SS" => "SwarmScope", + b"ST" => "SymTorrent", + b"st" => "sharktorrent", + b"SZ" => "Shareaza", + b"TN" => "TorrentDotNET", + b"TR" => "Transmission", + b"TS" => "Torrentstorm", + b"TT" => "TuoTu", + b"UL" => "uLeecher!", + b"UT" => "µTorrent", + b"UW" => "µTorrent Web", + b"VG" => "Vagaa", + b"WD" => "WebTorrent Desktop", + b"WT" => "BitLet", + b"WW" => "WebTorrent", + b"WY" => "FireTorrent", + b"XL" => "Xunlei", + b"XT" => "XanTorrent", + b"XX" => "Xtorrent", + b"ZT" => "ZipTorrent", + _ => return None, + }; + Some(name) + } else { + None + } + } +} + +impl Serialize for Id { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + #[derive(Serialize)] + struct PeerIdInfo<'a> { + id: Option, + client: Option<&'a str>, + } + + let buff_size = self.0.len() * 2; + let mut tmp: Vec = vec![0; buff_size]; + binascii::bin2hex(&self.0, &mut tmp).unwrap(); + let id = std::str::from_utf8(&tmp).ok(); + + let obj = PeerIdInfo { + id: self.get_id(), + client: self.get_client_name(), + }; + obj.serialize(serializer) + } +} + #[cfg(test)] mod test { mod torrent_peer { @@ -97,13 +224,12 @@ mod test { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use crate::protocol::clock::{Current, Time}; - use crate::protocol::common::PeerId; - use crate::tracker::peer::TorrentPeer; + use crate::tracker::peer::{self, TorrentPeer}; #[test] fn it_should_be_serializable() { let torrent_peer = TorrentPeer { - peer_id: PeerId(*b"-qB00000000000000000"), + peer_id: peer::Id(*b"-qB00000000000000000"), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), updated: Current::now(), uploaded: NumberOfBytes(0), @@ -284,8 +410,8 @@ mod test { use std::net::{IpAddr, Ipv4Addr}; use crate::http::request::Announce; - use crate::protocol::common::{InfoHash, PeerId}; - use crate::tracker::peer::TorrentPeer; + use crate::protocol::info_hash::InfoHash; + use crate::tracker::peer::{self, TorrentPeer}; fn sample_http_announce_request(peer_addr: IpAddr, port: u16) -> Announce { Announce { @@ -293,7 +419,7 @@ mod test { peer_addr, downloaded: 0u64, uploaded: 0u64, - peer_id: PeerId(*b"-qB00000000000000000"), + peer_id: peer::Id(*b"-qB00000000000000000"), port, left: 0u64, event: None, diff --git a/src/tracker/torrent.rs b/src/tracker/torrent.rs index 21bcfc513..3e38d2340 100644 --- a/src/tracker/torrent.rs +++ b/src/tracker/torrent.rs @@ -6,12 +6,12 @@ use serde::{Deserialize, Serialize}; use super::peer; use crate::protocol::clock::{Current, TimeNow}; -use crate::protocol::common::{PeerId, MAX_SCRAPE_TORRENTS}; +use crate::protocol::common::MAX_SCRAPE_TORRENTS; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct Entry { #[serde(skip)] - pub peers: std::collections::BTreeMap, + pub peers: std::collections::BTreeMap, pub completed: u32, } @@ -118,7 +118,6 @@ mod tests { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use crate::protocol::clock::{Current, DurationSinceUnixEpoch, Stopped, StoppedTime, Time, Working}; - use crate::protocol::common::PeerId; use crate::tracker::peer; use crate::tracker::torrent::Entry; @@ -129,7 +128,7 @@ mod tests { impl TorrentPeerBuilder { pub fn default() -> TorrentPeerBuilder { let default_peer = peer::TorrentPeer { - peer_id: PeerId([0u8; 20]), + peer_id: peer::Id([0u8; 20]), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080), updated: Current::now(), uploaded: NumberOfBytes(0), @@ -150,7 +149,7 @@ mod tests { self } - pub fn with_peer_id(mut self, peer_id: PeerId) -> Self { + pub fn with_peer_id(mut self, peer_id: peer::Id) -> Self { self.peer.peer_id = peer_id; self } @@ -278,9 +277,9 @@ mod tests { assert_eq!(peers.len(), 0); } - fn peer_id_from_i32(number: i32) -> PeerId { + fn peer_id_from_i32(number: i32) -> peer::Id { let peer_id = number.to_le_bytes(); - PeerId([ + peer::Id([ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, peer_id[0], peer_id[1], peer_id[2], peer_id[3], ]) diff --git a/src/udp/connection_cookie.rs b/src/udp/connection_cookie.rs index 8a544fa6a..3daa3e0f6 100644 --- a/src/udp/connection_cookie.rs +++ b/src/udp/connection_cookie.rs @@ -2,7 +2,7 @@ use std::net::SocketAddr; use aquatic_udp_protocol::ConnectionId; -use super::errors::ServerError; +use super::error::Error; use crate::protocol::clock::time_extent::{Extent, TimeExtent}; pub type Cookie = [u8; 8]; @@ -36,7 +36,7 @@ pub fn make(remote_address: &SocketAddr) -> Cookie { /// # Errors /// /// Will return a `ServerError::InvalidConnectionId` if the supplied `connection_cookie` fails to verify. -pub fn check(remote_address: &SocketAddr, connection_cookie: &Cookie) -> Result { +pub fn check(remote_address: &SocketAddr, connection_cookie: &Cookie) -> Result { // we loop backwards testing each time_extent until we find one that matches. // (or the lifetime of time_extents is exhausted) for offset in 0..=COOKIE_LIFETIME.amount { @@ -49,7 +49,7 @@ pub fn check(remote_address: &SocketAddr, connection_cookie: &Cookie) -> Result< return Ok(checking_time_extent); } } - Err(ServerError::InvalidConnectionId) + Err(Error::InvalidConnectionId) } mod cookie_builder { @@ -59,7 +59,7 @@ mod cookie_builder { use super::{Cookie, SinceUnixEpochTimeExtent, COOKIE_LIFETIME}; use crate::protocol::clock::time_extent::{DefaultTimeExtentMaker, Extent, Make, TimeExtent}; - use crate::protocol::crypto::keys::seeds::{DefaultSeed, SeedKeeper}; + use crate::protocol::crypto::keys::seeds::{Current, Keeper}; pub(super) fn get_last_time_extent() -> SinceUnixEpochTimeExtent { DefaultTimeExtentMaker::now(&COOKIE_LIFETIME.increment) @@ -70,7 +70,7 @@ mod cookie_builder { } pub(super) fn build(remote_address: &SocketAddr, time_extent: &TimeExtent) -> Cookie { - let seed = DefaultSeed::get_seed(); + let seed = Current::get_seed(); let mut hasher = DefaultHasher::new(); diff --git a/src/udp/errors.rs b/src/udp/error.rs similarity index 57% rename from src/udp/errors.rs rename to src/udp/error.rs index f90149a99..c5fbb3929 100644 --- a/src/udp/errors.rs +++ b/src/udp/error.rs @@ -3,9 +3,9 @@ use thiserror::Error; use crate::tracker::torrent; #[derive(Error, Debug)] -pub enum ServerError { +pub enum Error { #[error("internal server error")] - InternalServerError, + InternalServer, #[error("info_hash is either missing or invalid")] InvalidInfoHash, @@ -35,15 +35,15 @@ pub enum ServerError { BadRequest, } -impl From for ServerError { +impl From for Error { fn from(e: torrent::Error) -> Self { match e { - torrent::Error::TorrentNotWhitelisted => ServerError::TorrentNotWhitelisted, - torrent::Error::PeerNotAuthenticated => ServerError::PeerNotAuthenticated, - torrent::Error::PeerKeyNotValid => ServerError::PeerKeyNotValid, - torrent::Error::NoPeersFound => ServerError::NoPeersFound, - torrent::Error::CouldNotSendResponse => ServerError::InternalServerError, - torrent::Error::InvalidInfoHash => ServerError::InvalidInfoHash, + torrent::Error::TorrentNotWhitelisted => Error::TorrentNotWhitelisted, + torrent::Error::PeerNotAuthenticated => Error::PeerNotAuthenticated, + torrent::Error::PeerKeyNotValid => Error::PeerKeyNotValid, + torrent::Error::NoPeersFound => Error::NoPeersFound, + torrent::Error::CouldNotSendResponse => Error::InternalServer, + torrent::Error::InvalidInfoHash => Error::InvalidInfoHash, } } } diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index 274af1e2c..da4bdbf35 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -7,13 +7,14 @@ use aquatic_udp_protocol::{ }; use super::connection_cookie::{check, from_connection_id, into_connection_id, make}; -use crate::protocol::common::{InfoHash, MAX_SCRAPE_TORRENTS}; +use crate::protocol::common::MAX_SCRAPE_TORRENTS; +use crate::protocol::info_hash::InfoHash; use crate::tracker::{self, peer, statistics}; -use crate::udp::errors::ServerError; -use crate::udp::request::AnnounceRequestWrapper; +use crate::udp::error::Error; +use crate::udp::request::AnnounceWrapper; pub async fn handle_packet(remote_addr: SocketAddr, payload: Vec, tracker: Arc) -> Response { - match Request::from_bytes(&payload[..payload.len()], MAX_SCRAPE_TORRENTS).map_err(|_| ServerError::InternalServerError) { + match Request::from_bytes(&payload[..payload.len()], MAX_SCRAPE_TORRENTS).map_err(|_| Error::InternalServer) { Ok(request) => { let transaction_id = match &request { Request::Connect(connect_request) => connect_request.transaction_id, @@ -27,7 +28,7 @@ pub async fn handle_packet(remote_addr: SocketAddr, payload: Vec, tracker: A } } // bad request - Err(_) => handle_error(&ServerError::BadRequest, TransactionId(0)), + Err(_) => handle_error(&Error::BadRequest, TransactionId(0)), } } @@ -38,7 +39,7 @@ pub async fn handle_request( request: Request, remote_addr: SocketAddr, tracker: Arc, -) -> Result { +) -> Result { match request { Request::Connect(connect_request) => handle_connect(remote_addr, &connect_request, tracker).await, Request::Announce(announce_request) => handle_announce(remote_addr, &announce_request, tracker).await, @@ -53,7 +54,7 @@ pub async fn handle_connect( remote_addr: SocketAddr, request: &ConnectRequest, tracker: Arc, -) -> Result { +) -> Result { let connection_cookie = make(&remote_addr); let connection_id = into_connection_id(&connection_cookie); @@ -82,10 +83,10 @@ pub async fn handle_announce( remote_addr: SocketAddr, announce_request: &AnnounceRequest, tracker: Arc, -) -> Result { +) -> Result { check(&remote_addr, &from_connection_id(&announce_request.connection_id))?; - let wrapped_announce_request = AnnounceRequestWrapper::new(announce_request); + let wrapped_announce_request = AnnounceWrapper::new(announce_request); tracker .authenticate_request(&wrapped_announce_request.info_hash, &None) @@ -173,7 +174,7 @@ pub async fn handle_scrape( remote_addr: SocketAddr, request: &ScrapeRequest, tracker: Arc, -) -> Result { +) -> Result { let db = tracker.get_torrents().await; let mut torrent_stats: Vec = Vec::new(); @@ -228,7 +229,7 @@ pub async fn handle_scrape( })) } -fn handle_error(e: &ServerError, transaction_id: TransactionId) -> Response { +fn handle_error(e: &Error, transaction_id: TransactionId) -> Response { let message = e.to_string(); Response::from(ErrorResponse { transaction_id, @@ -245,7 +246,6 @@ mod tests { use crate::config::Configuration; use crate::protocol::clock::{Current, Time}; - use crate::protocol::common::PeerId; use crate::tracker::{self, mode, peer, statistics}; fn default_tracker_config() -> Arc { @@ -253,21 +253,17 @@ mod tests { } fn initialized_public_tracker() -> Arc { - let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Tracker::Public).into()); + let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Mode::Public).into()); initialized_tracker(&configuration) } fn initialized_private_tracker() -> Arc { - let configuration = Arc::new( - TrackerConfigurationBuilder::default() - .with_mode(mode::Tracker::Private) - .into(), - ); + let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Mode::Private).into()); initialized_tracker(&configuration) } fn initialized_whitelisted_tracker() -> Arc { - let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Tracker::Listed).into()); + let configuration = Arc::new(TrackerConfigurationBuilder::default().with_mode(mode::Mode::Listed).into()); initialized_tracker(&configuration) } @@ -299,7 +295,7 @@ mod tests { impl TorrentPeerBuilder { pub fn default() -> TorrentPeerBuilder { let default_peer = peer::TorrentPeer { - peer_id: PeerId([255u8; 20]), + peer_id: peer::Id([255u8; 20]), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), updated: Current::now(), uploaded: NumberOfBytes(0), @@ -310,7 +306,7 @@ mod tests { TorrentPeerBuilder { peer: default_peer } } - pub fn with_peer_id(mut self, peer_id: PeerId) -> Self { + pub fn with_peer_id(mut self, peer_id: peer::Id) -> Self { self.peer.peer_id = peer_id; self } @@ -347,7 +343,7 @@ mod tests { self } - pub fn with_mode(mut self, mode: mode::Tracker) -> Self { + pub fn with_mode(mut self, mode: mode::Mode) -> Self { self.configuration.mode = mode; self } @@ -537,8 +533,7 @@ mod tests { }; use mockall::predicate::eq; - use crate::protocol::common::PeerId; - use crate::tracker::{self, statistics}; + use crate::tracker::{self, peer, statistics}; use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; @@ -570,7 +565,7 @@ mod tests { let peers = tracker.get_all_torrent_peers(&info_hash.0.into()).await; let expected_peer = TorrentPeerBuilder::default() - .with_peer_id(PeerId(peer_id.0)) + .with_peer_id(peer::Id(peer_id.0)) .with_peer_addr(SocketAddr::new(IpAddr::V4(client_ip), client_port)) .into(); @@ -644,7 +639,7 @@ mod tests { let peer_id = AquaticPeerId([255u8; 20]); let peer_using_ipv6 = TorrentPeerBuilder::default() - .with_peer_id(PeerId(peer_id.0)) + .with_peer_id(peer::Id(peer_id.0)) .with_peer_addr(SocketAddr::new(IpAddr::V6(client_ip_v6), client_port)) .into(); @@ -707,7 +702,7 @@ mod tests { use aquatic_udp_protocol::{InfoHash as AquaticInfoHash, PeerId as AquaticPeerId}; - use crate::protocol::common::PeerId; + use crate::tracker::peer; use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; @@ -740,7 +735,7 @@ mod tests { tracker.config.external_ip.clone().unwrap().parse::().unwrap(); let expected_peer = TorrentPeerBuilder::default() - .with_peer_id(PeerId(peer_id.0)) + .with_peer_id(peer::Id(peer_id.0)) .with_peer_addr(SocketAddr::new(IpAddr::V4(external_ip_in_tracker_configuration), client_port)) .into(); @@ -761,8 +756,7 @@ mod tests { }; use mockall::predicate::eq; - use crate::protocol::common::PeerId; - use crate::tracker::{self, statistics}; + use crate::tracker::{self, peer, statistics}; use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_announce; use crate::udp::handlers::tests::announce_request::AnnounceRequestBuilder; @@ -795,7 +789,7 @@ mod tests { let peers = tracker.get_all_torrent_peers(&info_hash.0.into()).await; let expected_peer = TorrentPeerBuilder::default() - .with_peer_id(PeerId(peer_id.0)) + .with_peer_id(peer::Id(peer_id.0)) .with_peer_addr(SocketAddr::new(IpAddr::V6(client_ip_v6), client_port)) .into(); @@ -872,7 +866,7 @@ mod tests { let peer_id = AquaticPeerId([255u8; 20]); let peer_using_ipv4 = TorrentPeerBuilder::default() - .with_peer_id(PeerId(peer_id.0)) + .with_peer_id(peer::Id(peer_id.0)) .with_peer_addr(SocketAddr::new(IpAddr::V4(client_ip_v4), client_port)) .into(); @@ -1003,8 +997,7 @@ mod tests { }; use super::TorrentPeerBuilder; - use crate::protocol::common::PeerId; - use crate::tracker; + use crate::tracker::{self, peer}; use crate::udp::connection_cookie::{into_connection_id, make}; use crate::udp::handlers::handle_scrape; use crate::udp::handlers::tests::{initialized_public_tracker, sample_ipv4_remote_addr}; @@ -1046,10 +1039,10 @@ mod tests { } async fn add_a_seeder(tracker: Arc, remote_addr: &SocketAddr, info_hash: &InfoHash) { - let peer_id = PeerId([255u8; 20]); + let peer_id = peer::Id([255u8; 20]); let peer = TorrentPeerBuilder::default() - .with_peer_id(PeerId(peer_id.0)) + .with_peer_id(peer::Id(peer_id.0)) .with_peer_addr(*remote_addr) .with_bytes_left(0) .into(); diff --git a/src/udp/mod.rs b/src/udp/mod.rs index 2a8d42d9f..8b8c8c4f8 100644 --- a/src/udp/mod.rs +++ b/src/udp/mod.rs @@ -1,5 +1,5 @@ pub mod connection_cookie; -pub mod errors; +pub mod error; pub mod handlers; pub mod request; pub mod server; diff --git a/src/udp/request.rs b/src/udp/request.rs index 34139384b..c4326b291 100644 --- a/src/udp/request.rs +++ b/src/udp/request.rs @@ -1,6 +1,6 @@ use aquatic_udp_protocol::AnnounceRequest; -use crate::protocol::common::InfoHash; +use crate::protocol::info_hash::InfoHash; // struct AnnounceRequest { // pub connection_id: i64, @@ -17,15 +17,15 @@ use crate::protocol::common::InfoHash; // pub port: Port // } -pub struct AnnounceRequestWrapper { +pub struct AnnounceWrapper { pub announce_request: AnnounceRequest, pub info_hash: InfoHash, } -impl AnnounceRequestWrapper { +impl AnnounceWrapper { #[must_use] pub fn new(announce_request: &AnnounceRequest) -> Self { - AnnounceRequestWrapper { + AnnounceWrapper { announce_request: announce_request.clone(), info_hash: InfoHash(announce_request.info_hash.0), } diff --git a/tests/api.rs b/tests/api.rs index 72c3c65c7..4f119e6d0 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -16,15 +16,15 @@ mod tracker_api { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use reqwest::Response; use tokio::task::JoinHandle; - use torrust_tracker::api::resources::auth_key_resource::AuthKeyResource; + use torrust_tracker::api::resources::auth_key_resource::AuthKey; use torrust_tracker::api::resources::stats_resource::StatsResource; use torrust_tracker::api::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; use torrust_tracker::config::Configuration; use torrust_tracker::jobs::tracker_api; use torrust_tracker::protocol::clock::DurationSinceUnixEpoch; - use torrust_tracker::protocol::common::{InfoHash, PeerId}; + use torrust_tracker::protocol::info_hash::InfoHash; use torrust_tracker::tracker::key::Auth; - use torrust_tracker::tracker::peer::TorrentPeer; + use torrust_tracker::tracker::peer::{self, TorrentPeer}; use torrust_tracker::tracker::statistics::Keeper; use torrust_tracker::{ephemeral_instance_keys, logging, static_time, tracker}; @@ -189,7 +189,7 @@ mod tracker_api { fn sample_torrent_peer() -> (TorrentPeer, TorrentPeerResource) { let torrent_peer = TorrentPeer { - peer_id: PeerId(*b"-qB00000000000000000"), + peer_id: peer::Id(*b"-qB00000000000000000"), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), updated: DurationSinceUnixEpoch::new(1_669_397_478_934, 0), uploaded: NumberOfBytes(0), @@ -310,7 +310,7 @@ mod tracker_api { Self { connection_info } } - pub async fn generate_auth_key(&self, seconds_valid: i32) -> AuthKeyResource { + pub async fn generate_auth_key(&self, seconds_valid: i32) -> AuthKey { let url = format!( "http://{}/api/key/{}?token={}", &self.connection_info.bind_address, &seconds_valid, &self.connection_info.api_token From ec21df90b3f6bb949f150ad13dcafadba6a7d18d Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Mon, 28 Nov 2022 19:38:50 +0100 Subject: [PATCH 40/45] rename: Key::Auth to auth::Key --- .../{auth_key_resource.rs => auth_key.rs} | 18 +++++----- src/api/resources/mod.rs | 3 +- src/api/server.rs | 2 +- src/databases/database.rs | 8 ++--- src/databases/mysql.rs | 16 ++++----- src/databases/sqlite.rs | 14 ++++---- src/http/filters.rs | 9 +++-- src/http/handlers.rs | 15 ++++---- src/tracker/{key.rs => auth.rs} | 34 +++++++++---------- src/tracker/mod.rs | 17 +++++----- tests/api.rs | 6 ++-- 11 files changed, 72 insertions(+), 70 deletions(-) rename src/api/resources/{auth_key_resource.rs => auth_key.rs} (88%) rename src/tracker/{key.rs => auth.rs} (83%) diff --git a/src/api/resources/auth_key_resource.rs b/src/api/resources/auth_key.rs similarity index 88% rename from src/api/resources/auth_key_resource.rs rename to src/api/resources/auth_key.rs index b575984db..d5c08f496 100644 --- a/src/api/resources/auth_key_resource.rs +++ b/src/api/resources/auth_key.rs @@ -3,7 +3,7 @@ use std::convert::From; use serde::{Deserialize, Serialize}; use crate::protocol::clock::DurationSinceUnixEpoch; -use crate::tracker::key::Auth; +use crate::tracker::auth; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct AuthKey { @@ -11,9 +11,9 @@ pub struct AuthKey { pub valid_until: Option, } -impl From for Auth { +impl From for auth::Key { fn from(auth_key_resource: AuthKey) -> Self { - Auth { + auth::Key { key: auth_key_resource.key, valid_until: auth_key_resource .valid_until @@ -22,8 +22,8 @@ impl From for Auth { } } -impl From for AuthKey { - fn from(auth_key: Auth) -> Self { +impl From for AuthKey { + fn from(auth_key: auth::Key) -> Self { AuthKey { key: auth_key.key, valid_until: auth_key.valid_until.map(|valid_until| valid_until.as_secs()), @@ -37,7 +37,7 @@ mod tests { use super::AuthKey; use crate::protocol::clock::{Current, TimeNow}; - use crate::tracker::key::Auth; + use crate::tracker::auth; #[test] fn it_should_be_convertible_into_an_auth_key() { @@ -49,8 +49,8 @@ mod tests { }; assert_eq!( - Auth::from(auth_key_resource), - Auth { + auth::Key::from(auth_key_resource), + auth::Key { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(Current::add(&Duration::new(duration_in_secs, 0)).unwrap()) } @@ -61,7 +61,7 @@ mod tests { fn it_should_be_convertible_from_an_auth_key() { let duration_in_secs = 60; - let auth_key = Auth { + let auth_key = auth::Key { key: "IaWDneuFNZi8IB4MPA3qW1CD0M30EZSM".to_string(), // cspell:disable-line valid_until: Some(Current::add(&Duration::new(duration_in_secs, 0)).unwrap()), }; diff --git a/src/api/resources/mod.rs b/src/api/resources/mod.rs index 2b3e4b886..f708fc2e4 100644 --- a/src/api/resources/mod.rs +++ b/src/api/resources/mod.rs @@ -6,6 +6,7 @@ //! - [ ] `TorrentResource`, `TorrentListItemResource`, `TorrentPeerResource`, `PeerIdResource` //! - [ ] `StatsResource` //! - [ ] ... -pub mod auth_key_resource; + +pub mod auth_key; pub mod stats_resource; pub mod torrent_resource; diff --git a/src/api/server.rs b/src/api/server.rs index 61fd8ed3d..af2d66458 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -7,7 +7,7 @@ use std::time::Duration; use serde::{Deserialize, Serialize}; use warp::{filters, reply, serve, Filter}; -use super::resources::auth_key_resource::AuthKey; +use super::resources::auth_key::AuthKey; use super::resources::stats_resource::StatsResource; use super::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; use crate::protocol::info_hash::InfoHash; diff --git a/src/databases/database.rs b/src/databases/database.rs index 7055d2a09..a4dae57ee 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use crate::databases::mysql::Mysql; use crate::databases::sqlite::Sqlite; use crate::protocol::info_hash::InfoHash; -use crate::tracker::key::Auth; +use crate::tracker::auth; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] pub enum Drivers { @@ -42,7 +42,7 @@ pub trait Database: Sync + Send { async fn load_persistent_torrents(&self) -> Result, Error>; - async fn load_keys(&self) -> Result, Error>; + async fn load_keys(&self) -> Result, Error>; async fn load_whitelist(&self) -> Result, Error>; @@ -54,9 +54,9 @@ pub trait Database: Sync + Send { async fn remove_info_hash_from_whitelist(&self, info_hash: InfoHash) -> Result; - async fn get_key_from_keys(&self, key: &str) -> Result; + async fn get_key_from_keys(&self, key: &str) -> Result; - async fn add_key_to_keys(&self, auth_key: &Auth) -> Result; + async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result; async fn remove_key_from_keys(&self, key: &str) -> Result; diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs index 0dafc3a60..0d79315c6 100644 --- a/src/databases/mysql.rs +++ b/src/databases/mysql.rs @@ -12,7 +12,7 @@ use crate::databases::database; use crate::databases::database::{Database, Error}; use crate::protocol::common::AUTH_KEY_LENGTH; use crate::protocol::info_hash::InfoHash; -use crate::tracker::key::Auth; +use crate::tracker::auth; pub struct Mysql { pool: Pool, @@ -61,7 +61,7 @@ impl Database for Mysql { PRIMARY KEY (`id`), UNIQUE (`key`) );", - i8::try_from(AUTH_KEY_LENGTH).expect("Auth Key Length Should fit within a i8!") + i8::try_from(AUTH_KEY_LENGTH).expect("auth::Auth Key Length Should fit within a i8!") ); let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; @@ -91,13 +91,13 @@ impl Database for Mysql { Ok(torrents) } - async fn load_keys(&self) -> Result, Error> { + async fn load_keys(&self) -> Result, Error> { let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; - let keys: Vec = conn + let keys: Vec = conn .query_map( "SELECT `key`, valid_until FROM `keys`", - |(key, valid_until): (String, i64)| Auth { + |(key, valid_until): (String, i64)| auth::Key { key, valid_until: Some(Duration::from_secs(valid_until.unsigned_abs())), }, @@ -183,14 +183,14 @@ impl Database for Mysql { } } - async fn get_key_from_keys(&self, key: &str) -> Result { + async fn get_key_from_keys(&self, key: &str) -> Result { let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; match conn .exec_first::<(String, i64), _, _>("SELECT `key`, valid_until FROM `keys` WHERE `key` = :key", params! { key }) .map_err(|_| database::Error::QueryReturnedNoRows)? { - Some((key, valid_until)) => Ok(Auth { + Some((key, valid_until)) => Ok(auth::Key { key, valid_until: Some(Duration::from_secs(valid_until.unsigned_abs())), }), @@ -198,7 +198,7 @@ impl Database for Mysql { } } - async fn add_key_to_keys(&self, auth_key: &Auth) -> Result { + async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result { let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; let key = auth_key.key.to_string(); diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index 39dea8502..c42e9382d 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -9,7 +9,7 @@ use crate::databases::database; use crate::databases::database::{Database, Error}; use crate::protocol::clock::DurationSinceUnixEpoch; use crate::protocol::info_hash::InfoHash; -use crate::tracker::key::Auth; +use crate::tracker::auth; pub struct Sqlite { pool: Pool, @@ -78,7 +78,7 @@ impl Database for Sqlite { Ok(torrents) } - async fn load_keys(&self) -> Result, Error> { + async fn load_keys(&self) -> Result, Error> { let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT key, valid_until FROM keys")?; @@ -87,13 +87,13 @@ impl Database for Sqlite { let key = row.get(0)?; let valid_until: i64 = row.get(1)?; - Ok(Auth { + Ok(auth::Key { key, valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until.unsigned_abs())), }) })?; - let keys: Vec = keys_iter.filter_map(std::result::Result::ok).collect(); + let keys: Vec = keys_iter.filter_map(std::result::Result::ok).collect(); Ok(keys) } @@ -186,7 +186,7 @@ impl Database for Sqlite { } } - async fn get_key_from_keys(&self, key: &str) -> Result { + async fn get_key_from_keys(&self, key: &str) -> Result { let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT key, valid_until FROM keys WHERE key = ?")?; @@ -196,7 +196,7 @@ impl Database for Sqlite { let key: String = row.get(0).unwrap(); let valid_until: i64 = row.get(1).unwrap(); - Ok(Auth { + Ok(auth::Key { key, valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until.unsigned_abs())), }) @@ -205,7 +205,7 @@ impl Database for Sqlite { } } - async fn add_key_to_keys(&self, auth_key: &Auth) -> Result { + async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result { let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; match conn.execute( diff --git a/src/http/filters.rs b/src/http/filters.rs index 484ae2311..e9432e191 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -10,8 +10,7 @@ use super::request::{Announce, AnnounceQuery, Scrape}; use super::WebResult; use crate::protocol::common::MAX_SCRAPE_TORRENTS; use crate::protocol::info_hash::InfoHash; -use crate::tracker::key::Auth; -use crate::tracker::{self, peer}; +use crate::tracker::{self, auth, peer}; /// Pass Arc along #[must_use] @@ -35,10 +34,10 @@ pub fn with_peer_id() -> impl Filter + /// Pass Arc along #[must_use] -pub fn with_auth_key() -> impl Filter,), Error = Infallible> + Clone { +pub fn with_auth_key() -> impl Filter,), Error = Infallible> + Clone { warp::path::param::() - .map(|key: String| Auth::from_string(&key)) - .or_else(|_| async { Ok::<(Option,), Infallible>((None,)) }) + .map(|key: String| auth::Key::from_string(&key)) + .or_else(|_| async { Ok::<(Option,), Infallible>((None,)) }) } /// Check for `PeerAddress` diff --git a/src/http/handlers.rs b/src/http/handlers.rs index ff5469168..8d8816885 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -11,15 +11,18 @@ use super::error::Error; use super::response::{self, Peer, ScrapeEntry}; use super::{request, WebResult}; use crate::protocol::info_hash::InfoHash; -use crate::tracker::key::Auth; -use crate::tracker::{self, peer, statistics, torrent}; +use crate::tracker::{self, auth, peer, statistics, torrent}; -/// Authenticate `InfoHash` using optional `AuthKey` +/// Authenticate `InfoHash` using optional `auth::Key` /// /// # Errors /// /// Will return `ServerError` that wraps the `Error` if unable to `authenticate_request`. -pub async fn authenticate(info_hash: &InfoHash, auth_key: &Option, tracker: Arc) -> Result<(), Error> { +pub async fn authenticate( + info_hash: &InfoHash, + auth_key: &Option, + tracker: Arc, +) -> Result<(), Error> { tracker.authenticate_request(info_hash, auth_key).await.map_err(|e| match e { torrent::Error::TorrentNotWhitelisted => Error::TorrentNotWhitelisted, torrent::Error::PeerNotAuthenticated => Error::PeerNotAuthenticated, @@ -37,7 +40,7 @@ pub async fn authenticate(info_hash: &InfoHash, auth_key: &Option, tracker /// Will return `warp::Rejection` that wraps the `ServerError` if unable to `send_scrape_response`. pub async fn handle_announce( announce_request: request::Announce, - auth_key: Option, + auth_key: Option, tracker: Arc, ) -> WebResult { authenticate(&announce_request.info_hash, &auth_key, tracker.clone()) @@ -83,7 +86,7 @@ pub async fn handle_announce( /// Will return `warp::Rejection` that wraps the `ServerError` if unable to `send_scrape_response`. pub async fn handle_scrape( scrape_request: request::Scrape, - auth_key: Option, + auth_key: Option, tracker: Arc, ) -> WebResult { let mut files: HashMap = HashMap::new(); diff --git a/src/tracker/key.rs b/src/tracker/auth.rs similarity index 83% rename from src/tracker/key.rs rename to src/tracker/auth.rs index 673780ad0..7ac6d6939 100644 --- a/src/tracker/key.rs +++ b/src/tracker/auth.rs @@ -13,7 +13,7 @@ use crate::protocol::common::AUTH_KEY_LENGTH; /// # Panics /// /// It would panic if the `lifetime: Duration` + Duration is more than `Duration::MAX`. -pub fn generate(lifetime: Duration) -> Auth { +pub fn generate(lifetime: Duration) -> Key { let key: String = thread_rng() .sample_iter(&Alphanumeric) .take(AUTH_KEY_LENGTH) @@ -22,7 +22,7 @@ pub fn generate(lifetime: Duration) -> Auth { debug!("Generated key: {}, valid for: {:?} seconds", key, lifetime); - Auth { + Key { key, valid_until: Some(Current::add(&lifetime).unwrap()), } @@ -33,7 +33,7 @@ pub fn generate(lifetime: Duration) -> Auth { /// Will return `Error::KeyExpired` if `auth_key.valid_until` is past the `current_time`. /// /// Will return `Error::KeyInvalid` if `auth_key.valid_until` is past the `None`. -pub fn verify(auth_key: &Auth) -> Result<(), Error> { +pub fn verify(auth_key: &Key) -> Result<(), Error> { let current_time: DurationSinceUnixEpoch = Current::now(); match auth_key.valid_until { @@ -49,25 +49,25 @@ pub fn verify(auth_key: &Auth) -> Result<(), Error> { } #[derive(Serialize, Debug, Eq, PartialEq, Clone)] -pub struct Auth { +pub struct Key { pub key: String, pub valid_until: Option, } -impl Auth { +impl Key { #[must_use] - pub fn from_buffer(key_buffer: [u8; AUTH_KEY_LENGTH]) -> Option { + pub fn from_buffer(key_buffer: [u8; AUTH_KEY_LENGTH]) -> Option { if let Ok(key) = String::from_utf8(Vec::from(key_buffer)) { - Some(Auth { key, valid_until: None }) + Some(Key { key, valid_until: None }) } else { None } } #[must_use] - pub fn from_string(key: &str) -> Option { + pub fn from_string(key: &str) -> Option { if key.len() == AUTH_KEY_LENGTH { - Some(Auth { + Some(Key { key: key.to_string(), valid_until: None, }) @@ -100,11 +100,11 @@ mod tests { use std::time::Duration; use crate::protocol::clock::{Current, StoppedTime}; - use crate::tracker::key; + use crate::tracker::auth; #[test] fn auth_key_from_buffer() { - let auth_key = key::Auth::from_buffer([ + let auth_key = auth::Key::from_buffer([ 89, 90, 83, 108, 52, 108, 77, 90, 117, 112, 82, 117, 79, 112, 83, 82, 67, 51, 107, 114, 73, 75, 82, 53, 66, 80, 66, 49, 52, 110, 114, 74, ]); @@ -116,7 +116,7 @@ mod tests { #[test] fn auth_key_from_string() { let key_string = "YZSl4lMZupRuOpSRC3krIKR5BPB14nrJ"; - let auth_key = key::Auth::from_string(key_string); + let auth_key = auth::Key::from_string(key_string); assert!(auth_key.is_some()); assert_eq!(auth_key.unwrap().key, key_string); @@ -124,9 +124,9 @@ mod tests { #[test] fn generate_valid_auth_key() { - let auth_key = key::generate(Duration::new(9999, 0)); + let auth_key = auth::generate(Duration::new(9999, 0)); - assert!(key::verify(&auth_key).is_ok()); + assert!(auth::verify(&auth_key).is_ok()); } #[test] @@ -135,16 +135,16 @@ mod tests { Current::local_set_to_system_time_now(); // Make key that is valid for 19 seconds. - let auth_key = key::generate(Duration::from_secs(19)); + let auth_key = auth::generate(Duration::from_secs(19)); // Mock the time has passed 10 sec. Current::local_add(&Duration::from_secs(10)).unwrap(); - assert!(key::verify(&auth_key).is_ok()); + assert!(auth::verify(&auth_key).is_ok()); // Mock the time has passed another 10 sec. Current::local_add(&Duration::from_secs(10)).unwrap(); - assert!(key::verify(&auth_key).is_err()); + assert!(auth::verify(&auth_key).is_err()); } } diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 508280b1a..806efee54 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -1,4 +1,4 @@ -pub mod key; +pub mod auth; pub mod mode; pub mod peer; pub mod statistics; @@ -17,12 +17,11 @@ use crate::config::Configuration; use crate::databases::database; use crate::databases::database::Database; use crate::protocol::info_hash::InfoHash; -use crate::tracker::key::Auth; pub struct Tracker { pub config: Arc, mode: mode::Mode, - keys: RwLock>, + keys: RwLock>, whitelist: RwLock>, torrents: RwLock>, stats_event_sender: Option>, @@ -68,8 +67,8 @@ impl Tracker { /// # Errors /// /// Will return a `database::Error` if unable to add the `auth_key` to the database. - pub async fn generate_auth_key(&self, lifetime: Duration) -> Result { - let auth_key = key::generate(lifetime); + pub async fn generate_auth_key(&self, lifetime: Duration) -> Result { + let auth_key = auth::generate(lifetime); self.database.add_key_to_keys(&auth_key).await?; self.keys.write().await.insert(auth_key.key.clone(), auth_key.clone()); Ok(auth_key) @@ -87,10 +86,10 @@ impl Tracker { /// # Errors /// /// Will return a `key::Error` if unable to get any `auth_key`. - pub async fn verify_auth_key(&self, auth_key: &Auth) -> Result<(), key::Error> { + pub async fn verify_auth_key(&self, auth_key: &auth::Key) -> Result<(), auth::Error> { match self.keys.read().await.get(&auth_key.key) { - None => Err(key::Error::KeyInvalid), - Some(key) => key::verify(key), + None => Err(auth::Error::KeyInvalid), + Some(key) => auth::verify(key), } } @@ -174,7 +173,7 @@ impl Tracker { /// Will return a `torrent::Error::PeerNotAuthenticated` if the `key` is `None`. /// /// Will return a `torrent::Error::TorrentNotWhitelisted` if the the Tracker is in listed mode and the `info_hash` is not whitelisted. - pub async fn authenticate_request(&self, info_hash: &InfoHash, key: &Option) -> Result<(), torrent::Error> { + pub async fn authenticate_request(&self, info_hash: &InfoHash, key: &Option) -> Result<(), torrent::Error> { // no authentication needed in public mode if self.is_public() { return Ok(()); diff --git a/tests/api.rs b/tests/api.rs index 4f119e6d0..22a222698 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -16,14 +16,14 @@ mod tracker_api { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use reqwest::Response; use tokio::task::JoinHandle; - use torrust_tracker::api::resources::auth_key_resource::AuthKey; + use torrust_tracker::api::resources::auth_key::AuthKey; use torrust_tracker::api::resources::stats_resource::StatsResource; use torrust_tracker::api::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; use torrust_tracker::config::Configuration; use torrust_tracker::jobs::tracker_api; use torrust_tracker::protocol::clock::DurationSinceUnixEpoch; use torrust_tracker::protocol::info_hash::InfoHash; - use torrust_tracker::tracker::key::Auth; + use torrust_tracker::tracker::auth; use torrust_tracker::tracker::peer::{self, TorrentPeer}; use torrust_tracker::tracker::statistics::Keeper; use torrust_tracker::{ephemeral_instance_keys, logging, static_time, tracker}; @@ -44,7 +44,7 @@ mod tracker_api { assert!(api_server .tracker .unwrap() - .verify_auth_key(&Auth::from(auth_key)) + .verify_auth_key(&auth::Key::from(auth_key)) .await .is_ok()); } From 36452717a0b21e850adcc7d00282500359cc4a26 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Mon, 28 Nov 2022 19:57:03 +0100 Subject: [PATCH 41/45] refactor: rename inside databases --- src/config.rs | 6 +-- src/databases/database.rs | 94 --------------------------------------- src/databases/driver.rs | 7 +++ src/databases/error.rs | 21 +++++++++ src/databases/mod.rs | 72 +++++++++++++++++++++++++++++- src/databases/mysql.rs | 67 ++++++++++++++-------------- src/databases/sqlite.rs | 71 +++++++++++++++-------------- src/tracker/mod.rs | 21 +++++---- 8 files changed, 180 insertions(+), 179 deletions(-) delete mode 100644 src/databases/database.rs create mode 100644 src/databases/driver.rs create mode 100644 src/databases/error.rs diff --git a/src/config.rs b/src/config.rs index 67177aca1..a7e7e9df6 100644 --- a/src/config.rs +++ b/src/config.rs @@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize}; use serde_with::{serde_as, NoneAsEmptyString}; use {std, toml}; -use crate::databases::database::Drivers; +use crate::databases::driver::Driver; use crate::tracker::mode; #[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] @@ -42,7 +42,7 @@ pub struct HttpApi { pub struct Configuration { pub log_level: Option, pub mode: mode::Mode, - pub db_driver: Drivers, + pub db_driver: Driver, pub db_path: String, pub announce_interval: u32, pub min_announce_interval: u32, @@ -98,7 +98,7 @@ impl Configuration { let mut configuration = Configuration { log_level: Option::from(String::from("info")), mode: mode::Mode::Public, - db_driver: Drivers::Sqlite3, + db_driver: Driver::Sqlite3, db_path: String::from("data.db"), announce_interval: 120, min_announce_interval: 120, diff --git a/src/databases/database.rs b/src/databases/database.rs deleted file mode 100644 index a4dae57ee..000000000 --- a/src/databases/database.rs +++ /dev/null @@ -1,94 +0,0 @@ -use async_trait::async_trait; -use derive_more::{Display, Error}; -use serde::{Deserialize, Serialize}; - -use crate::databases::mysql::Mysql; -use crate::databases::sqlite::Sqlite; -use crate::protocol::info_hash::InfoHash; -use crate::tracker::auth; - -#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] -pub enum Drivers { - Sqlite3, - MySQL, -} - -/// # Errors -/// -/// Will return `r2d2::Error` if `db_path` is not able to create a database. -pub fn connect(db_driver: &Drivers, db_path: &str) -> Result, r2d2::Error> { - let database: Box = match db_driver { - Drivers::Sqlite3 => { - let db = Sqlite::new(db_path)?; - Box::new(db) - } - Drivers::MySQL => { - let db = Mysql::new(db_path)?; - Box::new(db) - } - }; - - database.create_database_tables().expect("Could not create database tables."); - - Ok(database) -} - -#[async_trait] -pub trait Database: Sync + Send { - /// # Errors - /// - /// Will return `Error` if unable to create own tables. - fn create_database_tables(&self) -> Result<(), Error>; - - async fn load_persistent_torrents(&self) -> Result, Error>; - - async fn load_keys(&self) -> Result, Error>; - - async fn load_whitelist(&self) -> Result, Error>; - - async fn save_persistent_torrent(&self, info_hash: &InfoHash, completed: u32) -> Result<(), Error>; - - async fn get_info_hash_from_whitelist(&self, info_hash: &str) -> Result; - - async fn add_info_hash_to_whitelist(&self, info_hash: InfoHash) -> Result; - - async fn remove_info_hash_from_whitelist(&self, info_hash: InfoHash) -> Result; - - async fn get_key_from_keys(&self, key: &str) -> Result; - - async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result; - - async fn remove_key_from_keys(&self, key: &str) -> Result; - - async fn is_info_hash_whitelisted(&self, info_hash: &InfoHash) -> Result { - self.get_info_hash_from_whitelist(&info_hash.clone().to_string()) - .await - .map_or_else( - |e| match e { - Error::QueryReturnedNoRows => Ok(false), - e => Err(e), - }, - |_| Ok(true), - ) - } -} - -#[derive(Debug, Display, PartialEq, Eq, Error)] -#[allow(dead_code)] -pub enum Error { - #[display(fmt = "Query returned no rows.")] - QueryReturnedNoRows, - #[display(fmt = "Invalid query.")] - InvalidQuery, - #[display(fmt = "Database error.")] - DatabaseError, -} - -impl From for Error { - fn from(e: r2d2_sqlite::rusqlite::Error) -> Self { - match e { - r2d2_sqlite::rusqlite::Error::QueryReturnedNoRows => Error::QueryReturnedNoRows, - _ => Error::InvalidQuery, - } - } -} diff --git a/src/databases/driver.rs b/src/databases/driver.rs new file mode 100644 index 000000000..7eaa9064e --- /dev/null +++ b/src/databases/driver.rs @@ -0,0 +1,7 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] +pub enum Driver { + Sqlite3, + MySQL, +} diff --git a/src/databases/error.rs b/src/databases/error.rs new file mode 100644 index 000000000..467db407f --- /dev/null +++ b/src/databases/error.rs @@ -0,0 +1,21 @@ +use derive_more::{Display, Error}; + +#[derive(Debug, Display, PartialEq, Eq, Error)] +#[allow(dead_code)] +pub enum Error { + #[display(fmt = "Query returned no rows.")] + QueryReturnedNoRows, + #[display(fmt = "Invalid query.")] + InvalidQuery, + #[display(fmt = "Database error.")] + DatabaseError, +} + +impl From for Error { + fn from(e: r2d2_sqlite::rusqlite::Error) -> Self { + match e { + r2d2_sqlite::rusqlite::Error::QueryReturnedNoRows => Error::QueryReturnedNoRows, + _ => Error::InvalidQuery, + } + } +} diff --git a/src/databases/mod.rs b/src/databases/mod.rs index 169d99f4d..c1d265b56 100644 --- a/src/databases/mod.rs +++ b/src/databases/mod.rs @@ -1,3 +1,73 @@ -pub mod database; +pub mod driver; +pub mod error; pub mod mysql; pub mod sqlite; + +use async_trait::async_trait; + +use self::driver::Driver; +use self::error::Error; +use crate::databases::mysql::Mysql; +use crate::databases::sqlite::Sqlite; +use crate::protocol::info_hash::InfoHash; +use crate::tracker::auth; + +/// # Errors +/// +/// Will return `r2d2::Error` if `db_path` is not able to create a database. +pub fn connect(db_driver: &Driver, db_path: &str) -> Result, r2d2::Error> { + let database: Box = match db_driver { + Driver::Sqlite3 => { + let db = Sqlite::new(db_path)?; + Box::new(db) + } + Driver::MySQL => { + let db = Mysql::new(db_path)?; + Box::new(db) + } + }; + + database.create_database_tables().expect("Could not create database tables."); + + Ok(database) +} + +#[async_trait] +pub trait Database: Sync + Send { + /// # Errors + /// + /// Will return `Error` if unable to create own tables. + fn create_database_tables(&self) -> Result<(), Error>; + + async fn load_persistent_torrents(&self) -> Result, Error>; + + async fn load_keys(&self) -> Result, Error>; + + async fn load_whitelist(&self) -> Result, Error>; + + async fn save_persistent_torrent(&self, info_hash: &InfoHash, completed: u32) -> Result<(), Error>; + + async fn get_info_hash_from_whitelist(&self, info_hash: &str) -> Result; + + async fn add_info_hash_to_whitelist(&self, info_hash: InfoHash) -> Result; + + async fn remove_info_hash_from_whitelist(&self, info_hash: InfoHash) -> Result; + + async fn get_key_from_keys(&self, key: &str) -> Result; + + async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result; + + async fn remove_key_from_keys(&self, key: &str) -> Result; + + async fn is_info_hash_whitelisted(&self, info_hash: &InfoHash) -> Result { + self.get_info_hash_from_whitelist(&info_hash.clone().to_string()) + .await + .map_or_else( + |e| match e { + Error::QueryReturnedNoRows => Ok(false), + e => Err(e), + }, + |_| Ok(true), + ) + } +} diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs index 0d79315c6..8322b2273 100644 --- a/src/databases/mysql.rs +++ b/src/databases/mysql.rs @@ -8,8 +8,7 @@ use r2d2_mysql::mysql::prelude::Queryable; use r2d2_mysql::mysql::{params, Opts, OptsBuilder}; use r2d2_mysql::MysqlConnectionManager; -use crate::databases::database; -use crate::databases::database::{Database, Error}; +use crate::databases::{Database, Error}; use crate::protocol::common::AUTH_KEY_LENGTH; use crate::protocol::info_hash::InfoHash; use crate::tracker::auth; @@ -36,7 +35,7 @@ impl Mysql { #[async_trait] impl Database for Mysql { - fn create_database_tables(&self) -> Result<(), database::Error> { + fn create_database_tables(&self) -> Result<(), Error> { let create_whitelist_table = " CREATE TABLE IF NOT EXISTS whitelist ( id integer PRIMARY KEY AUTO_INCREMENT, @@ -64,7 +63,7 @@ impl Database for Mysql { i8::try_from(AUTH_KEY_LENGTH).expect("auth::Auth Key Length Should fit within a i8!") ); - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; conn.query_drop(&create_torrents_table) .expect("Could not create torrents table."); @@ -75,8 +74,8 @@ impl Database for Mysql { Ok(()) } - async fn load_persistent_torrents(&self) -> Result, database::Error> { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn load_persistent_torrents(&self) -> Result, Error> { + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let torrents: Vec<(InfoHash, u32)> = conn .query_map( @@ -86,13 +85,13 @@ impl Database for Mysql { (info_hash, completed) }, ) - .map_err(|_| database::Error::QueryReturnedNoRows)?; + .map_err(|_| Error::QueryReturnedNoRows)?; Ok(torrents) } async fn load_keys(&self) -> Result, Error> { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let keys: Vec = conn .query_map( @@ -102,25 +101,25 @@ impl Database for Mysql { valid_until: Some(Duration::from_secs(valid_until.unsigned_abs())), }, ) - .map_err(|_| database::Error::QueryReturnedNoRows)?; + .map_err(|_| Error::QueryReturnedNoRows)?; Ok(keys) } async fn load_whitelist(&self) -> Result, Error> { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let info_hashes: Vec = conn .query_map("SELECT info_hash FROM whitelist", |info_hash: String| { InfoHash::from_str(&info_hash).unwrap() }) - .map_err(|_| database::Error::QueryReturnedNoRows)?; + .map_err(|_| Error::QueryReturnedNoRows)?; Ok(info_hashes) } - async fn save_persistent_torrent(&self, info_hash: &InfoHash, completed: u32) -> Result<(), database::Error> { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn save_persistent_torrent(&self, info_hash: &InfoHash, completed: u32) -> Result<(), Error> { + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let info_hash_str = info_hash.to_string(); @@ -132,28 +131,28 @@ impl Database for Mysql { } Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn get_info_hash_from_whitelist(&self, info_hash: &str) -> Result { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn get_info_hash_from_whitelist(&self, info_hash: &str) -> Result { + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; match conn .exec_first::( "SELECT info_hash FROM whitelist WHERE info_hash = :info_hash", params! { info_hash }, ) - .map_err(|_| database::Error::DatabaseError)? + .map_err(|_| Error::DatabaseError)? { Some(info_hash) => Ok(InfoHash::from_str(&info_hash).unwrap()), - None => Err(database::Error::QueryReturnedNoRows), + None => Err(Error::QueryReturnedNoRows), } } - async fn add_info_hash_to_whitelist(&self, info_hash: InfoHash) -> Result { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn add_info_hash_to_whitelist(&self, info_hash: InfoHash) -> Result { + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let info_hash_str = info_hash.to_string(); @@ -164,13 +163,13 @@ impl Database for Mysql { Ok(_) => Ok(1), Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn remove_info_hash_from_whitelist(&self, info_hash: InfoHash) -> Result { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn remove_info_hash_from_whitelist(&self, info_hash: InfoHash) -> Result { + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let info_hash = info_hash.to_string(); @@ -178,28 +177,28 @@ impl Database for Mysql { Ok(_) => Ok(1), Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn get_key_from_keys(&self, key: &str) -> Result { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn get_key_from_keys(&self, key: &str) -> Result { + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; match conn .exec_first::<(String, i64), _, _>("SELECT `key`, valid_until FROM `keys` WHERE `key` = :key", params! { key }) - .map_err(|_| database::Error::QueryReturnedNoRows)? + .map_err(|_| Error::QueryReturnedNoRows)? { Some((key, valid_until)) => Ok(auth::Key { key, valid_until: Some(Duration::from_secs(valid_until.unsigned_abs())), }), - None => Err(database::Error::InvalidQuery), + None => Err(Error::InvalidQuery), } } - async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result { + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let key = auth_key.key.to_string(); let valid_until = auth_key.valid_until.unwrap_or(Duration::ZERO).as_secs().to_string(); @@ -211,19 +210,19 @@ impl Database for Mysql { Ok(_) => Ok(1), Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn remove_key_from_keys(&self, key: &str) -> Result { - let mut conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn remove_key_from_keys(&self, key: &str) -> Result { + let mut conn = self.pool.get().map_err(|_| Error::DatabaseError)?; match conn.exec_drop("DELETE FROM `keys` WHERE key = :key", params! { key }) { Ok(_) => Ok(1), Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index c42e9382d..c5401aacf 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -5,8 +5,7 @@ use log::debug; use r2d2::Pool; use r2d2_sqlite::SqliteConnectionManager; -use crate::databases::database; -use crate::databases::database::{Database, Error}; +use crate::databases::{Database, Error}; use crate::protocol::clock::DurationSinceUnixEpoch; use crate::protocol::info_hash::InfoHash; use crate::tracker::auth; @@ -28,7 +27,7 @@ impl Sqlite { #[async_trait] impl Database for Sqlite { - fn create_database_tables(&self) -> Result<(), database::Error> { + fn create_database_tables(&self) -> Result<(), Error> { let create_whitelist_table = " CREATE TABLE IF NOT EXISTS whitelist ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -52,17 +51,17 @@ impl Database for Sqlite { );" .to_string(); - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; conn.execute(&create_whitelist_table, []) .and_then(|_| conn.execute(&create_keys_table, [])) .and_then(|_| conn.execute(&create_torrents_table, [])) - .map_err(|_| database::Error::InvalidQuery) + .map_err(|_| Error::InvalidQuery) .map(|_| ()) } - async fn load_persistent_torrents(&self) -> Result, database::Error> { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn load_persistent_torrents(&self) -> Result, Error> { + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT info_hash, completed FROM torrents")?; @@ -79,7 +78,7 @@ impl Database for Sqlite { } async fn load_keys(&self) -> Result, Error> { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT key, valid_until FROM keys")?; @@ -99,7 +98,7 @@ impl Database for Sqlite { } async fn load_whitelist(&self) -> Result, Error> { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT info_hash FROM whitelist")?; @@ -114,8 +113,8 @@ impl Database for Sqlite { Ok(info_hashes) } - async fn save_persistent_torrent(&self, info_hash: &InfoHash, completed: u32) -> Result<(), database::Error> { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn save_persistent_torrent(&self, info_hash: &InfoHash, completed: u32) -> Result<(), Error> { + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; match conn.execute( "INSERT INTO torrents (info_hash, completed) VALUES (?1, ?2) ON CONFLICT(info_hash) DO UPDATE SET completed = ?2", @@ -125,17 +124,17 @@ impl Database for Sqlite { if updated > 0 { return Ok(()); } - Err(database::Error::QueryReturnedNoRows) + Err(Error::QueryReturnedNoRows) } Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn get_info_hash_from_whitelist(&self, info_hash: &str) -> Result { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn get_info_hash_from_whitelist(&self, info_hash: &str) -> Result { + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT info_hash FROM whitelist WHERE info_hash = ?")?; let mut rows = stmt.query([info_hash])?; @@ -143,51 +142,51 @@ impl Database for Sqlite { match rows.next() { Ok(row) => match row { Some(row) => Ok(InfoHash::from_str(&row.get_unwrap::<_, String>(0)).unwrap()), - None => Err(database::Error::QueryReturnedNoRows), + None => Err(Error::QueryReturnedNoRows), }, Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn add_info_hash_to_whitelist(&self, info_hash: InfoHash) -> Result { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn add_info_hash_to_whitelist(&self, info_hash: InfoHash) -> Result { + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; match conn.execute("INSERT INTO whitelist (info_hash) VALUES (?)", [info_hash.to_string()]) { Ok(updated) => { if updated > 0 { return Ok(updated); } - Err(database::Error::QueryReturnedNoRows) + Err(Error::QueryReturnedNoRows) } Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn remove_info_hash_from_whitelist(&self, info_hash: InfoHash) -> Result { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn remove_info_hash_from_whitelist(&self, info_hash: InfoHash) -> Result { + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; match conn.execute("DELETE FROM whitelist WHERE info_hash = ?", [info_hash.to_string()]) { Ok(updated) => { if updated > 0 { return Ok(updated); } - Err(database::Error::QueryReturnedNoRows) + Err(Error::QueryReturnedNoRows) } Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn get_key_from_keys(&self, key: &str) -> Result { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn get_key_from_keys(&self, key: &str) -> Result { + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; let mut stmt = conn.prepare("SELECT key, valid_until FROM keys WHERE key = ?")?; let mut rows = stmt.query([key.to_string()])?; @@ -201,12 +200,12 @@ impl Database for Sqlite { valid_until: Some(DurationSinceUnixEpoch::from_secs(valid_until.unsigned_abs())), }) } else { - Err(database::Error::QueryReturnedNoRows) + Err(Error::QueryReturnedNoRows) } } - async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn add_key_to_keys(&self, auth_key: &auth::Key) -> Result { + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; match conn.execute( "INSERT INTO keys (key, valid_until) VALUES (?1, ?2)", @@ -216,28 +215,28 @@ impl Database for Sqlite { if updated > 0 { return Ok(updated); } - Err(database::Error::QueryReturnedNoRows) + Err(Error::QueryReturnedNoRows) } Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } - async fn remove_key_from_keys(&self, key: &str) -> Result { - let conn = self.pool.get().map_err(|_| database::Error::DatabaseError)?; + async fn remove_key_from_keys(&self, key: &str) -> Result { + let conn = self.pool.get().map_err(|_| Error::DatabaseError)?; match conn.execute("DELETE FROM keys WHERE key = ?", [key]) { Ok(updated) => { if updated > 0 { return Ok(updated); } - Err(database::Error::QueryReturnedNoRows) + Err(Error::QueryReturnedNoRows) } Err(e) => { debug!("{:?}", e); - Err(database::Error::InvalidQuery) + Err(Error::InvalidQuery) } } } diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 806efee54..bd2da93f0 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -14,8 +14,7 @@ use tokio::sync::mpsc::error::SendError; use tokio::sync::{RwLock, RwLockReadGuard}; use crate::config::Configuration; -use crate::databases::database; -use crate::databases::database::Database; +use crate::databases::{self, Database}; use crate::protocol::info_hash::InfoHash; pub struct Tracker { @@ -38,7 +37,7 @@ impl Tracker { stats_event_sender: Option>, stats_repository: statistics::Repo, ) -> Result { - let database = database::connect(&config.db_driver, &config.db_path)?; + let database = databases::connect(&config.db_driver, &config.db_path)?; Ok(Tracker { config: config.clone(), @@ -67,7 +66,7 @@ impl Tracker { /// # Errors /// /// Will return a `database::Error` if unable to add the `auth_key` to the database. - pub async fn generate_auth_key(&self, lifetime: Duration) -> Result { + pub async fn generate_auth_key(&self, lifetime: Duration) -> Result { let auth_key = auth::generate(lifetime); self.database.add_key_to_keys(&auth_key).await?; self.keys.write().await.insert(auth_key.key.clone(), auth_key.clone()); @@ -77,7 +76,7 @@ impl Tracker { /// # Errors /// /// Will return a `database::Error` if unable to remove the `key` to the database. - pub async fn remove_auth_key(&self, key: &str) -> Result<(), database::Error> { + pub async fn remove_auth_key(&self, key: &str) -> Result<(), databases::error::Error> { self.database.remove_key_from_keys(key).await?; self.keys.write().await.remove(key); Ok(()) @@ -96,7 +95,7 @@ impl Tracker { /// # Errors /// /// Will return a `database::Error` if unable to `load_keys` from the database. - pub async fn load_keys(&self) -> Result<(), database::Error> { + pub async fn load_keys(&self) -> Result<(), databases::error::Error> { let keys_from_database = self.database.load_keys().await?; let mut keys = self.keys.write().await; @@ -114,14 +113,14 @@ impl Tracker { /// # Errors /// /// Will return a `database::Error` if unable to add the `info_hash` into the whitelist database. - pub async fn add_torrent_to_whitelist(&self, info_hash: &InfoHash) -> Result<(), database::Error> { + pub async fn add_torrent_to_whitelist(&self, info_hash: &InfoHash) -> Result<(), databases::error::Error> { self.add_torrent_to_database_whitelist(info_hash).await?; self.add_torrent_to_memory_whitelist(info_hash).await; Ok(()) } /// It adds a torrent to the whitelist if it has not been whitelisted previously - async fn add_torrent_to_database_whitelist(&self, info_hash: &InfoHash) -> Result<(), database::Error> { + async fn add_torrent_to_database_whitelist(&self, info_hash: &InfoHash) -> Result<(), databases::error::Error> { if self.database.is_info_hash_whitelisted(info_hash).await.unwrap() { return Ok(()); } @@ -140,7 +139,7 @@ impl Tracker { /// # Errors /// /// Will return a `database::Error` if unable to remove the `info_hash` from the whitelist database. - pub async fn remove_torrent_from_whitelist(&self, info_hash: &InfoHash) -> Result<(), database::Error> { + pub async fn remove_torrent_from_whitelist(&self, info_hash: &InfoHash) -> Result<(), databases::error::Error> { self.database.remove_info_hash_from_whitelist(*info_hash).await?; self.whitelist.write().await.remove(info_hash); Ok(()) @@ -153,7 +152,7 @@ impl Tracker { /// # Errors /// /// Will return a `database::Error` if unable to load the list whitelisted `info_hash`s from the database. - pub async fn load_whitelist(&self) -> Result<(), database::Error> { + pub async fn load_whitelist(&self) -> Result<(), databases::error::Error> { let whitelisted_torrents_from_database = self.database.load_whitelist().await?; let mut whitelist = self.whitelist.write().await; @@ -206,7 +205,7 @@ impl Tracker { /// # Errors /// /// Will return a `database::Error` if unable to load the list of `persistent_torrents` from the database. - pub async fn load_persistent_torrents(&self) -> Result<(), database::Error> { + pub async fn load_persistent_torrents(&self) -> Result<(), databases::error::Error> { let persistent_torrents = self.database.load_persistent_torrents().await?; let mut torrents = self.torrents.write().await; From 32eb44b318ddc57b574bae940ee384984bf7a24e Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Mon, 28 Nov 2022 20:08:37 +0100 Subject: [PATCH 42/45] refactor: rename inside http --- src/http/filters.rs | 21 ++++++++++----------- src/http/handlers.rs | 17 ++++++++--------- 2 files changed, 18 insertions(+), 20 deletions(-) diff --git a/src/http/filters.rs b/src/http/filters.rs index e9432e191..0fe369eba 100644 --- a/src/http/filters.rs +++ b/src/http/filters.rs @@ -6,8 +6,7 @@ use std::sync::Arc; use warp::{reject, Filter, Rejection}; use super::error::Error; -use super::request::{Announce, AnnounceQuery, Scrape}; -use super::WebResult; +use super::{request, WebResult}; use crate::protocol::common::MAX_SCRAPE_TORRENTS; use crate::protocol::info_hash::InfoHash; use crate::tracker::{self, auth, peer}; @@ -51,10 +50,10 @@ pub fn with_peer_addr(on_reverse_proxy: bool) -> impl Filter impl Filter + Clone { - warp::filters::query::query::() +pub fn with_announce_request(on_reverse_proxy: bool) -> impl Filter + Clone { + warp::filters::query::query::() .and(with_info_hash()) .and(with_peer_id()) .and(with_peer_addr(on_reverse_proxy)) @@ -63,7 +62,7 @@ pub fn with_announce_request(on_reverse_proxy: bool) -> impl Filter impl Filter + Clone { +pub fn with_scrape_request(on_reverse_proxy: bool) -> impl Filter + Clone { warp::any() .and(with_info_hash()) .and(with_peer_addr(on_reverse_proxy)) @@ -162,12 +161,12 @@ fn peer_addr((on_reverse_proxy, remote_addr, x_forwarded_for): (bool, Option, peer_id: peer::Id, peer_addr: IpAddr, -) -> WebResult { - Ok(Announce { +) -> WebResult { + Ok(request::Announce { info_hash: info_hashes[0], peer_addr, downloaded: announce_request_query.downloaded.unwrap_or(0), @@ -182,6 +181,6 @@ fn announce_request( /// Parse `ScrapeRequest` from `InfoHash` #[allow(clippy::unnecessary_wraps)] -fn scrape_request(info_hashes: Vec, peer_addr: IpAddr) -> WebResult { - Ok(Scrape { info_hashes, peer_addr }) +fn scrape_request(info_hashes: Vec, peer_addr: IpAddr) -> WebResult { + Ok(request::Scrape { info_hashes, peer_addr }) } diff --git a/src/http/handlers.rs b/src/http/handlers.rs index 8d8816885..0e230e785 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -8,8 +8,7 @@ use warp::http::Response; use warp::{reject, Rejection, Reply}; use super::error::Error; -use super::response::{self, Peer, ScrapeEntry}; -use super::{request, WebResult}; +use super::{request, response, WebResult}; use crate::protocol::info_hash::InfoHash; use crate::tracker::{self, auth, peer, statistics, torrent}; @@ -89,7 +88,7 @@ pub async fn handle_scrape( auth_key: Option, tracker: Arc, ) -> WebResult { - let mut files: HashMap = HashMap::new(); + let mut files: HashMap = HashMap::new(); let db = tracker.get_torrents().await; for info_hash in &scrape_request.info_hashes { @@ -97,20 +96,20 @@ pub async fn handle_scrape( Some(torrent_info) => { if authenticate(info_hash, &auth_key, tracker.clone()).await.is_ok() { let (seeders, completed, leechers) = torrent_info.get_stats(); - ScrapeEntry { + response::ScrapeEntry { complete: seeders, downloaded: completed, incomplete: leechers, } } else { - ScrapeEntry { + response::ScrapeEntry { complete: 0, downloaded: 0, incomplete: 0, } } } - None => ScrapeEntry { + None => response::ScrapeEntry { complete: 0, downloaded: 0, incomplete: 0, @@ -142,9 +141,9 @@ fn send_announce_response( interval: u32, interval_min: u32, ) -> WebResult { - let http_peers: Vec = peers + let http_peers: Vec = peers .iter() - .map(|peer| Peer { + .map(|peer| response::Peer { peer_id: peer.peer_id.to_string(), ip: peer.peer_addr.ip(), port: peer.peer_addr.port(), @@ -171,7 +170,7 @@ fn send_announce_response( } /// Send scrape response -fn send_scrape_response(files: HashMap) -> WebResult { +fn send_scrape_response(files: HashMap) -> WebResult { let res = response::Scrape { files }; match res.write() { From 49a6acbcb922d24016a4d33a91bf4e89f0c09cf8 Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 23 Nov 2022 17:21:10 +0100 Subject: [PATCH 43/45] ci: clippy warning as errors --- .github/workflows/test_build_release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_build_release.yml b/.github/workflows/test_build_release.yml index 4acf14277..3924eea4b 100644 --- a/.github/workflows/test_build_release.yml +++ b/.github/workflows/test_build_release.yml @@ -45,7 +45,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy - args: --all-targets + args: --all-targets -- -D clippy::pedantic - uses: taiki-e/install-action@cargo-llvm-cov - uses: taiki-e/install-action@nextest - name: Run Tests From 01e71bfe5c0a0c7cd0e54af8096cd6adf8d67efe Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 30 Nov 2022 15:31:52 +0100 Subject: [PATCH 44/45] clippy: fix src/tracker/peer.rs --- src/api/resources/torrent_resource.rs | 6 +-- src/http/handlers.rs | 5 +-- src/tracker/mod.rs | 6 +-- src/tracker/peer.rs | 54 +++++++++++++-------------- src/tracker/torrent.rs | 16 ++++---- src/udp/handlers.rs | 8 ++-- tests/api.rs | 6 +-- 7 files changed, 49 insertions(+), 52 deletions(-) diff --git a/src/api/resources/torrent_resource.rs b/src/api/resources/torrent_resource.rs index 4063b95f5..bc1a9acf5 100644 --- a/src/api/resources/torrent_resource.rs +++ b/src/api/resources/torrent_resource.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; -use crate::tracker::peer::{self, TorrentPeer}; +use crate::tracker::peer::{self, Peer}; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] pub struct TorrentResource { @@ -50,9 +50,9 @@ impl From for PeerIdResource { } } -impl From for TorrentPeerResource { +impl From for TorrentPeerResource { #[allow(deprecated)] - fn from(peer: TorrentPeer) -> Self { + fn from(peer: Peer) -> Self { TorrentPeerResource { peer_id: PeerIdResource::from(peer.peer_id), peer_addr: peer.peer_addr.to_string(), diff --git a/src/http/handlers.rs b/src/http/handlers.rs index 0e230e785..2fc878354 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -48,8 +48,7 @@ pub async fn handle_announce( debug!("{:?}", announce_request); - let peer = - peer::TorrentPeer::from_http_announce_request(&announce_request, announce_request.peer_addr, tracker.config.get_ext_ip()); + let peer = peer::Peer::from_http_announce_request(&announce_request, announce_request.peer_addr, tracker.config.get_ext_ip()); let torrent_stats = tracker .update_torrent_with_peer_and_get_stats(&announce_request.info_hash, &peer) .await; @@ -137,7 +136,7 @@ pub async fn handle_scrape( fn send_announce_response( announce_request: &request::Announce, torrent_stats: &torrent::Stats, - peers: &Vec, + peers: &Vec, interval: u32, interval_min: u32, ) -> WebResult { diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index bd2da93f0..4b2dabebb 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -227,7 +227,7 @@ impl Tracker { } /// Get all torrent peers for a given torrent filtering out the peer with the client address - pub async fn get_torrent_peers(&self, info_hash: &InfoHash, client_addr: &SocketAddr) -> Vec { + pub async fn get_torrent_peers(&self, info_hash: &InfoHash, client_addr: &SocketAddr) -> Vec { let read_lock = self.torrents.read().await; match read_lock.get(info_hash) { @@ -237,7 +237,7 @@ impl Tracker { } /// Get all torrent peers for a given torrent - pub async fn get_all_torrent_peers(&self, info_hash: &InfoHash) -> Vec { + pub async fn get_all_torrent_peers(&self, info_hash: &InfoHash) -> Vec { let read_lock = self.torrents.read().await; match read_lock.get(info_hash) { @@ -246,7 +246,7 @@ impl Tracker { } } - pub async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::TorrentPeer) -> torrent::Stats { + pub async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::Peer) -> torrent::Stats { let mut torrents = self.torrents.write().await; let torrent_entry = match torrents.entry(*info_hash) { diff --git a/src/tracker/peer.rs b/src/tracker/peer.rs index 16aada0ed..2da257d3e 100644 --- a/src/tracker/peer.rs +++ b/src/tracker/peer.rs @@ -10,7 +10,7 @@ use crate::protocol::common::{AnnounceEventDef, NumberOfBytesDef}; use crate::protocol::utils::ser_unix_time_value; #[derive(PartialEq, Eq, Debug, Clone, Serialize, Copy)] -pub struct TorrentPeer { +pub struct Peer { pub peer_id: Id, pub peer_addr: SocketAddr, #[serde(serialize_with = "ser_unix_time_value")] @@ -25,16 +25,16 @@ pub struct TorrentPeer { pub event: AnnounceEvent, } -impl TorrentPeer { +impl Peer { #[must_use] pub fn from_udp_announce_request( announce_request: &aquatic_udp_protocol::AnnounceRequest, remote_ip: IpAddr, host_opt_ip: Option, ) -> Self { - let peer_addr = TorrentPeer::peer_addr_from_ip_and_port_and_opt_host_ip(remote_ip, host_opt_ip, announce_request.port.0); + let peer_addr = Peer::peer_addr_from_ip_and_port_and_opt_host_ip(remote_ip, host_opt_ip, announce_request.port.0); - TorrentPeer { + Peer { peer_id: Id(announce_request.peer_id.0), peer_addr, updated: Current::now(), @@ -47,7 +47,7 @@ impl TorrentPeer { #[must_use] pub fn from_http_announce_request(announce_request: &Announce, remote_ip: IpAddr, host_opt_ip: Option) -> Self { - let peer_addr = TorrentPeer::peer_addr_from_ip_and_port_and_opt_host_ip(remote_ip, host_opt_ip, announce_request.port); + let peer_addr = Peer::peer_addr_from_ip_and_port_and_opt_host_ip(remote_ip, host_opt_ip, announce_request.port); let event: AnnounceEvent = if let Some(event) = &announce_request.event { match event.as_ref() { @@ -61,8 +61,8 @@ impl TorrentPeer { }; #[allow(clippy::cast_possible_truncation)] - TorrentPeer { - peer_id: announce_request.peer_id.clone(), + Peer { + peer_id: announce_request.peer_id, peer_addr, updated: Current::now(), uploaded: NumberOfBytes(i128::from(announce_request.uploaded) as i64), @@ -104,6 +104,9 @@ impl std::fmt::Display for Id { impl Id { #[must_use] + /// # Panics + /// + /// It will panic if the `binascii::bin2hex` from a too-small output buffer. pub fn get_id(&self) -> Option { let buff_size = self.0.len() * 2; let mut tmp: Vec = vec![0; buff_size]; @@ -202,11 +205,6 @@ impl Serialize for Id { client: Option<&'a str>, } - let buff_size = self.0.len() * 2; - let mut tmp: Vec = vec![0; buff_size]; - binascii::bin2hex(&self.0, &mut tmp).unwrap(); - let id = std::str::from_utf8(&tmp).ok(); - let obj = PeerIdInfo { id: self.get_id(), client: self.get_client_name(), @@ -224,11 +222,11 @@ mod test { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use crate::protocol::clock::{Current, Time}; - use crate::tracker::peer::{self, TorrentPeer}; + use crate::tracker::peer::{self, Peer}; #[test] fn it_should_be_serializable() { - let torrent_peer = TorrentPeer { + let torrent_peer = Peer { peer_id: peer::Id(*b"-qB00000000000000000"), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), updated: Current::now(), @@ -256,7 +254,7 @@ mod test { AnnounceEvent, AnnounceRequest, NumberOfBytes, NumberOfPeers, PeerId as AquaticPeerId, PeerKey, Port, TransactionId, }; - use crate::tracker::peer::TorrentPeer; + use crate::tracker::peer::Peer; use crate::udp::connection_cookie::{into_connection_id, make}; // todo: duplicate functions is PR 82. Remove duplication once both PR are merged. @@ -308,7 +306,7 @@ mod test { let remote_ip = IpAddr::V4(Ipv4Addr::new(126, 0, 0, 2)); let announce_request = AnnounceRequestBuilder::default().into(); - let torrent_peer = TorrentPeer::from_udp_announce_request(&announce_request, remote_ip, None); + let torrent_peer = Peer::from_udp_announce_request(&announce_request, remote_ip, None); assert_eq!(torrent_peer.peer_addr, SocketAddr::new(remote_ip, announce_request.port.0)); } @@ -318,7 +316,7 @@ mod test { let remote_ip = IpAddr::V4(Ipv4Addr::new(126, 0, 0, 2)); let announce_request = AnnounceRequestBuilder::default().into(); - let torrent_peer = TorrentPeer::from_udp_announce_request(&announce_request, remote_ip, None); + let torrent_peer = Peer::from_udp_announce_request(&announce_request, remote_ip, None); assert_eq!(torrent_peer.peer_addr, SocketAddr::new(remote_ip, announce_request.port.0)); } @@ -329,14 +327,14 @@ mod test { use std::str::FromStr; use crate::tracker::peer::test::torrent_peer_constructor_from_udp_requests::AnnounceRequestBuilder; - use crate::tracker::peer::TorrentPeer; + use crate::tracker::peer::Peer; #[test] fn it_should_use_the_loopback_ip_if_the_server_does_not_have_the_external_ip_configuration() { let remote_ip = IpAddr::V4(Ipv4Addr::LOCALHOST); let announce_request = AnnounceRequestBuilder::default().into(); - let torrent_peer = TorrentPeer::from_udp_announce_request(&announce_request, remote_ip, None); + let torrent_peer = Peer::from_udp_announce_request(&announce_request, remote_ip, None); assert_eq!(torrent_peer.peer_addr, SocketAddr::new(remote_ip, announce_request.port.0)); } @@ -347,7 +345,7 @@ mod test { let announce_request = AnnounceRequestBuilder::default().into(); let host_opt_ip = IpAddr::V4(Ipv4Addr::from_str("126.0.0.1").unwrap()); - let torrent_peer = TorrentPeer::from_udp_announce_request(&announce_request, remote_ip, Some(host_opt_ip)); + let torrent_peer = Peer::from_udp_announce_request(&announce_request, remote_ip, Some(host_opt_ip)); assert_eq!(torrent_peer.peer_addr, SocketAddr::new(host_opt_ip, announce_request.port.0)); } @@ -358,7 +356,7 @@ mod test { let announce_request = AnnounceRequestBuilder::default().into(); let host_opt_ip = IpAddr::V6(Ipv6Addr::from_str("2345:0425:2CA1:0000:0000:0567:5673:23b5").unwrap()); - let torrent_peer = TorrentPeer::from_udp_announce_request(&announce_request, remote_ip, Some(host_opt_ip)); + let torrent_peer = Peer::from_udp_announce_request(&announce_request, remote_ip, Some(host_opt_ip)); assert_eq!(torrent_peer.peer_addr, SocketAddr::new(host_opt_ip, announce_request.port.0)); } @@ -370,14 +368,14 @@ mod test { use std::str::FromStr; use crate::tracker::peer::test::torrent_peer_constructor_from_udp_requests::AnnounceRequestBuilder; - use crate::tracker::peer::TorrentPeer; + use crate::tracker::peer::Peer; #[test] fn it_should_use_the_loopback_ip_if_the_server_does_not_have_the_external_ip_configuration() { let remote_ip = IpAddr::V6(Ipv6Addr::LOCALHOST); let announce_request = AnnounceRequestBuilder::default().into(); - let torrent_peer = TorrentPeer::from_udp_announce_request(&announce_request, remote_ip, None); + let torrent_peer = Peer::from_udp_announce_request(&announce_request, remote_ip, None); assert_eq!(torrent_peer.peer_addr, SocketAddr::new(remote_ip, announce_request.port.0)); } @@ -388,7 +386,7 @@ mod test { let announce_request = AnnounceRequestBuilder::default().into(); let host_opt_ip = IpAddr::V6(Ipv6Addr::from_str("2345:0425:2CA1:0000:0000:0567:5673:23b5").unwrap()); - let torrent_peer = TorrentPeer::from_udp_announce_request(&announce_request, remote_ip, Some(host_opt_ip)); + let torrent_peer = Peer::from_udp_announce_request(&announce_request, remote_ip, Some(host_opt_ip)); assert_eq!(torrent_peer.peer_addr, SocketAddr::new(host_opt_ip, announce_request.port.0)); } @@ -399,7 +397,7 @@ mod test { let announce_request = AnnounceRequestBuilder::default().into(); let host_opt_ip = IpAddr::V4(Ipv4Addr::from_str("126.0.0.1").unwrap()); - let torrent_peer = TorrentPeer::from_udp_announce_request(&announce_request, remote_ip, Some(host_opt_ip)); + let torrent_peer = Peer::from_udp_announce_request(&announce_request, remote_ip, Some(host_opt_ip)); assert_eq!(torrent_peer.peer_addr, SocketAddr::new(host_opt_ip, announce_request.port.0)); } @@ -411,7 +409,7 @@ mod test { use crate::http::request::Announce; use crate::protocol::info_hash::InfoHash; - use crate::tracker::peer::{self, TorrentPeer}; + use crate::tracker::peer::{self, Peer}; fn sample_http_announce_request(peer_addr: IpAddr, port: u16) -> Announce { Announce { @@ -434,7 +432,7 @@ mod test { let ip_in_announce_request = IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)); let announce_request = sample_http_announce_request(ip_in_announce_request, 8080); - let torrent_peer = TorrentPeer::from_http_announce_request(&announce_request, remote_ip, None); + let torrent_peer = Peer::from_http_announce_request(&announce_request, remote_ip, None); assert_eq!(torrent_peer.peer_addr.ip(), remote_ip); assert_ne!(torrent_peer.peer_addr.ip(), ip_in_announce_request); @@ -449,7 +447,7 @@ mod test { let announce_request = sample_http_announce_request(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), port_in_announce_request); - let torrent_peer = TorrentPeer::from_http_announce_request(&announce_request, remote_ip, None); + let torrent_peer = Peer::from_http_announce_request(&announce_request, remote_ip, None); assert_eq!(torrent_peer.peer_addr.port(), announce_request.port); assert_ne!(torrent_peer.peer_addr.port(), remote_port); diff --git a/src/tracker/torrent.rs b/src/tracker/torrent.rs index 3e38d2340..8058ab891 100644 --- a/src/tracker/torrent.rs +++ b/src/tracker/torrent.rs @@ -11,7 +11,7 @@ use crate::protocol::common::MAX_SCRAPE_TORRENTS; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct Entry { #[serde(skip)] - pub peers: std::collections::BTreeMap, + pub peers: std::collections::BTreeMap, pub completed: u32, } @@ -25,7 +25,7 @@ impl Entry { } // Update peer and return completed (times torrent has been downloaded) - pub fn update_peer(&mut self, peer: &peer::TorrentPeer) -> bool { + pub fn update_peer(&mut self, peer: &peer::Peer) -> bool { let mut did_torrent_stats_change: bool = false; match peer.event { @@ -49,7 +49,7 @@ impl Entry { } #[must_use] - pub fn get_peers(&self, client_addr: Option<&SocketAddr>) -> Vec<&peer::TorrentPeer> { + pub fn get_peers(&self, client_addr: Option<&SocketAddr>) -> Vec<&peer::Peer> { self.peers .values() .filter(|peer| match client_addr { @@ -122,12 +122,12 @@ mod tests { use crate::tracker::torrent::Entry; struct TorrentPeerBuilder { - peer: peer::TorrentPeer, + peer: peer::Peer, } impl TorrentPeerBuilder { pub fn default() -> TorrentPeerBuilder { - let default_peer = peer::TorrentPeer { + let default_peer = peer::Peer { peer_id: peer::Id([0u8; 20]), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080), updated: Current::now(), @@ -164,14 +164,14 @@ mod tests { self } - pub fn into(self) -> peer::TorrentPeer { + pub fn into(self) -> peer::Peer { self.peer } } /// A torrent seeder is a peer with 0 bytes left to download which /// has not announced it has stopped - fn a_torrent_seeder() -> peer::TorrentPeer { + fn a_torrent_seeder() -> peer::Peer { TorrentPeerBuilder::default() .with_number_of_bytes_left(0) .with_event_completed() @@ -180,7 +180,7 @@ mod tests { /// A torrent leecher is a peer that is not a seeder. /// Leecher: left > 0 OR event = Stopped - fn a_torrent_leecher() -> peer::TorrentPeer { + fn a_torrent_leecher() -> peer::Peer { TorrentPeerBuilder::default() .with_number_of_bytes_left(1) .with_event_completed() diff --git a/src/udp/handlers.rs b/src/udp/handlers.rs index da4bdbf35..625f42d40 100644 --- a/src/udp/handlers.rs +++ b/src/udp/handlers.rs @@ -92,7 +92,7 @@ pub async fn handle_announce( .authenticate_request(&wrapped_announce_request.info_hash, &None) .await?; - let peer = peer::TorrentPeer::from_udp_announce_request( + let peer = peer::Peer::from_udp_announce_request( &wrapped_announce_request.announce_request, remote_addr.ip(), tracker.config.get_ext_ip(), @@ -289,12 +289,12 @@ mod tests { } struct TorrentPeerBuilder { - peer: peer::TorrentPeer, + peer: peer::Peer, } impl TorrentPeerBuilder { pub fn default() -> TorrentPeerBuilder { - let default_peer = peer::TorrentPeer { + let default_peer = peer::Peer { peer_id: peer::Id([255u8; 20]), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), updated: Current::now(), @@ -321,7 +321,7 @@ mod tests { self } - pub fn into(self) -> peer::TorrentPeer { + pub fn into(self) -> peer::Peer { self.peer } } diff --git a/tests/api.rs b/tests/api.rs index 22a222698..824c198e2 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -24,7 +24,7 @@ mod tracker_api { use torrust_tracker::protocol::clock::DurationSinceUnixEpoch; use torrust_tracker::protocol::info_hash::InfoHash; use torrust_tracker::tracker::auth; - use torrust_tracker::tracker::peer::{self, TorrentPeer}; + use torrust_tracker::tracker::peer::{self, Peer}; use torrust_tracker::tracker::statistics::Keeper; use torrust_tracker::{ephemeral_instance_keys, logging, static_time, tracker}; @@ -187,8 +187,8 @@ mod tracker_api { ); } - fn sample_torrent_peer() -> (TorrentPeer, TorrentPeerResource) { - let torrent_peer = TorrentPeer { + fn sample_torrent_peer() -> (Peer, TorrentPeerResource) { + let torrent_peer = Peer { peer_id: peer::Id(*b"-qB00000000000000000"), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), updated: DurationSinceUnixEpoch::new(1_669_397_478_934, 0), From 0f075e4daee476db840a4dbf98e3639a84ecd1bc Mon Sep 17 00:00:00 2001 From: Cameron Garnham Date: Wed, 30 Nov 2022 17:57:13 +0100 Subject: [PATCH 45/45] refactor: src/api/resource(s) --- src/api/mod.rs | 2 +- src/api/{resources => resource}/auth_key.rs | 0 src/api/{resources => resource}/mod.rs | 7 +- src/api/resource/peer.rs | 47 +++++++++++++ .../stats_resource.rs => resource/stats.rs} | 2 +- src/api/resource/torrent.rs | 21 ++++++ src/api/resources/torrent_resource.rs | 67 ------------------- src/api/server.rs | 15 +++-- src/http/handlers.rs | 2 +- src/tracker/mod.rs | 4 +- src/tracker/torrent.rs | 2 +- tests/api.rs | 34 +++++----- 12 files changed, 103 insertions(+), 100 deletions(-) rename src/api/{resources => resource}/auth_key.rs (100%) rename src/api/{resources => resource}/mod.rs (55%) create mode 100644 src/api/resource/peer.rs rename src/api/{resources/stats_resource.rs => resource/stats.rs} (95%) create mode 100644 src/api/resource/torrent.rs delete mode 100644 src/api/resources/torrent_resource.rs diff --git a/src/api/mod.rs b/src/api/mod.rs index 46ad24218..16abb8e27 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -1,2 +1,2 @@ -pub mod resources; +pub mod resource; pub mod server; diff --git a/src/api/resources/auth_key.rs b/src/api/resource/auth_key.rs similarity index 100% rename from src/api/resources/auth_key.rs rename to src/api/resource/auth_key.rs diff --git a/src/api/resources/mod.rs b/src/api/resource/mod.rs similarity index 55% rename from src/api/resources/mod.rs rename to src/api/resource/mod.rs index f708fc2e4..e86c550ca 100644 --- a/src/api/resources/mod.rs +++ b/src/api/resource/mod.rs @@ -3,10 +3,11 @@ //! WIP. Not all endpoints have their resource structs. //! //! - [x] `AuthKeys` -//! - [ ] `TorrentResource`, `TorrentListItemResource`, `TorrentPeerResource`, `PeerIdResource` +//! - [ ] `Torrent`, `ListItem`, `Peer`, `PeerId` //! - [ ] `StatsResource` //! - [ ] ... pub mod auth_key; -pub mod stats_resource; -pub mod torrent_resource; +pub mod peer; +pub mod stats; +pub mod torrent; diff --git a/src/api/resource/peer.rs b/src/api/resource/peer.rs new file mode 100644 index 000000000..ff84be197 --- /dev/null +++ b/src/api/resource/peer.rs @@ -0,0 +1,47 @@ +use serde::{Deserialize, Serialize}; + +use crate::tracker; + +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct Peer { + pub peer_id: Id, + pub peer_addr: String, + #[deprecated(since = "2.0.0", note = "please use `updated_milliseconds_ago` instead")] + pub updated: u128, + pub updated_milliseconds_ago: u128, + pub uploaded: i64, + pub downloaded: i64, + pub left: i64, + pub event: String, +} + +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct Id { + pub id: Option, + pub client: Option, +} + +impl From for Id { + fn from(peer_id: tracker::peer::Id) -> Self { + Id { + id: peer_id.get_id(), + client: peer_id.get_client_name().map(std::string::ToString::to_string), + } + } +} + +impl From for Peer { + #[allow(deprecated)] + fn from(peer: tracker::peer::Peer) -> Self { + Peer { + peer_id: Id::from(peer.peer_id), + peer_addr: peer.peer_addr.to_string(), + updated: peer.updated.as_millis(), + updated_milliseconds_ago: peer.updated.as_millis(), + uploaded: peer.uploaded.0, + downloaded: peer.downloaded.0, + left: peer.left.0, + event: format!("{:?}", peer.event), + } + } +} diff --git a/src/api/resources/stats_resource.rs b/src/api/resource/stats.rs similarity index 95% rename from src/api/resources/stats_resource.rs rename to src/api/resource/stats.rs index e6f184897..e87f08f63 100644 --- a/src/api/resources/stats_resource.rs +++ b/src/api/resource/stats.rs @@ -1,7 +1,7 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct StatsResource { +pub struct Stats { pub torrents: u32, pub seeders: u32, pub completed: u32, diff --git a/src/api/resource/torrent.rs b/src/api/resource/torrent.rs new file mode 100644 index 000000000..924b61b8c --- /dev/null +++ b/src/api/resource/torrent.rs @@ -0,0 +1,21 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct Torrent { + pub info_hash: String, + pub seeders: u32, + pub completed: u32, + pub leechers: u32, + #[serde(skip_serializing_if = "Option::is_none")] + pub peers: Option>, +} + +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct ListItem { + pub info_hash: String, + pub seeders: u32, + pub completed: u32, + pub leechers: u32, + // todo: this is always None. Remove field from endpoint? + pub peers: Option>, +} diff --git a/src/api/resources/torrent_resource.rs b/src/api/resources/torrent_resource.rs deleted file mode 100644 index bc1a9acf5..000000000 --- a/src/api/resources/torrent_resource.rs +++ /dev/null @@ -1,67 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::tracker::peer::{self, Peer}; - -#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct TorrentResource { - pub info_hash: String, - pub seeders: u32, - pub completed: u32, - pub leechers: u32, - #[serde(skip_serializing_if = "Option::is_none")] - pub peers: Option>, -} - -#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct TorrentListItemResource { - pub info_hash: String, - pub seeders: u32, - pub completed: u32, - pub leechers: u32, - // todo: this is always None. Remove field from endpoint? - pub peers: Option>, -} - -#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct TorrentPeerResource { - pub peer_id: PeerIdResource, - pub peer_addr: String, - #[deprecated(since = "2.0.0", note = "please use `updated_milliseconds_ago` instead")] - pub updated: u128, - pub updated_milliseconds_ago: u128, - pub uploaded: i64, - pub downloaded: i64, - pub left: i64, - pub event: String, -} - -#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct PeerIdResource { - pub id: Option, - pub client: Option, -} - -impl From for PeerIdResource { - fn from(peer_id: peer::Id) -> Self { - PeerIdResource { - id: peer_id.get_id(), - client: peer_id.get_client_name().map(std::string::ToString::to_string), - } - } -} - -impl From for TorrentPeerResource { - #[allow(deprecated)] - fn from(peer: Peer) -> Self { - TorrentPeerResource { - peer_id: PeerIdResource::from(peer.peer_id), - peer_addr: peer.peer_addr.to_string(), - updated: peer.updated.as_millis(), - updated_milliseconds_ago: peer.updated.as_millis(), - uploaded: peer.uploaded.0, - downloaded: peer.downloaded.0, - left: peer.left.0, - event: format!("{:?}", peer.event), - } - } -} diff --git a/src/api/server.rs b/src/api/server.rs index af2d66458..5967a8be4 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -7,9 +7,10 @@ use std::time::Duration; use serde::{Deserialize, Serialize}; use warp::{filters, reply, serve, Filter}; -use super::resources::auth_key::AuthKey; -use super::resources::stats_resource::StatsResource; -use super::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; +use super::resource::auth_key::AuthKey; +use super::resource::peer; +use super::resource::stats::Stats; +use super::resource::torrent::{ListItem, Torrent}; use crate::protocol::info_hash::InfoHash; use crate::tracker; @@ -81,7 +82,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl w .iter() .map(|(info_hash, torrent_entry)| { let (seeders, completed, leechers) = torrent_entry.get_stats(); - TorrentListItemResource { + ListItem { info_hash: info_hash.to_string(), seeders, completed, @@ -104,7 +105,7 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl w .and(filters::path::end()) .map(move || api_stats.clone()) .and_then(|tracker: Arc| async move { - let mut results = StatsResource { + let mut results = Stats { torrents: 0, seeders: 0, completed: 0, @@ -179,9 +180,9 @@ pub fn start(socket_addr: SocketAddr, tracker: &Arc) -> impl w let peers = torrent_entry.get_peers(None); - let peer_resources = peers.iter().map(|peer| TorrentPeerResource::from(**peer)).collect(); + let peer_resources = peers.iter().map(|peer| peer::Peer::from(**peer)).collect(); - Ok(reply::json(&TorrentResource { + Ok(reply::json(&Torrent { info_hash: info_hash.to_string(), seeders, completed, diff --git a/src/http/handlers.rs b/src/http/handlers.rs index 2fc878354..1170b7188 100644 --- a/src/http/handlers.rs +++ b/src/http/handlers.rs @@ -135,7 +135,7 @@ pub async fn handle_scrape( #[allow(clippy::ptr_arg)] fn send_announce_response( announce_request: &request::Announce, - torrent_stats: &torrent::Stats, + torrent_stats: &torrent::SwamStats, peers: &Vec, interval: u32, interval_min: u32, diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 4b2dabebb..4469d682b 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -246,7 +246,7 @@ impl Tracker { } } - pub async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::Peer) -> torrent::Stats { + pub async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::Peer) -> torrent::SwamStats { let mut torrents = self.torrents.write().await; let torrent_entry = match torrents.entry(*info_hash) { @@ -266,7 +266,7 @@ impl Tracker { let (seeders, completed, leechers) = torrent_entry.get_stats(); - torrent::Stats { + torrent::SwamStats { completed, seeders, leechers, diff --git a/src/tracker/torrent.rs b/src/tracker/torrent.rs index 8058ab891..e292dff54 100644 --- a/src/tracker/torrent.rs +++ b/src/tracker/torrent.rs @@ -93,7 +93,7 @@ impl Default for Entry { } #[derive(Debug)] -pub struct Stats { +pub struct SwamStats { pub completed: u32, pub seeders: u32, pub leechers: u32, diff --git a/tests/api.rs b/tests/api.rs index 824c198e2..706cd0b8d 100644 --- a/tests/api.rs +++ b/tests/api.rs @@ -16,16 +16,16 @@ mod tracker_api { use aquatic_udp_protocol::{AnnounceEvent, NumberOfBytes}; use reqwest::Response; use tokio::task::JoinHandle; - use torrust_tracker::api::resources::auth_key::AuthKey; - use torrust_tracker::api::resources::stats_resource::StatsResource; - use torrust_tracker::api::resources::torrent_resource::{TorrentListItemResource, TorrentPeerResource, TorrentResource}; + use torrust_tracker::api::resource; + use torrust_tracker::api::resource::auth_key::AuthKey; + use torrust_tracker::api::resource::stats::Stats; + use torrust_tracker::api::resource::torrent::{self, Torrent}; use torrust_tracker::config::Configuration; use torrust_tracker::jobs::tracker_api; use torrust_tracker::protocol::clock::DurationSinceUnixEpoch; use torrust_tracker::protocol::info_hash::InfoHash; - use torrust_tracker::tracker::auth; - use torrust_tracker::tracker::peer::{self, Peer}; use torrust_tracker::tracker::statistics::Keeper; + use torrust_tracker::tracker::{auth, peer}; use torrust_tracker::{ephemeral_instance_keys, logging, static_time, tracker}; use crate::common::ephemeral_random_port; @@ -104,7 +104,7 @@ mod tracker_api { assert_eq!( torrent_resource, - TorrentResource { + Torrent { info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(), seeders: 1, completed: 0, @@ -135,7 +135,7 @@ mod tracker_api { assert_eq!( torrent_resources, - vec![TorrentListItemResource { + vec![torrent::ListItem { info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(), seeders: 1, completed: 0, @@ -166,7 +166,7 @@ mod tracker_api { assert_eq!( stats_resource, - StatsResource { + Stats { torrents: 1, seeders: 1, completed: 0, @@ -187,8 +187,8 @@ mod tracker_api { ); } - fn sample_torrent_peer() -> (Peer, TorrentPeerResource) { - let torrent_peer = Peer { + fn sample_torrent_peer() -> (peer::Peer, resource::peer::Peer) { + let torrent_peer = peer::Peer { peer_id: peer::Id(*b"-qB00000000000000000"), peer_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::new(126, 0, 0, 1)), 8080), updated: DurationSinceUnixEpoch::new(1_669_397_478_934, 0), @@ -197,7 +197,7 @@ mod tracker_api { left: NumberOfBytes(0), event: AnnounceEvent::Started, }; - let torrent_peer_resource = TorrentPeerResource::from(torrent_peer); + let torrent_peer_resource = resource::peer::Peer::from(torrent_peer); (torrent_peer, torrent_peer_resource) } @@ -326,7 +326,7 @@ mod tracker_api { reqwest::Client::new().post(url.clone()).send().await.unwrap() } - pub async fn get_torrent(&self, info_hash: &str) -> TorrentResource { + pub async fn get_torrent(&self, info_hash: &str) -> Torrent { let url = format!( "http://{}/api/torrent/{}?token={}", &self.connection_info.bind_address, &info_hash, &self.connection_info.api_token @@ -338,12 +338,12 @@ mod tracker_api { .send() .await .unwrap() - .json::() + .json::() .await .unwrap() } - pub async fn get_torrents(&self) -> Vec { + pub async fn get_torrents(&self) -> Vec { let url = format!( "http://{}/api/torrents?token={}", &self.connection_info.bind_address, &self.connection_info.api_token @@ -355,12 +355,12 @@ mod tracker_api { .send() .await .unwrap() - .json::>() + .json::>() .await .unwrap() } - pub async fn get_tracker_statistics(&self) -> StatsResource { + pub async fn get_tracker_statistics(&self) -> Stats { let url = format!( "http://{}/api/stats?token={}", &self.connection_info.bind_address, &self.connection_info.api_token @@ -372,7 +372,7 @@ mod tracker_api { .send() .await .unwrap() - .json::() + .json::() .await .unwrap() }