Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Performance optimization: create a new torrent repository using DashMap #784

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ clap = { version = "4", features = ["derive", "env"] }
colored = "2"
config = "0"
crossbeam-skiplist = "0.1"
dashmap = "5.5.3"
derive_more = "0"
fern = "0"
futures = "0"
Expand Down Expand Up @@ -77,7 +78,7 @@ url = "2"
uuid = { version = "1", features = ["v4"] }

[package.metadata.cargo-machete]
ignored = ["serde_bytes", "crossbeam-skiplist"]
ignored = ["serde_bytes", "crossbeam-skiplist", "dashmap"]

[dev-dependencies]
local-ip-address = "0"
Expand Down
1 change: 1 addition & 0 deletions cSpell.json
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@
"Weidendorfer",
"Werror",
"whitespaces",
"Xacrimon",
"XBTT",
"Xdebug",
"Xeon",
Expand Down
1 change: 1 addition & 0 deletions packages/torrent-repository/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ version.workspace = true

[dependencies]
crossbeam-skiplist = "0.1"
dashmap = "5.5.3"
futures = "0.3.29"
tokio = { version = "1", features = ["macros", "net", "rt-multi-thread", "signal", "sync"] }
torrust-tracker-clock = { version = "3.0.0-alpha.12-develop", path = "../clock" }
Expand Down
23 changes: 21 additions & 2 deletions packages/torrent-repository/benches/repository_benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ mod helpers;

use criterion::{criterion_group, criterion_main, Criterion};
use torrust_tracker_torrent_repository::{
TorrentsRwLockStd, TorrentsRwLockStdMutexStd, TorrentsRwLockStdMutexTokio, TorrentsRwLockTokio, TorrentsRwLockTokioMutexStd,
TorrentsRwLockTokioMutexTokio, TorrentsSkipMapMutexStd,
TorrentsDashMapMutexStd, TorrentsRwLockStd, TorrentsRwLockStdMutexStd, TorrentsRwLockStdMutexTokio, TorrentsRwLockTokio,
TorrentsRwLockTokioMutexStd, TorrentsRwLockTokioMutexTokio, TorrentsSkipMapMutexStd,
};

use crate::helpers::{asyn, sync};
Expand Down Expand Up @@ -49,6 +49,10 @@ fn add_one_torrent(c: &mut Criterion) {
b.iter_custom(sync::add_one_torrent::<TorrentsSkipMapMutexStd, _>);
});

group.bench_function("DashMapMutexStd", |b| {
b.iter_custom(sync::add_one_torrent::<TorrentsDashMapMutexStd, _>);
});

group.finish();
}

Expand Down Expand Up @@ -98,6 +102,11 @@ fn add_multiple_torrents_in_parallel(c: &mut Criterion) {
.iter_custom(|iters| sync::add_multiple_torrents_in_parallel::<TorrentsSkipMapMutexStd, _>(&rt, iters, None));
});

group.bench_function("DashMapMutexStd", |b| {
b.to_async(&rt)
.iter_custom(|iters| sync::add_multiple_torrents_in_parallel::<TorrentsDashMapMutexStd, _>(&rt, iters, None));
});

group.finish();
}

Expand Down Expand Up @@ -147,6 +156,11 @@ fn update_one_torrent_in_parallel(c: &mut Criterion) {
.iter_custom(|iters| sync::update_one_torrent_in_parallel::<TorrentsSkipMapMutexStd, _>(&rt, iters, None));
});

group.bench_function("DashMapMutexStd", |b| {
b.to_async(&rt)
.iter_custom(|iters| sync::update_one_torrent_in_parallel::<TorrentsDashMapMutexStd, _>(&rt, iters, None));
});

group.finish();
}

Expand Down Expand Up @@ -197,6 +211,11 @@ fn update_multiple_torrents_in_parallel(c: &mut Criterion) {
.iter_custom(|iters| sync::update_multiple_torrents_in_parallel::<TorrentsSkipMapMutexStd, _>(&rt, iters, None));
});

group.bench_function("DashMapMutexStd", |b| {
b.to_async(&rt)
.iter_custom(|iters| sync::update_multiple_torrents_in_parallel::<TorrentsDashMapMutexStd, _>(&rt, iters, None));
});

group.finish();
}

Expand Down
2 changes: 2 additions & 0 deletions packages/torrent-repository/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::sync::Arc;

use repository::dash_map_mutex_std::XacrimonDashMap;
use repository::rw_lock_std::RwLockStd;
use repository::rw_lock_tokio::RwLockTokio;
use repository::skip_map_mutex_std::CrossbeamSkipList;
Expand All @@ -20,6 +21,7 @@ pub type TorrentsRwLockTokioMutexStd = RwLockTokio<EntryMutexStd>;
pub type TorrentsRwLockTokioMutexTokio = RwLockTokio<EntryMutexTokio>;

pub type TorrentsSkipMapMutexStd = CrossbeamSkipList<EntryMutexStd>;
pub type TorrentsDashMapMutexStd = XacrimonDashMap<EntryMutexStd>;

/// This code needs to be copied into each crate.
/// Working version, for production.
Expand Down
106 changes: 106 additions & 0 deletions packages/torrent-repository/src/repository/dash_map_mutex_std.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
use std::collections::BTreeMap;
use std::sync::Arc;

use dashmap::DashMap;
use torrust_tracker_configuration::TrackerPolicy;
use torrust_tracker_primitives::info_hash::InfoHash;
use torrust_tracker_primitives::pagination::Pagination;
use torrust_tracker_primitives::swarm_metadata::SwarmMetadata;
use torrust_tracker_primitives::torrent_metrics::TorrentsMetrics;
use torrust_tracker_primitives::{peer, DurationSinceUnixEpoch, PersistentTorrents};

use super::Repository;
use crate::entry::{Entry, EntrySync};
use crate::{EntryMutexStd, EntrySingle};

#[derive(Default, Debug)]
pub struct XacrimonDashMap<T> {
pub torrents: DashMap<InfoHash, T>,
}

impl Repository<EntryMutexStd> for XacrimonDashMap<EntryMutexStd>
where
EntryMutexStd: EntrySync,
EntrySingle: Entry,
{
fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::Peer) -> (bool, SwarmMetadata) {
if let Some(entry) = self.torrents.get(info_hash) {
entry.insert_or_update_peer_and_get_stats(peer)
} else {
let _unused = self.torrents.insert(*info_hash, Arc::default());

match self.torrents.get(info_hash) {
Some(entry) => entry.insert_or_update_peer_and_get_stats(peer),
None => (false, SwarmMetadata::zeroed()),

Check warning on line 34 in packages/torrent-repository/src/repository/dash_map_mutex_std.rs

View check run for this annotation

Codecov / codecov/patch

packages/torrent-repository/src/repository/dash_map_mutex_std.rs#L34

Added line #L34 was not covered by tests
}
}
}

fn get(&self, key: &InfoHash) -> Option<EntryMutexStd> {
let maybe_entry = self.torrents.get(key);
maybe_entry.map(|entry| entry.clone())
}

fn get_metrics(&self) -> TorrentsMetrics {
let mut metrics = TorrentsMetrics::default();

for entry in &self.torrents {
let stats = entry.value().lock().expect("it should get a lock").get_stats();
metrics.complete += u64::from(stats.complete);
metrics.downloaded += u64::from(stats.downloaded);
metrics.incomplete += u64::from(stats.incomplete);
metrics.torrents += 1;
}

metrics
}

fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, EntryMutexStd)> {
match pagination {
Some(pagination) => self

Check warning on line 60 in packages/torrent-repository/src/repository/dash_map_mutex_std.rs

View check run for this annotation

Codecov / codecov/patch

packages/torrent-repository/src/repository/dash_map_mutex_std.rs#L60

Added line #L60 was not covered by tests
.torrents
.iter()
.skip(pagination.offset as usize)
.take(pagination.limit as usize)
.map(|entry| (*entry.key(), entry.value().clone()))

Check warning on line 65 in packages/torrent-repository/src/repository/dash_map_mutex_std.rs

View check run for this annotation

Codecov / codecov/patch

packages/torrent-repository/src/repository/dash_map_mutex_std.rs#L63-L65

Added lines #L63 - L65 were not covered by tests
.collect(),
None => self
.torrents
.iter()
.map(|entry| (*entry.key(), entry.value().clone()))
.collect(),
}
}

fn import_persistent(&self, persistent_torrents: &PersistentTorrents) {
for (info_hash, completed) in persistent_torrents {
if self.torrents.contains_key(info_hash) {
continue;
}

let entry = EntryMutexStd::new(
EntrySingle {
peers: BTreeMap::default(),
downloaded: *completed,
}
.into(),
);

self.torrents.insert(*info_hash, entry);
}
}

fn remove(&self, key: &InfoHash) -> Option<EntryMutexStd> {
self.torrents.remove(key).map(|(_key, value)| value.clone())
}

fn remove_inactive_peers(&self, current_cutoff: DurationSinceUnixEpoch) {
for entry in &self.torrents {
entry.value().remove_inactive_peers(current_cutoff);
}
}

fn remove_peerless_torrents(&self, policy: &TrackerPolicy) {
self.torrents.retain(|_, entry| entry.is_good(policy));
}
}
1 change: 1 addition & 0 deletions packages/torrent-repository/src/repository/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use torrust_tracker_primitives::swarm_metadata::SwarmMetadata;
use torrust_tracker_primitives::torrent_metrics::TorrentsMetrics;
use torrust_tracker_primitives::{peer, DurationSinceUnixEpoch, PersistentTorrents};

pub mod dash_map_mutex_std;
pub mod rw_lock_std;
pub mod rw_lock_std_mutex_std;
pub mod rw_lock_std_mutex_tokio;
Expand Down
20 changes: 18 additions & 2 deletions packages/torrent-repository/tests/common/repo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use torrust_tracker_primitives::torrent_metrics::TorrentsMetrics;
use torrust_tracker_primitives::{peer, DurationSinceUnixEpoch, PersistentTorrents};
use torrust_tracker_torrent_repository::repository::{Repository as _, RepositoryAsync as _};
use torrust_tracker_torrent_repository::{
EntrySingle, TorrentsRwLockStd, TorrentsRwLockStdMutexStd, TorrentsRwLockStdMutexTokio, TorrentsRwLockTokio,
TorrentsRwLockTokioMutexStd, TorrentsRwLockTokioMutexTokio, TorrentsSkipMapMutexStd,
EntrySingle, TorrentsDashMapMutexStd, TorrentsRwLockStd, TorrentsRwLockStdMutexStd, TorrentsRwLockStdMutexTokio,
TorrentsRwLockTokio, TorrentsRwLockTokioMutexStd, TorrentsRwLockTokioMutexTokio, TorrentsSkipMapMutexStd,
};

#[derive(Debug)]
Expand All @@ -19,6 +19,7 @@ pub(crate) enum Repo {
RwLockTokioMutexStd(TorrentsRwLockTokioMutexStd),
RwLockTokioMutexTokio(TorrentsRwLockTokioMutexTokio),
SkipMapMutexStd(TorrentsSkipMapMutexStd),
DashMapMutexStd(TorrentsDashMapMutexStd),
}

impl Repo {
Expand All @@ -31,6 +32,7 @@ impl Repo {
Repo::RwLockTokioMutexStd(repo) => Some(repo.get(key).await?.lock().unwrap().clone()),
Repo::RwLockTokioMutexTokio(repo) => Some(repo.get(key).await?.lock().await.clone()),
Repo::SkipMapMutexStd(repo) => Some(repo.get(key)?.lock().unwrap().clone()),
Repo::DashMapMutexStd(repo) => Some(repo.get(key)?.lock().unwrap().clone()),
}
}

Expand All @@ -43,6 +45,7 @@ impl Repo {
Repo::RwLockTokioMutexStd(repo) => repo.get_metrics().await,
Repo::RwLockTokioMutexTokio(repo) => repo.get_metrics().await,
Repo::SkipMapMutexStd(repo) => repo.get_metrics(),
Repo::DashMapMutexStd(repo) => repo.get_metrics(),
}
}

Expand Down Expand Up @@ -82,6 +85,11 @@ impl Repo {
.iter()
.map(|(i, t)| (*i, t.lock().expect("it should get a lock").clone()))
.collect(),
Repo::DashMapMutexStd(repo) => repo
.get_paginated(pagination)
.iter()
.map(|(i, t)| (*i, t.lock().expect("it should get a lock").clone()))
.collect(),
}
}

Expand All @@ -94,6 +102,7 @@ impl Repo {
Repo::RwLockTokioMutexStd(repo) => repo.import_persistent(persistent_torrents).await,
Repo::RwLockTokioMutexTokio(repo) => repo.import_persistent(persistent_torrents).await,
Repo::SkipMapMutexStd(repo) => repo.import_persistent(persistent_torrents),
Repo::DashMapMutexStd(repo) => repo.import_persistent(persistent_torrents),
}
}

Expand All @@ -106,6 +115,7 @@ impl Repo {
Repo::RwLockTokioMutexStd(repo) => Some(repo.remove(key).await?.lock().unwrap().clone()),
Repo::RwLockTokioMutexTokio(repo) => Some(repo.remove(key).await?.lock().await.clone()),
Repo::SkipMapMutexStd(repo) => Some(repo.remove(key)?.lock().unwrap().clone()),
Repo::DashMapMutexStd(repo) => Some(repo.remove(key)?.lock().unwrap().clone()),
}
}

Expand All @@ -118,6 +128,7 @@ impl Repo {
Repo::RwLockTokioMutexStd(repo) => repo.remove_inactive_peers(current_cutoff).await,
Repo::RwLockTokioMutexTokio(repo) => repo.remove_inactive_peers(current_cutoff).await,
Repo::SkipMapMutexStd(repo) => repo.remove_inactive_peers(current_cutoff),
Repo::DashMapMutexStd(repo) => repo.remove_inactive_peers(current_cutoff),
}
}

Expand All @@ -130,6 +141,7 @@ impl Repo {
Repo::RwLockTokioMutexStd(repo) => repo.remove_peerless_torrents(policy).await,
Repo::RwLockTokioMutexTokio(repo) => repo.remove_peerless_torrents(policy).await,
Repo::SkipMapMutexStd(repo) => repo.remove_peerless_torrents(policy),
Repo::DashMapMutexStd(repo) => repo.remove_peerless_torrents(policy),
}
}

Expand All @@ -146,6 +158,7 @@ impl Repo {
Repo::RwLockTokioMutexStd(repo) => repo.update_torrent_with_peer_and_get_stats(info_hash, peer).await,
Repo::RwLockTokioMutexTokio(repo) => repo.update_torrent_with_peer_and_get_stats(info_hash, peer).await,
Repo::SkipMapMutexStd(repo) => repo.update_torrent_with_peer_and_get_stats(info_hash, peer),
Repo::DashMapMutexStd(repo) => repo.update_torrent_with_peer_and_get_stats(info_hash, peer),
}
}

Expand All @@ -172,6 +185,9 @@ impl Repo {
Repo::SkipMapMutexStd(repo) => {
repo.torrents.insert(*info_hash, torrent.into());
}
Repo::DashMapMutexStd(repo) => {
repo.torrents.insert(*info_hash, torrent.into());
}
};
self.get(info_hash).await
}
Expand Down
Loading
Loading