summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTimo Kösters <timo@koesters.xyz>2022-10-05 20:34:31 +0200
committerNyaaori <+@nyaaori.cat>2022-10-10 14:02:01 +0200
commita4637e2ba1093065a6fda3fa2ad2b2b9f30eea63 (patch)
tree2d31313957d699875fc61f570686318b523ae0f1
parent33a2b2b7729bb40253fd174d99ad773869b5ecfe (diff)
downloadconduit-a4637e2ba1093065a6fda3fa2ad2b2b9f30eea63.zip
cargo fmt
-rw-r--r--src/api/appservice_server.rs8
-rw-r--r--src/api/client_server/account.rs103
-rw-r--r--src/api/client_server/alias.rs29
-rw-r--r--src/api/client_server/backup.rs101
-rw-r--r--src/api/client_server/capabilities.rs2
-rw-r--r--src/api/client_server/config.rs2
-rw-r--r--src/api/client_server/context.rs19
-rw-r--r--src/api/client_server/device.rs35
-rw-r--r--src/api/client_server/directory.rs5
-rw-r--r--src/api/client_server/filter.rs2
-rw-r--r--src/api/client_server/keys.rs80
-rw-r--r--src/api/client_server/media.rs16
-rw-r--r--src/api/client_server/membership.rs248
-rw-r--r--src/api/client_server/message.rs40
-rw-r--r--src/api/client_server/presence.rs5
-rw-r--r--src/api/client_server/profile.rs48
-rw-r--r--src/api/client_server/push.rs2
-rw-r--r--src/api/client_server/read_marker.rs25
-rw-r--r--src/api/client_server/redact.rs5
-rw-r--r--src/api/client_server/report.rs2
-rw-r--r--src/api/client_server/room.rs108
-rw-r--r--src/api/client_server/search.rs11
-rw-r--r--src/api/client_server/session.rs34
-rw-r--r--src/api/client_server/state.rs48
-rw-r--r--src/api/client_server/sync.rs207
-rw-r--r--src/api/client_server/tag.rs101
-rw-r--r--src/api/client_server/to_device.rs25
-rw-r--r--src/api/client_server/typing.rs14
-rw-r--r--src/api/client_server/user_directory.rs38
-rw-r--r--src/api/client_server/voip.rs2
-rw-r--r--src/api/mod.rs4
-rw-r--r--src/api/ruma_wrapper/axum.rs12
-rw-r--r--src/api/server_server.rs388
-rw-r--r--src/database/abstraction/rocksdb.rs2
-rw-r--r--src/database/key_value/account_data.rs14
-rw-r--r--src/database/key_value/appservice.rs11
-rw-r--r--src/database/key_value/globals.rs50
-rw-r--r--src/database/key_value/key_backups.rs24
-rw-r--r--src/database/key_value/media.rs28
-rw-r--r--src/database/key_value/pusher.rs12
-rw-r--r--src/database/key_value/rooms/alias.rs20
-rw-r--r--src/database/key_value/rooms/auth_chain.rs15
-rw-r--r--src/database/key_value/rooms/directory.rs2
-rw-r--r--src/database/key_value/rooms/edus/mod.rs4
-rw-r--r--src/database/key_value/rooms/edus/presence.rs4
-rw-r--r--src/database/key_value/rooms/edus/read_receipt.rs88
-rw-r--r--src/database/key_value/rooms/edus/typing.rs35
-rw-r--r--src/database/key_value/rooms/lazy_load.rs4
-rw-r--r--src/database/key_value/rooms/metadata.rs2
-rw-r--r--src/database/key_value/rooms/outlier.rs4
-rw-r--r--src/database/key_value/rooms/pdu_metadata.rs4
-rw-r--r--src/database/key_value/rooms/search.rs14
-rw-r--r--src/database/key_value/rooms/short.rs19
-rw-r--r--src/database/key_value/rooms/state.rs15
-rw-r--r--src/database/key_value/rooms/state_accessor.rs58
-rw-r--r--src/database/key_value/rooms/state_cache.rs195
-rw-r--r--src/database/key_value/rooms/state_compressor.rs18
-rw-r--r--src/database/key_value/rooms/timeline.rs123
-rw-r--r--src/database/key_value/rooms/user.rs34
-rw-r--r--src/database/key_value/transaction_ids.rs4
-rw-r--r--src/database/key_value/uiaa.rs6
-rw-r--r--src/database/key_value/users.rs138
-rw-r--r--src/database/mod.rs306
-rw-r--r--src/lib.rs17
-rw-r--r--src/service/account_data/data.rs6
-rw-r--r--src/service/account_data/mod.rs4
-rw-r--r--src/service/admin/mod.rs132
-rw-r--r--src/service/globals/data.rs6
-rw-r--r--src/service/globals/mod.rs8
-rw-r--r--src/service/key_backups/data.rs25
-rw-r--r--src/service/key_backups/mod.rs8
-rw-r--r--src/service/media/data.rs16
-rw-r--r--src/service/media/mod.rs31
-rw-r--r--src/service/mod.rs41
-rw-r--r--src/service/pdu.rs2
-rw-r--r--src/service/pusher/data.rs10
-rw-r--r--src/service/pusher/mod.rs22
-rw-r--r--src/service/rooms/alias/data.rs18
-rw-r--r--src/service/rooms/alias/mod.rs13
-rw-r--r--src/service/rooms/auth_chain/data.rs10
-rw-r--r--src/service/rooms/auth_chain/mod.rs39
-rw-r--r--src/service/rooms/directory/data.rs2
-rw-r--r--src/service/rooms/edus/presence/data.rs2
-rw-r--r--src/service/rooms/edus/presence/mod.rs2
-rw-r--r--src/service/rooms/edus/read_receipt/data.rs18
-rw-r--r--src/service/rooms/edus/read_receipt/mod.rs2
-rw-r--r--src/service/rooms/edus/typing/data.rs4
-rw-r--r--src/service/rooms/edus/typing/mod.rs2
-rw-r--r--src/service/rooms/event_handler/mod.rs330
-rw-r--r--src/service/rooms/lazy_loading/data.rs4
-rw-r--r--src/service/rooms/lazy_loading/mod.rs20
-rw-r--r--src/service/rooms/metadata/data.rs2
-rw-r--r--src/service/rooms/mod.rs20
-rw-r--r--src/service/rooms/outlier/mod.rs4
-rw-r--r--src/service/rooms/pdu_metadata/data.rs2
-rw-r--r--src/service/rooms/pdu_metadata/mod.rs2
-rw-r--r--src/service/rooms/search/data.rs2
-rw-r--r--src/service/rooms/search/mod.rs7
-rw-r--r--src/service/rooms/short/data.rs17
-rw-r--r--src/service/rooms/short/mod.rs19
-rw-r--r--src/service/rooms/state/data.rs12
-rw-r--r--src/service/rooms/state/mod.rs181
-rw-r--r--src/service/rooms/state_accessor/data.rs9
-rw-r--r--src/service/rooms/state_accessor/mod.rs9
-rw-r--r--src/service/rooms/state_cache/data.rs13
-rw-r--r--src/service/rooms/state_cache/mod.rs40
-rw-r--r--src/service/rooms/state_compressor/mod.rs46
-rw-r--r--src/service/rooms/timeline/data.rs24
-rw-r--r--src/service/rooms/timeline/mod.rs195
-rw-r--r--src/service/rooms/user/data.rs2
-rw-r--r--src/service/rooms/user/mod.rs3
-rw-r--r--src/service/sending/mod.rs55
-rw-r--r--src/service/transaction_ids/data.rs2
-rw-r--r--src/service/transaction_ids/mod.rs2
-rw-r--r--src/service/uiaa/data.rs2
-rw-r--r--src/service/uiaa/mod.rs21
-rw-r--r--src/service/users/data.rs28
-rw-r--r--src/service/users/mod.rs56
-rw-r--r--src/utils/mod.rs1
119 files changed, 2809 insertions, 1783 deletions
diff --git a/src/api/appservice_server.rs b/src/api/appservice_server.rs
index 1f6e2c9..6dca60b 100644
--- a/src/api/appservice_server.rs
+++ b/src/api/appservice_server.rs
@@ -1,4 +1,4 @@
-use crate::{utils, Error, Result, services};
+use crate::{services, utils, Error, Result};
use bytes::BytesMut;
use ruma::api::{IncomingResponse, MatrixVersion, OutgoingRequest, SendAccessToken};
use std::{fmt::Debug, mem, time::Duration};
@@ -45,7 +45,11 @@ where
*reqwest_request.timeout_mut() = Some(Duration::from_secs(30));
let url = reqwest_request.url().clone();
- let mut response = services().globals.default_client().execute(reqwest_request).await?;
+ let mut response = services()
+ .globals
+ .default_client()
+ .execute(reqwest_request)
+ .await?;
// reqwest::Response -> http::Response conversion
let status = response.status();
diff --git a/src/api/client_server/account.rs b/src/api/client_server/account.rs
index 6d37ce9..28d6c07 100644
--- a/src/api/client_server/account.rs
+++ b/src/api/client_server/account.rs
@@ -1,9 +1,7 @@
use std::sync::Arc;
use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH};
-use crate::{
- utils, Error, Result, Ruma, services, api::client_server,
-};
+use crate::{api::client_server, services, utils, Error, Result, Ruma};
use ruma::{
api::client::{
account::{
@@ -43,16 +41,18 @@ pub async fn get_register_available_route(
body: Ruma<get_username_availability::v3::IncomingRequest>,
) -> Result<get_username_availability::v3::Response> {
// Validate user id
- let user_id =
- UserId::parse_with_server_name(body.username.to_lowercase(), services().globals.server_name())
- .ok()
- .filter(|user_id| {
- !user_id.is_historical() && user_id.server_name() == services().globals.server_name()
- })
- .ok_or(Error::BadRequest(
- ErrorKind::InvalidUsername,
- "Username is invalid.",
- ))?;
+ let user_id = UserId::parse_with_server_name(
+ body.username.to_lowercase(),
+ services().globals.server_name(),
+ )
+ .ok()
+ .filter(|user_id| {
+ !user_id.is_historical() && user_id.server_name() == services().globals.server_name()
+ })
+ .ok_or(Error::BadRequest(
+ ErrorKind::InvalidUsername,
+ "Username is invalid.",
+ ))?;
// Check if username is creative enough
if services().users.exists(&user_id)? {
@@ -95,17 +95,19 @@ pub async fn register_route(
let user_id = match (&body.username, is_guest) {
(Some(username), false) => {
- let proposed_user_id =
- UserId::parse_with_server_name(username.to_lowercase(), services().globals.server_name())
- .ok()
- .filter(|user_id| {
- !user_id.is_historical()
- && user_id.server_name() == services().globals.server_name()
- })
- .ok_or(Error::BadRequest(
- ErrorKind::InvalidUsername,
- "Username is invalid.",
- ))?;
+ let proposed_user_id = UserId::parse_with_server_name(
+ username.to_lowercase(),
+ services().globals.server_name(),
+ )
+ .ok()
+ .filter(|user_id| {
+ !user_id.is_historical()
+ && user_id.server_name() == services().globals.server_name()
+ })
+ .ok_or(Error::BadRequest(
+ ErrorKind::InvalidUsername,
+ "Username is invalid.",
+ ))?;
if services().users.exists(&proposed_user_id)? {
return Err(Error::BadRequest(
ErrorKind::UserInUse,
@@ -176,7 +178,8 @@ pub async fn register_route(
// Default to pretty displayname
let displayname = format!("{} ⚡️", user_id.localpart());
- services().users
+ services()
+ .users
.set_displayname(&user_id, Some(displayname.clone()))?;
// Initial account data
@@ -188,7 +191,8 @@ pub async fn register_route(
content: ruma::events::push_rules::PushRulesEventContent {
global: push::Ruleset::server_default(&user_id),
},
- }).expect("to json always works"),
+ })
+ .expect("to json always works"),
)?;
// Inhibit login does not work for guests
@@ -220,7 +224,8 @@ pub async fn register_route(
)?;
info!("New user {} registered on this server.", user_id);
- services().admin
+ services()
+ .admin
.send_message(RoomMessageEventContent::notice_plain(format!(
"New user {} registered on this server.",
user_id
@@ -229,7 +234,10 @@ pub async fn register_route(
// If this is the first real user, grant them admin privileges
// Note: the server user, @conduit:servername, is generated first
if services().users.count()? == 2 {
- services().admin.make_user_admin(&user_id, displayname).await?;
+ services()
+ .admin
+ .make_user_admin(&user_id, displayname)
+ .await?;
warn!("Granting {} admin privileges as the first user", user_id);
}
@@ -272,26 +280,26 @@ pub async fn change_password_route(
};
if let Some(auth) = &body.auth {
- let (worked, uiaainfo) = services().uiaa.try_auth(
- sender_user,
- sender_device,
- auth,
- &uiaainfo,
- )?;
+ let (worked, uiaainfo) =
+ services()
+ .uiaa
+ .try_auth(sender_user, sender_device, auth, &uiaainfo)?;
if !worked {
return Err(Error::Uiaa(uiaainfo));
}
// Success!
} else if let Some(json) = body.json_body {
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
- services().uiaa
+ services()
+ .uiaa
.create(sender_user, sender_device, &uiaainfo, &json)?;
return Err(Error::Uiaa(uiaainfo));
} else {
return Err(Error::BadRequest(ErrorKind::NotJson, "Not json."));
}
- services().users
+ services()
+ .users
.set_password(sender_user, Some(&body.new_password))?;
if body.logout_devices {
@@ -307,7 +315,8 @@ pub async fn change_password_route(
}
info!("User {} changed their password.", sender_user);
- services().admin
+ services()
+ .admin
.send_message(RoomMessageEventContent::notice_plain(format!(
"User {} changed their password.",
sender_user
@@ -321,9 +330,7 @@ pub async fn change_password_route(
/// Get user_id of the sender user.
///
/// Note: Also works for Application Services
-pub async fn whoami_route(
- body: Ruma<whoami::v3::Request>,
-) -> Result<whoami::v3::Response> {
+pub async fn whoami_route(body: Ruma<whoami::v3::Request>) -> Result<whoami::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
let device_id = body.sender_device.as_ref().cloned();
@@ -361,19 +368,18 @@ pub async fn deactivate_route(
};
if let Some(auth) = &body.auth {
- let (worked, uiaainfo) = services().uiaa.try_auth(
- sender_user,
- sender_device,
- auth,
- &uiaainfo,
- )?;
+ let (worked, uiaainfo) =
+ services()
+ .uiaa
+ .try_auth(sender_user, sender_device, auth, &uiaainfo)?;
if !worked {
return Err(Error::Uiaa(uiaainfo));
}
// Success!
} else if let Some(json) = body.json_body {
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
- services().uiaa
+ services()
+ .uiaa
.create(sender_user, sender_device, &uiaainfo, &json)?;
return Err(Error::Uiaa(uiaainfo));
} else {
@@ -387,7 +393,8 @@ pub async fn deactivate_route(
services().users.deactivate_account(sender_user)?;
info!("User {} deactivated their account.", sender_user);
- services().admin
+ services()
+ .admin
.send_message(RoomMessageEventContent::notice_plain(format!(
"User {} deactivated their account.",
sender_user
diff --git a/src/api/client_server/alias.rs b/src/api/client_server/alias.rs
index 444cc15..b28606c 100644
--- a/src/api/client_server/alias.rs
+++ b/src/api/client_server/alias.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use regex::Regex;
use ruma::{
api::{
@@ -25,11 +25,18 @@ pub async fn create_alias_route(
));
}
- if services().rooms.alias.resolve_local_alias(&body.room_alias)?.is_some() {
+ if services()
+ .rooms
+ .alias
+ .resolve_local_alias(&body.room_alias)?
+ .is_some()
+ {
return Err(Error::Conflict("Alias already exists."));
}
- services().rooms.alias
+ services()
+ .rooms
+ .alias
.set_alias(&body.room_alias, &body.room_id)?;
Ok(create_alias::v3::Response::new())
@@ -69,9 +76,7 @@ pub async fn get_alias_route(
get_alias_helper(&body.room_alias).await
}
-pub(crate) async fn get_alias_helper(
- room_alias: &RoomAliasId,
-) -> Result<get_alias::v3::Response> {
+pub(crate) async fn get_alias_helper(room_alias: &RoomAliasId) -> Result<get_alias::v3::Response> {
if room_alias.server_name() != services().globals.server_name() {
let response = services()
.sending
@@ -115,9 +120,15 @@ pub(crate) async fn get_alias_helper(
.await
.is_ok()
{
- room_id = Some(services().rooms.alias.resolve_local_alias(room_alias)?.ok_or_else(|| {
- Error::bad_config("Appservice lied to us. Room does not exist.")
- })?);
+ room_id = Some(
+ services()
+ .rooms
+ .alias
+ .resolve_local_alias(room_alias)?
+ .ok_or_else(|| {
+ Error::bad_config("Appservice lied to us. Room does not exist.")
+ })?,
+ );
break;
}
}
diff --git a/src/api/client_server/backup.rs b/src/api/client_server/backup.rs
index e413893..f3d5ddc 100644
--- a/src/api/client_server/backup.rs
+++ b/src/api/client_server/backup.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::api::client::{
backup::{
add_backup_keys, add_backup_keys_for_room, add_backup_keys_for_session,
@@ -31,7 +31,8 @@ pub async fn update_backup_version_route(
body: Ruma<update_backup_version::v3::IncomingRequest>,
) -> Result<update_backup_version::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- services().key_backups
+ services()
+ .key_backups
.update_backup(sender_user, &body.version, &body.algorithm)?;
Ok(update_backup_version::v3::Response {})
@@ -45,13 +46,13 @@ pub async fn get_latest_backup_info_route(
) -> Result<get_latest_backup_info::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- let (version, algorithm) =
- services().key_backups
- .get_latest_backup(sender_user)?
- .ok_or(Error::BadRequest(
- ErrorKind::NotFound,
- "Key backup does not exist.",
- ))?;
+ let (version, algorithm) = services()
+ .key_backups
+ .get_latest_backup(sender_user)?
+ .ok_or(Error::BadRequest(
+ ErrorKind::NotFound,
+ "Key backup does not exist.",
+ ))?;
Ok(get_latest_backup_info::v3::Response {
algorithm,
@@ -78,8 +79,13 @@ pub async fn get_backup_info_route(
Ok(get_backup_info::v3::Response {
algorithm,
- count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
- etag: services().key_backups.get_etag(sender_user, &body.version)?,
+ count: (services()
+ .key_backups
+ .count_keys(sender_user, &body.version)? as u32)
+ .into(),
+ etag: services()
+ .key_backups
+ .get_etag(sender_user, &body.version)?,
version: body.version.to_owned(),
})
}
@@ -94,7 +100,9 @@ pub async fn delete_backup_version_route(
) -> Result<delete_backup_version::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- services().key_backups.delete_backup(sender_user, &body.version)?;
+ services()
+ .key_backups
+ .delete_backup(sender_user, &body.version)?;
Ok(delete_backup_version::v3::Response {})
}
@@ -136,8 +144,13 @@ pub async fn add_backup_keys_route(
}
Ok(add_backup_keys::v3::Response {
- count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
- etag: services().key_backups.get_etag(sender_user, &body.version)?,
+ count: (services()
+ .key_backups
+ .count_keys(sender_user, &body.version)? as u32)
+ .into(),
+ etag: services()
+ .key_backups
+ .get_etag(sender_user, &body.version)?,
})
}
@@ -176,8 +189,13 @@ pub async fn add_backup_keys_for_room_route(
}
Ok(add_backup_keys_for_room::v3::Response {
- count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
- etag: services().key_backups.get_etag(sender_user, &body.version)?,
+ count: (services()
+ .key_backups
+ .count_keys(sender_user, &body.version)? as u32)
+ .into(),
+ etag: services()
+ .key_backups
+ .get_etag(sender_user, &body.version)?,
})
}
@@ -214,8 +232,13 @@ pub async fn add_backup_keys_for_session_route(
)?;
Ok(add_backup_keys_for_session::v3::Response {
- count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
- etag: services().key_backups.get_etag(sender_user, &body.version)?,
+ count: (services()
+ .key_backups
+ .count_keys(sender_user, &body.version)? as u32)
+ .into(),
+ etag: services()
+ .key_backups
+ .get_etag(sender_user, &body.version)?,
})
}
@@ -274,11 +297,18 @@ pub async fn delete_backup_keys_route(
) -> Result<delete_backup_keys::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- services().key_backups.delete_all_keys(sender_user, &body.version)?;
+ services()
+ .key_backups
+ .delete_all_keys(sender_user, &body.version)?;
Ok(delete_backup_keys::v3::Response {
- count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
- etag: services().key_backups.get_etag(sender_user, &body.version)?,
+ count: (services()
+ .key_backups
+ .count_keys(sender_user, &body.version)? as u32)
+ .into(),
+ etag: services()
+ .key_backups
+ .get_etag(sender_user, &body.version)?,
})
}
@@ -290,12 +320,18 @@ pub async fn delete_backup_keys_for_room_route(
) -> Result<delete_backup_keys_for_room::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- services().key_backups
+ services()
+ .key_backups
.delete_room_keys(sender_user, &body.version, &body.room_id)?;
Ok(delete_backup_keys_for_room::v3::Response {
- count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
- etag: services().key_backups.get_etag(sender_user, &body.version)?,
+ count: (services()
+ .key_backups
+ .count_keys(sender_user, &body.version)? as u32)
+ .into(),
+ etag: services()
+ .key_backups
+ .get_etag(sender_user, &body.version)?,
})
}
@@ -307,11 +343,20 @@ pub async fn delete_backup_keys_for_session_route(
) -> Result<delete_backup_keys_for_session::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- services().key_backups
- .delete_room_key(sender_user, &body.version, &body.room_id, &body.session_id)?;
+ services().key_backups.delete_room_key(
+ sender_user,
+ &body.version,
+ &body.room_id,
+ &body.session_id,
+ )?;
Ok(delete_backup_keys_for_session::v3::Response {
- count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
- etag: services().key_backups.get_etag(sender_user, &body.version)?,
+ count: (services()
+ .key_backups
+ .count_keys(sender_user, &body.version)? as u32)
+ .into(),
+ etag: services()
+ .key_backups
+ .get_etag(sender_user, &body.version)?,
})
}
diff --git a/src/api/client_server/capabilities.rs b/src/api/client_server/capabilities.rs
index e4283b7..97529cf 100644
--- a/src/api/client_server/capabilities.rs
+++ b/src/api/client_server/capabilities.rs
@@ -1,4 +1,4 @@
-use crate::{Result, Ruma, services};
+use crate::{services, Result, Ruma};
use ruma::api::client::discovery::get_capabilities::{
self, Capabilities, RoomVersionStability, RoomVersionsCapability,
};
diff --git a/src/api/client_server/config.rs b/src/api/client_server/config.rs
index 36f4fcb..dbd2b2c 100644
--- a/src/api/client_server/config.rs
+++ b/src/api/client_server/config.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::{
api::client::{
config::{
diff --git a/src/api/client_server/context.rs b/src/api/client_server/context.rs
index c407c71..2e0f257 100644
--- a/src/api/client_server/context.rs
+++ b/src/api/client_server/context.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::{
api::client::{context::get_context, error::ErrorKind, filter::LazyLoadOptions},
events::StateEventType,
@@ -49,7 +49,11 @@ pub async fn get_context_route(
let room_id = base_event.room_id.clone();
- if !services().rooms.state_cache.is_joined(sender_user, &room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You don't have permission to view this room.",
@@ -141,7 +145,11 @@ pub async fn get_context_route(
.expect("All rooms have state"),
};
- let state_ids = services().rooms.state_accessor.state_full_ids(shortstatehash).await?;
+ let state_ids = services()
+ .rooms
+ .state_accessor
+ .state_full_ids(shortstatehash)
+ .await?;
let end_token = events_after
.last()
@@ -156,7 +164,10 @@ pub async fn get_context_route(
let mut state = Vec::new();
for (shortstatekey, id) in state_ids {
- let (event_type, state_key) = services().rooms.short.get_statekey_from_short(shortstatekey)?;
+ let (event_type, state_key) = services()
+ .rooms
+ .short
+ .get_statekey_from_short(shortstatekey)?;
if event_type != StateEventType::RoomMember {
let pdu = match services().rooms.timeline.get_pdu(&id)? {
diff --git a/src/api/client_server/device.rs b/src/api/client_server/device.rs
index 2f55993..d4c4178 100644
--- a/src/api/client_server/device.rs
+++ b/src/api/client_server/device.rs
@@ -1,4 +1,4 @@
-use crate::{utils, Error, Result, Ruma, services};
+use crate::{services, utils, Error, Result, Ruma};
use ruma::api::client::{
device::{self, delete_device, delete_devices, get_device, get_devices, update_device},
error::ErrorKind,
@@ -55,7 +55,8 @@ pub async fn update_device_route(
device.display_name = body.display_name.clone();
- services().users
+ services()
+ .users
.update_device_metadata(sender_user, &body.device_id, &device)?;
Ok(update_device::v3::Response {})
@@ -88,26 +89,27 @@ pub async fn delete_device_route(
};
if let Some(auth) = &body.auth {
- let (worked, uiaainfo) = services().uiaa.try_auth(
- sender_user,
- sender_device,
- auth,
- &uiaainfo,
- )?;
+ let (worked, uiaainfo) =
+ services()
+ .uiaa
+ .try_auth(sender_user, sender_device, auth, &uiaainfo)?;
if !worked {
return Err(Error::Uiaa(uiaainfo));
}
// Success!
} else if let Some(json) = body.json_body {
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
- services().uiaa
+ services()
+ .uiaa
.create(sender_user, sender_device, &uiaainfo, &json)?;
return Err(Error::Uiaa(uiaainfo));
} else {
return Err(Error::BadRequest(ErrorKind::NotJson, "Not json."));
}
- services().users.remove_device(sender_user, &body.device_id)?;
+ services()
+ .users
+ .remove_device(sender_user, &body.device_id)?;
Ok(delete_device::v3::Response {})
}
@@ -141,19 +143,18 @@ pub async fn delete_devices_route(
};
if let Some(auth) = &body.auth {
- let (worked, uiaainfo) = services().uiaa.try_auth(
- sender_user,
- sender_device,
- auth,
- &uiaainfo,
- )?;
+ let (worked, uiaainfo) =
+ services()
+ .uiaa
+ .try_auth(sender_user, sender_device, auth, &uiaainfo)?;
if !worked {
return Err(Error::Uiaa(uiaainfo));
}
// Success!
} else if let Some(json) = body.json_body {
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
- services().uiaa
+ services()
+ .uiaa
.create(sender_user, sender_device, &uiaainfo, &json)?;
return Err(Error::Uiaa(uiaainfo));
} else {
diff --git a/src/api/client_server/directory.rs b/src/api/client_server/directory.rs
index 2a60f67..c1b0eda 100644
--- a/src/api/client_server/directory.rs
+++ b/src/api/client_server/directory.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::{
api::{
client::{
@@ -123,7 +123,8 @@ pub(crate) async fn get_public_rooms_filtered_helper(
filter: &IncomingFilter,
_network: &IncomingRoomNetwork,
) -> Result<get_public_rooms_filtered::v3::Response> {
- if let Some(other_server) = server.filter(|server| *server != services().globals.server_name().as_str())
+ if let Some(other_server) =
+ server.filter(|server| *server != services().globals.server_name().as_str())
{
let response = services()
.sending
diff --git a/src/api/client_server/filter.rs b/src/api/client_server/filter.rs
index e0c9506..a0d5a19 100644
--- a/src/api/client_server/filter.rs
+++ b/src/api/client_server/filter.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::api::client::{
error::ErrorKind,
filter::{create_filter, get_filter},
diff --git a/src/api/client_server/keys.rs b/src/api/client_server/keys.rs
index 4ce5d4c..be62cc2 100644
--- a/src/api/client_server/keys.rs
+++ b/src/api/client_server/keys.rs
@@ -1,5 +1,5 @@
use super::SESSION_ID_LENGTH;
-use crate::{utils, Error, Result, Ruma, services};
+use crate::{services, utils, Error, Result, Ruma};
use futures_util::{stream::FuturesUnordered, StreamExt};
use ruma::{
api::{
@@ -32,7 +32,8 @@ pub async fn upload_keys_route(
let sender_device = body.sender_device.as_ref().expect("user is authenticated");
for (key_key, key_value) in &body.one_time_keys {
- services().users
+ services()
+ .users
.add_one_time_key(sender_user, sender_device, key_key, key_value)?;
}
@@ -44,16 +45,16 @@ pub async fn upload_keys_route(
.get_device_keys(sender_user, sender_device)?
.is_none()
{
- services().users.add_device_keys(
- sender_user,
- sender_device,
- device_keys,
- )?;
+ services()
+ .users
+ .add_device_keys(sender_user, sender_device, device_keys)?;
}
}
Ok(upload_keys::v3::Response {
- one_time_key_counts: services().users.count_one_time_keys(sender_user, sender_device)?,
+ one_time_key_counts: services()
+ .users
+ .count_one_time_keys(sender_user, sender_device)?,
})
}
@@ -69,12 +70,8 @@ pub async fn get_keys_route(
) -> Result<get_keys::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- let response = get_keys_helper(
- Some(sender_user),
- &body.device_keys,
- |u| u == sender_user,
- )
- .await?;
+ let response =
+ get_keys_helper(Some(sender_user), &body.device_keys, |u| u == sender_user).await?;
Ok(response)
}
@@ -113,19 +110,18 @@ pub async fn upload_signing_keys_route(
};
if let Some(auth) = &body.auth {
- let (worked, uiaainfo) = services().uiaa.try_auth(
- sender_user,
- sender_device,
- auth,
- &uiaainfo,
- )?;
+ let (worked, uiaainfo) =
+ services()
+ .uiaa
+ .try_auth(sender_user, sender_device, auth, &uiaainfo)?;
if !worked {
return Err(Error::Uiaa(uiaainfo));
}
// Success!
} else if let Some(json) = body.json_body {
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
- services().uiaa
+ services()
+ .uiaa
.create(sender_user, sender_device, &uiaainfo, &json)?;
return Err(Error::Uiaa(uiaainfo));
} else {
@@ -187,12 +183,9 @@ pub async fn upload_signatures_route(
))?
.to_owned(),
);
- services().users.sign_key(
- user_id,
- key_id,
- signature,
- sender_user,
- )?;
+ services()
+ .users
+ .sign_key(user_id, key_id, signature, sender_user)?;
}
}
}
@@ -215,7 +208,8 @@ pub async fn get_key_changes_route(
let mut device_list_updates = HashSet::new();
device_list_updates.extend(
- services().users
+ services()
+ .users
.keys_changed(
sender_user.as_str(),
body.from
@@ -230,9 +224,15 @@ pub async fn get_key_changes_route(
.filter_map(|r| r.ok()),
);
- for room_id in services().rooms.state_cache.rooms_joined(sender_user).filter_map(|r| r.ok()) {
+ for room_id in services()
+ .rooms
+ .state_cache
+ .rooms_joined(sender_user)
+ .filter_map(|r| r.ok())
+ {
device_list_updates.extend(
- services().users
+ services()
+ .users
.keys_changed(
&room_id.to_string(),
body.from.parse().map_err(|_| {
@@ -296,12 +296,13 @@ pub(crate) async fn get_keys_helper<F: Fn(&UserId) -> bool>(
for device_id in device_ids {
let mut container = BTreeMap::new();
if let Some(mut keys) = services().users.get_device_keys(user_id, device_id)? {
- let metadata = services().users.get_device_metadata(user_id, device_id)?.ok_or(
- Error::BadRequest(
+ let metadata = services()
+ .users
+ .get_device_metadata(user_id, device_id)?
+ .ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
"Tried to get keys for nonexistent device.",
- ),
- )?;
+ ))?;
add_unsigned_device_display_name(&mut keys, metadata)
.map_err(|_| Error::bad_database("invalid device keys in database"))?;
@@ -311,7 +312,10 @@ pub(crate) async fn get_keys_helper<F: Fn(&UserId) -> bool>(
}
}
- if let Some(master_key) = services().users.get_master_key(user_id, &allowed_signatures)? {
+ if let Some(master_key) = services()
+ .users
+ .get_master_key(user_id, &allowed_signatures)?
+ {
master_keys.insert(user_id.to_owned(), master_key);
}
if let Some(self_signing_key) = services()
@@ -338,7 +342,8 @@ pub(crate) async fn get_keys_helper<F: Fn(&UserId) -> bool>(
}
(
server,
- services().sending
+ services()
+ .sending
.send_federation_request(
server,
federation::keys::get_keys::v1::Request {
@@ -408,7 +413,8 @@ pub(crate) async fn claim_keys_helper(
let mut container = BTreeMap::new();
for (device_id, key_algorithm) in map {
if let Some(one_time_keys) =
- services().users
+ services()
+ .users
.take_one_time_key(user_id, device_id, key_algorithm)?
{
let mut c = BTreeMap::new();
diff --git a/src/api/client_server/media.rs b/src/api/client_server/media.rs
index 80cbb61..c1f5e1d 100644
--- a/src/api/client_server/media.rs
+++ b/src/api/client_server/media.rs
@@ -1,6 +1,4 @@
-use crate::{
- utils, Error, Result, Ruma, services, service::media::FileMeta,
-};
+use crate::{service::media::FileMeta, services, utils, Error, Result, Ruma};
use ruma::api::client::{
error::ErrorKind,
media::{
@@ -37,11 +35,11 @@ pub async fn create_content_route(
utils::random_string(MXC_LENGTH)
);
- services().media
+ services()
+ .media
.create(
mxc.clone(),
- body
- .filename
+ body.filename
.as_ref()
.map(|filename| "inline; filename=".to_owned() + filename)
.as_deref(),
@@ -73,7 +71,8 @@ pub async fn get_remote_content(
)
.await?;
- services().media
+ services()
+ .media
.create(
mxc.to_string(),
content_response.content_disposition.as_deref(),
@@ -192,7 +191,8 @@ pub async fn get_content_thumbnail_route(
)
.await?;
- services().media
+ services()
+ .media
.upload_thumbnail(
mxc,
None,
diff --git a/src/api/client_server/membership.rs b/src/api/client_server/membership.rs
index c930ce4..5de8ce1 100644
--- a/src/api/client_server/membership.rs
+++ b/src/api/client_server/membership.rs
@@ -30,7 +30,11 @@ use std::{
};
use tracing::{debug, error, warn};
-use crate::{Result, services, PduEvent, service::pdu::{gen_event_id_canonical_json, PduBuilder}, Error, api::{server_server, client_server}, utils, Ruma};
+use crate::{
+ api::{client_server, server_server},
+ service::pdu::{gen_event_id_canonical_json, PduBuilder},
+ services, utils, Error, PduEvent, Result, Ruma,
+};
use super::get_alias_helper;
@@ -47,8 +51,9 @@ pub async fn join_room_by_id_route(
let mut servers = Vec::new(); // There is no body.server_name for /roomId/join
servers.extend(
- services().rooms
- .state_cache
+ services()
+ .rooms
+ .state_cache
.invite_state(sender_user, &body.room_id)?
.unwrap_or_default()
.iter()
@@ -88,8 +93,9 @@ pub async fn join_room_by_id_or_alias_route(
Ok(room_id) => {
let mut servers = body.server_name.clone();
servers.extend(
- services().rooms
- .state_cache
+ services()
+ .rooms
+ .state_cache
.invite_state(sender_user, &room_id)?
.unwrap_or_default()
.iter()
@@ -163,8 +169,9 @@ pub async fn kick_user_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
let mut event: RoomMemberEventContent = serde_json::from_str(
- services().rooms
- .state_accessor
+ services()
+ .rooms
+ .state_accessor
.room_state_get(
&body.room_id,
&StateEventType::RoomMember,
@@ -183,7 +190,8 @@ pub async fn kick_user_route(
// TODO: reason
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -250,7 +258,8 @@ pub async fn ban_user_route(
)?;
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -286,8 +295,9 @@ pub async fn unban_user_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
let mut event: RoomMemberEventContent = serde_json::from_str(
- services().rooms
- .state_accessor
+ services()
+ .rooms
+ .state_accessor
.room_state_get(
&body.room_id,
&StateEventType::RoomMember,
@@ -305,7 +315,8 @@ pub async fn unban_user_route(
event.membership = MembershipState::Leave;
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -345,7 +356,10 @@ pub async fn forget_room_route(
) -> Result<forget_room::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- services().rooms.state_cache.forget(&body.room_id, sender_user)?;
+ services()
+ .rooms
+ .state_cache
+ .forget(&body.room_id, sender_user)?;
Ok(forget_room::v3::Response::new())
}
@@ -379,7 +393,11 @@ pub async fn get_member_events_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
// TODO: check history visibility?
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You don't have permission to view this room.",
@@ -410,7 +428,11 @@ pub async fn joined_members_route(
) -> Result<joined_members::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You aren't a member of the room.",
@@ -418,7 +440,12 @@ pub async fn joined_members_route(
}
let mut joined = BTreeMap::new();
- for user_id in services().rooms.state_cache.room_members(&body.room_id).filter_map(|r| r.ok()) {
+ for user_id in services()
+ .rooms
+ .state_cache
+ .room_members(&body.room_id)
+ .filter_map(|r| r.ok())
+ {
let display_name = services().users.displayname(&user_id)?;
let avatar_url = services().users.avatar_url(&user_id)?;
@@ -443,7 +470,8 @@ async fn join_room_by_id_helper(
let sender_user = sender_user.expect("user is authenticated");
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -481,7 +509,14 @@ async fn join_room_by_id_helper(
let (make_join_response, remote_server) = make_join_response_and_server?;
let room_version = match make_join_response.room_version {
- Some(room_version) if services().globals.supported_room_versions().contains(&room_version) => room_version,
+ Some(room_version)
+ if services()
+ .globals
+ .supported_room_versions()
+ .contains(&room_version) =>
+ {
+ room_version
+ }
_ => return Err(Error::BadServerResponse("Room version is not supported")),
};
@@ -568,12 +603,11 @@ async fn join_room_by_id_helper(
let mut state = HashMap::new();
let pub_key_map = RwLock::new(BTreeMap::new());
- services().rooms.event_handler.fetch_join_signing_keys(
- &send_join_response,
- &room_version,
- &pub_key_map,
- )
- .await?;
+ services()
+ .rooms
+ .event_handler
+ .fetch_join_signing_keys(&send_join_response, &room_version, &pub_key_map)
+ .await?;
for result in send_join_response
.room_state
@@ -591,12 +625,15 @@ async fn join_room_by_id_helper(
Error::BadServerResponse("Invalid PDU in send_join response.")
})?;
- services().rooms.outlier.add_pdu_outlier(&event_id, &value)?;
+ services()
+ .rooms
+ .outlier
+ .add_pdu_outlier(&event_id, &value)?;
if let Some(state_key) = &pdu.state_key {
- let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
- &pdu.kind.to_string().into(),
- state_key,
- )?;
+ let shortstatekey = services()
+ .rooms
+ .short
+ .get_or_create_shortstatekey(&pdu.kind.to_string().into(), state_key)?;
state.insert(shortstatekey, pdu.event_id.clone());
}
}
@@ -632,7 +669,10 @@ async fn join_room_by_id_helper(
Err(_) => continue,
};
- services().rooms.outlier.add_pdu_outlier(&event_id, &value)?;
+ services()
+ .rooms
+ .outlier
+ .add_pdu_outlier(&event_id, &value)?;
}
let shortstatehash = services().rooms.state.set_event_state(
@@ -640,7 +680,12 @@ async fn join_room_by_id_helper(
room_id,
state
.into_iter()
- .map(|(k, id)| services().rooms.state_compressor.compress_state_event(k, &id))
+ .map(|(k, id)| {
+ services()
+ .rooms
+ .state_compressor
+ .compress_state_event(k, &id)
+ })
.collect::<Result<_>>()?,
)?;
@@ -650,12 +695,15 @@ async fn join_room_by_id_helper(
&parsed_pdu,
join_event,
vec![(*parsed_pdu.event_id).to_owned()],
- &state_lock
+ &state_lock,
)?;
// We set the room state after inserting the pdu, so that we never have a moment in time
// where events in the current room state do not exist
- services().rooms.state.set_room_state(room_id, shortstatehash, &state_lock)?;
+ services()
+ .rooms
+ .state
+ .set_room_state(room_id, shortstatehash, &state_lock)?;
let statehashid = services().rooms.state.append_to_state(&parsed_pdu)?;
} else {
@@ -705,7 +753,13 @@ fn validate_and_add_event_id(
))
.expect("ruma's reference hashes are valid event ids");
- let back_off = |id| match services().globals.bad_event_ratelimiter.write().unwrap().entry(id) {
+ let back_off = |id| match services()
+ .globals
+ .bad_event_ratelimiter
+ .write()
+ .unwrap()
+ .entry(id)
+ {
Entry::Vacant(e) => {
e.insert((Instant::now(), 1));
}
@@ -760,7 +814,8 @@ pub(crate) async fn invite_helper<'a>(
if user_id.server_name() != services().globals.server_name() {
let (pdu_json, invite_room_state) = {
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -781,13 +836,18 @@ pub(crate) async fn invite_helper<'a>(
})
.expect("member event is valid value");
- let (pdu, pdu_json) = services().rooms.timeline.create_hash_and_sign_event(PduBuilder {
- event_type: RoomEventType::RoomMember,
- content,
- unsigned: None,
- state_key: Some(user_id.to_string()),
- redacts: None,
- }, sender_user, room_id, &state_lock)?;
+ let (pdu, pdu_json) = services().rooms.timeline.create_hash_and_sign_event(
+ PduBuilder {
+ event_type: RoomEventType::RoomMember,
+ content,
+ unsigned: None,
+ state_key: Some(user_id.to_string()),
+ redacts: None,
+ },
+ sender_user,
+ room_id,
+ &state_lock,
+ )?;
let invite_room_state = services().rooms.state.calculate_invite_state(&pdu)?;
@@ -799,8 +859,11 @@ pub(crate) async fn invite_helper<'a>(
// Generate event id
let expected_event_id = format!(
"${}",
- ruma::signatures::reference_hash(&pdu_json, &services().rooms.state.get_room_version(&room_id)?)
- .expect("ruma can calculate reference hashes")
+ ruma::signatures::reference_hash(
+ &pdu_json,
+ &services().rooms.state.get_room_version(&room_id)?
+ )
+ .expect("ruma can calculate reference hashes")
);
let expected_event_id = <&EventId>::try_from(expected_event_id.as_str())
.expect("ruma's reference hashes are valid event ids");
@@ -822,8 +885,7 @@ pub(crate) async fn invite_helper<'a>(
let pub_key_map = RwLock::new(BTreeMap::new());
// We do not add the event_id field to the pdu here because of signature and hashes checks
- let (event_id, value) = match gen_event_id_canonical_json(&response.event)
- {
+ let (event_id, value) = match gen_event_id_canonical_json(&response.event) {
Ok(t) => t,
Err(_) => {
// Event could not be converted to canonical json
@@ -847,22 +909,20 @@ pub(crate) async fn invite_helper<'a>(
)
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Origin field is invalid."))?;
- let pdu_id: Vec<u8> = services().rooms.event_handler.handle_incoming_pdu(
- &origin,
- &event_id,
- room_id,
- value,
- true,
- &pub_key_map,
- )
- .await?
- .ok_or(Error::BadRequest(
- ErrorKind::InvalidParam,
- "Could not accept incoming PDU as timeline event.",
- ))?;
+ let pdu_id: Vec<u8> = services()
+ .rooms
+ .event_handler
+ .handle_incoming_pdu(&origin, &event_id, room_id, value, true, &pub_key_map)
+ .await?
+ .ok_or(Error::BadRequest(
+ ErrorKind::InvalidParam,
+ "Could not accept incoming PDU as timeline event.",
+ ))?;
// Bind to variable because of lifetimes
- let servers = services().rooms.state_cache
+ let servers = services()
+ .rooms
+ .state_cache
.room_servers(room_id)
.filter_map(|r| r.ok())
.filter(|server| &**server != services().globals.server_name());
@@ -872,7 +932,11 @@ pub(crate) async fn invite_helper<'a>(
return Ok(());
}
- if !services().rooms.state_cache.is_joined(sender_user, &room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You don't have permission to view this room.",
@@ -880,7 +944,8 @@ pub(crate) async fn invite_helper<'a>(
}
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -923,7 +988,13 @@ pub async fn leave_all_rooms(user_id: &UserId) -> Result<()> {
.rooms
.state_cache
.rooms_joined(user_id)
- .chain(services().rooms.state_cache.rooms_invited(user_id).map(|t| t.map(|(r, _)| r)))
+ .chain(
+ services()
+ .rooms
+ .state_cache
+ .rooms_invited(user_id)
+ .map(|t| t.map(|(r, _)| r)),
+ )
.collect::<Vec<_>>();
for room_id in all_rooms {
@@ -938,20 +1009,24 @@ pub async fn leave_all_rooms(user_id: &UserId) -> Result<()> {
Ok(())
}
-pub async fn leave_room(
- user_id: &UserId,
- room_id: &RoomId,
-) -> Result<()> {
+pub async fn leave_room(user_id: &UserId, room_id: &RoomId) -> Result<()> {
// Ask a remote server if we don't have this room
- if !services().rooms.metadata.exists(room_id)? && room_id.server_name() != services().globals.server_name() {
+ if !services().rooms.metadata.exists(room_id)?
+ && room_id.server_name() != services().globals.server_name()
+ {
if let Err(e) = remote_leave_room(user_id, room_id).await {
warn!("Failed to leave room {} remotely: {}", user_id, e);
// Don't tell the client about this error
}
- let last_state = services().rooms.state_cache
+ let last_state = services()
+ .rooms
+ .state_cache
.invite_state(user_id, room_id)?
- .map_or_else(|| services().rooms.state_cache.left_state(user_id, room_id), |s| Ok(Some(s)))?;
+ .map_or_else(
+ || services().rooms.state_cache.left_state(user_id, room_id),
+ |s| Ok(Some(s)),
+ )?;
// We always drop the invite, we can't rely on other servers
services().rooms.state_cache.update_membership(
@@ -964,7 +1039,8 @@ pub async fn leave_room(
)?;
} else {
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -974,7 +1050,10 @@ pub async fn leave_room(
let state_lock = mutex_state.lock().await;
let mut event: RoomMemberEventContent = serde_json::from_str(
- services().rooms.state_accessor.room_state_get(room_id, &StateEventType::RoomMember, user_id.as_str())?
+ services()
+ .rooms
+ .state_accessor
+ .room_state_get(room_id, &StateEventType::RoomMember, user_id.as_str())?
.ok_or(Error::BadRequest(
ErrorKind::BadState,
"Cannot leave a room you are not a member of.",
@@ -1003,10 +1082,7 @@ pub async fn leave_room(
Ok(())
}
-async fn remote_leave_room(
- user_id: &UserId,
- room_id: &RoomId,
-) -> Result<()> {
+async fn remote_leave_room(user_id: &UserId, room_id: &RoomId) -> Result<()> {
let mut make_leave_response_and_server = Err(Error::BadServerResponse(
"No server available to assist in leaving.",
));
@@ -1048,14 +1124,21 @@ async fn remote_leave_room(
let (make_leave_response, remote_server) = make_leave_response_and_server?;
let room_version_id = match make_leave_response.room_version {
- Some(version) if services().globals.supported_room_versions().contains(&version) => version,
+ Some(version)
+ if services()
+ .globals
+ .supported_room_versions()
+ .contains(&version) =>
+ {
+ version
+ }
_ => return Err(Error::BadServerResponse("Room version is not supported")),
};
- let mut leave_event_stub =
- serde_json::from_str::<CanonicalJsonObject>(make_leave_response.event.get()).map_err(
- |_| Error::BadServerResponse("Invalid make_leave event json received from server."),
- )?;
+ let mut leave_event_stub = serde_json::from_str::<CanonicalJsonObject>(
+ make_leave_response.event.get(),
+ )
+ .map_err(|_| Error::BadServerResponse("Invalid make_leave event json received from server."))?;
// TODO: Is origin needed?
leave_event_stub.insert(
@@ -1099,7 +1182,8 @@ async fn remote_leave_room(
// It has enough fields to be called a proper event now
let leave_event = leave_event_stub;
- services().sending
+ services()
+ .sending
.send_federation_request(
&remote_server,
federation::membership::create_leave_event::v2::Request {
diff --git a/src/api/client_server/message.rs b/src/api/client_server/message.rs
index bfdc2fd..e086e4a 100644
--- a/src/api/client_server/message.rs
+++ b/src/api/client_server/message.rs
@@ -1,4 +1,4 @@
-use crate::{utils, Error, Result, Ruma, services, service::pdu::PduBuilder};
+use crate::{service::pdu::PduBuilder, services, utils, Error, Result, Ruma};
use ruma::{
api::client::{
error::ErrorKind,
@@ -25,7 +25,8 @@ pub async fn send_message_event_route(
let sender_device = body.sender_device.as_deref();
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -46,7 +47,8 @@ pub async fn send_message_event_route(
// Check if this is a new transaction id
if let Some(response) =
- services().transaction_ids
+ services()
+ .transaction_ids
.existing_txnid(sender_user, sender_device, &body.txn_id)?
{
// The client might have sent a txnid of the /sendToDevice endpoint
@@ -108,7 +110,11 @@ pub async fn get_message_events_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
let sender_device = body.sender_device.as_ref().expect("user is authenticated");
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You don't have permission to view this room.",
@@ -128,8 +134,12 @@ pub async fn get_message_events_route(
let to = body.to.as_ref().map(|t| t.parse());
- services().rooms
- .lazy_loading.lazy_load_confirm_delivery(sender_user, sender_device, &body.room_id, from)?;
+ services().rooms.lazy_loading.lazy_load_confirm_delivery(
+ sender_user,
+ sender_device,
+ &body.room_id,
+ from,
+ )?;
// Use limit or else 10
let limit = body.limit.try_into().map_or(10_usize, |l: u32| l as usize);
@@ -149,8 +159,10 @@ pub async fn get_message_events_route(
.take(limit)
.filter_map(|r| r.ok()) // Filter out buggy events
.filter_map(|(pdu_id, pdu)| {
- services().rooms
- .timeline.pdu_count(&pdu_id)
+ services()
+ .rooms
+ .timeline
+ .pdu_count(&pdu_id)
.map(|pdu_count| (pdu_count, pdu))
.ok()
})
@@ -187,7 +199,8 @@ pub async fn get_message_events_route(
.take(limit)
.filter_map(|r| r.ok()) // Filter out buggy events
.filter_map(|(pdu_id, pdu)| {
- services().rooms
+ services()
+ .rooms
.timeline
.pdu_count(&pdu_id)
.map(|pdu_count| (pdu_count, pdu))
@@ -222,10 +235,11 @@ pub async fn get_message_events_route(
resp.state = Vec::new();
for ll_id in &lazy_loaded {
- if let Some(member_event) =
- services().rooms.state_accessor
- .room_state_get(&body.room_id, &StateEventType::RoomMember, ll_id.as_str())?
- {
+ if let Some(member_event) = services().rooms.state_accessor.room_state_get(
+ &body.room_id,
+ &StateEventType::RoomMember,
+ ll_id.as_str(),
+ )? {
resp.state.push(member_event.to_state_event());
}
}
diff --git a/src/api/client_server/presence.rs b/src/api/client_server/presence.rs
index 6a915e4..dfac3db 100644
--- a/src/api/client_server/presence.rs
+++ b/src/api/client_server/presence.rs
@@ -1,4 +1,4 @@
-use crate::{utils, Result, Ruma, services};
+use crate::{services, utils, Result, Ruma};
use ruma::api::client::presence::{get_presence, set_presence};
use std::time::Duration;
@@ -51,7 +51,8 @@ pub async fn get_presence_route(
for room_id in services()
.rooms
- .user.get_shared_rooms(vec![sender_user.clone(), body.user_id.clone()])?
+ .user
+ .get_shared_rooms(vec![sender_user.clone(), body.user_id.clone()])?
{
let room_id = room_id?;
diff --git a/src/api/client_server/profile.rs b/src/api/client_server/profile.rs
index 3e1d736..5ace177 100644
--- a/src/api/client_server/profile.rs
+++ b/src/api/client_server/profile.rs
@@ -1,4 +1,4 @@
-use crate::{utils, Error, Result, Ruma, services, service::pdu::PduBuilder};
+use crate::{service::pdu::PduBuilder, services, utils, Error, Result, Ruma};
use ruma::{
api::{
client::{
@@ -24,7 +24,8 @@ pub async fn set_displayname_route(
) -> Result<set_display_name::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- services().users
+ services()
+ .users
.set_displayname(sender_user, body.displayname.clone())?;
// Send a new membership event and presence update into all joined rooms
@@ -40,8 +41,9 @@ pub async fn set_displayname_route(
content: to_raw_value(&RoomMemberEventContent {
displayname: body.displayname.clone(),
..serde_json::from_str(
- services().rooms
- .state_accessor
+ services()
+ .rooms
+ .state_accessor
.room_state_get(
&room_id,
&StateEventType::RoomMember,
@@ -71,7 +73,8 @@ pub async fn set_displayname_route(
for (pdu_builder, room_id) in all_rooms_joined {
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -80,10 +83,12 @@ pub async fn set_displayname_route(
);
let state_lock = mutex_state.lock().await;
- let _ = services()
- .rooms
- .timeline
- .build_and_append_pdu(pdu_builder, sender_user, &room_id, &state_lock);
+ let _ = services().rooms.timeline.build_and_append_pdu(
+ pdu_builder,
+ sender_user,
+ &room_id,
+ &state_lock,
+ );
// Presence update
services().rooms.edus.presence.update_presence(
@@ -150,10 +155,13 @@ pub async fn set_avatar_url_route(
) -> Result<set_avatar_url::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- services().users
+ services()
+ .users
.set_avatar_url(sender_user, body.avatar_url.clone())?;
- services().users.set_blurhash(sender_user, body.blurhash.clone())?;
+ services()
+ .users
+ .set_blurhash(sender_user, body.blurhash.clone())?;
// Send a new membership event and presence update into all joined rooms
let all_joined_rooms: Vec<_> = services()
@@ -168,8 +176,9 @@ pub async fn set_avatar_url_route(
content: to_raw_value(&RoomMemberEventContent {
avatar_url: body.avatar_url.clone(),
..serde_json::from_str(
- services().rooms
- .state_accessor
+ services()
+ .rooms
+ .state_accessor
.room_state_get(
&room_id,
&StateEventType::RoomMember,
@@ -199,7 +208,8 @@ pub async fn set_avatar_url_route(
for (pdu_builder, room_id) in all_joined_rooms {
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -208,10 +218,12 @@ pub async fn set_avatar_url_route(
);
let state_lock = mutex_state.lock().await;
- let _ = services()
- .rooms
- .timeline
- .build_and_append_pdu(pdu_builder, sender_user, &room_id, &state_lock);
+ let _ = services().rooms.timeline.build_and_append_pdu(
+ pdu_builder,
+ sender_user,
+ &room_id,
+ &state_lock,
+ );
// Presence update
services().rooms.edus.presence.update_presence(
diff --git a/src/api/client_server/push.rs b/src/api/client_server/push.rs
index 12ec25d..2301ddc 100644
--- a/src/api/client_server/push.rs
+++ b/src/api/client_server/push.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::{
api::client::{
error::ErrorKind,
diff --git a/src/api/client_server/read_marker.rs b/src/api/client_server/read_marker.rs
index c6d77c1..fd0e090 100644
--- a/src/api/client_server/read_marker.rs
+++ b/src/api/client_server/read_marker.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::{
api::client::{error::ErrorKind, read_marker::set_read_marker, receipt::create_receipt},
events::RoomAccountDataEventType,
@@ -34,12 +34,18 @@ pub async fn set_read_marker_route(
services().rooms.edus.read_receipt.private_read_set(
&body.room_id,
sender_user,
- services().rooms.timeline.get_pdu_count(event)?.ok_or(Error::BadRequest(
- ErrorKind::InvalidParam,
- "Event does not exist.",
- ))?,
+ services()
+ .rooms
+ .timeline
+ .get_pdu_count(event)?
+ .ok_or(Error::BadRequest(
+ ErrorKind::InvalidParam,
+ "Event does not exist.",
+ ))?,
)?;
- services().rooms.user
+ services()
+ .rooms
+ .user
.reset_notification_counts(sender_user, &body.room_id)?;
let mut user_receipts = BTreeMap::new();
@@ -80,7 +86,8 @@ pub async fn create_receipt_route(
services().rooms.edus.read_receipt.private_read_set(
&body.room_id,
sender_user,
- services().rooms
+ services()
+ .rooms
.timeline
.get_pdu_count(&body.event_id)?
.ok_or(Error::BadRequest(
@@ -88,7 +95,9 @@ pub async fn create_receipt_route(
"Event does not exist.",
))?,
)?;
- services().rooms.user
+ services()
+ .rooms
+ .user
.reset_notification_counts(sender_user, &body.room_id)?;
let mut user_receipts = BTreeMap::new();
diff --git a/src/api/client_server/redact.rs b/src/api/client_server/redact.rs
index 57e442a..ab586c0 100644
--- a/src/api/client_server/redact.rs
+++ b/src/api/client_server/redact.rs
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use crate::{Result, Ruma, services, service::pdu::PduBuilder};
+use crate::{service::pdu::PduBuilder, services, Result, Ruma};
use ruma::{
api::client::redact::redact_event,
events::{room::redaction::RoomRedactionEventContent, RoomEventType},
@@ -20,7 +20,8 @@ pub async fn redact_event_route(
let body = body.body;
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
diff --git a/src/api/client_server/report.rs b/src/api/client_server/report.rs
index efcc434..e45820e 100644
--- a/src/api/client_server/report.rs
+++ b/src/api/client_server/report.rs
@@ -1,4 +1,4 @@
-use crate::{utils::HtmlEscape, Error, Result, Ruma, services};
+use crate::{services, utils::HtmlEscape, Error, Result, Ruma};
use ruma::{
api::client::{error::ErrorKind, room::report_content},
events::room::message,
diff --git a/src/api/client_server/room.rs b/src/api/client_server/room.rs
index 939fbaa..ca191d6 100644
--- a/src/api/client_server/room.rs
+++ b/src/api/client_server/room.rs
@@ -1,5 +1,5 @@
use crate::{
- Error, Result, Ruma, service::pdu::PduBuilder, services, api::client_server::invite_helper,
+ api::client_server::invite_helper, service::pdu::PduBuilder, services, Error, Result, Ruma,
};
use ruma::{
api::client::{
@@ -57,7 +57,8 @@ pub async fn create_room_route(
services().rooms.short.get_or_create_shortroomid(&room_id)?;
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -81,13 +82,19 @@ pub async fn create_room_route(
.as_ref()
.map_or(Ok(None), |localpart| {
// TODO: Check for invalid characters and maximum length
- let alias =
- RoomAliasId::parse(format!("#{}:{}", localpart, services().globals.server_name()))
- .map_err(|_| {
- Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias.")
- })?;
-
- if services().rooms.alias.resolve_local_alias(&alias)?.is_some() {
+ let alias = RoomAliasId::parse(format!(
+ "#{}:{}",
+ localpart,
+ services().globals.server_name()
+ ))
+ .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias."))?;
+
+ if services()
+ .rooms
+ .alias
+ .resolve_local_alias(&alias)?
+ .is_some()
+ {
Err(Error::BadRequest(
ErrorKind::RoomInUse,
"Room alias already exists.",
@@ -99,7 +106,11 @@ pub async fn create_room_route(
let room_version = match body.room_version.clone() {
Some(room_version) => {
- if services().globals.supported_room_versions().contains(&room_version) {
+ if services()
+ .globals
+ .supported_room_versions()
+ .contains(&room_version)
+ {
room_version
} else {
return Err(Error::BadRequest(
@@ -338,13 +349,18 @@ pub async fn create_room_route(
pdu_builder.state_key.get_or_insert_with(|| "".to_owned());
// Silently skip encryption events if they are not allowed
- if pdu_builder.event_type == RoomEventType::RoomEncryption && !services().globals.allow_encryption()
+ if pdu_builder.event_type == RoomEventType::RoomEncryption
+ && !services().globals.allow_encryption()
{
continue;
}
- services().rooms
- .timeline.build_and_append_pdu(pdu_builder, sender_user, &room_id, &state_lock)?;
+ services().rooms.timeline.build_and_append_pdu(
+ pdu_builder,
+ sender_user,
+ &room_id,
+ &state_lock,
+ )?;
}
// 7. Events implied by name and topic
@@ -412,7 +428,11 @@ pub async fn get_room_event_route(
) -> Result<get_room_event::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You don't have permission to view this room.",
@@ -439,7 +459,11 @@ pub async fn get_room_aliases_route(
) -> Result<aliases::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You don't have permission to view this room.",
@@ -449,7 +473,8 @@ pub async fn get_room_aliases_route(
Ok(aliases::v3::Response {
aliases: services()
.rooms
- .alias.local_aliases_for_room(&body.room_id)
+ .alias
+ .local_aliases_for_room(&body.room_id)
.filter_map(|a| a.ok())
.collect(),
})
@@ -470,7 +495,11 @@ pub async fn upgrade_room_route(
) -> Result<upgrade_room::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- if !services().globals.supported_room_versions().contains(&body.new_version) {
+ if !services()
+ .globals
+ .supported_room_versions()
+ .contains(&body.new_version)
+ {
return Err(Error::BadRequest(
ErrorKind::UnsupportedRoomVersion,
"This server does not support that room version.",
@@ -479,11 +508,14 @@ pub async fn upgrade_room_route(
// Create a replacement room
let replacement_room = RoomId::new(services().globals.server_name());
- services().rooms
- .short.get_or_create_shortroomid(&replacement_room)?;
+ services()
+ .rooms
+ .short
+ .get_or_create_shortroomid(&replacement_room)?;
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -514,7 +546,8 @@ pub async fn upgrade_room_route(
// Change lock to replacement room
drop(state_lock);
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -525,7 +558,8 @@ pub async fn upgrade_room_route(
// Get the old room creation event
let mut create_event_content = serde_json::from_str::<CanonicalJsonObject>(
- services().rooms
+ services()
+ .rooms
.state_accessor
.room_state_get(&body.room_id, &StateEventType::RoomCreate, "")?
.ok_or_else(|| Error::bad_database("Found room without m.room.create event."))?
@@ -627,10 +661,15 @@ pub async fn upgrade_room_route(
// Replicate transferable state events to the new room
for event_type in transferable_state_events {
- let event_content = match services().rooms.state_accessor.room_state_get(&body.room_id, &event_type, "")? {
- Some(v) => v.content.clone(),
- None => continue, // Skipping missing events.
- };
+ let event_content =
+ match services()
+ .rooms
+ .state_accessor
+ .room_state_get(&body.room_id, &event_type, "")?
+ {
+ Some(v) => v.content.clone(),
+ None => continue, // Skipping missing events.
+ };
services().rooms.timeline.build_and_append_pdu(
PduBuilder {
@@ -647,14 +686,22 @@ pub async fn upgrade_room_route(
}
// Moves any local aliases to the new room
- for alias in services().rooms.alias.local_aliases_for_room(&body.room_id).filter_map(|r| r.ok()) {
- services().rooms
- .alias.set_alias(&alias, &replacement_room)?;
+ for alias in services()
+ .rooms
+ .alias
+ .local_aliases_for_room(&body.room_id)
+ .filter_map(|r| r.ok())
+ {
+ services()
+ .rooms
+ .alias
+ .set_alias(&alias, &replacement_room)?;
}
// Get the old room power levels
let mut power_levels_event_content: RoomPowerLevelsEventContent = serde_json::from_str(
- services().rooms
+ services()
+ .rooms
.state_accessor
.room_state_get(&body.room_id, &StateEventType::RoomPowerLevels, "")?
.ok_or_else(|| Error::bad_database("Found room without m.room.create event."))?
@@ -688,4 +735,3 @@ pub async fn upgrade_room_route(
// Return the replacement room id
Ok(upgrade_room::v3::Response { replacement_room })
}
-
diff --git a/src/api/client_server/search.rs b/src/api/client_server/search.rs
index f648649..1ba9cdf 100644
--- a/src/api/client_server/search.rs
+++ b/src/api/client_server/search.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::api::client::{
error::ErrorKind,
search::search_events::{
@@ -23,7 +23,8 @@ pub async fn search_events_route(
let filter = &search_criteria.filter;
let room_ids = filter.rooms.clone().unwrap_or_else(|| {
- services().rooms
+ services()
+ .rooms
.state_cache
.rooms_joined(sender_user)
.filter_map(|r| r.ok())
@@ -35,7 +36,11 @@ pub async fn search_events_route(
let mut searches = Vec::new();
for room_id in room_ids {
- if !services().rooms.state_cache.is_joined(sender_user, &room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You don't have permission to view this room.",
diff --git a/src/api/client_server/session.rs b/src/api/client_server/session.rs
index 7feeb66..14f1404 100644
--- a/src/api/client_server/session.rs
+++ b/src/api/client_server/session.rs
@@ -1,5 +1,5 @@
use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH};
-use crate::{utils, Error, Result, Ruma, services};
+use crate::{services, utils, Error, Result, Ruma};
use ruma::{
api::client::{
error::ErrorKind,
@@ -40,9 +40,7 @@ pub async fn get_login_types_route(
///
/// Note: You can use [`GET /_matrix/client/r0/login`](fn.get_supported_versions_route.html) to see
/// supported login types.
-pub async fn login_route(
- body: Ruma<login::v3::IncomingRequest>,
-) -> Result<login::v3::Response> {
+pub async fn login_route(body: Ruma<login::v3::IncomingRequest>) -> Result<login::v3::Response> {
// Validate login method
// TODO: Other login methods
let user_id = match &body.login_info {
@@ -55,15 +53,18 @@ pub async fn login_route(
} else {
return Err(Error::BadRequest(ErrorKind::Forbidden, "Bad login type."));
};
- let user_id =
- UserId::parse_with_server_name(username.to_owned(), services().globals.server_name())
- .map_err(|_| {
- Error::BadRequest(ErrorKind::InvalidUsername, "Username is invalid.")
- })?;
- let hash = services().users.password_hash(&user_id)?.ok_or(Error::BadRequest(
- ErrorKind::Forbidden,
- "Wrong username or password.",
- ))?;
+ let user_id = UserId::parse_with_server_name(
+ username.to_owned(),
+ services().globals.server_name(),
+ )
+ .map_err(|_| Error::BadRequest(ErrorKind::InvalidUsername, "Username is invalid."))?;
+ let hash = services()
+ .users
+ .password_hash(&user_id)?
+ .ok_or(Error::BadRequest(
+ ErrorKind::Forbidden,
+ "Wrong username or password.",
+ ))?;
if hash.is_empty() {
return Err(Error::BadRequest(
@@ -121,7 +122,8 @@ pub async fn login_route(
// Determine if device_id was provided and exists in the db for this user
let device_exists = body.device_id.as_ref().map_or(false, |device_id| {
- services().users
+ services()
+ .users
.all_device_ids(&user_id)
.any(|x| x.as_ref().map_or(false, |v| v == device_id))
});
@@ -156,9 +158,7 @@ pub async fn login_route(
/// - Deletes device metadata (device id, device display name, last seen ip, last seen ts)
/// - Forgets to-device events
/// - Triggers device list updates
-pub async fn logout_route(
- body: Ruma<logout::v3::Request>,
-) -> Result<logout::v3::Response> {
+pub async fn logout_route(body: Ruma<logout::v3::Request>) -> Result<logout::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
let sender_device = body.sender_device.as_ref().expect("user is authenticated");
diff --git a/src/api/client_server/state.rs b/src/api/client_server/state.rs
index ece7453..36466b8 100644
--- a/src/api/client_server/state.rs
+++ b/src/api/client_server/state.rs
@@ -1,8 +1,6 @@
use std::sync::Arc;
-use crate::{
- Error, Result, Ruma, RumaResponse, services, service::pdu::PduBuilder,
-};
+use crate::{service::pdu::PduBuilder, services, Error, Result, Ruma, RumaResponse};
use ruma::{
api::client::{
error::ErrorKind,
@@ -90,10 +88,14 @@ pub async fn get_state_events_route(
#[allow(clippy::blocks_in_if_conditions)]
// Users not in the room should not be able to access the state unless history_visibility is
// WorldReadable
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)?
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
&& !matches!(
- services().rooms
- .state_accessor
+ services()
+ .rooms
+ .state_accessor
.room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
.map(|event| {
serde_json::from_str(event.content.get())
@@ -138,10 +140,15 @@ pub async fn get_state_events_for_key_route(
#[allow(clippy::blocks_in_if_conditions)]
// Users not in the room should not be able to access the state unless history_visibility is
// WorldReadable
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)?
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
&& !matches!(
- services().rooms
- .state_accessor.room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
+ services()
+ .rooms
+ .state_accessor
+ .room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
.map(|event| {
serde_json::from_str(event.content.get())
.map(|e: RoomHistoryVisibilityEventContent| e.history_visibility)
@@ -162,7 +169,8 @@ pub async fn get_state_events_for_key_route(
let event = services()
.rooms
- .state_accessor.room_state_get(&body.room_id, &body.event_type, &body.state_key)?
+ .state_accessor
+ .room_state_get(&body.room_id, &body.event_type, &body.state_key)?
.ok_or(Error::BadRequest(
ErrorKind::NotFound,
"State event not found.",
@@ -187,10 +195,15 @@ pub async fn get_state_events_for_empty_key_route(
#[allow(clippy::blocks_in_if_conditions)]
// Users not in the room should not be able to access the state unless history_visibility is
// WorldReadable
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)?
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
&& !matches!(
- services().rooms
- .state_accessor.room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
+ services()
+ .rooms
+ .state_accessor
+ .room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
.map(|event| {
serde_json::from_str(event.content.get())
.map(|e: RoomHistoryVisibilityEventContent| e.history_visibility)
@@ -211,7 +224,8 @@ pub async fn get_state_events_for_empty_key_route(
let event = services()
.rooms
- .state_accessor.room_state_get(&body.room_id, &body.event_type, "")?
+ .state_accessor
+ .room_state_get(&body.room_id, &body.event_type, "")?
.ok_or(Error::BadRequest(
ErrorKind::NotFound,
"State event not found.",
@@ -248,7 +262,8 @@ async fn send_state_event_for_key_helper(
if alias.server_name() != services().globals.server_name()
|| services()
.rooms
- .alias.resolve_local_alias(&alias)?
+ .alias
+ .resolve_local_alias(&alias)?
.filter(|room| room == room_id) // Make sure it's the right room
.is_none()
{
@@ -262,7 +277,8 @@ async fn send_state_event_for_key_helper(
}
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
diff --git a/src/api/client_server/sync.rs b/src/api/client_server/sync.rs
index 9eb6383..9ce98b7 100644
--- a/src/api/client_server/sync.rs
+++ b/src/api/client_server/sync.rs
@@ -1,4 +1,4 @@
-use crate::{Error, Result, Ruma, RumaResponse, services};
+use crate::{services, Error, Result, Ruma, RumaResponse};
use ruma::{
api::client::{
filter::{IncomingFilterDefinition, LazyLoadOptions},
@@ -129,12 +129,7 @@ async fn sync_helper_wrapper(
) {
let since = body.since.clone();
- let r = sync_helper(
- sender_user.clone(),
- sender_device.clone(),
- body,
- )
- .await;
+ let r = sync_helper(sender_user.clone(), sender_device.clone(), body).await;
if let Ok((_, caching_allowed)) = r {
if !caching_allowed {
@@ -211,12 +206,17 @@ async fn sync_helper(
// Look for device list updates of this account
device_list_updates.extend(
- services().users
+ services()
+ .users
.keys_changed(&sender_user.to_string(), since, None)
.filter_map(|r| r.ok()),
);
- let all_joined_rooms = services().rooms.state_cache.rooms_joined(&sender_user).collect::<Vec<_>>();
+ let all_joined_rooms = services()
+ .rooms
+ .state_cache
+ .rooms_joined(&sender_user)
+ .collect::<Vec<_>>();
for room_id in all_joined_rooms {
let room_id = room_id?;
@@ -224,7 +224,8 @@ async fn sync_helper(
// Get and drop the lock to wait for remaining operations to finish
// This will make sure the we have all events until next_batch
let mutex_insert = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_insert
.write()
.unwrap()
@@ -237,7 +238,12 @@ async fn sync_helper(
let timeline_pdus;
let limited;
- if services().rooms.timeline.last_timeline_count(&sender_user, &room_id)? > since {
+ if services()
+ .rooms
+ .timeline
+ .last_timeline_count(&sender_user, &room_id)?
+ > since
+ {
let mut non_timeline_pdus = services()
.rooms
.timeline
@@ -250,7 +256,8 @@ async fn sync_helper(
r.ok()
})
.take_while(|(pduid, _)| {
- services().rooms
+ services()
+ .rooms
.timeline
.pdu_count(pduid)
.map_or(false, |count| count > since)
@@ -286,24 +293,40 @@ async fn sync_helper(
timeline_users.insert(event.sender.as_str().to_owned());
}
- services().rooms.lazy_loading
- .lazy_load_confirm_delivery(&sender_user, &sender_device, &room_id, since)?;
+ services().rooms.lazy_loading.lazy_load_confirm_delivery(
+ &sender_user,
+ &sender_device,
+ &room_id,
+ since,
+ )?;
// Database queries:
- let current_shortstatehash = if let Some(s) = services().rooms.state.get_room_shortstatehash(&room_id)? {
- s
- } else {
- error!("Room {} has no state", room_id);
- continue;
- };
+ let current_shortstatehash =
+ if let Some(s) = services().rooms.state.get_room_shortstatehash(&room_id)? {
+ s
+ } else {
+ error!("Room {} has no state", room_id);
+ continue;
+ };
- let since_shortstatehash = services().rooms.user.get_token_shortstatehash(&room_id, since)?;
+ let since_shortstatehash = services()
+ .rooms
+ .user
+ .get_token_shortstatehash(&room_id, since)?;
// Calculates joined_member_count, invited_member_count and heroes
let calculate_counts = || {
- let joined_member_count = services().rooms.state_cache.room_joined_count(&room_id)?.unwrap_or(0);
- let invited_member_count = services().rooms.state_cache.room_invited_count(&room_id)?.unwrap_or(0);
+ let joined_member_count = services()
+ .rooms
+ .state_cache
+ .room_joined_count(&room_id)?
+ .unwrap_or(0);
+ let invited_member_count = services()
+ .rooms
+ .state_cache
+ .room_invited_count(&room_id)?
+ .unwrap_or(0);
// Recalculate heroes (first 5 members)
let mut heroes = Vec::new();
@@ -314,7 +337,8 @@ async fn sync_helper(
for hero in services()
.rooms
- .timeline.all_pdus(&sender_user, &room_id)?
+ .timeline
+ .all_pdus(&sender_user, &room_id)?
.filter_map(|pdu| pdu.ok()) // Ignore all broken pdus
.filter(|(_, pdu)| pdu.kind == RoomEventType::RoomMember)
.map(|(_, pdu)| {
@@ -333,7 +357,10 @@ async fn sync_helper(
content.membership,
MembershipState::Join | MembershipState::Invite
) && (services().rooms.state_cache.is_joined(&user_id, &room_id)?
- || services().rooms.state_cache.is_invited(&user_id, &room_id)?)
+ || services()
+ .rooms
+ .state_cache
+ .is_invited(&user_id, &room_id)?)
{
Ok::<_, Error>(Some(state_key.clone()))
} else {
@@ -374,14 +401,21 @@ async fn sync_helper(
let (joined_member_count, invited_member_count, heroes) = calculate_counts()?;
- let current_state_ids = services().rooms.state_accessor.state_full_ids(current_shortstatehash).await?;
+ let current_state_ids = services()
+ .rooms
+ .state_accessor
+ .state_full_ids(current_shortstatehash)
+ .await?;
let mut state_events = Vec::new();
let mut lazy_loaded = HashSet::new();
let mut i = 0;
for (shortstatekey, id) in current_state_ids {
- let (event_type, state_key) = services().rooms.short.get_statekey_from_short(shortstatekey)?;
+ let (event_type, state_key) = services()
+ .rooms
+ .short
+ .get_statekey_from_short(shortstatekey)?;
if event_type != StateEventType::RoomMember {
let pdu = match services().rooms.timeline.get_pdu(&id)? {
@@ -423,8 +457,11 @@ async fn sync_helper(
}
// Reset lazy loading because this is an initial sync
- services().rooms.lazy_loading
- .lazy_load_reset(&sender_user, &sender_device, &room_id)?;
+ services().rooms.lazy_loading.lazy_load_reset(
+ &sender_user,
+ &sender_device,
+ &room_id,
+ )?;
// The state_events above should contain all timeline_users, let's mark them as lazy
// loaded.
@@ -471,8 +508,16 @@ async fn sync_helper(
let mut lazy_loaded = HashSet::new();
if since_shortstatehash != current_shortstatehash {
- let current_state_ids = services().rooms.state_accessor.state_full_ids(current_shortstatehash).await?;
- let since_state_ids = services().rooms.state_accessor.state_full_ids(since_shortstatehash).await?;
+ let current_state_ids = services()
+ .rooms
+ .state_accessor
+ .state_full_ids(current_shortstatehash)
+ .await?;
+ let since_state_ids = services()
+ .rooms
+ .state_accessor
+ .state_full_ids(since_shortstatehash)
+ .await?;
for (key, id) in current_state_ids {
if body.full_state || since_state_ids.get(&key) != Some(&id) {
@@ -537,13 +582,15 @@ async fn sync_helper(
let encrypted_room = services()
.rooms
- .state_accessor.state_get(current_shortstatehash, &StateEventType::RoomEncryption, "")?
+ .state_accessor
+ .state_get(current_shortstatehash, &StateEventType::RoomEncryption, "")?
.is_some();
- let since_encryption =
- services().rooms
- .state_accessor
- .state_get(since_shortstatehash, &StateEventType::RoomEncryption, "")?;
+ let since_encryption = services().rooms.state_accessor.state_get(
+ since_shortstatehash,
+ &StateEventType::RoomEncryption,
+ "",
+ )?;
// Calculations:
let new_encrypted_room = encrypted_room && since_encryption.is_none();
@@ -592,8 +639,9 @@ async fn sync_helper(
if joined_since_last_sync && encrypted_room || new_encrypted_room {
// If the user is in a new encrypted room, give them all joined users
device_list_updates.extend(
- services().rooms
- .state_cache
+ services()
+ .rooms
+ .state_cache
.room_members(&room_id)
.flatten()
.filter(|user_id| {
@@ -602,8 +650,7 @@ async fn sync_helper(
})
.filter(|user_id| {
// Only send keys if the sender doesn't share an encrypted room with the target already
- !share_encrypted_room(&sender_user, user_id, &room_id)
- .unwrap_or(false)
+ !share_encrypted_room(&sender_user, user_id, &room_id).unwrap_or(false)
}),
);
}
@@ -625,15 +672,17 @@ async fn sync_helper(
// Look for device list updates in this room
device_list_updates.extend(
- services().users
+ services()
+ .users
.keys_changed(&room_id.to_string(), since, None)
.filter_map(|r| r.ok()),
);
let notification_count = if send_notification_counts {
Some(
- services().rooms
- .user
+ services()
+ .rooms
+ .user
.notification_count(&sender_user, &room_id)?
.try_into()
.expect("notification count can't go that high"),
@@ -644,8 +693,9 @@ async fn sync_helper(
let highlight_count = if send_notification_counts {
Some(
- services().rooms
- .user
+ services()
+ .rooms
+ .user
.highlight_count(&sender_user, &room_id)?
.try_into()
.expect("highlight count can't go that high"),
@@ -657,7 +707,9 @@ async fn sync_helper(
let prev_batch = timeline_pdus
.first()
.map_or(Ok::<_, Error>(None), |(pdu_id, _)| {
- Ok(Some(services().rooms.timeline.pdu_count(pdu_id)?.to_string()))
+ Ok(Some(
+ services().rooms.timeline.pdu_count(pdu_id)?.to_string(),
+ ))
})?;
let room_events: Vec<_> = timeline_pdus
@@ -685,8 +737,11 @@ async fn sync_helper(
}
// Save the state after this sync so we can send the correct state diff next sync
- services().rooms.user
- .associate_token_shortstatehash(&room_id, next_batch, current_shortstatehash)?;
+ services().rooms.user.associate_token_shortstatehash(
+ &room_id,
+ next_batch,
+ current_shortstatehash,
+ )?;
let joined_room = JoinedRoom {
account_data: RoomAccountData {
@@ -729,11 +784,11 @@ async fn sync_helper(
}
// Take presence updates from this room
- for (user_id, presence) in
- services().rooms
- .edus
- .presence
- .presence_since(&room_id, since)?
+ for (user_id, presence) in services()
+ .rooms
+ .edus
+ .presence
+ .presence_since(&room_id, since)?
{
match presence_updates.entry(user_id) {
Entry::Vacant(v) => {
@@ -765,14 +820,19 @@ async fn sync_helper(
}
let mut left_rooms = BTreeMap::new();
- let all_left_rooms: Vec<_> = services().rooms.state_cache.rooms_left(&sender_user).collect();
+ let all_left_rooms: Vec<_> = services()
+ .rooms
+ .state_cache
+ .rooms_left(&sender_user)
+ .collect();
for result in all_left_rooms {
let (room_id, left_state_events) = result?;
{
// Get and drop the lock to wait for remaining operations to finish
let mutex_insert = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_insert
.write()
.unwrap()
@@ -783,7 +843,10 @@ async fn sync_helper(
drop(insert_lock);
}
- let left_count = services().rooms.state_cache.get_left_count(&room_id, &sender_user)?;
+ let left_count = services()
+ .rooms
+ .state_cache
+ .get_left_count(&room_id, &sender_user)?;
// Left before last sync
if Some(since) >= left_count {
@@ -807,14 +870,19 @@ async fn sync_helper(
}
let mut invited_rooms = BTreeMap::new();
- let all_invited_rooms: Vec<_> = services().rooms.state_cache.rooms_invited(&sender_user).collect();
+ let all_invited_rooms: Vec<_> = services()
+ .rooms
+ .state_cache
+ .rooms_invited(&sender_user)
+ .collect();
for result in all_invited_rooms {
let (room_id, invite_state_events) = result?;
{
// Get and drop the lock to wait for remaining operations to finish
let mutex_insert = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_insert
.write()
.unwrap()
@@ -825,7 +893,10 @@ async fn sync_helper(
drop(insert_lock);
}
- let invite_count = services().rooms.state_cache.get_invite_count(&room_id, &sender_user)?;
+ let invite_count = services()
+ .rooms
+ .state_cache
+ .get_invite_count(&room_id, &sender_user)?;
// Invited before last sync
if Some(since) >= invite_count {
@@ -850,8 +921,10 @@ async fn sync_helper(
.filter_map(|r| r.ok())
.filter_map(|other_room_id| {
Some(
- services().rooms
- .state_accessor.room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
+ services()
+ .rooms
+ .state_accessor
+ .room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
.ok()?
.is_some(),
)
@@ -865,7 +938,8 @@ async fn sync_helper(
}
// Remove all to-device events the device received *last time*
- services().users
+ services()
+ .users
.remove_to_device_events(&sender_user, &sender_device, since)?;
let response = sync_events::v3::Response {
@@ -898,7 +972,9 @@ async fn sync_helper(
changed: device_list_updates.into_iter().collect(),
left: device_list_left.into_iter().collect(),
},
- device_one_time_keys_count: services().users.count_one_time_keys(&sender_user, &sender_device)?,
+ device_one_time_keys_count: services()
+ .users
+ .count_one_time_keys(&sender_user, &sender_device)?,
to_device: ToDevice {
events: services()
.users
@@ -942,8 +1018,9 @@ fn share_encrypted_room(
.filter(|room_id| room_id != ignore_room)
.filter_map(|other_room_id| {
Some(
- services().rooms
- .state_accessor
+ services()
+ .rooms
+ .state_accessor
.room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
.ok()?
.is_some(),
diff --git a/src/api/client_server/tag.rs b/src/api/client_server/tag.rs
index abf2b87..cb46d9c 100644
--- a/src/api/client_server/tag.rs
+++ b/src/api/client_server/tag.rs
@@ -1,4 +1,4 @@
-use crate::{Result, Ruma, services, Error};
+use crate::{services, Error, Result, Ruma};
use ruma::{
api::client::tag::{create_tag, delete_tag, get_tags},
events::{
@@ -18,21 +18,24 @@ pub async fn update_tag_route(
) -> Result<create_tag::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- let event = services()
- .account_data
- .get(
- Some(&body.room_id),
- sender_user,
- RoomAccountDataEventType::Tag,
- )?;
-
- let mut tags_event = event.map(|e| serde_json::from_str(e.get())
- .map_err(|_| Error::bad_database("Invalid account data event in db.")))
- .unwrap_or_else(|| Ok(TagEvent {
- content: TagEventContent {
- tags: BTreeMap::new(),
- },
- }))?;
+ let event = services().account_data.get(
+ Some(&body.room_id),
+ sender_user,
+ RoomAccountDataEventType::Tag,
+ )?;
+
+ let mut tags_event = event
+ .map(|e| {
+ serde_json::from_str(e.get())
+ .map_err(|_| Error::bad_database("Invalid account data event in db."))
+ })
+ .unwrap_or_else(|| {
+ Ok(TagEvent {
+ content: TagEventContent {
+ tags: BTreeMap::new(),
+ },
+ })
+ })?;
tags_event
.content
@@ -59,21 +62,24 @@ pub async fn delete_tag_route(
) -> Result<delete_tag::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- let mut event = services()
- .account_data
- .get(
- Some(&body.room_id),
- sender_user,
- RoomAccountDataEventType::Tag,
- )?;
-
- let mut tags_event = event.map(|e| serde_json::from_str(e.get())
- .map_err(|_| Error::bad_database("Invalid account data event in db.")))
- .unwrap_or_else(|| Ok(TagEvent {
- content: TagEventContent {
- tags: BTreeMap::new(),
- },
- }))?;
+ let mut event = services().account_data.get(
+ Some(&body.room_id),
+ sender_user,
+ RoomAccountDataEventType::Tag,
+ )?;
+
+ let mut tags_event = event
+ .map(|e| {
+ serde_json::from_str(e.get())
+ .map_err(|_| Error::bad_database("Invalid account data event in db."))
+ })
+ .unwrap_or_else(|| {
+ Ok(TagEvent {
+ content: TagEventContent {
+ tags: BTreeMap::new(),
+ },
+ })
+ })?;
tags_event.content.tags.remove(&body.tag.clone().into());
@@ -97,21 +103,24 @@ pub async fn get_tags_route(
) -> Result<get_tags::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- let mut event = services()
- .account_data
- .get(
- Some(&body.room_id),
- sender_user,
- RoomAccountDataEventType::Tag,
- )?;
-
- let mut tags_event = event.map(|e| serde_json::from_str(e.get())
- .map_err(|_| Error::bad_database("Invalid account data event in db.")))
- .unwrap_or_else(|| Ok(TagEvent {
- content: TagEventContent {
- tags: BTreeMap::new(),
- },
- }))?;
+ let mut event = services().account_data.get(
+ Some(&body.room_id),
+ sender_user,
+ RoomAccountDataEventType::Tag,
+ )?;
+
+ let mut tags_event = event
+ .map(|e| {
+ serde_json::from_str(e.get())
+ .map_err(|_| Error::bad_database("Invalid account data event in db."))
+ })
+ .unwrap_or_else(|| {
+ Ok(TagEvent {
+ content: TagEventContent {
+ tags: BTreeMap::new(),
+ },
+ })
+ })?;
Ok(get_tags::v3::Response {
tags: tags_event.content.tags,
diff --git a/src/api/client_server/to_device.rs b/src/api/client_server/to_device.rs
index 3a2f6c0..34db3f9 100644
--- a/src/api/client_server/to_device.rs
+++ b/src/api/client_server/to_device.rs
@@ -1,7 +1,7 @@
use ruma::events::ToDeviceEventType;
use std::collections::BTreeMap;
-use crate::{Error, Result, Ruma, services};
+use crate::{services, Error, Result, Ruma};
use ruma::{
api::{
client::{error::ErrorKind, to_device::send_event_to_device},
@@ -54,15 +54,17 @@ pub async fn send_event_to_device_route(
}
match target_device_id_maybe {
- DeviceIdOrAllDevices::DeviceId(target_device_id) => services().users.add_to_device_event(
- sender_user,
- target_user_id,
- &target_device_id,
- &body.event_type,
- event.deserialize_as().map_err(|_| {
- Error::BadRequest(ErrorKind::InvalidParam, "Event is invalid")
- })?,
- )?,
+ DeviceIdOrAllDevices::DeviceId(target_device_id) => {
+ services().users.add_to_device_event(
+ sender_user,
+ target_user_id,
+ &target_device_id,
+ &body.event_type,
+ event.deserialize_as().map_err(|_| {
+ Error::BadRequest(ErrorKind::InvalidParam, "Event is invalid")
+ })?,
+ )?
+ }
DeviceIdOrAllDevices::AllDevices => {
for target_device_id in services().users.all_device_ids(target_user_id) {
@@ -82,7 +84,8 @@ pub async fn send_event_to_device_route(
}
// Save transaction id with empty data
- services().transaction_ids
+ services()
+ .transaction_ids
.add_txnid(sender_user, sender_device, &body.txn_id, &[])?;
Ok(send_event_to_device::v3::Response {})
diff --git a/src/api/client_server/typing.rs b/src/api/client_server/typing.rs
index abb669b..ecc926f 100644
--- a/src/api/client_server/typing.rs
+++ b/src/api/client_server/typing.rs
@@ -1,4 +1,4 @@
-use crate::{utils, Error, Result, Ruma, services};
+use crate::{services, utils, Error, Result, Ruma};
use ruma::api::client::{error::ErrorKind, typing::create_typing_event};
/// # `PUT /_matrix/client/r0/rooms/{roomId}/typing/{userId}`
@@ -11,7 +11,11 @@ pub async fn create_typing_event_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
- if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .is_joined(sender_user, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"You are not in this room.",
@@ -25,8 +29,10 @@ pub async fn create_typing_event_route(
duration.as_millis() as u64 + utils::millis_since_unix_epoch(),
)?;
} else {
- services().rooms
- .edus.typing
+ services()
+ .rooms
+ .edus
+ .typing
.typing_remove(sender_user, &body.room_id)?;
}
diff --git a/src/api/client_server/user_directory.rs b/src/api/client_server/user_directory.rs
index c94a283..9d7a828 100644
--- a/src/api/client_server/user_directory.rs
+++ b/src/api/client_server/user_directory.rs
@@ -1,4 +1,4 @@
-use crate::{Result, Ruma, services};
+use crate::{services, Result, Ruma};
use ruma::{
api::client::user_directory::search_users,
events::{
@@ -48,22 +48,25 @@ pub async fn search_users_route(
return None;
}
- let user_is_in_public_rooms =
- services().rooms
- .state_cache.rooms_joined(&user_id)
- .filter_map(|r| r.ok())
- .any(|room| {
- services().rooms
- .state_accessor.room_state_get(&room, &StateEventType::RoomJoinRules, "")
- .map_or(false, |event| {
- event.map_or(false, |event| {
- serde_json::from_str(event.content.get())
- .map_or(false, |r: RoomJoinRulesEventContent| {
- r.join_rule == JoinRule::Public
- })
- })
+ let user_is_in_public_rooms = services()
+ .rooms
+ .state_cache
+ .rooms_joined(&user_id)
+ .filter_map(|r| r.ok())
+ .any(|room| {
+ services()
+ .rooms
+ .state_accessor
+ .room_state_get(&room, &StateEventType::RoomJoinRules, "")
+ .map_or(false, |event| {
+ event.map_or(false, |event| {
+ serde_json::from_str(event.content.get())
+ .map_or(false, |r: RoomJoinRulesEventContent| {
+ r.join_rule == JoinRule::Public
+ })
})
- });
+ })
+ });
if user_is_in_public_rooms {
return Some(user);
@@ -71,7 +74,8 @@ pub async fn search_users_route(
let user_is_in_shared_rooms = services()
.rooms
- .user.get_shared_rooms(vec![sender_user.clone(), user_id.clone()])
+ .user
+ .get_shared_rooms(vec![sender_user.clone(), user_id.clone()])
.ok()?
.next()
.is_some();
diff --git a/src/api/client_server/voip.rs b/src/api/client_server/voip.rs
index 9917979..dc9caaa 100644
--- a/src/api/client_server/voip.rs
+++ b/src/api/client_server/voip.rs
@@ -1,4 +1,4 @@
-use crate::{Result, Ruma, services};
+use crate::{services, Result, Ruma};
use hmac::{Hmac, Mac, NewMac};
use ruma::{api::client::voip::get_turn_server_info, SecondsSinceUnixEpoch};
use sha1::Sha1;
diff --git a/src/api/mod.rs b/src/api/mod.rs
index 68589be..0d2cd66 100644
--- a/src/api/mod.rs
+++ b/src/api/mod.rs
@@ -1,4 +1,4 @@
-pub mod client_server;
-pub mod server_server;
pub mod appservice_server;
+pub mod client_server;
pub mod ruma_wrapper;
+pub mod server_server;
diff --git a/src/api/ruma_wrapper/axum.rs b/src/api/ruma_wrapper/axum.rs
index d926b89..ee8c9e7 100644
--- a/src/api/ruma_wrapper/axum.rs
+++ b/src/api/ruma_wrapper/axum.rs
@@ -24,7 +24,7 @@ use serde::Deserialize;
use tracing::{debug, error, warn};
use super::{Ruma, RumaResponse};
-use crate::{Error, Result, api::server_server, services};
+use crate::{api::server_server, services, Error, Result};
#[async_trait]
impl<T, B> FromRequest<B> for Ruma<T>
@@ -197,11 +197,11 @@ where
request_map.insert("content".to_owned(), json_body.clone());
};
- let keys_result = services().rooms.event_handler.fetch_signing_keys(
- &x_matrix.origin,
- vec![x_matrix.key.to_owned()],
- )
- .await;
+ let keys_result = services()
+ .rooms
+ .event_handler
+ .fetch_signing_keys(&x_matrix.origin, vec![x_matrix.key.to_owned()])
+ .await;
let keys = match keys_result {
Ok(b) => b,
diff --git a/src/api/server_server.rs b/src/api/server_server.rs
index 11f7ec3..dba4489 100644
--- a/src/api/server_server.rs
+++ b/src/api/server_server.rs
@@ -1,6 +1,7 @@
use crate::{
api::client_server::{self, claim_keys_helper, get_keys_helper},
- utils, Error, PduEvent, Result, Ruma, services, service::pdu::{gen_event_id_canonical_json, PduBuilder},
+ service::pdu::{gen_event_id_canonical_json, PduBuilder},
+ services, utils, Error, PduEvent, Result, Ruma,
};
use axum::{response::IntoResponse, Json};
use futures_util::{stream::FuturesUnordered, StreamExt};
@@ -138,7 +139,8 @@ where
let mut write_destination_to_cache = false;
- let cached_result = services().globals
+ let cached_result = services()
+ .globals
.actual_destination_cache
.read()
.unwrap()
@@ -191,7 +193,10 @@ where
.to_string()
.into(),
);
- request_map.insert("origin".to_owned(), services().globals.server_name().as_str().into());
+ request_map.insert(
+ "origin".to_owned(),
+ services().globals.server_name().as_str().into(),
+ );
request_map.insert("destination".to_owned(), destination.as_str().into());
let mut request_json =
@@ -238,7 +243,11 @@ where
let url = reqwest_request.url().clone();
- let response = services().globals.federation_client().execute(reqwest_request).await;
+ let response = services()
+ .globals
+ .federation_client()
+ .execute(reqwest_request)
+ .await;
match response {
Ok(mut response) => {
@@ -278,10 +287,15 @@ where
if status == 200 {
let response = T::IncomingResponse::try_from_http_response(http_response);
if response.is_ok() && write_destination_to_cache {
- services().globals.actual_destination_cache.write().unwrap().insert(
- Box::<ServerName>::from(destination),
- (actual_destination, host),
- );
+ services()
+ .globals
+ .actual_destination_cache
+ .write()
+ .unwrap()
+ .insert(
+ Box::<ServerName>::from(destination),
+ (actual_destination, host),
+ );
}
response.map_err(|e| {
@@ -329,9 +343,7 @@ fn add_port_to_hostname(destination_str: &str) -> FedDest {
/// Returns: actual_destination, host header
/// Implemented according to the specification at https://matrix.org/docs/spec/server_server/r0.1.4#resolving-server-names
/// Numbers in comments below refer to bullet points in linked section of specification
-async fn find_actual_destination(
- destination: &'_ ServerName,
-) -> (FedDest, FedDest) {
+async fn find_actual_destination(destination: &'_ ServerName) -> (FedDest, FedDest) {
let destination_str = destination.as_str().to_owned();
let mut hostname = destination_str.clone();
let actual_destination = match get_ip_with_port(&destination_str) {
@@ -364,18 +376,24 @@ async fn find_actual_destination(
// 3.3: SRV lookup successful
let force_port = hostname_override.port();
- if let Ok(override_ip) = services().globals
+ if let Ok(override_ip) = services()
+ .globals
.dns_resolver()
.lookup_ip(hostname_override.hostname())
.await
{
- services().globals.tls_name_override.write().unwrap().insert(
- delegated_hostname.clone(),
- (
- override_ip.iter().collect(),
- force_port.unwrap_or(8448),
- ),
- );
+ services()
+ .globals
+ .tls_name_override
+ .write()
+ .unwrap()
+ .insert(
+ delegated_hostname.clone(),
+ (
+ override_ip.iter().collect(),
+ force_port.unwrap_or(8448),
+ ),
+ );
} else {
warn!("Using SRV record, but could not resolve to IP");
}
@@ -400,15 +418,24 @@ async fn find_actual_destination(
Some(hostname_override) => {
let force_port = hostname_override.port();
- if let Ok(override_ip) = services().globals
+ if let Ok(override_ip) = services()
+ .globals
.dns_resolver()
.lookup_ip(hostname_override.hostname())
.await
{
- services().globals.tls_name_override.write().unwrap().insert(
- hostname.clone(),
- (override_ip.iter().collect(), force_port.unwrap_or(8448)),
- );
+ services()
+ .globals
+ .tls_name_override
+ .write()
+ .unwrap()
+ .insert(
+ hostname.clone(),
+ (
+ override_ip.iter().collect(),
+ force_port.unwrap_or(8448),
+ ),
+ );
} else {
warn!("Using SRV record, but could not resolve to IP");
}
@@ -443,10 +470,9 @@ async fn find_actual_destination(
(actual_destination, hostname)
}
-async fn query_srv_record(
- hostname: &'_ str,
-) -> Option<FedDest> {
- if let Ok(Some(host_port)) = services().globals
+async fn query_srv_record(hostname: &'_ str) -> Option<FedDest> {
+ if let Ok(Some(host_port)) = services()
+ .globals
.dns_resolver()
.srv_lookup(format!("_matrix._tcp.{}", hostname))
.await
@@ -465,11 +491,10 @@ async fn query_srv_record(
}
}
-async fn request_well_known(
- destination: &str,
-) -> Option<String> {
+async fn request_well_known(destination: &str) -> Option<String> {
let body: serde_json::Value = serde_json::from_str(
- &services().globals
+ &services()
+ .globals
.default_client()
.get(&format!(
"https://{}/.well-known/matrix/server",
@@ -664,15 +689,22 @@ pub async fn send_transaction_message_route(
Some(id) => id,
None => {
// Event is invalid
- resolved_map.insert(event_id, Err(Error::bad_database("Event needs a valid RoomId.")));
+ resolved_map.insert(
+ event_id,
+ Err(Error::bad_database("Event needs a valid RoomId.")),
+ );
continue;
}
};
- services().rooms.event_handler.acl_check(&sender_servername, &room_id)?;
+ services()
+ .rooms
+ .event_handler
+ .acl_check(&sender_servername, &room_id)?;
let mutex = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_federation
.write()
.unwrap()
@@ -683,16 +715,19 @@ pub async fn send_transaction_message_route(
let start_time = Instant::now();
resolved_map.insert(
event_id.clone(),
- services().rooms.event_handler.handle_incoming_pdu(
- &sender_servername,
- &event_id,
- &room_id,
- value,
- true,
- &pub_key_map,
- )
- .await
- .map(|_| ()),
+ services()
+ .rooms
+ .event_handler
+ .handle_incoming_pdu(
+ &sender_servername,
+ &event_id,
+ &room_id,
+ value,
+ true,
+ &pub_key_map,
+ )
+ .await
+ .map(|_| ()),
);
drop(mutex_lock);
@@ -727,7 +762,13 @@ pub async fn send_transaction_message_route(
.event_ids
.iter()
.filter_map(|id| {
- services().rooms.timeline.get_pdu_count(id).ok().flatten().map(|r| (id, r))
+ services()
+ .rooms
+ .timeline
+ .get_pdu_count(id)
+ .ok()
+ .flatten()
+ .map(|r| (id, r))
})
.max_by_key(|(_, count)| *count)
{
@@ -744,11 +785,11 @@ pub async fn send_transaction_message_route(
content: ReceiptEventContent(receipt_content),
room_id: room_id.clone(),
};
- services().rooms.edus.read_receipt.readreceipt_update(
- &user_id,
- &room_id,
- event,
- )?;
+ services()
+ .rooms
+ .edus
+ .read_receipt
+ .readreceipt_update(&user_id, &room_id, event)?;
} else {
// TODO fetch missing events
info!("No known event ids in read receipt: {:?}", user_updates);
@@ -757,7 +798,11 @@ pub async fn send_transaction_message_route(
}
}
Edu::Typing(typing) => {
- if services().rooms.state_cache.is_joined(&typing.user_id, &typing.room_id)? {
+ if services()
+ .rooms
+ .state_cache
+ .is_joined(&typing.user_id, &typing.room_id)?
+ {
if typing.typing {
services().rooms.edus.typing.typing_add(
&typing.user_id,
@@ -765,16 +810,16 @@ pub async fn send_transaction_message_route(
3000 + utils::millis_since_unix_epoch(),
)?;
} else {
- services().rooms.edus.typing.typing_remove(
- &typing.user_id,
- &typing.room_id,
- )?;
+ services()
+ .rooms
+ .edus
+ .typing
+ .typing_remove(&typing.user_id, &typing.room_id)?;
}
}
}
Edu::DeviceListUpdate(DeviceListUpdateContent { user_id, .. }) => {
- services().users
- .mark_device_key_update(&user_id)?;
+ services().users.mark_device_key_update(&user_id)?;
}
Edu::DirectToDevice(DirectDeviceContent {
sender,
@@ -810,7 +855,9 @@ pub async fn send_transaction_message_route(
}
DeviceIdOrAllDevices::AllDevices => {
- for target_device_id in services().users.all_device_ids(target_user_id) {
+ for target_device_id in
+ services().users.all_device_ids(target_user_id)
+ {
services().users.add_to_device_event(
&sender,
target_user_id,
@@ -830,7 +877,8 @@ pub async fn send_transaction_message_route(
}
// Save transaction id with empty data
- services().transaction_ids
+ services()
+ .transaction_ids
.add_txnid(&sender, None, &message_id, &[])?;
}
Edu::SigningKeyUpdate(SigningKeyUpdateContent {
@@ -854,7 +902,12 @@ pub async fn send_transaction_message_route(
}
}
- Ok(send_transaction_message::v1::Response { pdus: resolved_map.into_iter().map(|(e, r)| (e, r.map_err(|e| e.to_string()))).collect() })
+ Ok(send_transaction_message::v1::Response {
+ pdus: resolved_map
+ .into_iter()
+ .map(|(e, r)| (e, r.map_err(|e| e.to_string())))
+ .collect(),
+ })
}
/// # `GET /_matrix/federation/v1/event/{eventId}`
@@ -875,7 +928,8 @@ pub async fn get_event_route(
.expect("server is authenticated");
let event = services()
- .rooms.timeline
+ .rooms
+ .timeline
.get_pdu_json(&body.event_id)?
.ok_or(Error::BadRequest(ErrorKind::NotFound, "Event not found."))?;
@@ -887,7 +941,11 @@ pub async fn get_event_route(
let room_id = <&RoomId>::try_from(room_id_str)
.map_err(|_| Error::bad_database("Invalid room id field in event in database"))?;
- if !services().rooms.state_cache.server_in_room(sender_servername, room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .server_in_room(sender_servername, room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Server is not in room",
@@ -916,14 +974,21 @@ pub async fn get_missing_events_route(
.as_ref()
.expect("server is authenticated");
- if !services().rooms.state_cache.server_in_room(sender_servername, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .server_in_room(sender_servername, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Server is not in room",
));
}
- services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
+ services()
+ .rooms
+ .event_handler
+ .acl_check(&sender_servername, &body.room_id)?;
let mut queued_events = body.latest_events.clone();
let mut events = Vec::new();
@@ -988,17 +1053,25 @@ pub async fn get_event_authorization_route(
.as_ref()
.expect("server is authenticated");
- if !services().rooms.state_cache.server_in_room(sender_servername, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .server_in_room(sender_servername, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Server is not in room.",
));
}
- services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
+ services()
+ .rooms
+ .event_handler
+ .acl_check(&sender_servername, &body.room_id)?;
let event = services()
- .rooms.timeline
+ .rooms
+ .timeline
.get_pdu_json(&body.event_id)?
.ok_or(Error::BadRequest(ErrorKind::NotFound, "Event not found."))?;
@@ -1010,7 +1083,11 @@ pub async fn get_event_authorization_route(
let room_id = <&RoomId>::try_from(room_id_str)
.map_err(|_| Error::bad_database("Invalid room id field in event in database"))?;
- let auth_chain_ids = services().rooms.auth_chain.get_auth_chain(room_id, vec![Arc::from(&*body.event_id)]).await?;
+ let auth_chain_ids = services()
+ .rooms
+ .auth_chain
+ .get_auth_chain(room_id, vec![Arc::from(&*body.event_id)])
+ .await?;
Ok(get_event_authorization::v1::Response {
auth_chain: auth_chain_ids
@@ -1035,17 +1112,25 @@ pub async fn get_room_state_route(
.as_ref()
.expect("server is authenticated");
- if !services().rooms.state_cache.server_in_room(sender_servername, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .server_in_room(sender_servername, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Server is not in room.",
));
}
- services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
+ services()
+ .rooms
+ .event_handler
+ .acl_check(&sender_servername, &body.room_id)?;
let shortstatehash = services()
- .rooms.state_accessor
+ .rooms
+ .state_accessor
.pdu_shortstatehash(&body.event_id)?
.ok_or(Error::BadRequest(
ErrorKind::NotFound,
@@ -1053,26 +1138,39 @@ pub async fn get_room_state_route(
))?;
let pdus = services()
- .rooms.state_accessor
+ .rooms
+ .state_accessor
.state_full_ids(shortstatehash)
.await?
.into_iter()
.map(|(_, id)| {
PduEvent::convert_to_outgoing_federation_event(
- services().rooms.timeline.get_pdu_json(&id).unwrap().unwrap(),
+ services()
+ .rooms
+ .timeline
+ .get_pdu_json(&id)
+ .unwrap()
+ .unwrap(),
)
})
.collect();
- let auth_chain_ids =
- services().rooms.auth_chain.get_auth_chain(&body.room_id, vec![Arc::from(&*body.event_id)]).await?;
+ let auth_chain_ids = services()
+ .rooms
+ .auth_chain
+ .get_auth_chain(&body.room_id, vec![Arc::from(&*body.event_id)])
+ .await?;
Ok(get_room_state::v1::Response {
auth_chain: auth_chain_ids
.map(|id| {
- services().rooms.timeline.get_pdu_json(&id).map(|maybe_json| {
- PduEvent::convert_to_outgoing_federation_event(maybe_json.unwrap())
- })
+ services()
+ .rooms
+ .timeline
+ .get_pdu_json(&id)
+ .map(|maybe_json| {
+ PduEvent::convert_to_outgoing_federation_event(maybe_json.unwrap())
+ })
})
.filter_map(|r| r.ok())
.collect(),
@@ -1095,17 +1193,25 @@ pub async fn get_room_state_ids_route(
.as_ref()
.expect("server is authenticated");
- if !services().rooms.state_cache.server_in_room(sender_servername, &body.room_id)? {
+ if !services()
+ .rooms
+ .state_cache
+ .server_in_room(sender_servername, &body.room_id)?
+ {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Server is not in room.",
));
}
- services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
+ services()
+ .rooms
+ .event_handler
+ .acl_check(&sender_servername, &body.room_id)?;
let shortstatehash = services()
- .rooms.state_accessor
+ .rooms
+ .state_accessor
.pdu_shortstatehash(&body.event_id)?
.ok_or(Error::BadRequest(
ErrorKind::NotFound,
@@ -1113,15 +1219,19 @@ pub async fn get_room_state_ids_route(
))?;
let pdu_ids = services()
- .rooms.state_accessor
+ .rooms
+ .state_accessor
.state_full_ids(shortstatehash)
.await?
.into_iter()
.map(|(_, id)| (*id).to_owned())
.collect();
- let auth_chain_ids =
- services().rooms.auth_chain.get_auth_chain(&body.room_id, vec![Arc::from(&*body.event_id)]).await?;
+ let auth_chain_ids = services()
+ .rooms
+ .auth_chain
+ .get_auth_chain(&body.room_id, vec![Arc::from(&*body.event_id)])
+ .await?;
Ok(get_room_state_ids::v1::Response {
auth_chain_ids: auth_chain_ids.map(|id| (*id).to_owned()).collect(),
@@ -1151,10 +1261,14 @@ pub async fn create_join_event_template_route(
.as_ref()
.expect("server is authenticated");
- services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
+ services()
+ .rooms
+ .event_handler
+ .acl_check(&sender_servername, &body.room_id)?;
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -1164,9 +1278,11 @@ pub async fn create_join_event_template_route(
let state_lock = mutex_state.lock().await;
// TODO: Conduit does not implement restricted join rules yet, we always reject
- let join_rules_event =
- services().rooms.state_accessor
- .room_state_get(&body.room_id, &StateEventType::RoomJoinRules, "")?;
+ let join_rules_event = services().rooms.state_accessor.room_state_get(
+ &body.room_id,
+ &StateEventType::RoomJoinRules,
+ "",
+ )?;
let join_rules_event_content: Option<RoomJoinRulesEventContent> = join_rules_event
.as_ref()
@@ -1212,13 +1328,18 @@ pub async fn create_join_event_template_route(
})
.expect("member event is valid value");
- let (pdu, pdu_json) = services().rooms.timeline.create_hash_and_sign_event(PduBuilder {
- event_type: RoomEventType::RoomMember,
- content,
- unsigned: None,
- state_key: Some(body.user_id.to_string()),
- redacts: None,
- }, &body.user_id, &body.room_id, &state_lock)?;
+ let (pdu, pdu_json) = services().rooms.timeline.create_hash_and_sign_event(
+ PduBuilder {
+ event_type: RoomEventType::RoomMember,
+ content,
+ unsigned: None,
+ state_key: Some(body.user_id.to_string()),
+ redacts: None,
+ },
+ &body.user_id,
+ &body.room_id,
+ &state_lock,
+ )?;
drop(state_lock);
@@ -1244,12 +1365,17 @@ async fn create_join_event(
));
}
- services().rooms.event_handler.acl_check(&sender_servername, room_id)?;
+ services()
+ .rooms
+ .event_handler
+ .acl_check(&sender_servername, room_id)?;
// TODO: Conduit does not implement restricted join rules yet, we always reject
- let join_rules_event = services()
- .rooms.state_accessor
- .room_state_get(room_id, &StateEventType::RoomJoinRules, "")?;
+ let join_rules_event = services().rooms.state_accessor.room_state_get(
+ room_id,
+ &StateEventType::RoomJoinRules,
+ "",
+ )?;
let join_rules_event_content: Option<RoomJoinRulesEventContent> = join_rules_event
.as_ref()
@@ -1275,7 +1401,8 @@ async fn create_join_event(
// We need to return the state prior to joining, let's keep a reference to that here
let shortstatehash = services()
- .rooms.state
+ .rooms
+ .state
.get_room_shortstatehash(room_id)?
.ok_or(Error::BadRequest(
ErrorKind::NotFound,
@@ -1307,7 +1434,8 @@ async fn create_join_event(
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Origin field is invalid."))?;
let mutex = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_federation
.write()
.unwrap()
@@ -1315,7 +1443,10 @@ async fn create_join_event(
.or_default(),
);
let mutex_lock = mutex.lock().await;
- let pdu_id: Vec<u8> = services().rooms.event_handler.handle_incoming_pdu(&origin, &event_id, room_id, value, true, &pub_key_map)
+ let pdu_id: Vec<u8> = services()
+ .rooms
+ .event_handler
+ .handle_incoming_pdu(&origin, &event_id, room_id, value, true, &pub_key_map)
.await?
.ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
@@ -1323,12 +1454,19 @@ async fn create_join_event(
))?;
drop(mutex_lock);
- let state_ids = services().rooms.state_accessor.state_full_ids(shortstatehash).await?;
- let auth_chain_ids = services().rooms.auth_chain.get_auth_chain(
- room_id,
- state_ids.iter().map(|(_, id)| id.clone()).collect(),
- )
- .await?;
+ let state_ids = services()
+ .rooms
+ .state_accessor
+ .state_full_ids(shortstatehash)
+ .await?;
+ let auth_chain_ids = services()
+ .rooms
+ .auth_chain
+ .get_auth_chain(
+ room_id,
+ state_ids.iter().map(|(_, id)| id.clone()).collect(),
+ )
+ .await?;
let servers = services()
.rooms
@@ -1399,9 +1537,16 @@ pub async fn create_invite_route(
.as_ref()
.expect("server is authenticated");
- services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
+ services()
+ .rooms
+ .event_handler
+ .acl_check(&sender_servername, &body.room_id)?;
- if !services().globals.supported_room_versions().contains(&body.room_version) {
+ if !services()
+ .globals
+ .supported_room_versions()
+ .contains(&body.room_version)
+ {
return Err(Error::BadRequest(
ErrorKind::IncompatibleRoomVersion {
room_version: body.room_version.clone(),
@@ -1549,7 +1694,8 @@ pub async fn get_room_information_route(
let room_id = services()
.rooms
- .alias.resolve_local_alias(&body.room_alias)?
+ .alias
+ .resolve_local_alias(&body.room_alias)?
.ok_or(Error::BadRequest(
ErrorKind::NotFound,
"Room alias not found.",
@@ -1576,7 +1722,9 @@ pub async fn get_profile_information_route(
let mut blurhash = None;
match &body.field {
- Some(ProfileField::DisplayName) => displayname = services().users.displayname(&body.user_id)?,
+ Some(ProfileField::DisplayName) => {
+ displayname = services().users.displayname(&body.user_id)?
+ }
Some(ProfileField::AvatarUrl) => {
avatar_url = services().users.avatar_url(&body.user_id)?;
blurhash = services().users.blurhash(&body.user_id)?
@@ -1600,18 +1748,14 @@ pub async fn get_profile_information_route(
/// # `POST /_matrix/federation/v1/user/keys/query`
///
/// Gets devices and identity keys for the given users.
-pub async fn get_keys_route(
- body: Ruma<get_keys::v1::Request>,
-) -> Result<get_keys::v1::Response> {
+pub async fn get_keys_route(body: Ruma<get_keys::v1::Request>) -> Result<get_keys::v1::Response> {
if !services().globals.allow_federation() {
return Err(Error::bad_config("Federation is disabled."));
}
- let result = get_keys_helper(
- None,
- &body.device_keys,
- |u| Some(u.server_name()) == body.sender_servername.as_deref(),
- )
+ let result = get_keys_helper(None, &body.device_keys, |u| {
+ Some(u.server_name()) == body.sender_servername.as_deref()
+ })
.await?;
Ok(get_keys::v1::Response {
diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs
index 1388dc3..0727728 100644
--- a/src/database/abstraction/rocksdb.rs
+++ b/src/database/abstraction/rocksdb.rs
@@ -1,4 +1,4 @@
-use super::{super::Config, watchers::Watchers, KvTree, KeyValueDatabaseEngine};
+use super::{super::Config, watchers::Watchers, KeyValueDatabaseEngine, KvTree};
use crate::{utils, Result};
use std::{
future::Future,
diff --git a/src/database/key_value/account_data.rs b/src/database/key_value/account_data.rs
index 5674ac0..7d2a870 100644
--- a/src/database/key_value/account_data.rs
+++ b/src/database/key_value/account_data.rs
@@ -1,9 +1,15 @@
use std::collections::HashMap;
-use ruma::{UserId, DeviceId, signatures::CanonicalJsonValue, api::client::{uiaa::UiaaInfo, error::ErrorKind}, events::{RoomAccountDataEventType, AnyEphemeralRoomEvent}, serde::Raw, RoomId};
-use serde::{Serialize, de::DeserializeOwned};
-
-use crate::{Result, database::KeyValueDatabase, service, Error, utils, services};
+use ruma::{
+ api::client::{error::ErrorKind, uiaa::UiaaInfo},
+ events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
+ serde::Raw,
+ signatures::CanonicalJsonValue,
+ DeviceId, RoomId, UserId,
+};
+use serde::{de::DeserializeOwned, Serialize};
+
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::account_data::Data for KeyValueDatabase {
/// Places one event in the account data of the user and removes the previous entry.
diff --git a/src/database/key_value/appservice.rs b/src/database/key_value/appservice.rs
index f427ba7..9a821a6 100644
--- a/src/database/key_value/appservice.rs
+++ b/src/database/key_value/appservice.rs
@@ -55,10 +55,13 @@ impl service::appservice::Data for KeyValueDatabase {
}
fn iter_ids<'a>(&'a self) -> Result<Box<dyn Iterator<Item = Result<String>> + 'a>> {
- Ok(Box::new(self.id_appserviceregistrations.iter().map(|(id, _)| {
- utils::string_from_bytes(&id)
- .map_err(|_| Error::bad_database("Invalid id bytes in id_appserviceregistrations."))
- })))
+ Ok(Box::new(self.id_appserviceregistrations.iter().map(
+ |(id, _)| {
+ utils::string_from_bytes(&id).map_err(|_| {
+ Error::bad_database("Invalid id bytes in id_appserviceregistrations.")
+ })
+ },
+ )))
}
fn all(&self) -> Result<Vec<(String, serde_yaml::Value)>> {
diff --git a/src/database/key_value/globals.rs b/src/database/key_value/globals.rs
index 199cbf6..fafaf49 100644
--- a/src/database/key_value/globals.rs
+++ b/src/database/key_value/globals.rs
@@ -2,9 +2,13 @@ use std::collections::BTreeMap;
use async_trait::async_trait;
use futures_util::{stream::FuturesUnordered, StreamExt};
-use ruma::{signatures::Ed25519KeyPair, UserId, DeviceId, ServerName, api::federation::discovery::{ServerSigningKeys, VerifyKey}, ServerSigningKeyId, MilliSecondsSinceUnixEpoch};
+use ruma::{
+ api::federation::discovery::{ServerSigningKeys, VerifyKey},
+ signatures::Ed25519KeyPair,
+ DeviceId, MilliSecondsSinceUnixEpoch, ServerName, ServerSigningKeyId, UserId,
+};
-use crate::{Result, service, database::KeyValueDatabase, Error, utils, services};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
pub const COUNTER: &[u8] = b"c";
@@ -35,28 +39,24 @@ impl service::globals::Data for KeyValueDatabase {
// Return when *any* user changed his key
// TODO: only send for user they share a room with
- futures.push(
- self.todeviceid_events
- .watch_prefix(&userdeviceid_prefix),
- );
+ futures.push(self.todeviceid_events.watch_prefix(&userdeviceid_prefix));
futures.push(self.userroomid_joined.watch_prefix(&userid_prefix));
- futures.push(
- self.userroomid_invitestate
- .watch_prefix(&userid_prefix),
- );
+ futures.push(self.userroomid_invitestate.watch_prefix(&userid_prefix));
futures.push(self.userroomid_leftstate.watch_prefix(&userid_prefix));
futures.push(
self.userroomid_notificationcount
.watch_prefix(&userid_prefix),
);
- futures.push(
- self.userroomid_highlightcount
- .watch_prefix(&userid_prefix),
- );
+ futures.push(self.userroomid_highlightcount.watch_prefix(&userid_prefix));
// Events for rooms we are in
- for room_id in services().rooms.state_cache.rooms_joined(user_id).filter_map(|r| r.ok()) {
+ for room_id in services()
+ .rooms
+ .state_cache
+ .rooms_joined(user_id)
+ .filter_map(|r| r.ok())
+ {
let short_roomid = services()
.rooms
.short
@@ -75,15 +75,9 @@ impl service::globals::Data for KeyValueDatabase {
futures.push(self.pduid_pdu.watch_prefix(&short_roomid));
// EDUs
- futures.push(
- self.roomid_lasttypingupdate
- .watch_prefix(&roomid_bytes),
- );
+ futures.push(self.roomid_lasttypingupdate.watch_prefix(&roomid_bytes));
- futures.push(
- self.readreceiptid_readreceipt
- .watch_prefix(&roomid_prefix),
- );
+ futures.push(self.readreceiptid_readreceipt.watch_prefix(&roomid_prefix));
// Key changes
futures.push(self.keychangeid_userid.watch_prefix(&roomid_prefix));
@@ -110,10 +104,7 @@ impl service::globals::Data for KeyValueDatabase {
futures.push(self.keychangeid_userid.watch_prefix(&userid_prefix));
// One time keys
- futures.push(
- self.userid_lastonetimekeyupdate
- .watch_prefix(&userid_bytes),
- );
+ futures.push(self.userid_lastonetimekeyupdate.watch_prefix(&userid_bytes));
futures.push(Box::pin(services().globals.rotate.watch()));
@@ -238,10 +229,7 @@ impl service::globals::Data for KeyValueDatabase {
}
fn bump_database_version(&self, new_version: u64) -> Result<()> {
- self.global
- .insert(b"version", &new_version.to_be_bytes())?;
+ self.global.insert(b"version", &new_version.to_be_bytes())?;
Ok(())
}
-
-
}
diff --git a/src/database/key_value/key_backups.rs b/src/database/key_value/key_backups.rs
index 8171451..0738f73 100644
--- a/src/database/key_value/key_backups.rs
+++ b/src/database/key_value/key_backups.rs
@@ -1,8 +1,15 @@
use std::collections::BTreeMap;
-use ruma::{UserId, serde::Raw, api::client::{backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup}, error::ErrorKind}, RoomId};
+use ruma::{
+ api::client::{
+ backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
+ error::ErrorKind,
+ },
+ serde::Raw,
+ RoomId, UserId,
+};
-use crate::{Result, service, database::KeyValueDatabase, services, Error, utils};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::key_backups::Data for KeyValueDatabase {
fn create_backup(
@@ -118,11 +125,7 @@ impl service::key_backups::Data for KeyValueDatabase {
.transpose()
}
- fn get_backup(
- &self,
- user_id: &UserId,
- version: &str,
- ) -> Result<Option<Raw<BackupAlgorithm>>> {
+ fn get_backup(&self, user_id: &UserId, version: &str) -> Result<Option<Raw<BackupAlgorithm>>> {
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
@@ -322,12 +325,7 @@ impl service::key_backups::Data for KeyValueDatabase {
Ok(())
}
- fn delete_room_keys(
- &self,
- user_id: &UserId,
- version: &str,
- room_id: &RoomId,
- ) -> Result<()> {
+ fn delete_room_keys(&self, user_id: &UserId, version: &str, room_id: &RoomId) -> Result<()> {
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
diff --git a/src/database/key_value/media.rs b/src/database/key_value/media.rs
index f024487..de96ace 100644
--- a/src/database/key_value/media.rs
+++ b/src/database/key_value/media.rs
@@ -1,9 +1,16 @@
use ruma::api::client::error::ErrorKind;
-use crate::{database::KeyValueDatabase, service, Error, utils, Result};
+use crate::{database::KeyValueDatabase, service, utils, Error, Result};
impl service::media::Data for KeyValueDatabase {
- fn create_file_metadata(&self, mxc: String, width: u32, height: u32, content_disposition: Option<&str>, content_type: Option<&str>) -> Result<Vec<u8>> {
+ fn create_file_metadata(
+ &self,
+ mxc: String,
+ width: u32,
+ height: u32,
+ content_disposition: Option<&str>,
+ content_type: Option<&str>,
+ ) -> Result<Vec<u8>> {
let mut key = mxc.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(&width.to_be_bytes());
@@ -28,14 +35,23 @@ impl service::media::Data for KeyValueDatabase {
Ok(key)
}
- fn search_file_metadata(&self, mxc: String, width: u32, height: u32) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
+ fn search_file_metadata(
+ &self,
+ mxc: String,
+ width: u32,
+ height: u32,
+ ) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
let mut prefix = mxc.as_bytes().to_vec();
prefix.push(0xff);
prefix.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail
prefix.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail
prefix.push(0xff);
- let (key, _) = self.mediaid_file.scan_prefix(prefix).next().ok_or(Error::BadRequest(ErrorKind::NotFound, "Media not found"))?;
+ let (key, _) = self
+ .mediaid_file
+ .scan_prefix(prefix)
+ .next()
+ .ok_or(Error::BadRequest(ErrorKind::NotFound, "Media not found"))?;
let mut parts = key.rsplit(|&b| b == 0xff);
@@ -57,9 +73,7 @@ impl service::media::Data for KeyValueDatabase {
} else {
Some(
utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
- Error::bad_database(
- "Content Disposition in mediaid_file is invalid unicode.",
- )
+ Error::bad_database("Content Disposition in mediaid_file is invalid unicode.")
})?,
)
};
diff --git a/src/database/key_value/pusher.rs b/src/database/key_value/pusher.rs
index b05e47b..15f4e26 100644
--- a/src/database/key_value/pusher.rs
+++ b/src/database/key_value/pusher.rs
@@ -1,6 +1,9 @@
-use ruma::{UserId, api::client::push::{set_pusher, get_pushers}};
+use ruma::{
+ api::client::push::{get_pushers, set_pusher},
+ UserId,
+};
-use crate::{service, database::KeyValueDatabase, Error, Result};
+use crate::{database::KeyValueDatabase, service, Error, Result};
impl service::pusher::Data for KeyValueDatabase {
fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()> {
@@ -48,10 +51,7 @@ impl service::pusher::Data for KeyValueDatabase {
.collect()
}
- fn get_pusher_senderkeys<'a>(
- &'a self,
- sender: &UserId,
- ) -> Box<dyn Iterator<Item = Vec<u8>>> {
+ fn get_pusher_senderkeys<'a>(&'a self, sender: &UserId) -> Box<dyn Iterator<Item = Vec<u8>>> {
let mut prefix = sender.as_bytes().to_vec();
prefix.push(0xff);
diff --git a/src/database/key_value/rooms/alias.rs b/src/database/key_value/rooms/alias.rs
index 0aa8dd4..112d6eb 100644
--- a/src/database/key_value/rooms/alias.rs
+++ b/src/database/key_value/rooms/alias.rs
@@ -1,13 +1,9 @@
-use ruma::{RoomId, RoomAliasId, api::client::error::ErrorKind};
+use ruma::{api::client::error::ErrorKind, RoomAliasId, RoomId};
-use crate::{service, database::KeyValueDatabase, utils, Error, services, Result};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::rooms::alias::Data for KeyValueDatabase {
- fn set_alias(
- &self,
- alias: &RoomAliasId,
- room_id: &RoomId
- ) -> Result<()> {
+ fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()> {
self.alias_roomid
.insert(alias.alias().as_bytes(), room_id.as_bytes())?;
let mut aliasid = room_id.as_bytes().to_vec();
@@ -17,10 +13,7 @@ impl service::rooms::alias::Data for KeyValueDatabase {
Ok(())
}
- fn remove_alias(
- &self,
- alias: &RoomAliasId,
- ) -> Result<()> {
+ fn remove_alias(&self, alias: &RoomAliasId) -> Result<()> {
if let Some(room_id) = self.alias_roomid.get(alias.alias().as_bytes())? {
let mut prefix = room_id.to_vec();
prefix.push(0xff);
@@ -38,10 +31,7 @@ impl service::rooms::alias::Data for KeyValueDatabase {
Ok(())
}
- fn resolve_local_alias(
- &self,
- alias: &RoomAliasId
- ) -> Result<Option<Box<RoomId>>> {
+ fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<Box<RoomId>>> {
self.alias_roomid
.get(alias.alias().as_bytes())?
.map(|bytes| {
diff --git a/src/database/key_value/rooms/auth_chain.rs b/src/database/key_value/rooms/auth_chain.rs
index 49d3956..60057ac 100644
--- a/src/database/key_value/rooms/auth_chain.rs
+++ b/src/database/key_value/rooms/auth_chain.rs
@@ -1,6 +1,6 @@
use std::{collections::HashSet, mem::size_of, sync::Arc};
-use crate::{service, database::KeyValueDatabase, Result, utils};
+use crate::{database::KeyValueDatabase, service, utils, Result};
impl service::rooms::auth_chain::Data for KeyValueDatabase {
fn get_cached_eventid_authchain(&self, key: &[u64]) -> Result<Option<Arc<HashSet<u64>>>> {
@@ -12,14 +12,13 @@ impl service::rooms::auth_chain::Data for KeyValueDatabase {
// We only save auth chains for single events in the db
if key.len() == 1 {
// Check DB cache
- let chain = self.shorteventid_authchain
+ let chain = self
+ .shorteventid_authchain
.get(&key[0].to_be_bytes())?
.map(|chain| {
chain
.chunks_exact(size_of::<u64>())
- .map(|chunk| {
- utils::u64_from_bytes(chunk).expect("byte length is correct")
- })
+ .map(|chunk| utils::u64_from_bytes(chunk).expect("byte length is correct"))
.collect()
});
@@ -37,7 +36,6 @@ impl service::rooms::auth_chain::Data for KeyValueDatabase {
}
Ok(None)
-
}
fn cache_auth_chain(&self, key: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()> {
@@ -53,7 +51,10 @@ impl service::rooms::auth_chain::Data for KeyValueDatabase {
}
// Cache in RAM
- self.auth_chain_cache.lock().unwrap().insert(key, auth_chain);
+ self.auth_chain_cache
+ .lock()
+ .unwrap()
+ .insert(key, auth_chain);
Ok(())
}
diff --git a/src/database/key_value/rooms/directory.rs b/src/database/key_value/rooms/directory.rs
index 727004e..661c202 100644
--- a/src/database/key_value/rooms/directory.rs
+++ b/src/database/key_value/rooms/directory.rs
@@ -1,6 +1,6 @@
use ruma::RoomId;
-use crate::{service, database::KeyValueDatabase, utils, Error, Result};
+use crate::{database::KeyValueDatabase, service, utils, Error, Result};
impl service::rooms::directory::Data for KeyValueDatabase {
fn set_public(&self, room_id: &RoomId) -> Result<()> {
diff --git a/src/database/key_value/rooms/edus/mod.rs b/src/database/key_value/rooms/edus/mod.rs
index b5007f8..6c65291 100644
--- a/src/database/key_value/rooms/edus/mod.rs
+++ b/src/database/key_value/rooms/edus/mod.rs
@@ -1,7 +1,7 @@
mod presence;
-mod typing;
mod read_receipt;
+mod typing;
-use crate::{service, database::KeyValueDatabase};
+use crate::{database::KeyValueDatabase, service};
impl service::rooms::edus::Data for KeyValueDatabase {}
diff --git a/src/database/key_value/rooms/edus/presence.rs b/src/database/key_value/rooms/edus/presence.rs
index 1477c28..fdd51ce 100644
--- a/src/database/key_value/rooms/edus/presence.rs
+++ b/src/database/key_value/rooms/edus/presence.rs
@@ -1,8 +1,8 @@
use std::collections::HashMap;
-use ruma::{UserId, RoomId, events::presence::PresenceEvent, presence::PresenceState, UInt};
+use ruma::{events::presence::PresenceEvent, presence::PresenceState, RoomId, UInt, UserId};
-use crate::{service, database::KeyValueDatabase, utils, Error, services, Result};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::rooms::edus::presence::Data for KeyValueDatabase {
fn update_presence(
diff --git a/src/database/key_value/rooms/edus/read_receipt.rs b/src/database/key_value/rooms/edus/read_receipt.rs
index a12e265..c78f0f5 100644
--- a/src/database/key_value/rooms/edus/read_receipt.rs
+++ b/src/database/key_value/rooms/edus/read_receipt.rs
@@ -1,8 +1,10 @@
use std::mem;
-use ruma::{UserId, RoomId, events::receipt::ReceiptEvent, serde::Raw, signatures::CanonicalJsonObject};
+use ruma::{
+ events::receipt::ReceiptEvent, serde::Raw, signatures::CanonicalJsonObject, RoomId, UserId,
+};
-use crate::{database::KeyValueDatabase, service, utils, Error, services, Result};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::rooms::edus::read_receipt::Data for KeyValueDatabase {
fn readreceipt_update(
@@ -50,13 +52,15 @@ impl service::rooms::edus::read_receipt::Data for KeyValueDatabase {
&'a self,
room_id: &RoomId,
since: u64,
- ) -> Box<dyn Iterator<
- Item=Result<(
- Box<UserId>,
- u64,
- Raw<ruma::events::AnySyncEphemeralRoomEvent>,
- )>,
- >> {
+ ) -> Box<
+ dyn Iterator<
+ Item = Result<(
+ Box<UserId>,
+ u64,
+ Raw<ruma::events::AnySyncEphemeralRoomEvent>,
+ )>,
+ >,
+ > {
let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff);
let prefix2 = prefix.clone();
@@ -64,42 +68,44 @@ impl service::rooms::edus::read_receipt::Data for KeyValueDatabase {
let mut first_possible_edu = prefix.clone();
first_possible_edu.extend_from_slice(&(since + 1).to_be_bytes()); // +1 so we don't send the event at since
- Box::new(self.readreceiptid_readreceipt
- .iter_from(&first_possible_edu, false)
- .take_while(move |(k, _)| k.starts_with(&prefix2))
- .map(move |(k, v)| {
- let count =
- utils::u64_from_bytes(&k[prefix.len()..prefix.len() + mem::size_of::<u64>()])
- .map_err(|_| Error::bad_database("Invalid readreceiptid count in db."))?;
- let user_id = UserId::parse(
- utils::string_from_bytes(&k[prefix.len() + mem::size_of::<u64>() + 1..])
- .map_err(|_| {
- Error::bad_database("Invalid readreceiptid userid bytes in db.")
- })?,
- )
+ Box::new(
+ self.readreceiptid_readreceipt
+ .iter_from(&first_possible_edu, false)
+ .take_while(move |(k, _)| k.starts_with(&prefix2))
+ .map(move |(k, v)| {
+ let count = utils::u64_from_bytes(
+ &k[prefix.len()..prefix.len() + mem::size_of::<u64>()],
+ )
+ .map_err(|_| Error::bad_database("Invalid readreceiptid count in db."))?;
+ let user_id = UserId::parse(
+ utils::string_from_bytes(&k[prefix.len() + mem::size_of::<u64>() + 1..])
+ .map_err(|_| {
+ Error::bad_database("Invalid readreceiptid userid bytes in db.")
+ })?,
+ )
.map_err(|_| Error::bad_database("Invalid readreceiptid userid in db."))?;
- let mut json = serde_json::from_slice::<CanonicalJsonObject>(&v).map_err(|_| {
- Error::bad_database("Read receipt in roomlatestid_roomlatest is invalid json.")
- })?;
- json.remove("room_id");
-
- Ok((
- user_id,
- count,
- Raw::from_json(
- serde_json::value::to_raw_value(&json).expect("json is valid raw value"),
- ),
- ))
- }))
+ let mut json =
+ serde_json::from_slice::<CanonicalJsonObject>(&v).map_err(|_| {
+ Error::bad_database(
+ "Read receipt in roomlatestid_roomlatest is invalid json.",
+ )
+ })?;
+ json.remove("room_id");
+
+ Ok((
+ user_id,
+ count,
+ Raw::from_json(
+ serde_json::value::to_raw_value(&json)
+ .expect("json is valid raw value"),
+ ),
+ ))
+ }),
+ )
}
- fn private_read_set(
- &self,
- room_id: &RoomId,
- user_id: &UserId,
- count: u64,
- ) -> Result<()> {
+ fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()> {
let mut key = room_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(user_id.as_bytes());
diff --git a/src/database/key_value/rooms/edus/typing.rs b/src/database/key_value/rooms/edus/typing.rs
index b7d3596..7b211e7 100644
--- a/src/database/key_value/rooms/edus/typing.rs
+++ b/src/database/key_value/rooms/edus/typing.rs
@@ -1,16 +1,11 @@
use std::collections::HashSet;
-use ruma::{UserId, RoomId};
+use ruma::{RoomId, UserId};
-use crate::{database::KeyValueDatabase, service, utils, Error, services, Result};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::rooms::edus::typing::Data for KeyValueDatabase {
- fn typing_add(
- &self,
- user_id: &UserId,
- room_id: &RoomId,
- timeout: u64,
- ) -> Result<()> {
+ fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()> {
let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff);
@@ -30,11 +25,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
Ok(())
}
- fn typing_remove(
- &self,
- user_id: &UserId,
- room_id: &RoomId,
- ) -> Result<()> {
+ fn typing_remove(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff);
@@ -53,17 +44,16 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
}
if found_outdated {
- self.roomid_lasttypingupdate
- .insert(room_id.as_bytes(), &services().globals.next_count()?.to_be_bytes())?;
+ self.roomid_lasttypingupdate.insert(
+ room_id.as_bytes(),
+ &services().globals.next_count()?.to_be_bytes(),
+ )?;
}
Ok(())
}
- fn last_typing_update(
- &self,
- room_id: &RoomId,
- ) -> Result<u64> {
+ fn last_typing_update(&self, room_id: &RoomId) -> Result<u64> {
Ok(self
.roomid_lasttypingupdate
.get(room_id.as_bytes())?
@@ -76,10 +66,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
.unwrap_or(0))
}
- fn typings_all(
- &self,
- room_id: &RoomId,
- ) -> Result<HashSet<Box<UserId>>> {
+ fn typings_all(&self, room_id: &RoomId) -> Result<HashSet<Box<UserId>>> {
let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff);
@@ -89,7 +76,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
let user_id = UserId::parse(utils::string_from_bytes(&user_id).map_err(|_| {
Error::bad_database("User ID in typingid_userid is invalid unicode.")
})?)
- .map_err(|_| Error::bad_database("User ID in typingid_userid is invalid."))?;
+ .map_err(|_| Error::bad_database("User ID in typingid_userid is invalid."))?;
user_ids.insert(user_id);
}
diff --git a/src/database/key_value/rooms/lazy_load.rs b/src/database/key_value/rooms/lazy_load.rs
index 133e1d0..a19d52c 100644
--- a/src/database/key_value/rooms/lazy_load.rs
+++ b/src/database/key_value/rooms/lazy_load.rs
@@ -1,6 +1,6 @@
-use ruma::{UserId, DeviceId, RoomId};
+use ruma::{DeviceId, RoomId, UserId};
-use crate::{service, database::KeyValueDatabase, Result};
+use crate::{database::KeyValueDatabase, service, Result};
impl service::rooms::lazy_loading::Data for KeyValueDatabase {
fn lazy_load_was_sent_before(
diff --git a/src/database/key_value/rooms/metadata.rs b/src/database/key_value/rooms/metadata.rs
index 72f6251..63a6b1a 100644
--- a/src/database/key_value/rooms/metadata.rs
+++ b/src/database/key_value/rooms/metadata.rs
@@ -1,6 +1,6 @@
use ruma::RoomId;
-use crate::{service, database::KeyValueDatabase, Result, services};
+use crate::{database::KeyValueDatabase, service, services, Result};
impl service::rooms::metadata::Data for KeyValueDatabase {
fn exists(&self, room_id: &RoomId) -> Result<bool> {
diff --git a/src/database/key_value/rooms/outlier.rs b/src/database/key_value/rooms/outlier.rs
index aa97544..2ecaadb 100644
--- a/src/database/key_value/rooms/outlier.rs
+++ b/src/database/key_value/rooms/outlier.rs
@@ -1,6 +1,6 @@
-use ruma::{EventId, signatures::CanonicalJsonObject};
+use ruma::{signatures::CanonicalJsonObject, EventId};
-use crate::{service, database::KeyValueDatabase, PduEvent, Error, Result};
+use crate::{database::KeyValueDatabase, service, Error, PduEvent, Result};
impl service::rooms::outlier::Data for KeyValueDatabase {
fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
diff --git a/src/database/key_value/rooms/pdu_metadata.rs b/src/database/key_value/rooms/pdu_metadata.rs
index f3ac414..76ec734 100644
--- a/src/database/key_value/rooms/pdu_metadata.rs
+++ b/src/database/key_value/rooms/pdu_metadata.rs
@@ -1,8 +1,8 @@
use std::sync::Arc;
-use ruma::{RoomId, EventId};
+use ruma::{EventId, RoomId};
-use crate::{service, database::KeyValueDatabase, Result};
+use crate::{database::KeyValueDatabase, service, Result};
impl service::rooms::pdu_metadata::Data for KeyValueDatabase {
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> {
diff --git a/src/database/key_value/rooms/search.rs b/src/database/key_value/rooms/search.rs
index 41df544..79e6a32 100644
--- a/src/database/key_value/rooms/search.rs
+++ b/src/database/key_value/rooms/search.rs
@@ -2,7 +2,7 @@ use std::mem::size_of;
use ruma::RoomId;
-use crate::{service, database::KeyValueDatabase, utils, Result, services};
+use crate::{database::KeyValueDatabase, service, services, utils, Result};
impl service::rooms::search::Data for KeyValueDatabase {
fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: &[u8], message_body: String) -> Result<()> {
@@ -27,7 +27,9 @@ impl service::rooms::search::Data for KeyValueDatabase {
room_id: &RoomId,
search_string: &str,
) -> Result<Option<(Box<dyn Iterator<Item = Vec<u8>>>, Vec<String>)>> {
- let prefix = services().rooms.short
+ let prefix = services()
+ .rooms
+ .short
.get_shortroomid(room_id)?
.expect("room exists")
.to_be_bytes()
@@ -63,10 +65,10 @@ impl service::rooms::search::Data for KeyValueDatabase {
};
let mapped = common_elements.map(move |id| {
- let mut pduid = prefix_clone.clone();
- pduid.extend_from_slice(&id);
- pduid
- });
+ let mut pduid = prefix_clone.clone();
+ pduid.extend_from_slice(&id);
+ pduid
+ });
Ok(Some((Box::new(mapped), words)))
}
diff --git a/src/database/key_value/rooms/short.rs b/src/database/key_value/rooms/short.rs
index ecd12da..c022317 100644
--- a/src/database/key_value/rooms/short.rs
+++ b/src/database/key_value/rooms/short.rs
@@ -1,14 +1,11 @@
use std::sync::Arc;
-use ruma::{EventId, events::StateEventType, RoomId};
+use ruma::{events::StateEventType, EventId, RoomId};
-use crate::{Result, database::KeyValueDatabase, service, utils, Error, services};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::rooms::short::Data for KeyValueDatabase {
- fn get_or_create_shorteventid(
- &self,
- event_id: &EventId,
- ) -> Result<u64> {
+ fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64> {
if let Some(short) = self.eventidshort_cache.lock().unwrap().get_mut(event_id) {
return Ok(*short);
}
@@ -180,10 +177,7 @@ impl service::rooms::short::Data for KeyValueDatabase {
}
/// Returns (shortstatehash, already_existed)
- fn get_or_create_shortstatehash(
- &self,
- state_hash: &[u8],
- ) -> Result<(u64, bool)> {
+ fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)> {
Ok(match self.statehash_shortstatehash.get(state_hash)? {
Some(shortstatehash) => (
utils::u64_from_bytes(&shortstatehash)
@@ -209,10 +203,7 @@ impl service::rooms::short::Data for KeyValueDatabase {
.transpose()
}
- fn get_or_create_shortroomid(
- &self,
- room_id: &RoomId,
- ) -> Result<u64> {
+ fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64> {
Ok(match self.roomid_shortroomid.get(room_id.as_bytes())? {
Some(short) => utils::u64_from_bytes(&short)
.map_err(|_| Error::bad_database("Invalid shortroomid in db."))?,
diff --git a/src/database/key_value/rooms/state.rs b/src/database/key_value/rooms/state.rs
index 90ac0d5..80a7458 100644
--- a/src/database/key_value/rooms/state.rs
+++ b/src/database/key_value/rooms/state.rs
@@ -1,10 +1,10 @@
-use ruma::{RoomId, EventId};
-use tokio::sync::MutexGuard;
-use std::sync::Arc;
+use ruma::{EventId, RoomId};
use std::collections::HashSet;
use std::fmt::Debug;
+use std::sync::Arc;
+use tokio::sync::MutexGuard;
-use crate::{service, database::KeyValueDatabase, utils, Error, Result};
+use crate::{database::KeyValueDatabase, service, utils, Error, Result};
impl service::rooms::state::Data for KeyValueDatabase {
fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> {
@@ -17,9 +17,12 @@ impl service::rooms::state::Data for KeyValueDatabase {
})
}
- fn set_room_state(&self, room_id: &RoomId, new_shortstatehash: u64,
+ fn set_room_state(
+ &self,
+ room_id: &RoomId,
+ new_shortstatehash: u64,
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
- ) -> Result<()> {
+ ) -> Result<()> {
self.roomid_shortstatehash
.insert(room_id.as_bytes(), &new_shortstatehash.to_be_bytes())?;
Ok(())
diff --git a/src/database/key_value/rooms/state_accessor.rs b/src/database/key_value/rooms/state_accessor.rs
index 4d5bd4a..39c261f 100644
--- a/src/database/key_value/rooms/state_accessor.rs
+++ b/src/database/key_value/rooms/state_accessor.rs
@@ -1,13 +1,18 @@
-use std::{collections::{BTreeMap, HashMap}, sync::Arc};
+use std::{
+ collections::{BTreeMap, HashMap},
+ sync::Arc,
+};
-use crate::{database::KeyValueDatabase, service, PduEvent, Error, utils, Result, services};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, PduEvent, Result};
use async_trait::async_trait;
-use ruma::{EventId, events::StateEventType, RoomId};
+use ruma::{events::StateEventType, EventId, RoomId};
#[async_trait]
impl service::rooms::state_accessor::Data for KeyValueDatabase {
async fn state_full_ids(&self, shortstatehash: u64) -> Result<BTreeMap<u64, Arc<EventId>>> {
- let full_state = services().rooms.state_compressor
+ let full_state = services()
+ .rooms
+ .state_compressor
.load_shortstatehash_info(shortstatehash)?
.pop()
.expect("there is always one layer")
@@ -15,7 +20,10 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
let mut result = BTreeMap::new();
let mut i = 0;
for compressed in full_state.into_iter() {
- let parsed = services().rooms.state_compressor.parse_compressed_state_event(compressed)?;
+ let parsed = services()
+ .rooms
+ .state_compressor
+ .parse_compressed_state_event(compressed)?;
result.insert(parsed.0, parsed.1);
i += 1;
@@ -30,7 +38,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
&self,
shortstatehash: u64,
) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>> {
- let full_state = services().rooms.state_compressor
+ let full_state = services()
+ .rooms
+ .state_compressor
.load_shortstatehash_info(shortstatehash)?
.pop()
.expect("there is always one layer")
@@ -39,7 +49,10 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
let mut result = HashMap::new();
let mut i = 0;
for compressed in full_state {
- let (_, eventid) = services().rooms.state_compressor.parse_compressed_state_event(compressed)?;
+ let (_, eventid) = services()
+ .rooms
+ .state_compressor
+ .parse_compressed_state_event(compressed)?;
if let Some(pdu) = services().rooms.timeline.get_pdu(&eventid)? {
result.insert(
(
@@ -69,11 +82,17 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
event_type: &StateEventType,
state_key: &str,
) -> Result<Option<Arc<EventId>>> {
- let shortstatekey = match services().rooms.short.get_shortstatekey(event_type, state_key)? {
+ let shortstatekey = match services()
+ .rooms
+ .short
+ .get_shortstatekey(event_type, state_key)?
+ {
Some(s) => s,
None => return Ok(None),
};
- let full_state = services().rooms.state_compressor
+ let full_state = services()
+ .rooms
+ .state_compressor
.load_shortstatehash_info(shortstatehash)?
.pop()
.expect("there is always one layer")
@@ -82,7 +101,10 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
.into_iter()
.find(|bytes| bytes.starts_with(&shortstatekey.to_be_bytes()))
.and_then(|compressed| {
- services().rooms.state_compressor.parse_compressed_state_event(compressed)
+ services()
+ .rooms
+ .state_compressor
+ .parse_compressed_state_event(compressed)
.ok()
.map(|(_, id)| id)
}))
@@ -96,7 +118,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
state_key: &str,
) -> Result<Option<Arc<PduEvent>>> {
self.state_get_id(shortstatehash, event_type, state_key)?
- .map_or(Ok(None), |event_id| services().rooms.timeline.get_pdu(&event_id))
+ .map_or(Ok(None), |event_id| {
+ services().rooms.timeline.get_pdu(&event_id)
+ })
}
/// Returns the state hash for this pdu.
@@ -122,7 +146,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
&self,
room_id: &RoomId,
) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>> {
- if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(room_id)? {
+ if let Some(current_shortstatehash) =
+ services().rooms.state.get_room_shortstatehash(room_id)?
+ {
self.state_full(current_shortstatehash).await
} else {
Ok(HashMap::new())
@@ -136,7 +162,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
event_type: &StateEventType,
state_key: &str,
) -> Result<Option<Arc<EventId>>> {
- if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(room_id)? {
+ if let Some(current_shortstatehash) =
+ services().rooms.state.get_room_shortstatehash(room_id)?
+ {
self.state_get_id(current_shortstatehash, event_type, state_key)
} else {
Ok(None)
@@ -150,7 +178,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
event_type: &StateEventType,
state_key: &str,
) -> Result<Option<Arc<PduEvent>>> {
- if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(room_id)? {
+ if let Some(current_shortstatehash) =
+ services().rooms.state.get_room_shortstatehash(room_id)?
+ {
self.state_get(current_shortstatehash, event_type, state_key)
} else {
Ok(None)
diff --git a/src/database/key_value/rooms/state_cache.rs b/src/database/key_value/rooms/state_cache.rs
index 4043bc4..4ca6ac4 100644
--- a/src/database/key_value/rooms/state_cache.rs
+++ b/src/database/key_value/rooms/state_cache.rs
@@ -1,9 +1,13 @@
use std::{collections::HashSet, sync::Arc};
use regex::Regex;
-use ruma::{UserId, RoomId, events::{AnyStrippedStateEvent, AnySyncStateEvent}, serde::Raw, ServerName};
+use ruma::{
+ events::{AnyStrippedStateEvent, AnySyncStateEvent},
+ serde::Raw,
+ RoomId, ServerName, UserId,
+};
-use crate::{service, database::KeyValueDatabase, services, Result, Error, utils};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::rooms::state_cache::Data for KeyValueDatabase {
fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
@@ -31,8 +35,13 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
Ok(())
}
-
- fn mark_as_invited(&self, user_id: &UserId, room_id: &RoomId, last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>) -> Result<()> {
+
+ fn mark_as_invited(
+ &self,
+ user_id: &UserId,
+ room_id: &RoomId,
+ last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>,
+ ) -> Result<()> {
let mut roomuser_id = room_id.as_bytes().to_vec();
roomuser_id.push(0xff);
roomuser_id.extend_from_slice(user_id.as_bytes());
@@ -46,8 +55,10 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
&serde_json::to_vec(&last_state.unwrap_or_default())
.expect("state to bytes always works"),
)?;
- self.roomuserid_invitecount
- .insert(&roomuser_id, &services().globals.next_count()?.to_be_bytes())?;
+ self.roomuserid_invitecount.insert(
+ &roomuser_id,
+ &services().globals.next_count()?.to_be_bytes(),
+ )?;
self.userroomid_joined.remove(&userroom_id)?;
self.roomuserid_joined.remove(&roomuser_id)?;
self.userroomid_leftstate.remove(&userroom_id)?;
@@ -69,8 +80,10 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
&userroom_id,
&serde_json::to_vec(&Vec::<Raw<AnySyncStateEvent>>::new()).unwrap(),
)?; // TODO
- self.roomuserid_leftcount
- .insert(&roomuser_id, &services().globals.next_count()?.to_be_bytes())?;
+ self.roomuserid_leftcount.insert(
+ &roomuser_id,
+ &services().globals.next_count()?.to_be_bytes(),
+ )?;
self.userroomid_joined.remove(&userroom_id)?;
self.roomuserid_joined.remove(&roomuser_id)?;
self.userroomid_invitestate.remove(&userroom_id)?;
@@ -324,21 +337,25 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff);
- Box::new(self.roomuseroncejoinedids
- .scan_prefix(prefix)
- .map(|(key, _)| {
- UserId::parse(
- utils::string_from_bytes(
- key.rsplit(|&b| b == 0xff)
- .next()
- .expect("rsplit always returns an element"),
+ Box::new(
+ self.roomuseroncejoinedids
+ .scan_prefix(prefix)
+ .map(|(key, _)| {
+ UserId::parse(
+ utils::string_from_bytes(
+ key.rsplit(|&b| b == 0xff)
+ .next()
+ .expect("rsplit always returns an element"),
+ )
+ .map_err(|_| {
+ Error::bad_database(
+ "User ID in room_useroncejoined is invalid unicode.",
+ )
+ })?,
)
- .map_err(|_| {
- Error::bad_database("User ID in room_useroncejoined is invalid unicode.")
- })?,
- )
- .map_err(|_| Error::bad_database("User ID in room_useroncejoined is invalid."))
- }))
+ .map_err(|_| Error::bad_database("User ID in room_useroncejoined is invalid."))
+ }),
+ )
}
/// Returns an iterator over all invited members of a room.
@@ -350,21 +367,23 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff);
- Box::new(self.roomuserid_invitecount
- .scan_prefix(prefix)
- .map(|(key, _)| {
- UserId::parse(
- utils::string_from_bytes(
- key.rsplit(|&b| b == 0xff)
- .next()
- .expect("rsplit always returns an element"),
+ Box::new(
+ self.roomuserid_invitecount
+ .scan_prefix(prefix)
+ .map(|(key, _)| {
+ UserId::parse(
+ utils::string_from_bytes(
+ key.rsplit(|&b| b == 0xff)
+ .next()
+ .expect("rsplit always returns an element"),
+ )
+ .map_err(|_| {
+ Error::bad_database("User ID in roomuserid_invited is invalid unicode.")
+ })?,
)
- .map_err(|_| {
- Error::bad_database("User ID in roomuserid_invited is invalid unicode.")
- })?,
- )
- .map_err(|_| Error::bad_database("User ID in roomuserid_invited is invalid."))
- }))
+ .map_err(|_| Error::bad_database("User ID in roomuserid_invited is invalid."))
+ }),
+ )
}
#[tracing::instrument(skip(self))]
@@ -403,21 +422,23 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
&'a self,
user_id: &UserId,
) -> Box<dyn Iterator<Item = Result<Box<RoomId>>> + 'a> {
- Box::new(self.userroomid_joined
- .scan_prefix(user_id.as_bytes().to_vec())
- .map(|(key, _)| {
- RoomId::parse(
- utils::string_from_bytes(
- key.rsplit(|&b| b == 0xff)
- .next()
- .expect("rsplit always returns an element"),
+ Box::new(
+ self.userroomid_joined
+ .scan_prefix(user_id.as_bytes().to_vec())
+ .map(|(key, _)| {
+ RoomId::parse(
+ utils::string_from_bytes(
+ key.rsplit(|&b| b == 0xff)
+ .next()
+ .expect("rsplit always returns an element"),
+ )
+ .map_err(|_| {
+ Error::bad_database("Room ID in userroomid_joined is invalid unicode.")
+ })?,
)
- .map_err(|_| {
- Error::bad_database("Room ID in userroomid_joined is invalid unicode.")
- })?,
- )
- .map_err(|_| Error::bad_database("Room ID in userroomid_joined is invalid."))
- }))
+ .map_err(|_| Error::bad_database("Room ID in userroomid_joined is invalid."))
+ }),
+ )
}
/// Returns an iterator over all rooms a user was invited to.
@@ -429,26 +450,31 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
- Box::new(self.userroomid_invitestate
- .scan_prefix(prefix)
- .map(|(key, state)| {
- let room_id = RoomId::parse(
- utils::string_from_bytes(
- key.rsplit(|&b| b == 0xff)
- .next()
- .expect("rsplit always returns an element"),
+ Box::new(
+ self.userroomid_invitestate
+ .scan_prefix(prefix)
+ .map(|(key, state)| {
+ let room_id = RoomId::parse(
+ utils::string_from_bytes(
+ key.rsplit(|&b| b == 0xff)
+ .next()
+ .expect("rsplit always returns an element"),
+ )
+ .map_err(|_| {
+ Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
+ })?,
)
.map_err(|_| {
- Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
- })?,
- )
- .map_err(|_| Error::bad_database("Room ID in userroomid_invited is invalid."))?;
+ Error::bad_database("Room ID in userroomid_invited is invalid.")
+ })?;
- let state = serde_json::from_slice(&state)
- .map_err(|_| Error::bad_database("Invalid state in userroomid_invitestate."))?;
+ let state = serde_json::from_slice(&state).map_err(|_| {
+ Error::bad_database("Invalid state in userroomid_invitestate.")
+ })?;
- Ok((room_id, state))
- }))
+ Ok((room_id, state))
+ }),
+ )
}
#[tracing::instrument(skip(self))]
@@ -502,26 +528,31 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
- Box::new(self.userroomid_leftstate
- .scan_prefix(prefix)
- .map(|(key, state)| {
- let room_id = RoomId::parse(
- utils::string_from_bytes(
- key.rsplit(|&b| b == 0xff)
- .next()
- .expect("rsplit always returns an element"),
+ Box::new(
+ self.userroomid_leftstate
+ .scan_prefix(prefix)
+ .map(|(key, state)| {
+ let room_id = RoomId::parse(
+ utils::string_from_bytes(
+ key.rsplit(|&b| b == 0xff)
+ .next()
+ .expect("rsplit always returns an element"),
+ )
+ .map_err(|_| {
+ Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
+ })?,
)
.map_err(|_| {
- Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
- })?,
- )
- .map_err(|_| Error::bad_database("Room ID in userroomid_invited is invalid."))?;
+ Error::bad_database("Room ID in userroomid_invited is invalid.")
+ })?;
- let state = serde_json::from_slice(&state)
- .map_err(|_| Error::bad_database("Invalid state in userroomid_leftstate."))?;
+ let state = serde_json::from_slice(&state).map_err(|_| {
+ Error::bad_database("Invalid state in userroomid_leftstate.")
+ })?;
- Ok((room_id, state))
- }))
+ Ok((room_id, state))
+ }),
+ )
}
#[tracing::instrument(skip(self))]
diff --git a/src/database/key_value/rooms/state_compressor.rs b/src/database/key_value/rooms/state_compressor.rs
index aee1890..d0a9be4 100644
--- a/src/database/key_value/rooms/state_compressor.rs
+++ b/src/database/key_value/rooms/state_compressor.rs
@@ -1,6 +1,10 @@
use std::{collections::HashSet, mem::size_of};
-use crate::{service::{self, rooms::state_compressor::data::StateDiff}, database::KeyValueDatabase, Error, utils, Result};
+use crate::{
+ database::KeyValueDatabase,
+ service::{self, rooms::state_compressor::data::StateDiff},
+ utils, Error, Result,
+};
impl service::rooms::state_compressor::Data for KeyValueDatabase {
fn get_statediff(&self, shortstatehash: u64) -> Result<StateDiff> {
@@ -10,11 +14,7 @@ impl service::rooms::state_compressor::Data for KeyValueDatabase {
.ok_or_else(|| Error::bad_database("State hash does not exist"))?;
let parent =
utils::u64_from_bytes(&value[0..size_of::<u64>()]).expect("bytes have right length");
- let parent = if parent != 0 {
- Some(parent)
- } else {
- None
- };
+ let parent = if parent != 0 { Some(parent) } else { None };
let mut add_mode = true;
let mut added = HashSet::new();
@@ -35,7 +35,11 @@ impl service::rooms::state_compressor::Data for KeyValueDatabase {
i += 2 * size_of::<u64>();
}
- Ok(StateDiff { parent, added, removed })
+ Ok(StateDiff {
+ parent,
+ added,
+ removed,
+ })
}
fn save_statediff(&self, shortstatehash: u64, diff: StateDiff) -> Result<()> {
diff --git a/src/database/key_value/rooms/timeline.rs b/src/database/key_value/rooms/timeline.rs
index 1723186..5d684a1 100644
--- a/src/database/key_value/rooms/timeline.rs
+++ b/src/database/key_value/rooms/timeline.rs
@@ -1,13 +1,17 @@
use std::{collections::hash_map, mem::size_of, sync::Arc};
-use ruma::{UserId, RoomId, api::client::error::ErrorKind, EventId, signatures::CanonicalJsonObject};
+use ruma::{
+ api::client::error::ErrorKind, signatures::CanonicalJsonObject, EventId, RoomId, UserId,
+};
use tracing::error;
-use crate::{service, database::KeyValueDatabase, utils, Error, PduEvent, Result, services};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, PduEvent, Result};
impl service::rooms::timeline::Data for KeyValueDatabase {
fn first_pdu_in_room(&self, room_id: &RoomId) -> Result<Option<Arc<PduEvent>>> {
- let prefix = services().rooms.short
+ let prefix = services()
+ .rooms
+ .short
.get_shortroomid(room_id)?
.expect("room exists")
.to_be_bytes()
@@ -82,10 +86,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
}
/// Returns the json of a pdu.
- fn get_non_outlier_pdu_json(
- &self,
- event_id: &EventId,
- ) -> Result<Option<CanonicalJsonObject>> {
+ fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
self.eventid_pduid
.get(event_id.as_bytes())?
.map(|pduid| {
@@ -187,10 +188,17 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
.map_err(|_| Error::bad_database("PDU has invalid count bytes."))
}
- fn append_pdu(&self, pdu_id: &[u8], pdu: &PduEvent, json: &CanonicalJsonObject, count: u64) -> Result<()> {
+ fn append_pdu(
+ &self,
+ pdu_id: &[u8],
+ pdu: &PduEvent,
+ json: &CanonicalJsonObject,
+ count: u64,
+ ) -> Result<()> {
self.pduid_pdu.insert(
pdu_id,
- &serde_json::to_vec(json).expect("CanonicalJsonObject is always a valid"))?;
+ &serde_json::to_vec(json).expect("CanonicalJsonObject is always a valid"),
+ )?;
self.lasttimelinecount_cache
.lock()
@@ -209,7 +217,8 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
if self.pduid_pdu.get(pdu_id)?.is_some() {
self.pduid_pdu.insert(
pdu_id,
- &serde_json::to_vec(pdu).expect("CanonicalJsonObject is always a valid"))?;
+ &serde_json::to_vec(pdu).expect("CanonicalJsonObject is always a valid"),
+ )?;
Ok(())
} else {
Err(Error::BadRequest(
@@ -227,7 +236,9 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
room_id: &RoomId,
since: u64,
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
- let prefix = services().rooms.short
+ let prefix = services()
+ .rooms
+ .short
.get_shortroomid(room_id)?
.expect("room exists")
.to_be_bytes()
@@ -239,18 +250,19 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
let user_id = user_id.to_owned();
- Ok(Box::new(self
- .pduid_pdu
- .iter_from(&first_pdu_id, false)
- .take_while(move |(k, _)| k.starts_with(&prefix))
- .map(move |(pdu_id, v)| {
- let mut pdu = serde_json::from_slice::<PduEvent>(&v)
- .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
- if pdu.sender != user_id {
- pdu.remove_transaction_id()?;
- }
- Ok((pdu_id, pdu))
- })))
+ Ok(Box::new(
+ self.pduid_pdu
+ .iter_from(&first_pdu_id, false)
+ .take_while(move |(k, _)| k.starts_with(&prefix))
+ .map(move |(pdu_id, v)| {
+ let mut pdu = serde_json::from_slice::<PduEvent>(&v)
+ .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
+ if pdu.sender != user_id {
+ pdu.remove_transaction_id()?;
+ }
+ Ok((pdu_id, pdu))
+ }),
+ ))
}
/// Returns an iterator over all events and their tokens in a room that happened before the
@@ -262,7 +274,9 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
until: u64,
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
// Create the first part of the full pdu id
- let prefix = services().rooms.short
+ let prefix = services()
+ .rooms
+ .short
.get_shortroomid(room_id)?
.expect("room exists")
.to_be_bytes()
@@ -275,18 +289,19 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
let user_id = user_id.to_owned();
- Ok(Box::new(self
- .pduid_pdu
- .iter_from(current, true)
- .take_while(move |(k, _)| k.starts_with(&prefix))
- .map(move |(pdu_id, v)| {
- let mut pdu = serde_json::from_slice::<PduEvent>(&v)
- .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
- if pdu.sender != user_id {
- pdu.remove_transaction_id()?;
- }
- Ok((pdu_id, pdu))
- })))
+ Ok(Box::new(
+ self.pduid_pdu
+ .iter_from(current, true)
+ .take_while(move |(k, _)| k.starts_with(&prefix))
+ .map(move |(pdu_id, v)| {
+ let mut pdu = serde_json::from_slice::<PduEvent>(&v)
+ .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
+ if pdu.sender != user_id {
+ pdu.remove_transaction_id()?;
+ }
+ Ok((pdu_id, pdu))
+ }),
+ ))
}
fn pdus_after<'a>(
@@ -296,7 +311,9 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
from: u64,
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
// Create the first part of the full pdu id
- let prefix = services().rooms.short
+ let prefix = services()
+ .rooms
+ .short
.get_shortroomid(room_id)?
.expect("room exists")
.to_be_bytes()
@@ -309,21 +326,27 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
let user_id = user_id.to_owned();
- Ok(Box::new(self
- .pduid_pdu
- .iter_from(current, false)
- .take_while(move |(k, _)| k.starts_with(&prefix))
- .map(move |(pdu_id, v)| {
- let mut pdu = serde_json::from_slice::<PduEvent>(&v)
- .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
- if pdu.sender != user_id {
- pdu.remove_transaction_id()?;
- }
- Ok((pdu_id, pdu))
- })))
+ Ok(Box::new(
+ self.pduid_pdu
+ .iter_from(current, false)
+ .take_while(move |(k, _)| k.starts_with(&prefix))
+ .map(move |(pdu_id, v)| {
+ let mut pdu = serde_json::from_slice::<PduEvent>(&v)
+ .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
+ if pdu.sender != user_id {
+ pdu.remove_transaction_id()?;
+ }
+ Ok((pdu_id, pdu))
+ }),
+ ))
}
- fn increment_notification_counts(&self, room_id: &RoomId, notifies: Vec<Box<UserId>>, highlights: Vec<Box<UserId>>) -> Result<()> {
+ fn increment_notification_counts(
+ &self,
+ room_id: &RoomId,
+ notifies: Vec<Box<UserId>>,
+ highlights: Vec<Box<UserId>>,
+ ) -> Result<()> {
let notifies_batch = Vec::new();
let highlights_batch = Vec::new();
for user in notifies {
diff --git a/src/database/key_value/rooms/user.rs b/src/database/key_value/rooms/user.rs
index 3759bda..78c78e1 100644
--- a/src/database/key_value/rooms/user.rs
+++ b/src/database/key_value/rooms/user.rs
@@ -1,6 +1,6 @@
-use ruma::{UserId, RoomId};
+use ruma::{RoomId, UserId};
-use crate::{service, database::KeyValueDatabase, utils, Error, Result, services};
+use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
impl service::rooms::user::Data for KeyValueDatabase {
fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
@@ -50,7 +50,11 @@ impl service::rooms::user::Data for KeyValueDatabase {
token: u64,
shortstatehash: u64,
) -> Result<()> {
- let shortroomid = services().rooms.short.get_shortroomid(room_id)?.expect("room exists");
+ let shortroomid = services()
+ .rooms
+ .short
+ .get_shortroomid(room_id)?
+ .expect("room exists");
let mut key = shortroomid.to_be_bytes().to_vec();
key.extend_from_slice(&token.to_be_bytes());
@@ -60,7 +64,11 @@ impl service::rooms::user::Data for KeyValueDatabase {
}
fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>> {
- let shortroomid = services().rooms.short.get_shortroomid(room_id)?.expect("room exists");
+ let shortroomid = services()
+ .rooms
+ .short
+ .get_shortroomid(room_id)?
+ .expect("room exists");
let mut key = shortroomid.to_be_bytes().to_vec();
key.extend_from_slice(&token.to_be_bytes());
@@ -102,13 +110,15 @@ impl service::rooms::user::Data for KeyValueDatabase {
});
// We use the default compare function because keys are sorted correctly (not reversed)
- Ok(Box::new(Box::new(utils::common_elements(iterators, Ord::cmp)
- .expect("users is not empty")
- .map(|bytes| {
- RoomId::parse(utils::string_from_bytes(&*bytes).map_err(|_| {
- Error::bad_database("Invalid RoomId bytes in userroomid_joined")
- })?)
- .map_err(|_| Error::bad_database("Invalid RoomId in userroomid_joined."))
- }))))
+ Ok(Box::new(Box::new(
+ utils::common_elements(iterators, Ord::cmp)
+ .expect("users is not empty")
+ .map(|bytes| {
+ RoomId::parse(utils::string_from_bytes(&*bytes).map_err(|_| {
+ Error::bad_database("Invalid RoomId bytes in userroomid_joined")
+ })?)
+ .map_err(|_| Error::bad_database("Invalid RoomId in userroomid_joined."))
+ }),
+ )))
}
}
diff --git a/src/database/key_value/transaction_ids.rs b/src/database/key_value/transaction_ids.rs
index a63b3c5..2ea6ad4 100644
--- a/src/database/key_value/transaction_ids.rs
+++ b/src/database/key_value/transaction_ids.rs
@@ -1,6 +1,6 @@
-use ruma::{UserId, DeviceId, TransactionId};
+use ruma::{DeviceId, TransactionId, UserId};
-use crate::{service, database::KeyValueDatabase, Result};
+use crate::{database::KeyValueDatabase, service, Result};
impl service::transaction_ids::Data for KeyValueDatabase {
fn add_txnid(
diff --git a/src/database/key_value/uiaa.rs b/src/database/key_value/uiaa.rs
index cf242de..8a9f176 100644
--- a/src/database/key_value/uiaa.rs
+++ b/src/database/key_value/uiaa.rs
@@ -1,4 +1,8 @@
-use ruma::{UserId, DeviceId, signatures::CanonicalJsonValue, api::client::{uiaa::UiaaInfo, error::ErrorKind}};
+use ruma::{
+ api::client::{error::ErrorKind, uiaa::UiaaInfo},
+ signatures::CanonicalJsonValue,
+ DeviceId, UserId,
+};
use crate::{database::KeyValueDatabase, service, Error, Result};
diff --git a/src/database/key_value/users.rs b/src/database/key_value/users.rs
index 55a518d..15699a1 100644
--- a/src/database/key_value/users.rs
+++ b/src/database/key_value/users.rs
@@ -1,9 +1,20 @@
-use std::{mem::size_of, collections::BTreeMap};
-
-use ruma::{api::client::{filter::IncomingFilterDefinition, error::ErrorKind, device::Device}, UserId, RoomAliasId, MxcUri, DeviceId, MilliSecondsSinceUnixEpoch, DeviceKeyId, encryption::{OneTimeKey, CrossSigningKey, DeviceKeys}, serde::Raw, events::{AnyToDeviceEvent, StateEventType}, DeviceKeyAlgorithm, UInt};
+use std::{collections::BTreeMap, mem::size_of};
+
+use ruma::{
+ api::client::{device::Device, error::ErrorKind, filter::IncomingFilterDefinition},
+ encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
+ events::{AnyToDeviceEvent, StateEventType},
+ serde::Raw,
+ DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, MxcUri, RoomAliasId,
+ UInt, UserId,
+};
use tracing::warn;
-use crate::{service::{self, users::clean_signatures}, database::KeyValueDatabase, Error, utils, services, Result};
+use crate::{
+ database::KeyValueDatabase,
+ service::{self, users::clean_signatures},
+ services, utils, Error, Result,
+};
impl service::users::Data for KeyValueDatabase {
/// Check if a user has an account on this homeserver.
@@ -274,18 +285,21 @@ impl service::users::Data for KeyValueDatabase {
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
// All devices have metadata
- Box::new(self.userdeviceid_metadata
- .scan_prefix(prefix)
- .map(|(bytes, _)| {
- Ok(utils::string_from_bytes(
- bytes
- .rsplit(|&b| b == 0xff)
- .next()
- .ok_or_else(|| Error::bad_database("UserDevice ID in db is invalid."))?,
- )
- .map_err(|_| Error::bad_database("Device ID in userdeviceid_metadata is invalid."))?
- .into())
- }))
+ Box::new(
+ self.userdeviceid_metadata
+ .scan_prefix(prefix)
+ .map(|(bytes, _)| {
+ Ok(utils::string_from_bytes(
+ bytes.rsplit(|&b| b == 0xff).next().ok_or_else(|| {
+ Error::bad_database("UserDevice ID in db is invalid.")
+ })?,
+ )
+ .map_err(|_| {
+ Error::bad_database("Device ID in userdeviceid_metadata is invalid.")
+ })?
+ .into())
+ }),
+ )
}
/// Replaces the access token of one device.
@@ -341,8 +355,10 @@ impl service::users::Data for KeyValueDatabase {
&serde_json::to_vec(&one_time_key_value).expect("OneTimeKey::to_vec always works"),
)?;
- self.userid_lastonetimekeyupdate
- .insert(user_id.as_bytes(), &services().globals.next_count()?.to_be_bytes())?;
+ self.userid_lastonetimekeyupdate.insert(
+ user_id.as_bytes(),
+ &services().globals.next_count()?.to_be_bytes(),
+ )?;
Ok(())
}
@@ -372,8 +388,10 @@ impl service::users::Data for KeyValueDatabase {
prefix.extend_from_slice(key_algorithm.as_ref().as_bytes());
prefix.push(b':');
- self.userid_lastonetimekeyupdate
- .insert(user_id.as_bytes(), &services().globals.next_count()?.to_be_bytes())?;
+ self.userid_lastonetimekeyupdate.insert(
+ user_id.as_bytes(),
+ &services().globals.next_count()?.to_be_bytes(),
+ )?;
self.onetimekeyid_onetimekeys
.scan_prefix(prefix)
@@ -617,38 +635,47 @@ impl service::users::Data for KeyValueDatabase {
let to = to.unwrap_or(u64::MAX);
- Box::new(self.keychangeid_userid
- .iter_from(&start, false)
- .take_while(move |(k, _)| {
- k.starts_with(&prefix)
- && if let Some(current) = k.splitn(2, |&b| b == 0xff).nth(1) {
- if let Ok(c) = utils::u64_from_bytes(current) {
- c <= to
+ Box::new(
+ self.keychangeid_userid
+ .iter_from(&start, false)
+ .take_while(move |(k, _)| {
+ k.starts_with(&prefix)
+ && if let Some(current) = k.splitn(2, |&b| b == 0xff).nth(1) {
+ if let Ok(c) = utils::u64_from_bytes(current) {
+ c <= to
+ } else {
+ warn!("BadDatabase: Could not parse keychangeid_userid bytes");
+ false
+ }
} else {
- warn!("BadDatabase: Could not parse keychangeid_userid bytes");
+ warn!("BadDatabase: Could not parse keychangeid_userid");
false
}
- } else {
- warn!("BadDatabase: Could not parse keychangeid_userid");
- false
- }
- })
- .map(|(_, bytes)| {
- UserId::parse(utils::string_from_bytes(&bytes).map_err(|_| {
- Error::bad_database("User ID in devicekeychangeid_userid is invalid unicode.")
- })?)
- .map_err(|_| Error::bad_database("User ID in devicekeychangeid_userid is invalid."))
- }))
+ })
+ .map(|(_, bytes)| {
+ UserId::parse(utils::string_from_bytes(&bytes).map_err(|_| {
+ Error::bad_database(
+ "User ID in devicekeychangeid_userid is invalid unicode.",
+ )
+ })?)
+ .map_err(|_| {
+ Error::bad_database("User ID in devicekeychangeid_userid is invalid.")
+ })
+ }),
+ )
}
- fn mark_device_key_update(
- &self,
- user_id: &UserId,
- ) -> Result<()> {
+ fn mark_device_key_update(&self, user_id: &UserId) -> Result<()> {
let count = services().globals.next_count()?.to_be_bytes();
- for room_id in services().rooms.state_cache.rooms_joined(user_id).filter_map(|r| r.ok()) {
+ for room_id in services()
+ .rooms
+ .state_cache
+ .rooms_joined(user_id)
+ .filter_map(|r| r.ok())
+ {
// Don't send key updates to unencrypted rooms
- if services().rooms
+ if services()
+ .rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomEncryption, "")?
.is_none()
@@ -883,20 +910,19 @@ impl service::users::Data for KeyValueDatabase {
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
- Box::new(self.userdeviceid_metadata
- .scan_prefix(key)
- .map(|(_, bytes)| {
- serde_json::from_slice::<Device>(&bytes)
- .map_err(|_| Error::bad_database("Device in userdeviceid_metadata is invalid."))
- }))
+ Box::new(
+ self.userdeviceid_metadata
+ .scan_prefix(key)
+ .map(|(_, bytes)| {
+ serde_json::from_slice::<Device>(&bytes).map_err(|_| {
+ Error::bad_database("Device in userdeviceid_metadata is invalid.")
+ })
+ }),
+ )
}
/// Creates a new sync filter. Returns the filter id.
- fn create_filter(
- &self,
- user_id: &UserId,
- filter: &IncomingFilterDefinition,
- ) -> Result<String> {
+ fn create_filter(&self, user_id: &UserId, filter: &IncomingFilterDefinition) -> Result<String> {
let filter_id = utils::random_string(4);
let mut key = user_id.as_bytes().to_vec();
diff --git a/src/database/mod.rs b/src/database/mod.rs
index 6868467..8a7c78e 100644
--- a/src/database/mod.rs
+++ b/src/database/mod.rs
@@ -1,8 +1,16 @@
pub mod abstraction;
pub mod key_value;
-use crate::{utils, Config, Error, Result, service::{users, globals, uiaa, rooms::{self, state_compressor::CompressedStateEvent}, account_data, media, key_backups, transaction_ids, sending, appservice, pusher}, services, PduEvent, Services, SERVICES};
+use crate::{
+ service::{
+ account_data, appservice, globals, key_backups, media, pusher,
+ rooms::{self, state_compressor::CompressedStateEvent},
+ sending, transaction_ids, uiaa, users,
+ },
+ services, utils, Config, Error, PduEvent, Result, Services, SERVICES,
+};
use abstraction::KeyValueDatabaseEngine;
+use abstraction::KvTree;
use directories::ProjectDirs;
use futures_util::{stream::FuturesUnordered, StreamExt};
use lru_cache::LruCache;
@@ -12,7 +20,8 @@ use ruma::{
GlobalAccountDataEvent, GlobalAccountDataEventType, StateEventType,
},
push::Ruleset,
- DeviceId, EventId, RoomId, UserId, signatures::CanonicalJsonValue,
+ signatures::CanonicalJsonValue,
+ DeviceId, EventId, RoomId, UserId,
};
use std::{
collections::{BTreeMap, HashMap, HashSet},
@@ -25,7 +34,6 @@ use std::{
};
use tokio::sync::{mpsc, OwnedRwLockReadGuard, RwLock as TokioRwLock, Semaphore};
use tracing::{debug, error, info, warn};
-use abstraction::KvTree;
pub struct KeyValueDatabase {
_db: Arc<dyn KeyValueDatabaseEngine>,
@@ -65,9 +73,9 @@ pub struct KeyValueDatabase {
pub(super) readreceiptid_readreceipt: Arc<dyn KvTree>, // ReadReceiptId = RoomId + Count + UserId
pub(super) roomuserid_privateread: Arc<dyn KvTree>, // RoomUserId = Room + User, PrivateRead = Count
pub(super) roomuserid_lastprivatereadupdate: Arc<dyn KvTree>, // LastPrivateReadUpdate = Count
- pub(super) typingid_userid: Arc<dyn KvTree>, // TypingId = RoomId + TimeoutTime + Count
+ pub(super) typingid_userid: Arc<dyn KvTree>, // TypingId = RoomId + TimeoutTime + Count
pub(super) roomid_lasttypingupdate: Arc<dyn KvTree>, // LastRoomTypingUpdate = Count
- pub(super) presenceid_presence: Arc<dyn KvTree>, // PresenceId = RoomId + Count + UserId
+ pub(super) presenceid_presence: Arc<dyn KvTree>, // PresenceId = RoomId + Count + UserId
pub(super) userid_lastpresenceupdate: Arc<dyn KvTree>, // LastPresenceUpdate = Count
//pub rooms: rooms::Rooms,
@@ -279,127 +287,126 @@ impl KeyValueDatabase {
let db = Arc::new(Self {
_db: builder.clone(),
- userid_password: builder.open_tree("userid_password")?,
- userid_displayname: builder.open_tree("userid_displayname")?,
- userid_avatarurl: builder.open_tree("userid_avatarurl")?,
- userid_blurhash: builder.open_tree("userid_blurhash")?,
- userdeviceid_token: builder.open_tree("userdeviceid_token")?,
- userdeviceid_metadata: builder.open_tree("userdeviceid_metadata")?,
- userid_devicelistversion: builder.open_tree("userid_devicelistversion")?,
- token_userdeviceid: builder.open_tree("token_userdeviceid")?,
- onetimekeyid_onetimekeys: builder.open_tree("onetimekeyid_onetimekeys")?,
- userid_lastonetimekeyupdate: builder.open_tree("userid_lastonetimekeyupdate")?,
- keychangeid_userid: builder.open_tree("keychangeid_userid")?,
- keyid_key: builder.open_tree("keyid_key")?,
- userid_masterkeyid: builder.open_tree("userid_masterkeyid")?,
- userid_selfsigningkeyid: builder.open_tree("userid_selfsigningkeyid")?,
- userid_usersigningkeyid: builder.open_tree("userid_usersigningkeyid")?,
- userfilterid_filter: builder.open_tree("userfilterid_filter")?,
- todeviceid_events: builder.open_tree("todeviceid_events")?,
-
- userdevicesessionid_uiaainfo: builder.open_tree("userdevicesessionid_uiaainfo")?,
- userdevicesessionid_uiaarequest: RwLock::new(BTreeMap::new()),
- readreceiptid_readreceipt: builder.open_tree("readreceiptid_readreceipt")?,
- roomuserid_privateread: builder.open_tree("roomuserid_privateread")?, // "Private" read receipt
- roomuserid_lastprivatereadupdate: builder
- .open_tree("roomuserid_lastprivatereadupdate")?,
- typingid_userid: builder.open_tree("typingid_userid")?,
- roomid_lasttypingupdate: builder.open_tree("roomid_lasttypingupdate")?,
- presenceid_presence: builder.open_tree("presenceid_presence")?,
- userid_lastpresenceupdate: builder.open_tree("userid_lastpresenceupdate")?,
- pduid_pdu: builder.open_tree("pduid_pdu")?,
- eventid_pduid: builder.open_tree("eventid_pduid")?,
- roomid_pduleaves: builder.open_tree("roomid_pduleaves")?,
-
- alias_roomid: builder.open_tree("alias_roomid")?,
- aliasid_alias: builder.open_tree("aliasid_alias")?,
- publicroomids: builder.open_tree("publicroomids")?,
-
- tokenids: builder.open_tree("tokenids")?,
-
- roomserverids: builder.open_tree("roomserverids")?,
- serverroomids: builder.open_tree("serverroomids")?,
- userroomid_joined: builder.open_tree("userroomid_joined")?,
- roomuserid_joined: builder.open_tree("roomuserid_joined")?,
- roomid_joinedcount: builder.open_tree("roomid_joinedcount")?,
- roomid_invitedcount: builder.open_tree("roomid_invitedcount")?,
- roomuseroncejoinedids: builder.open_tree("roomuseroncejoinedids")?,
- userroomid_invitestate: builder.open_tree("userroomid_invitestate")?,
- roomuserid_invitecount: builder.open_tree("roomuserid_invitecount")?,
- userroomid_leftstate: builder.open_tree("userroomid_leftstate")?,
- roomuserid_leftcount: builder.open_tree("roomuserid_leftcount")?,
-
- disabledroomids: builder.open_tree("disabledroomids")?,
-
- lazyloadedids: builder.open_tree("lazyloadedids")?,
-
- userroomid_notificationcount: builder.open_tree("userroomid_notificationcount")?,
- userroomid_highlightcount: builder.open_tree("userroomid_highlightcount")?,
-
- statekey_shortstatekey: builder.open_tree("statekey_shortstatekey")?,
- shortstatekey_statekey: builder.open_tree("shortstatekey_statekey")?,
-
- shorteventid_authchain: builder.open_tree("shorteventid_authchain")?,
-
- roomid_shortroomid: builder.open_tree("roomid_shortroomid")?,
-
- shortstatehash_statediff: builder.open_tree("shortstatehash_statediff")?,
- eventid_shorteventid: builder.open_tree("eventid_shorteventid")?,
- shorteventid_eventid: builder.open_tree("shorteventid_eventid")?,
- shorteventid_shortstatehash: builder.open_tree("shorteventid_shortstatehash")?,
- roomid_shortstatehash: builder.open_tree("roomid_shortstatehash")?,
- roomsynctoken_shortstatehash: builder.open_tree("roomsynctoken_shortstatehash")?,
- statehash_shortstatehash: builder.open_tree("statehash_shortstatehash")?,
-
- eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?,
- softfailedeventids: builder.open_tree("softfailedeventids")?,
-
- referencedevents: builder.open_tree("referencedevents")?,
- roomuserdataid_accountdata: builder.open_tree("roomuserdataid_accountdata")?,
- roomusertype_roomuserdataid: builder.open_tree("roomusertype_roomuserdataid")?,
- mediaid_file: builder.open_tree("mediaid_file")?,
- backupid_algorithm: builder.open_tree("backupid_algorithm")?,
- backupid_etag: builder.open_tree("backupid_etag")?,
- backupkeyid_backup: builder.open_tree("backupkeyid_backup")?,
- userdevicetxnid_response: builder.open_tree("userdevicetxnid_response")?,
- servername_educount: builder.open_tree("servername_educount")?,
- servernameevent_data: builder.open_tree("servernameevent_data")?,
- servercurrentevent_data: builder.open_tree("servercurrentevent_data")?,
- id_appserviceregistrations: builder.open_tree("id_appserviceregistrations")?,
- senderkey_pusher: builder.open_tree("senderkey_pusher")?,
- global: builder.open_tree("global")?,
- server_signingkeys: builder.open_tree("server_signingkeys")?,
-
- cached_registrations: Arc::new(RwLock::new(HashMap::new())),
- pdu_cache: Mutex::new(LruCache::new(
- config
- .pdu_cache_capacity
- .try_into()
- .expect("pdu cache capacity fits into usize"),
- )),
- auth_chain_cache: Mutex::new(LruCache::new(
- (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
- )),
- shorteventid_cache: Mutex::new(LruCache::new(
- (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
- )),
- eventidshort_cache: Mutex::new(LruCache::new(
- (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
- )),
- shortstatekey_cache: Mutex::new(LruCache::new(
- (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
- )),
- statekeyshort_cache: Mutex::new(LruCache::new(
- (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
- )),
- our_real_users_cache: RwLock::new(HashMap::new()),
- appservice_in_room_cache: RwLock::new(HashMap::new()),
- lazy_load_waiting: Mutex::new(HashMap::new()),
- stateinfo_cache: Mutex::new(LruCache::new(
- (100.0 * config.conduit_cache_capacity_modifier) as usize,
- )),
- lasttimelinecount_cache: Mutex::new(HashMap::new()),
-
+ userid_password: builder.open_tree("userid_password")?,
+ userid_displayname: builder.open_tree("userid_displayname")?,
+ userid_avatarurl: builder.open_tree("userid_avatarurl")?,
+ userid_blurhash: builder.open_tree("userid_blurhash")?,
+ userdeviceid_token: builder.open_tree("userdeviceid_token")?,
+ userdeviceid_metadata: builder.open_tree("userdeviceid_metadata")?,
+ userid_devicelistversion: builder.open_tree("userid_devicelistversion")?,
+ token_userdeviceid: builder.open_tree("token_userdeviceid")?,
+ onetimekeyid_onetimekeys: builder.open_tree("onetimekeyid_onetimekeys")?,
+ userid_lastonetimekeyupdate: builder.open_tree("userid_lastonetimekeyupdate")?,
+ keychangeid_userid: builder.open_tree("keychangeid_userid")?,
+ keyid_key: builder.open_tree("keyid_key")?,
+ userid_masterkeyid: builder.open_tree("userid_masterkeyid")?,
+ userid_selfsigningkeyid: builder.open_tree("userid_selfsigningkeyid")?,
+ userid_usersigningkeyid: builder.open_tree("userid_usersigningkeyid")?,
+ userfilterid_filter: builder.open_tree("userfilterid_filter")?,
+ todeviceid_events: builder.open_tree("todeviceid_events")?,
+
+ userdevicesessionid_uiaainfo: builder.open_tree("userdevicesessionid_uiaainfo")?,
+ userdevicesessionid_uiaarequest: RwLock::new(BTreeMap::new()),
+ readreceiptid_readreceipt: builder.open_tree("readreceiptid_readreceipt")?,
+ roomuserid_privateread: builder.open_tree("roomuserid_privateread")?, // "Private" read receipt
+ roomuserid_lastprivatereadupdate: builder
+ .open_tree("roomuserid_lastprivatereadupdate")?,
+ typingid_userid: builder.open_tree("typingid_userid")?,
+ roomid_lasttypingupdate: builder.open_tree("roomid_lasttypingupdate")?,
+ presenceid_presence: builder.open_tree("presenceid_presence")?,
+ userid_lastpresenceupdate: builder.open_tree("userid_lastpresenceupdate")?,
+ pduid_pdu: builder.open_tree("pduid_pdu")?,
+ eventid_pduid: builder.open_tree("eventid_pduid")?,
+ roomid_pduleaves: builder.open_tree("roomid_pduleaves")?,
+
+ alias_roomid: builder.open_tree("alias_roomid")?,
+ aliasid_alias: builder.open_tree("aliasid_alias")?,
+ publicroomids: builder.open_tree("publicroomids")?,
+
+ tokenids: builder.open_tree("tokenids")?,
+
+ roomserverids: builder.open_tree("roomserverids")?,
+ serverroomids: builder.open_tree("serverroomids")?,
+ userroomid_joined: builder.open_tree("userroomid_joined")?,
+ roomuserid_joined: builder.open_tree("roomuserid_joined")?,
+ roomid_joinedcount: builder.open_tree("roomid_joinedcount")?,
+ roomid_invitedcount: builder.open_tree("roomid_invitedcount")?,
+ roomuseroncejoinedids: builder.open_tree("roomuseroncejoinedids")?,
+ userroomid_invitestate: builder.open_tree("userroomid_invitestate")?,
+ roomuserid_invitecount: builder.open_tree("roomuserid_invitecount")?,
+ userroomid_leftstate: builder.open_tree("userroomid_leftstate")?,
+ roomuserid_leftcount: builder.open_tree("roomuserid_leftcount")?,
+
+ disabledroomids: builder.open_tree("disabledroomids")?,
+
+ lazyloadedids: builder.open_tree("lazyloadedids")?,
+
+ userroomid_notificationcount: builder.open_tree("userroomid_notificationcount")?,
+ userroomid_highlightcount: builder.open_tree("userroomid_highlightcount")?,
+
+ statekey_shortstatekey: builder.open_tree("statekey_shortstatekey")?,
+ shortstatekey_statekey: builder.open_tree("shortstatekey_statekey")?,
+
+ shorteventid_authchain: builder.open_tree("shorteventid_authchain")?,
+
+ roomid_shortroomid: builder.open_tree("roomid_shortroomid")?,
+
+ shortstatehash_statediff: builder.open_tree("shortstatehash_statediff")?,
+ eventid_shorteventid: builder.open_tree("eventid_shorteventid")?,
+ shorteventid_eventid: builder.open_tree("shorteventid_eventid")?,
+ shorteventid_shortstatehash: builder.open_tree("shorteventid_shortstatehash")?,
+ roomid_shortstatehash: builder.open_tree("roomid_shortstatehash")?,
+ roomsynctoken_shortstatehash: builder.open_tree("roomsynctoken_shortstatehash")?,
+ statehash_shortstatehash: builder.open_tree("statehash_shortstatehash")?,
+
+ eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?,
+ softfailedeventids: builder.open_tree("softfailedeventids")?,
+
+ referencedevents: builder.open_tree("referencedevents")?,
+ roomuserdataid_accountdata: builder.open_tree("roomuserdataid_accountdata")?,
+ roomusertype_roomuserdataid: builder.open_tree("roomusertype_roomuserdataid")?,
+ mediaid_file: builder.open_tree("mediaid_file")?,
+ backupid_algorithm: builder.open_tree("backupid_algorithm")?,
+ backupid_etag: builder.open_tree("backupid_etag")?,
+ backupkeyid_backup: builder.open_tree("backupkeyid_backup")?,
+ userdevicetxnid_response: builder.open_tree("userdevicetxnid_response")?,
+ servername_educount: builder.open_tree("servername_educount")?,
+ servernameevent_data: builder.open_tree("servernameevent_data")?,
+ servercurrentevent_data: builder.open_tree("servercurrentevent_data")?,
+ id_appserviceregistrations: builder.open_tree("id_appserviceregistrations")?,
+ senderkey_pusher: builder.open_tree("senderkey_pusher")?,
+ global: builder.open_tree("global")?,
+ server_signingkeys: builder.open_tree("server_signingkeys")?,
+
+ cached_registrations: Arc::new(RwLock::new(HashMap::new())),
+ pdu_cache: Mutex::new(LruCache::new(
+ config
+ .pdu_cache_capacity
+ .try_into()
+ .expect("pdu cache capacity fits into usize"),
+ )),
+ auth_chain_cache: Mutex::new(LruCache::new(
+ (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
+ )),
+ shorteventid_cache: Mutex::new(LruCache::new(
+ (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
+ )),
+ eventidshort_cache: Mutex::new(LruCache::new(
+ (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
+ )),
+ shortstatekey_cache: Mutex::new(LruCache::new(
+ (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
+ )),
+ statekeyshort_cache: Mutex::new(LruCache::new(
+ (100_000.0 * config.conduit_cache_capacity_modifier) as usize,
+ )),
+ our_real_users_cache: RwLock::new(HashMap::new()),
+ appservice_in_room_cache: RwLock::new(HashMap::new()),
+ lazy_load_waiting: Mutex::new(HashMap::new()),
+ stateinfo_cache: Mutex::new(LruCache::new(
+ (100.0 * config.conduit_cache_capacity_modifier) as usize,
+ )),
+ lasttimelinecount_cache: Mutex::new(HashMap::new()),
});
let services_raw = Box::new(Services::build(Arc::clone(&db), config)?);
@@ -407,7 +414,6 @@ impl KeyValueDatabase {
// This is the first and only time we initialize the SERVICE static
*SERVICES.write().unwrap() = Some(Box::leak(services_raw));
-
// Matrix resource ownership is based on the server name; changing it
// requires recreating the database from scratch.
if services().users.count()? > 0 {
@@ -570,7 +576,10 @@ impl KeyValueDatabase {
let states_parents = last_roomsstatehash.map_or_else(
|| Ok(Vec::new()),
|&last_roomsstatehash| {
- services().rooms.state_compressor.load_shortstatehash_info(dbg!(last_roomsstatehash))
+ services()
+ .rooms
+ .state_compressor
+ .load_shortstatehash_info(dbg!(last_roomsstatehash))
},
)?;
@@ -643,14 +652,15 @@ impl KeyValueDatabase {
current_state = HashSet::new();
current_sstatehash = Some(sstatehash);
- let event_id = db
- .shorteventid_eventid
- .get(&seventid)
- .unwrap()
- .unwrap();
+ let event_id = db.shorteventid_eventid.get(&seventid).unwrap().unwrap();
let string = utils::string_from_bytes(&event_id).unwrap();
let event_id = <&EventId>::try_from(string.as_str()).unwrap();
- let pdu = services().rooms.timeline.get_pdu(event_id).unwrap().unwrap();
+ let pdu = services()
+ .rooms
+ .timeline
+ .get_pdu(event_id)
+ .unwrap()
+ .unwrap();
if Some(&pdu.room_id) != current_room.as_ref() {
current_room = Some(pdu.room_id.clone());
@@ -764,8 +774,7 @@ impl KeyValueDatabase {
.peekable();
while iter.peek().is_some() {
- db.tokenids
- .insert_batch(&mut iter.by_ref().take(1000))?;
+ db.tokenids.insert_batch(&mut iter.by_ref().take(1000))?;
println!("smaller batch done");
}
@@ -803,8 +812,7 @@ impl KeyValueDatabase {
// Force E2EE device list updates so we can send them over federation
for user_id in services().users.iter().filter_map(|r| r.ok()) {
- services().users
- .mark_device_key_update(&user_id)?;
+ services().users.mark_device_key_update(&user_id)?;
}
services().globals.bump_database_version(10)?;
@@ -825,7 +833,8 @@ impl KeyValueDatabase {
info!(
"Loaded {} database with version {}",
- services().globals.config.database_backend, latest_database_version
+ services().globals.config.database_backend,
+ latest_database_version
);
} else {
services()
@@ -837,7 +846,8 @@ impl KeyValueDatabase {
warn!(
"Created new {} database with version {}",
- services().globals.config.database_backend, latest_database_version
+ services().globals.config.database_backend,
+ latest_database_version
);
}
@@ -862,9 +872,7 @@ impl KeyValueDatabase {
}
};
- services()
- .sending
- .start_handler(sending_receiver);
+ services().sending.start_handler(sending_receiver);
Self::start_cleanup_task().await;
@@ -898,7 +906,8 @@ impl KeyValueDatabase {
use std::time::{Duration, Instant};
- let timer_interval = Duration::from_secs(services().globals.config.cleanup_second_interval as u64);
+ let timer_interval =
+ Duration::from_secs(services().globals.config.cleanup_second_interval as u64);
tokio::spawn(async move {
let mut i = interval(timer_interval);
@@ -937,8 +946,10 @@ fn set_emergency_access() -> Result<bool> {
let conduit_user = UserId::parse_with_server_name("conduit", services().globals.server_name())
.expect("@conduit:server_name is a valid UserId");
- services().users
- .set_password(&conduit_user, services().globals.emergency_password().as_deref())?;
+ services().users.set_password(
+ &conduit_user,
+ services().globals.emergency_password().as_deref(),
+ )?;
let (ruleset, res) = match services().globals.emergency_password() {
Some(_) => (Ruleset::server_default(&conduit_user), Ok(true)),
@@ -951,7 +962,8 @@ fn set_emergency_access() -> Result<bool> {
GlobalAccountDataEventType::PushRules.to_string().into(),
&serde_json::to_value(&GlobalAccountDataEvent {
content: PushRulesEventContent { global: ruleset },
- }).expect("to json value always works"),
+ })
+ .expect("to json value always works"),
)?;
res
diff --git a/src/lib.rs b/src/lib.rs
index c103d52..e6421e8 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -7,22 +7,27 @@
#![allow(clippy::suspicious_else_formatting)]
#![deny(clippy::dbg_macro)]
+pub mod api;
mod config;
mod database;
mod service;
-pub mod api;
mod utils;
-use std::{cell::Cell, sync::{RwLock, Arc}};
+use std::{
+ cell::Cell,
+ sync::{Arc, RwLock},
+};
+pub use api::ruma_wrapper::{Ruma, RumaResponse};
pub use config::Config;
+pub use service::{pdu::PduEvent, Services};
pub use utils::error::{Error, Result};
-pub use service::{Services, pdu::PduEvent};
-pub use api::ruma_wrapper::{Ruma, RumaResponse};
pub static SERVICES: RwLock<Option<&'static Services>> = RwLock::new(None);
pub fn services<'a>() -> &'static Services {
- &SERVICES.read().unwrap().expect("SERVICES should be initialized when this is called")
+ &SERVICES
+ .read()
+ .unwrap()
+ .expect("SERVICES should be initialized when this is called")
}
-
diff --git a/src/service/account_data/data.rs b/src/service/account_data/data.rs
index 65780a6..c7c9298 100644
--- a/src/service/account_data/data.rs
+++ b/src/service/account_data/data.rs
@@ -1,7 +1,11 @@
use std::collections::HashMap;
-use ruma::{UserId, RoomId, events::{RoomAccountDataEventType, AnyEphemeralRoomEvent}, serde::Raw};
use crate::Result;
+use ruma::{
+ events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
+ serde::Raw,
+ RoomId, UserId,
+};
pub trait Data: Send + Sync {
/// Places one event in the account data of the user and removes the previous entry.
diff --git a/src/service/account_data/mod.rs b/src/service/account_data/mod.rs
index 1289f7a..5bf167d 100644
--- a/src/service/account_data/mod.rs
+++ b/src/service/account_data/mod.rs
@@ -3,9 +3,7 @@ mod data;
pub use data::Data;
use ruma::{
- api::client::{
- error::ErrorKind,
- },
+ api::client::error::ErrorKind,
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
serde::Raw,
signatures::CanonicalJsonValue,
diff --git a/src/service/admin/mod.rs b/src/service/admin/mod.rs
index 0b14314..db596a3 100644
--- a/src/service/admin/mod.rs
+++ b/src/service/admin/mod.rs
@@ -28,7 +28,15 @@ use ruma::{
use serde_json::value::to_raw_value;
use tokio::sync::{mpsc, MutexGuard, RwLock, RwLockReadGuard};
-use crate::{Result, services, Error, api::{server_server, client_server::{AUTO_GEN_PASSWORD_LENGTH, leave_all_rooms}}, PduEvent, utils::{HtmlEscape, self}};
+use crate::{
+ api::{
+ client_server::{leave_all_rooms, AUTO_GEN_PASSWORD_LENGTH},
+ server_server,
+ },
+ services,
+ utils::{self, HtmlEscape},
+ Error, PduEvent, Result,
+};
use super::pdu::PduBuilder;
@@ -153,7 +161,6 @@ enum AdminCommand {
EnableRoom { room_id: Box<RoomId> },
}
-
#[derive(Debug)]
pub enum AdminRoomEvent {
ProcessMessage(String),
@@ -166,16 +173,14 @@ pub struct Service {
}
impl Service {
- pub fn start_handler(
- &self,
- mut receiver: mpsc::UnboundedReceiver<AdminRoomEvent>,
- ) {
+ pub fn start_handler(&self, mut receiver: mpsc::UnboundedReceiver<AdminRoomEvent>) {
tokio::spawn(async move {
// TODO: Use futures when we have long admin commands
//let mut futures = FuturesUnordered::new();
- let conduit_user = UserId::parse(format!("@conduit:{}", services().globals.server_name()))
- .expect("@conduit:server_name is valid");
+ let conduit_user =
+ UserId::parse(format!("@conduit:{}", services().globals.server_name()))
+ .expect("@conduit:server_name is valid");
let conduit_room = services()
.rooms
@@ -193,7 +198,8 @@ impl Service {
mutex_lock: &MutexGuard<'_, ()>| {
services()
.rooms
- .timeline.build_and_append_pdu(
+ .timeline
+ .build_and_append_pdu(
PduBuilder {
event_type: RoomEventType::RoomMessage,
content: to_raw_value(&message)
@@ -316,9 +322,11 @@ impl Service {
) -> Result<RoomMessageEventContent> {
let reply_message_content = match command {
AdminCommand::RegisterAppservice => {
- if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```" {
+ if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```"
+ {
let appservice_config = body[1..body.len() - 1].join("\n");
- let parsed_config = serde_yaml::from_str::<serde_yaml::Value>(&appservice_config);
+ let parsed_config =
+ serde_yaml::from_str::<serde_yaml::Value>(&appservice_config);
match parsed_config {
Ok(yaml) => match services().appservice.register_appservice(yaml) {
Ok(id) => RoomMessageEventContent::text_plain(format!(
@@ -343,7 +351,10 @@ impl Service {
}
AdminCommand::UnregisterAppservice {
appservice_identifier,
- } => match services().appservice.unregister_appservice(&appservice_identifier) {
+ } => match services()
+ .appservice
+ .unregister_appservice(&appservice_identifier)
+ {
Ok(()) => RoomMessageEventContent::text_plain("Appservice unregistered."),
Err(e) => RoomMessageEventContent::text_plain(format!(
"Failed to unregister appservice: {}",
@@ -351,7 +362,11 @@ impl Service {
)),
},
AdminCommand::ListAppservices => {
- if let Ok(appservices) = services().appservice.iter_ids().map(|ids| ids.collect::<Vec<_>>()) {
+ if let Ok(appservices) = services()
+ .appservice
+ .iter_ids()
+ .map(|ids| ids.collect::<Vec<_>>())
+ {
let count = appservices.len();
let output = format!(
"Appservices ({}): {}",
@@ -399,7 +414,11 @@ impl Service {
Err(e) => RoomMessageEventContent::text_plain(e.to_string()),
},
AdminCommand::IncomingFederation => {
- let map = services().globals.roomid_federationhandletime.read().unwrap();
+ let map = services()
+ .globals
+ .roomid_federationhandletime
+ .read()
+ .unwrap();
let mut msg: String = format!("Handling {} incoming pdus:\n", map.len());
for (r, (e, i)) in map.iter() {
@@ -426,7 +445,10 @@ impl Service {
Error::bad_database("Invalid room id field in event in database")
})?;
let start = Instant::now();
- let count = services().rooms.auth_chain.get_auth_chain(room_id, vec![event_id])
+ let count = services()
+ .rooms
+ .auth_chain
+ .get_auth_chain(room_id, vec![event_id])
.await?
.count();
let elapsed = start.elapsed();
@@ -439,7 +461,8 @@ impl Service {
}
}
AdminCommand::ParsePdu => {
- if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```" {
+ if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```"
+ {
let string = body[1..body.len() - 1].join("\n");
match serde_json::from_str(&string) {
Ok(value) => {
@@ -477,15 +500,18 @@ impl Service {
}
AdminCommand::GetPdu { event_id } => {
let mut outlier = false;
- let mut pdu_json = services().rooms.timeline.get_non_outlier_pdu_json(&event_id)?;
+ let mut pdu_json = services()
+ .rooms
+ .timeline
+ .get_non_outlier_pdu_json(&event_id)?;
if pdu_json.is_none() {
outlier = true;
pdu_json = services().rooms.timeline.get_pdu_json(&event_id)?;
}
match pdu_json {
Some(json) => {
- let json_text =
- serde_json::to_string_pretty(&json).expect("canonical json is valid json");
+ let json_text = serde_json::to_string_pretty(&json)
+ .expect("canonical json is valid json");
RoomMessageEventContent::text_html(
format!(
"{}\n```json\n{}\n```",
@@ -539,8 +565,11 @@ impl Service {
if !services().users.exists(&user_id)?
|| services().users.is_deactivated(&user_id)?
|| user_id
- == UserId::parse_with_server_name("conduit", services().globals.server_name())
- .expect("conduit user exists")
+ == UserId::parse_with_server_name(
+ "conduit",
+ services().globals.server_name(),
+ )
+ .expect("conduit user exists")
{
return Ok(RoomMessageEventContent::text_plain(
"The specified user does not exist or is deactivated!",
@@ -549,7 +578,10 @@ impl Service {
let new_password = utils::random_string(AUTO_GEN_PASSWORD_LENGTH);
- match services().users.set_password(&user_id, Some(new_password.as_str())) {
+ match services()
+ .users
+ .set_password(&user_id, Some(new_password.as_str()))
+ {
Ok(()) => RoomMessageEventContent::text_plain(format!(
"Successfully reset the password for user {}: {}",
user_id, new_password
@@ -590,7 +622,8 @@ impl Service {
// Default to pretty displayname
let displayname = format!("{} ⚡️", user_id.localpart());
- services().users
+ services()
+ .users
.set_displayname(&user_id, Some(displayname.clone()))?;
// Initial account data
@@ -604,7 +637,8 @@ impl Service {
content: ruma::events::push_rules::PushRulesEventContent {
global: ruma::push::Ruleset::server_default(&user_id),
},
- }).expect("to json value always works"),
+ })
+ .expect("to json value always works"),
)?;
// we dont add a device since we're not the user, just the creator
@@ -651,7 +685,8 @@ impl Service {
}
}
AdminCommand::DeactivateAll { leave_rooms, force } => {
- if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```" {
+ if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```"
+ {
let usernames = body.clone().drain(1..body.len() - 1).collect::<Vec<_>>();
let mut user_ids: Vec<&UserId> = Vec::new();
@@ -672,17 +707,15 @@ impl Service {
let mut admins = Vec::new();
if !force {
- user_ids.retain(|&user_id| {
- match services().users.is_admin(user_id) {
- Ok(is_admin) => match is_admin {
- true => {
- admins.push(user_id.localpart());
- false
- }
- false => true,
- },
- Err(_) => false,
- }
+ user_ids.retain(|&user_id| match services().users.is_admin(user_id) {
+ Ok(is_admin) => match is_admin {
+ true => {
+ admins.push(user_id.localpart());
+ false
+ }
+ false => true,
+ },
+ Err(_) => false,
})
}
@@ -783,8 +816,8 @@ impl Service {
} else {
// Wrap the usage line in a code block, and add a yaml block example
// This makes the usage of e.g. `register-appservice` more accurate
- let re =
- Regex::new("(?m)^USAGE:\n (.*?)\n\n").expect("Regex compilation should not fail");
+ let re = Regex::new("(?m)^USAGE:\n (.*?)\n\n")
+ .expect("Regex compilation should not fail");
re.replace_all(&text, "USAGE:\n<pre>$1[nobr]\n[commandbodyblock]</pre>")
.replace("[commandbodyblock]", &command_body)
};
@@ -808,7 +841,8 @@ impl Service {
services().rooms.short.get_or_create_shortroomid(&room_id)?;
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -818,8 +852,9 @@ impl Service {
let state_lock = mutex_state.lock().await;
// Create a user for the server
- let conduit_user = UserId::parse_with_server_name("conduit", services().globals.server_name())
- .expect("@conduit:server_name is valid");
+ let conduit_user =
+ UserId::parse_with_server_name("conduit", services().globals.server_name())
+ .expect("@conduit:server_name is valid");
services().users.create(&conduit_user, None)?;
@@ -1002,9 +1037,10 @@ impl Service {
user_id: &UserId,
displayname: String,
) -> Result<()> {
- let admin_room_alias: Box<RoomAliasId> = format!("#admins:{}", services().globals.server_name())
- .try_into()
- .expect("#admins:server_name is a valid alias name");
+ let admin_room_alias: Box<RoomAliasId> =
+ format!("#admins:{}", services().globals.server_name())
+ .try_into()
+ .expect("#admins:server_name is a valid alias name");
let room_id = services()
.rooms
.alias
@@ -1012,7 +1048,8 @@ impl Service {
.expect("Admin room must exist");
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -1022,8 +1059,9 @@ impl Service {
let state_lock = mutex_state.lock().await;
// Use the server user to grant the new admin's power level
- let conduit_user = UserId::parse_with_server_name("conduit", services().globals.server_name())
- .expect("@conduit:server_name is valid");
+ let conduit_user =
+ UserId::parse_with_server_name("conduit", services().globals.server_name())
+ .expect("@conduit:server_name is valid");
// Invite and join the real user
services().rooms.timeline.build_and_append_pdu(
diff --git a/src/service/globals/data.rs b/src/service/globals/data.rs
index 0f74b2a..407ff1c 100644
--- a/src/service/globals/data.rs
+++ b/src/service/globals/data.rs
@@ -1,7 +1,11 @@
use std::collections::BTreeMap;
use async_trait::async_trait;
-use ruma::{signatures::Ed25519KeyPair, DeviceId, UserId, ServerName, api::federation::discovery::{ServerSigningKeys, VerifyKey}, ServerSigningKeyId};
+use ruma::{
+ api::federation::discovery::{ServerSigningKeys, VerifyKey},
+ signatures::Ed25519KeyPair,
+ DeviceId, ServerName, ServerSigningKeyId, UserId,
+};
use crate::Result;
diff --git a/src/service/globals/mod.rs b/src/service/globals/mod.rs
index de8d1aa..23a6159 100644
--- a/src/service/globals/mod.rs
+++ b/src/service/globals/mod.rs
@@ -4,7 +4,7 @@ pub use data::Data;
use crate::api::server_server::FedDest;
use crate::service::*;
-use crate::{Config, utils, Error, Result};
+use crate::{utils, Config, Error, Result};
use ruma::{
api::{
client::sync::sync_events,
@@ -89,12 +89,8 @@ impl Default for RotationHandler {
}
}
-
impl Service {
- pub fn load(
- db: Arc<dyn Data>,
- config: Config,
- ) -> Result<Self> {
+ pub fn load(db: Arc<dyn Data>, config: Config) -> Result<Self> {
let keypair = db.load_keypair();
let keypair = match keypair {
diff --git a/src/service/key_backups/data.rs b/src/service/key_backups/data.rs
index 226b1e1..f711e5d 100644
--- a/src/service/key_backups/data.rs
+++ b/src/service/key_backups/data.rs
@@ -1,7 +1,11 @@
use std::collections::BTreeMap;
-use ruma::{api::client::backup::{BackupAlgorithm, RoomKeyBackup, KeyBackupData}, serde::Raw, UserId, RoomId};
use crate::Result;
+use ruma::{
+ api::client::backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
+ serde::Raw,
+ RoomId, UserId,
+};
pub trait Data: Send + Sync {
fn create_backup(
@@ -21,16 +25,10 @@ pub trait Data: Send + Sync {
fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>>;
- fn get_latest_backup(
- &self,
- user_id: &UserId,
- ) -> Result<Option<(String, Raw<BackupAlgorithm>)>>;
+ fn get_latest_backup(&self, user_id: &UserId)
+ -> Result<Option<(String, Raw<BackupAlgorithm>)>>;
- fn get_backup(
- &self,
- user_id: &UserId,
- version: &str,
- ) -> Result<Option<Raw<BackupAlgorithm>>>;
+ fn get_backup(&self, user_id: &UserId, version: &str) -> Result<Option<Raw<BackupAlgorithm>>>;
fn add_key(
&self,
@@ -68,12 +66,7 @@ pub trait Data: Send + Sync {
fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()>;
- fn delete_room_keys(
- &self,
- user_id: &UserId,
- version: &str,
- room_id: &RoomId,
- ) -> Result<()>;
+ fn delete_room_keys(&self, user_id: &UserId, version: &str, room_id: &RoomId) -> Result<()>;
fn delete_room_key(
&self,
diff --git a/src/service/key_backups/mod.rs b/src/service/key_backups/mod.rs
index a3bed71..41ec1c1 100644
--- a/src/service/key_backups/mod.rs
+++ b/src/service/key_backups/mod.rs
@@ -1,7 +1,7 @@
mod data;
pub use data::Data;
-use crate::{utils, Error, Result, services};
+use crate::{services, utils, Error, Result};
use ruma::{
api::client::{
backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
@@ -65,7 +65,8 @@ impl Service {
session_id: &str,
key_data: &Raw<KeyBackupData>,
) -> Result<()> {
- self.db.add_key(user_id, version, room_id, session_id, key_data)
+ self.db
+ .add_key(user_id, version, room_id, session_id, key_data)
}
pub fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize> {
@@ -123,6 +124,7 @@ impl Service {
room_id: &RoomId,
session_id: &str,
) -> Result<()> {
- self.db.delete_room_key(user_id, version, room_id, session_id)
+ self.db
+ .delete_room_key(user_id, version, room_id, session_id)
}
}
diff --git a/src/service/media/data.rs b/src/service/media/data.rs
index 2e24049..75a682c 100644
--- a/src/service/media/data.rs
+++ b/src/service/media/data.rs
@@ -1,8 +1,20 @@
use crate::Result;
pub trait Data: Send + Sync {
- fn create_file_metadata(&self, mxc: String, width: u32, height: u32, content_disposition: Option<&str>, content_type: Option<&str>) -> Result<Vec<u8>>;
+ fn create_file_metadata(
+ &self,
+ mxc: String,
+ width: u32,
+ height: u32,
+ content_disposition: Option<&str>,
+ content_type: Option<&str>,
+ ) -> Result<Vec<u8>>;
/// Returns content_disposition, content_type and the metadata key.
- fn search_file_metadata(&self, mxc: String, width: u32, height: u32) -> Result<(Option<String>, Option<String>, Vec<u8>)>;
+ fn search_file_metadata(
+ &self,
+ mxc: String,
+ width: u32,
+ height: u32,
+ ) -> Result<(Option<String>, Option<String>, Vec<u8>)>;
}
diff --git a/src/service/media/mod.rs b/src/service/media/mod.rs
index d3dd2bd..ea276c0 100644
--- a/src/service/media/mod.rs
+++ b/src/service/media/mod.rs
@@ -1,8 +1,8 @@
mod data;
pub use data::Data;
+use crate::{services, utils, Error, Result};
use image::{imageops::FilterType, GenericImageView};
-use crate::{utils, Error, Result, services};
use std::{mem, sync::Arc};
use tokio::{
fs::File,
@@ -29,7 +29,9 @@ impl Service {
file: &[u8],
) -> Result<()> {
// Width, Height = 0 if it's not a thumbnail
- let key = self.db.create_file_metadata(mxc, 0, 0, content_disposition, content_type)?;
+ let key = self
+ .db
+ .create_file_metadata(mxc, 0, 0, content_disposition, content_type)?;
let path = services().globals.get_media_file(&key);
let mut f = File::create(path).await?;
@@ -48,7 +50,9 @@ impl Service {
height: u32,
file: &[u8],
) -> Result<()> {
- let key = self.db.create_file_metadata(mxc, width, height, content_disposition, content_type)?;
+ let key =
+ self.db
+ .create_file_metadata(mxc, width, height, content_disposition, content_type)?;
let path = services().globals.get_media_file(&key);
let mut f = File::create(path).await?;
@@ -59,12 +63,13 @@ impl Service {
/// Downloads a file.
pub async fn get(&self, mxc: String) -> Result<Option<FileMeta>> {
- if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc, 0, 0) {
+ if let Ok((content_disposition, content_type, key)) =
+ self.db.search_file_metadata(mxc, 0, 0)
+ {
let path = services().globals.get_media_file(&key);
let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?;
-
Ok(Some(FileMeta {
content_disposition,
content_type,
@@ -108,7 +113,9 @@ impl Service {
.thumbnail_properties(width, height)
.unwrap_or((0, 0, false)); // 0, 0 because that's the original file
- if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), width, height) {
+ if let Ok((content_disposition, content_type, key)) =
+ self.db.search_file_metadata(mxc.clone(), width, height)
+ {
// Using saved thumbnail
let path = services().globals.get_media_file(&key);
let mut file = Vec::new();
@@ -119,7 +126,9 @@ impl Service {
content_type,
file: file.to_vec(),
}))
- } else if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), 0, 0) {
+ } else if let Ok((content_disposition, content_type, key)) =
+ self.db.search_file_metadata(mxc.clone(), 0, 0)
+ {
// Generate a thumbnail
let path = services().globals.get_media_file(&key);
let mut file = Vec::new();
@@ -180,7 +189,13 @@ impl Service {
thumbnail.write_to(&mut thumbnail_bytes, image::ImageOutputFormat::Png)?;
// Save thumbnail in database so we don't have to generate it again next time
- let thumbnail_key = self.db.create_file_metadata(mxc, width, height, content_disposition.as_deref(), content_type.as_deref())?;
+ let thumbnail_key = self.db.create_file_metadata(
+ mxc,
+ width,
+ height,
+ content_disposition.as_deref(),
+ content_type.as_deref(),
+ )?;
let path = services().globals.get_media_file(&thumbnail_key);
let mut f = File::create(path).await?;
diff --git a/src/service/mod.rs b/src/service/mod.rs
index daf4329..dbddf40 100644
--- a/src/service/mod.rs
+++ b/src/service/mod.rs
@@ -5,7 +5,7 @@ use std::{
use lru_cache::LruCache;
-use crate::{Result, Config};
+use crate::{Config, Result};
pub mod account_data;
pub mod admin;
@@ -49,7 +49,8 @@ impl Services {
+ key_backups::Data
+ media::Data,
>(
- db: Arc<D>, config: Config
+ db: Arc<D>,
+ config: Config,
) -> Result<Self> {
Ok(Self {
appservice: appservice::Service { db: db.clone() },
@@ -76,30 +77,26 @@ impl Services {
state: rooms::state::Service { db: db.clone() },
state_accessor: rooms::state_accessor::Service { db: db.clone() },
state_cache: rooms::state_cache::Service { db: db.clone() },
- state_compressor: rooms::state_compressor::Service { db: db.clone(), stateinfo_cache: Mutex::new(LruCache::new((100.0 * config.conduit_cache_capacity_modifier) as usize,)) },
- timeline: rooms::timeline::Service { db: db.clone(), lasttimelinecount_cache: Mutex::new(HashMap::new()) },
+ state_compressor: rooms::state_compressor::Service {
+ db: db.clone(),
+ stateinfo_cache: Mutex::new(LruCache::new(
+ (100.0 * config.conduit_cache_capacity_modifier) as usize,
+ )),
+ },
+ timeline: rooms::timeline::Service {
+ db: db.clone(),
+ lasttimelinecount_cache: Mutex::new(HashMap::new()),
+ },
user: rooms::user::Service { db: db.clone() },
},
- transaction_ids: transaction_ids::Service {
- db: db.clone()
- },
- uiaa: uiaa::Service {
- db: db.clone()
- },
- users: users::Service {
- db: db.clone()
- },
- account_data: account_data::Service {
- db: db.clone()
- },
+ transaction_ids: transaction_ids::Service { db: db.clone() },
+ uiaa: uiaa::Service { db: db.clone() },
+ users: users::Service { db: db.clone() },
+ account_data: account_data::Service { db: db.clone() },
admin: admin::Service { sender: todo!() },
globals: globals::Service::load(db.clone(), config)?,
- key_backups: key_backups::Service {
- db: db.clone()
- },
- media: media::Service {
- db: db.clone()
- },
+ key_backups: key_backups::Service { db: db.clone() },
+ media: media::Service { db: db.clone() },
sending: sending::Service {
maximum_requests: todo!(),
sender: todo!(),
diff --git a/src/service/pdu.rs b/src/service/pdu.rs
index 3be3300..724b2b2 100644
--- a/src/service/pdu.rs
+++ b/src/service/pdu.rs
@@ -1,4 +1,4 @@
-use crate::{Error, services};
+use crate::{services, Error};
use ruma::{
events::{
room::member::RoomMemberEventContent, AnyEphemeralRoomEvent, AnyRoomEvent, AnyStateEvent,
diff --git a/src/service/pusher/data.rs b/src/service/pusher/data.rs
index 305a538..243b77f 100644
--- a/src/service/pusher/data.rs
+++ b/src/service/pusher/data.rs
@@ -1,5 +1,8 @@
-use ruma::{UserId, api::client::push::{set_pusher, get_pushers}};
use crate::Result;
+use ruma::{
+ api::client::push::{get_pushers, set_pusher},
+ UserId,
+};
pub trait Data: Send + Sync {
fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()>;
@@ -8,8 +11,5 @@ pub trait Data: Send + Sync {
fn get_pushers(&self, sender: &UserId) -> Result<Vec<get_pushers::v3::Pusher>>;
- fn get_pusher_senderkeys<'a>(
- &'a self,
- sender: &UserId,
- ) -> Box<dyn Iterator<Item = Vec<u8>>>;
+ fn get_pusher_senderkeys<'a>(&'a self, sender: &UserId) -> Box<dyn Iterator<Item = Vec<u8>>>;
}
diff --git a/src/service/pusher/mod.rs b/src/service/pusher/mod.rs
index e65c57a..78d5f26 100644
--- a/src/service/pusher/mod.rs
+++ b/src/service/pusher/mod.rs
@@ -79,7 +79,11 @@ impl Service {
//*reqwest_request.timeout_mut() = Some(Duration::from_secs(5));
let url = reqwest_request.url().clone();
- let response = services().globals.default_client().execute(reqwest_request).await;
+ let response = services()
+ .globals
+ .default_client()
+ .execute(reqwest_request)
+ .await;
match response {
Ok(mut response) => {
@@ -196,7 +200,8 @@ impl Service {
let ctx = PushConditionRoomCtx {
room_id: room_id.to_owned(),
member_count: 10_u32.into(), // TODO: get member count efficiently
- user_display_name: services().users
+ user_display_name: services()
+ .users
.displayname(user)?
.unwrap_or_else(|| user.localpart().to_owned()),
users_power_levels: power_levels.users.clone(),
@@ -276,10 +281,10 @@ impl Service {
let user_name = services().users.displayname(&event.sender)?;
notifi.sender_display_name = user_name.as_deref();
- let room_name = if let Some(room_name_pdu) =
- services().rooms
+ let room_name = if let Some(room_name_pdu) = services()
+ .rooms
.state_accessor
- .room_state_get(&event.room_id, &StateEventType::RoomName, "")?
+ .room_state_get(&event.room_id, &StateEventType::RoomName, "")?
{
serde_json::from_str::<RoomNameEventContent>(room_name_pdu.content.get())
.map_err(|_| Error::bad_database("Invalid room name event in database."))?
@@ -290,11 +295,8 @@ impl Service {
notifi.room_name = room_name.as_deref();
- self.send_request(
- url,
- send_event_notification::v1::Request::new(notifi),
- )
- .await?;
+ self.send_request(url, send_event_notification::v1::Request::new(notifi))
+ .await?;
}
// TODO: email
diff --git a/src/service/rooms/alias/data.rs b/src/service/rooms/alias/data.rs
index 26bffae..90205f9 100644
--- a/src/service/rooms/alias/data.rs
+++ b/src/service/rooms/alias/data.rs
@@ -1,25 +1,15 @@
-use ruma::{RoomId, RoomAliasId};
use crate::Result;
+use ruma::{RoomAliasId, RoomId};
pub trait Data: Send + Sync {
/// Creates or updates the alias to the given room id.
- fn set_alias(
- &self,
- alias: &RoomAliasId,
- room_id: &RoomId
- ) -> Result<()>;
+ fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()>;
/// Forgets about an alias. Returns an error if the alias did not exist.
- fn remove_alias(
- &self,
- alias: &RoomAliasId,
- ) -> Result<()>;
+ fn remove_alias(&self, alias: &RoomAliasId) -> Result<()>;
/// Looks up the roomid for the given alias.
- fn resolve_local_alias(
- &self,
- alias: &RoomAliasId,
- ) -> Result<Option<Box<RoomId>>>;
+ fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<Box<RoomId>>>;
/// Returns all local aliases that point to the given room
fn local_aliases_for_room(
diff --git a/src/service/rooms/alias/mod.rs b/src/service/rooms/alias/mod.rs
index 65fb367..6a3cf4e 100644
--- a/src/service/rooms/alias/mod.rs
+++ b/src/service/rooms/alias/mod.rs
@@ -3,8 +3,8 @@ use std::sync::Arc;
pub use data::Data;
-use ruma::{RoomAliasId, RoomId};
use crate::Result;
+use ruma::{RoomAliasId, RoomId};
pub struct Service {
db: Arc<dyn Data>,
@@ -12,19 +12,12 @@ pub struct Service {
impl Service {
#[tracing::instrument(skip(self))]
- pub fn set_alias(
- &self,
- alias: &RoomAliasId,
- room_id: &RoomId,
- ) -> Result<()> {
+ pub fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()> {
self.db.set_alias(alias, room_id)
}
#[tracing::instrument(skip(self))]
- pub fn remove_alias(
- &self,
- alias: &RoomAliasId,
- ) -> Result<()> {
+ pub fn remove_alias(&self, alias: &RoomAliasId) -> Result<()> {
self.db.remove_alias(alias)
}
diff --git a/src/service/rooms/auth_chain/data.rs b/src/service/rooms/auth_chain/data.rs
index 13fac2d..e8c379f 100644
--- a/src/service/rooms/auth_chain/data.rs
+++ b/src/service/rooms/auth_chain/data.rs
@@ -1,7 +1,11 @@
-use std::{collections::HashSet, sync::Arc};
use crate::Result;
+use std::{collections::HashSet, sync::Arc};
pub trait Data: Send + Sync {
- fn get_cached_eventid_authchain(&self, shorteventid: &[u64]) -> Result<Option<Arc<HashSet<u64>>>>;
- fn cache_auth_chain(&self, shorteventid: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()>;
+ fn get_cached_eventid_authchain(
+ &self,
+ shorteventid: &[u64],
+ ) -> Result<Option<Arc<HashSet<u64>>>>;
+ fn cache_auth_chain(&self, shorteventid: Vec<u64>, auth_chain: Arc<HashSet<u64>>)
+ -> Result<()>;
}
diff --git a/src/service/rooms/auth_chain/mod.rs b/src/service/rooms/auth_chain/mod.rs
index e35094b..ed06385 100644
--- a/src/service/rooms/auth_chain/mod.rs
+++ b/src/service/rooms/auth_chain/mod.rs
@@ -1,11 +1,14 @@
mod data;
-use std::{sync::Arc, collections::{HashSet, BTreeSet}};
+use std::{
+ collections::{BTreeSet, HashSet},
+ sync::Arc,
+};
pub use data::Data;
-use ruma::{RoomId, EventId, api::client::error::ErrorKind};
+use ruma::{api::client::error::ErrorKind, EventId, RoomId};
use tracing::log::warn;
-use crate::{Result, services, Error};
+use crate::{services, Error, Result};
pub struct Service {
db: Arc<dyn Data>,
@@ -56,7 +59,11 @@ impl Service {
}
let chunk_key: Vec<u64> = chunk.iter().map(|(short, _)| short).copied().collect();
- if let Some(cached) = services().rooms.auth_chain.get_cached_eventid_authchain(&chunk_key)? {
+ if let Some(cached) = services()
+ .rooms
+ .auth_chain
+ .get_cached_eventid_authchain(&chunk_key)?
+ {
hits += 1;
full_auth_chain.extend(cached.iter().copied());
continue;
@@ -68,13 +75,18 @@ impl Service {
let mut misses2 = 0;
let mut i = 0;
for (sevent_id, event_id) in chunk {
- if let Some(cached) = services().rooms.auth_chain.get_cached_eventid_authchain(&[sevent_id])? {
+ if let Some(cached) = services()
+ .rooms
+ .auth_chain
+ .get_cached_eventid_authchain(&[sevent_id])?
+ {
hits2 += 1;
chunk_cache.extend(cached.iter().copied());
} else {
misses2 += 1;
let auth_chain = Arc::new(self.get_auth_chain_inner(room_id, &event_id)?);
- services().rooms
+ services()
+ .rooms
.auth_chain
.cache_auth_chain(vec![sevent_id], Arc::clone(&auth_chain))?;
println!(
@@ -97,8 +109,10 @@ impl Service {
misses2
);
let chunk_cache = Arc::new(chunk_cache);
- services().rooms
- .auth_chain.cache_auth_chain(chunk_key, Arc::clone(&chunk_cache))?;
+ services()
+ .rooms
+ .auth_chain
+ .cache_auth_chain(chunk_key, Arc::clone(&chunk_cache))?;
full_auth_chain.extend(chunk_cache.iter());
}
@@ -115,11 +129,7 @@ impl Service {
}
#[tracing::instrument(skip(self, event_id))]
- fn get_auth_chain_inner(
- &self,
- room_id: &RoomId,
- event_id: &EventId,
- ) -> Result<HashSet<u64>> {
+ fn get_auth_chain_inner(&self, room_id: &RoomId, event_id: &EventId) -> Result<HashSet<u64>> {
let mut todo = vec![Arc::from(event_id)];
let mut found = HashSet::new();
@@ -131,7 +141,8 @@ impl Service {
}
for auth_event in &pdu.auth_events {
let sauthevent = services()
- .rooms.short
+ .rooms
+ .short
.get_or_create_shorteventid(auth_event)?;
if !found.contains(&sauthevent) {
diff --git a/src/service/rooms/directory/data.rs b/src/service/rooms/directory/data.rs
index b4e020d..fb523cf 100644
--- a/src/service/rooms/directory/data.rs
+++ b/src/service/rooms/directory/data.rs
@@ -1,5 +1,5 @@
-use ruma::RoomId;
use crate::Result;
+use ruma::RoomId;
pub trait Data: Send + Sync {
/// Adds the room to the public room directory
diff --git a/src/service/rooms/edus/presence/data.rs b/src/service/rooms/edus/presence/data.rs
index f759255..f378404 100644
--- a/src/service/rooms/edus/presence/data.rs
+++ b/src/service/rooms/edus/presence/data.rs
@@ -1,7 +1,7 @@
use std::collections::HashMap;
-use ruma::{UserId, RoomId, events::presence::PresenceEvent};
use crate::Result;
+use ruma::{events::presence::PresenceEvent, RoomId, UserId};
pub trait Data: Send + Sync {
/// Adds a presence event which will be saved until a new event replaces it.
diff --git a/src/service/rooms/edus/presence/mod.rs b/src/service/rooms/edus/presence/mod.rs
index d657897..636bd91 100644
--- a/src/service/rooms/edus/presence/mod.rs
+++ b/src/service/rooms/edus/presence/mod.rs
@@ -2,7 +2,7 @@ mod data;
use std::{collections::HashMap, sync::Arc};
pub use data::Data;
-use ruma::{RoomId, UserId, events::presence::PresenceEvent};
+use ruma::{events::presence::PresenceEvent, RoomId, UserId};
use crate::Result;
diff --git a/src/service/rooms/edus/read_receipt/data.rs b/src/service/rooms/edus/read_receipt/data.rs
index 5ebd89d..734c68d 100644
--- a/src/service/rooms/edus/read_receipt/data.rs
+++ b/src/service/rooms/edus/read_receipt/data.rs
@@ -1,5 +1,5 @@
-use ruma::{RoomId, events::receipt::ReceiptEvent, UserId, serde::Raw};
use crate::Result;
+use ruma::{events::receipt::ReceiptEvent, serde::Raw, RoomId, UserId};
pub trait Data: Send + Sync {
/// Replaces the previous read receipt.
@@ -15,13 +15,15 @@ pub trait Data: Send + Sync {
&self,
room_id: &RoomId,
since: u64,
- ) -> Box<dyn Iterator<
- Item = Result<(
- Box<UserId>,
- u64,
- Raw<ruma::events::AnySyncEphemeralRoomEvent>,
- )>,
- >>;
+ ) -> Box<
+ dyn Iterator<
+ Item = Result<(
+ Box<UserId>,
+ u64,
+ Raw<ruma::events::AnySyncEphemeralRoomEvent>,
+ )>,
+ >,
+ >;
/// Sets a private read marker at `count`.
fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()>;
diff --git a/src/service/rooms/edus/read_receipt/mod.rs b/src/service/rooms/edus/read_receipt/mod.rs
index 1770877..35fee1a 100644
--- a/src/service/rooms/edus/read_receipt/mod.rs
+++ b/src/service/rooms/edus/read_receipt/mod.rs
@@ -3,8 +3,8 @@ use std::sync::Arc;
pub use data::Data;
-use ruma::{RoomId, UserId, events::receipt::ReceiptEvent, serde::Raw};
use crate::Result;
+use ruma::{events::receipt::ReceiptEvent, serde::Raw, RoomId, UserId};
pub struct Service {
db: Arc<dyn Data>,
diff --git a/src/service/rooms/edus/typing/data.rs b/src/service/rooms/edus/typing/data.rs
index 426d4e0..50b6d13 100644
--- a/src/service/rooms/edus/typing/data.rs
+++ b/src/service/rooms/edus/typing/data.rs
@@ -1,6 +1,6 @@
-use std::collections::HashSet;
use crate::Result;
-use ruma::{UserId, RoomId};
+use ruma::{RoomId, UserId};
+use std::collections::HashSet;
pub trait Data: Send + Sync {
/// Sets a user as typing until the timeout timestamp is reached or roomtyping_remove is
diff --git a/src/service/rooms/edus/typing/mod.rs b/src/service/rooms/edus/typing/mod.rs
index 3752056..91892df 100644
--- a/src/service/rooms/edus/typing/mod.rs
+++ b/src/service/rooms/edus/typing/mod.rs
@@ -2,7 +2,7 @@ mod data;
use std::sync::Arc;
pub use data::Data;
-use ruma::{UserId, RoomId, events::SyncEphemeralRoomEvent};
+use ruma::{events::SyncEphemeralRoomEvent, RoomId, UserId};
use crate::Result;
diff --git a/src/service/rooms/event_handler/mod.rs b/src/service/rooms/event_handler/mod.rs
index d6ec8e9..689f678 100644
--- a/src/service/rooms/event_handler/mod.rs
+++ b/src/service/rooms/event_handler/mod.rs
@@ -1,22 +1,33 @@
/// An async function that can recursively call itself.
type AsyncRecursiveType<'a, T> = Pin<Box<dyn Future<Output = T> + 'a + Send>>;
-use ruma::{RoomVersionId, signatures::CanonicalJsonObject, api::federation::discovery::{get_server_keys, get_remote_server_keys}};
-use tokio::sync::Semaphore;
+use ruma::{
+ api::federation::discovery::{get_remote_server_keys, get_server_keys},
+ signatures::CanonicalJsonObject,
+ RoomVersionId,
+};
use std::{
collections::{btree_map, hash_map, BTreeMap, HashMap, HashSet},
pin::Pin,
sync::{Arc, RwLock, RwLockWriteGuard},
time::{Duration, Instant, SystemTime},
};
+use tokio::sync::Semaphore;
-use futures_util::{Future, stream::FuturesUnordered, StreamExt};
+use futures_util::{stream::FuturesUnordered, Future, StreamExt};
use ruma::{
api::{
client::error::ErrorKind,
- federation::{event::{get_event, get_room_state_ids}, membership::create_join_event, discovery::get_remote_server_keys_batch::{v2::QueryCriteria, self}},
+ federation::{
+ discovery::get_remote_server_keys_batch::{self, v2::QueryCriteria},
+ event::{get_event, get_room_state_ids},
+ membership::create_join_event,
+ },
+ },
+ events::{
+ room::{create::RoomCreateEventContent, server_acl::RoomServerAclEventContent},
+ StateEventType,
},
- events::{room::{create::RoomCreateEventContent, server_acl::RoomServerAclEventContent}, StateEventType},
int,
serde::Base64,
signatures::CanonicalJsonValue,
@@ -24,9 +35,9 @@ use ruma::{
uint, EventId, MilliSecondsSinceUnixEpoch, RoomId, ServerName, ServerSigningKeyId,
};
use serde_json::value::{to_raw_value, RawValue as RawJsonValue};
-use tracing::{error, info, trace, warn, debug};
+use tracing::{debug, error, info, trace, warn};
-use crate::{service::*, services, Result, Error, PduEvent};
+use crate::{service::*, services, Error, PduEvent, Result};
pub struct Service;
@@ -72,10 +83,7 @@ impl Service {
));
}
- if services()
- .rooms
- .metadata
- .is_disabled(room_id)? {
+ if services().rooms.metadata.is_disabled(room_id)? {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Federation of this room is currently disabled on this server.",
@@ -94,7 +102,8 @@ impl Service {
.ok_or_else(|| Error::bad_database("Failed to find create event in db."))?;
let first_pdu_in_room = services()
- .rooms.timeline
+ .rooms
+ .timeline
.first_pdu_in_room(room_id)?
.ok_or_else(|| Error::bad_database("Failed to find first pdu in db."))?;
@@ -113,21 +122,20 @@ impl Service {
}
// 9. Fetch any missing prev events doing all checks listed here starting at 1. These are timeline events
- let (sorted_prev_events, mut eventid_info) = self.fetch_unknown_prev_events(
- origin,
- &create_event,
- room_id,
- pub_key_map,
- incoming_pdu.prev_events.clone(),
- ).await?;
+ let (sorted_prev_events, mut eventid_info) = self
+ .fetch_unknown_prev_events(
+ origin,
+ &create_event,
+ room_id,
+ pub_key_map,
+ incoming_pdu.prev_events.clone(),
+ )
+ .await?;
let mut errors = 0;
for prev_id in dbg!(sorted_prev_events) {
// Check for disabled again because it might have changed
- if services()
- .rooms
- .metadata
- .is_disabled(room_id)? {
+ if services().rooms.metadata.is_disabled(room_id)? {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Federation of this room is currently disabled on this server.",
@@ -224,15 +232,18 @@ impl Service {
.write()
.unwrap()
.insert(room_id.to_owned(), (event_id.to_owned(), start_time));
- let r = services().rooms.event_handler.upgrade_outlier_to_timeline_pdu(
- incoming_pdu,
- val,
- &create_event,
- origin,
- room_id,
- pub_key_map,
- )
- .await;
+ let r = services()
+ .rooms
+ .event_handler
+ .upgrade_outlier_to_timeline_pdu(
+ incoming_pdu,
+ val,
+ &create_event,
+ origin,
+ room_id,
+ pub_key_map,
+ )
+ .await;
services()
.globals
.roomid_federationhandletime
@@ -252,8 +263,7 @@ impl Service {
room_id: &'a RoomId,
value: BTreeMap<String, CanonicalJsonValue>,
pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
- ) -> AsyncRecursiveType<'a, Result<(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>>
- {
+ ) -> AsyncRecursiveType<'a, Result<(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>> {
Box::pin(async move {
// TODO: For RoomVersion6 we must check that Raw<..> is canonical do we anywhere?: https://matrix.org/docs/spec/rooms/v6#canonical-json
@@ -282,14 +292,22 @@ impl Service {
Err(e) => {
// Drop
warn!("Dropping bad event {}: {}", event_id, e);
- return Err(Error::BadRequest(ErrorKind::InvalidParam, "Signature verification failed"));
+ return Err(Error::BadRequest(
+ ErrorKind::InvalidParam,
+ "Signature verification failed",
+ ));
}
Ok(ruma::signatures::Verified::Signatures) => {
// Redact
warn!("Calculated hash does not match: {}", event_id);
match ruma::signatures::redact(&value, room_version_id) {
Ok(obj) => obj,
- Err(_) => return Err(Error::BadRequest(ErrorKind::InvalidParam, "Redaction failed")),
+ Err(_) => {
+ return Err(Error::BadRequest(
+ ErrorKind::InvalidParam,
+ "Redaction failed",
+ ))
+ }
}
}
Ok(ruma::signatures::Verified::All) => value,
@@ -376,7 +394,8 @@ impl Service {
&incoming_pdu,
None::<PduEvent>, // TODO: third party invite
|k, s| auth_events.get(&(k.to_string().into(), s.to_owned())),
- ).map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed"))?
+ )
+ .map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed"))?
{
return Err(Error::BadRequest(
ErrorKind::InvalidParam,
@@ -415,9 +434,13 @@ impl Service {
if services()
.rooms
- .pdu_metadata.is_event_soft_failed(&incoming_pdu.event_id)?
+ .pdu_metadata
+ .is_event_soft_failed(&incoming_pdu.event_id)?
{
- return Err(Error::BadRequest(ErrorKind::InvalidParam, "Event has been soft failed"));
+ return Err(Error::BadRequest(
+ ErrorKind::InvalidParam,
+ "Event has been soft failed",
+ ));
}
info!("Upgrading {} to timeline pdu", incoming_pdu.event_id);
@@ -448,7 +471,13 @@ impl Service {
.pdu_shortstatehash(prev_event)?;
let state = if let Some(shortstatehash) = prev_event_sstatehash {
- Some(services().rooms.state_accessor.state_full_ids(shortstatehash).await)
+ Some(
+ services()
+ .rooms
+ .state_accessor
+ .state_full_ids(shortstatehash)
+ .await,
+ )
} else {
None
};
@@ -466,10 +495,10 @@ impl Service {
})?;
if let Some(state_key) = &prev_pdu.state_key {
- let shortstatekey = services()
- .rooms
- .short
- .get_or_create_shortstatekey(&prev_pdu.kind.to_string().into(), state_key)?;
+ let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
+ &prev_pdu.kind.to_string().into(),
+ state_key,
+ )?;
state.insert(shortstatekey, Arc::from(prev_event));
// Now it's the state after the pdu
@@ -483,21 +512,25 @@ impl Service {
let mut okay = true;
for prev_eventid in &incoming_pdu.prev_events {
- let prev_event = if let Ok(Some(pdu)) = services().rooms.timeline.get_pdu(prev_eventid) {
- pdu
- } else {
- okay = false;
- break;
- };
-
- let sstatehash =
- if let Ok(Some(s)) = services().rooms.state_accessor.pdu_shortstatehash(prev_eventid) {
- s
+ let prev_event =
+ if let Ok(Some(pdu)) = services().rooms.timeline.get_pdu(prev_eventid) {
+ pdu
} else {
okay = false;
break;
};
+ let sstatehash = if let Ok(Some(s)) = services()
+ .rooms
+ .state_accessor
+ .pdu_shortstatehash(prev_eventid)
+ {
+ s
+ } else {
+ okay = false;
+ break;
+ };
+
extremity_sstatehashes.insert(sstatehash, prev_event);
}
@@ -513,13 +546,10 @@ impl Service {
.await?;
if let Some(state_key) = &prev_event.state_key {
- let shortstatekey = services()
- .rooms
- .short
- .get_or_create_shortstatekey(
- &prev_event.kind.to_string().into(),
- state_key,
- )?;
+ let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
+ &prev_event.kind.to_string().into(),
+ state_key,
+ )?;
leaf_state.insert(shortstatekey, Arc::from(&*prev_event.event_id));
// Now it's the state after the pdu
}
@@ -528,7 +558,8 @@ impl Service {
let mut starting_events = Vec::with_capacity(leaf_state.len());
for (k, id) in leaf_state {
- if let Ok((ty, st_key)) = services().rooms.short.get_statekey_from_short(k) {
+ if let Ok((ty, st_key)) = services().rooms.short.get_statekey_from_short(k)
+ {
// FIXME: Undo .to_string().into() when StateMap
// is updated to use StateEventType
state.insert((ty.to_string().into(), st_key), id.clone());
@@ -567,10 +598,8 @@ impl Service {
new_state
.into_iter()
.map(|((event_type, state_key), event_id)| {
- let shortstatekey = services()
- .rooms
- .short
- .get_or_create_shortstatekey(
+ let shortstatekey =
+ services().rooms.short.get_or_create_shortstatekey(
&event_type.to_string().into(),
&state_key,
)?;
@@ -618,15 +647,14 @@ impl Service {
let mut state: BTreeMap<_, Arc<EventId>> = BTreeMap::new();
for (pdu, _) in state_vec {
- let state_key = pdu
- .state_key
- .clone()
- .ok_or_else(|| Error::bad_database("Found non-state pdu in state events."))?;
+ let state_key = pdu.state_key.clone().ok_or_else(|| {
+ Error::bad_database("Found non-state pdu in state events.")
+ })?;
- let shortstatekey = services()
- .rooms
- .short
- .get_or_create_shortstatekey(&pdu.kind.to_string().into(), &state_key)?;
+ let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
+ &pdu.kind.to_string().into(),
+ &state_key,
+ )?;
match state.entry(shortstatekey) {
btree_map::Entry::Vacant(v) => {
@@ -648,7 +676,9 @@ impl Service {
if state.get(&create_shortstatekey).map(|id| id.as_ref())
!= Some(&create_event.event_id)
{
- return Err(Error::bad_database("Incoming event refers to wrong create event."));
+ return Err(Error::bad_database(
+ "Incoming event refers to wrong create event.",
+ ));
}
state_at_incoming_event = Some(state);
@@ -683,7 +713,9 @@ impl Service {
.map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed."))?;
if !check_result {
- return Err(Error::bad_database("Event has failed auth check with state at the event."));
+ return Err(Error::bad_database(
+ "Event has failed auth check with state at the event.",
+ ));
}
info!("Auth check succeeded");
@@ -703,10 +735,7 @@ impl Service {
// Now we calculate the set of extremities this room has after the incoming event has been
// applied. We start with the previous extremities (aka leaves)
info!("Calculating extremities");
- let mut extremities = services()
- .rooms
- .state
- .get_forward_extremities(room_id)?;
+ let mut extremities = services().rooms.state.get_forward_extremities(room_id)?;
// Remove any forward extremities that are referenced by this incoming event's prev_events
for prev_event in &incoming_pdu.prev_events {
@@ -716,8 +745,15 @@ impl Service {
}
// Only keep those extremities were not referenced yet
- extremities
- .retain(|id| !matches!(services().rooms.pdu_metadata.is_event_referenced(room_id, id), Ok(true)));
+ extremities.retain(|id| {
+ !matches!(
+ services()
+ .rooms
+ .pdu_metadata
+ .is_event_referenced(room_id, id),
+ Ok(true)
+ )
+ });
info!("Compressing state at event");
let state_ids_compressed = state_at_incoming_event
@@ -733,23 +769,21 @@ impl Service {
// 13. Check if the event passes auth based on the "current state" of the room, if not "soft fail" it
info!("Starting soft fail auth check");
- let auth_events = services()
- .rooms
- .state
- .get_auth_events(
- room_id,
- &incoming_pdu.kind,
- &incoming_pdu.sender,
- incoming_pdu.state_key.as_deref(),
- &incoming_pdu.content,
- )?;
+ let auth_events = services().rooms.state.get_auth_events(
+ room_id,
+ &incoming_pdu.kind,
+ &incoming_pdu.sender,
+ incoming_pdu.state_key.as_deref(),
+ &incoming_pdu.content,
+ )?;
let soft_fail = !state_res::event_auth::auth_check(
&room_version,
&incoming_pdu,
None::<PduEvent>,
|k, s| auth_events.get(&(k.clone(), s.to_owned())),
- ).map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed."))?;
+ )
+ .map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed."))?;
if soft_fail {
services().rooms.timeline.append_incoming_pdu(
@@ -767,7 +801,10 @@ impl Service {
.rooms
.pdu_metadata
.mark_event_soft_failed(&incoming_pdu.event_id)?;
- return Err(Error::BadRequest(ErrorKind::InvalidParam, "Event has been soft failed"));
+ return Err(Error::BadRequest(
+ ErrorKind::InvalidParam,
+ "Event has been soft failed",
+ ));
}
if incoming_pdu.state_key.is_some() {
@@ -789,15 +826,12 @@ impl Service {
info!("Loading extremities");
for id in dbg!(&extremities) {
- match services()
- .rooms
- .timeline
- .get_pdu(id)?
- {
+ match services().rooms.timeline.get_pdu(id)? {
Some(leaf_pdu) => {
extremity_sstatehashes.insert(
services()
- .rooms.state_accessor
+ .rooms
+ .state_accessor
.pdu_shortstatehash(&leaf_pdu.event_id)?
.ok_or_else(|| {
error!(
@@ -829,10 +863,10 @@ impl Service {
// We also add state after incoming event to the fork states
let mut state_after = state_at_incoming_event.clone();
if let Some(state_key) = &incoming_pdu.state_key {
- let shortstatekey = services()
- .rooms
- .short
- .get_or_create_shortstatekey(&incoming_pdu.kind.to_string().into(), state_key)?;
+ let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
+ &incoming_pdu.kind.to_string().into(),
+ state_key,
+ )?;
state_after.insert(shortstatekey, Arc::from(&*incoming_pdu.event_id));
}
@@ -921,10 +955,10 @@ impl Service {
state
.into_iter()
.map(|((event_type, state_key), event_id)| {
- let shortstatekey = services()
- .rooms
- .short
- .get_or_create_shortstatekey(&event_type.to_string().into(), &state_key)?;
+ let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
+ &event_type.to_string().into(),
+ &state_key,
+ )?;
services()
.rooms
.state_compressor
@@ -936,7 +970,10 @@ impl Service {
// Set the new room state to the resolved state
if update_state {
info!("Forcing new room state");
- let sstatehash = services().rooms.state_compressor.save_state(room_id, new_room_state)?;
+ let sstatehash = services()
+ .rooms
+ .state_compressor
+ .save_state(room_id, new_room_state)?;
services()
.rooms
.state
@@ -951,15 +988,14 @@ impl Service {
// We use the `state_at_event` instead of `state_after` so we accurately
// represent the state for this event.
- let pdu_id = services().rooms.timeline
- .append_incoming_pdu(
- &incoming_pdu,
- val,
- extremities.iter().map(|e| (**e).to_owned()).collect(),
- state_ids_compressed,
- soft_fail,
- &state_lock,
- )?;
+ let pdu_id = services().rooms.timeline.append_incoming_pdu(
+ &incoming_pdu,
+ val,
+ extremities.iter().map(|e| (**e).to_owned()).collect(),
+ state_ids_compressed,
+ soft_fail,
+ &state_lock,
+ )?;
info!("Appended incoming pdu");
@@ -1141,8 +1177,10 @@ impl Service {
room_id: &RoomId,
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
initial_set: Vec<Arc<EventId>>,
- ) -> Result<(Vec<Arc<EventId>>, HashMap<Arc<EventId>,
-(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>)> {
+ ) -> Result<(
+ Vec<Arc<EventId>>,
+ HashMap<Arc<EventId>, (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>,
+ )> {
let mut graph: HashMap<Arc<EventId>, _> = HashMap::new();
let mut eventid_info = HashMap::new();
let mut todo_outlier_stack: Vec<Arc<EventId>> = initial_set;
@@ -1223,7 +1261,8 @@ impl Service {
.map_or_else(|| uint!(0), |info| info.0.origin_server_ts),
),
))
- }).map_err(|_| Error::bad_database("Error sorting prev events"))?;
+ })
+ .map_err(|_| Error::bad_database("Error sorting prev events"))?;
Ok((sorted, eventid_info))
}
@@ -1253,13 +1292,16 @@ impl Service {
let signature_ids = signature_object.keys().cloned().collect::<Vec<_>>();
- let fetch_res = self.fetch_signing_keys(
- signature_server.as_str().try_into().map_err(|_| {
- Error::BadServerResponse("Invalid servername in signatures of server response pdu.")
- })?,
- signature_ids,
- )
- .await;
+ let fetch_res = self
+ .fetch_signing_keys(
+ signature_server.as_str().try_into().map_err(|_| {
+ Error::BadServerResponse(
+ "Invalid servername in signatures of server response pdu.",
+ )
+ })?,
+ signature_ids,
+ )
+ .await;
let keys = match fetch_res {
Ok(keys) => keys,
@@ -1336,8 +1378,9 @@ impl Service {
let signature_ids = signature_object.keys().cloned().collect::<Vec<_>>();
- let contains_all_ids =
- |keys: &BTreeMap<String, Base64>| signature_ids.iter().all(|id| keys.contains_key(id));
+ let contains_all_ids = |keys: &BTreeMap<String, Base64>| {
+ signature_ids.iter().all(|id| keys.contains_key(id))
+ };
let origin = <&ServerName>::try_from(signature_server.as_str()).map_err(|_| {
Error::BadServerResponse("Invalid servername in signatures of server response pdu.")
@@ -1373,8 +1416,10 @@ impl Service {
room_version: &RoomVersionId,
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
) -> Result<()> {
- let mut servers: BTreeMap<Box<ServerName>, BTreeMap<Box<ServerSigningKeyId>, QueryCriteria>> =
- BTreeMap::new();
+ let mut servers: BTreeMap<
+ Box<ServerName>,
+ BTreeMap<Box<ServerSigningKeyId>, QueryCriteria>,
+ > = BTreeMap::new();
{
let mut pkm = pub_key_map
@@ -1440,11 +1485,9 @@ impl Service {
.into_iter()
.map(|(server, _)| async move {
(
- services().sending
- .send_federation_request(
- &server,
- get_server_keys::v2::Request::new(),
- )
+ services()
+ .sending
+ .send_federation_request(&server, get_server_keys::v2::Request::new())
.await,
server,
)
@@ -1472,10 +1515,11 @@ impl Service {
/// Returns Ok if the acl allows the server
pub fn acl_check(&self, server_name: &ServerName, room_id: &RoomId) -> Result<()> {
- let acl_event = match services()
- .rooms.state_accessor
- .room_state_get(room_id, &StateEventType::RoomServerAcl, "")?
- {
+ let acl_event = match services().rooms.state_accessor.room_state_get(
+ room_id,
+ &StateEventType::RoomServerAcl,
+ "",
+ )? {
Some(acl) => acl,
None => return Ok(()),
};
@@ -1587,7 +1631,9 @@ impl Service {
.ok()
.and_then(|resp| resp.server_key.deserialize().ok())
{
- services().globals.add_signing_key(origin, server_key.clone())?;
+ services()
+ .globals
+ .add_signing_key(origin, server_key.clone())?;
result.extend(
server_key
diff --git a/src/service/rooms/lazy_loading/data.rs b/src/service/rooms/lazy_loading/data.rs
index 524071c..9af8e21 100644
--- a/src/service/rooms/lazy_loading/data.rs
+++ b/src/service/rooms/lazy_loading/data.rs
@@ -1,5 +1,5 @@
-use ruma::{RoomId, DeviceId, UserId};
use crate::Result;
+use ruma::{DeviceId, RoomId, UserId};
pub trait Data: Send + Sync {
fn lazy_load_was_sent_before(
@@ -15,7 +15,7 @@ pub trait Data: Send + Sync {
user_id: &UserId,
device_id: &DeviceId,
room_id: &RoomId,
- confirmed_user_ids: &mut dyn Iterator<Item=&UserId>,
+ confirmed_user_ids: &mut dyn Iterator<Item = &UserId>,
) -> Result<()>;
fn lazy_load_reset(
diff --git a/src/service/rooms/lazy_loading/mod.rs b/src/service/rooms/lazy_loading/mod.rs
index 760fffe..a01ce9b 100644
--- a/src/service/rooms/lazy_loading/mod.rs
+++ b/src/service/rooms/lazy_loading/mod.rs
@@ -1,15 +1,19 @@
mod data;
-use std::{collections::{HashSet, HashMap}, sync::{Mutex, Arc}};
+use std::{
+ collections::{HashMap, HashSet},
+ sync::{Arc, Mutex},
+};
pub use data::Data;
-use ruma::{DeviceId, UserId, RoomId};
+use ruma::{DeviceId, RoomId, UserId};
use crate::Result;
pub struct Service {
db: Arc<dyn Data>,
- lazy_load_waiting: Mutex<HashMap<(Box<UserId>, Box<DeviceId>, Box<RoomId>, u64), HashSet<Box<UserId>>>>,
+ lazy_load_waiting:
+ Mutex<HashMap<(Box<UserId>, Box<DeviceId>, Box<RoomId>, u64), HashSet<Box<UserId>>>>,
}
impl Service {
@@ -21,7 +25,8 @@ impl Service {
room_id: &RoomId,
ll_user: &UserId,
) -> Result<bool> {
- self.db.lazy_load_was_sent_before(user_id, device_id, room_id, ll_user)
+ self.db
+ .lazy_load_was_sent_before(user_id, device_id, room_id, ll_user)
}
#[tracing::instrument(skip(self))]
@@ -58,7 +63,12 @@ impl Service {
room_id.to_owned(),
since,
)) {
- self.db.lazy_load_confirm_delivery(user_id, device_id, room_id, &mut user_ids.iter().map(|&u| &*u))?;
+ self.db.lazy_load_confirm_delivery(
+ user_id,
+ device_id,
+ room_id,
+ &mut user_ids.iter().map(|&u| &*u),
+ )?;
} else {
// Ignore
}
diff --git a/src/service/rooms/metadata/data.rs b/src/service/rooms/metadata/data.rs
index bc31ee8..27e7eb9 100644
--- a/src/service/rooms/metadata/data.rs
+++ b/src/service/rooms/metadata/data.rs
@@ -1,5 +1,5 @@
-use ruma::RoomId;
use crate::Result;
+use ruma::RoomId;
pub trait Data: Send + Sync {
fn exists(&self, room_id: &RoomId) -> Result<bool>;
diff --git a/src/service/rooms/mod.rs b/src/service/rooms/mod.rs
index f1b0bad..8956e4d 100644
--- a/src/service/rooms/mod.rs
+++ b/src/service/rooms/mod.rs
@@ -16,7 +16,25 @@ pub mod state_compressor;
pub mod timeline;
pub mod user;
-pub trait Data: alias::Data + auth_chain::Data + directory::Data + edus::Data + lazy_loading::Data + metadata::Data + outlier::Data + pdu_metadata::Data + search::Data + short::Data + state::Data + state_accessor::Data + state_cache::Data + state_compressor::Data + timeline::Data + user::Data {}
+pub trait Data:
+ alias::Data
+ + auth_chain::Data
+ + directory::Data
+ + edus::Data
+ + lazy_loading::Data
+ + metadata::Data
+ + outlier::Data
+ + pdu_metadata::Data
+ + search::Data
+ + short::Data
+ + state::Data
+ + state_accessor::Data
+ + state_cache::Data
+ + state_compressor::Data
+ + timeline::Data
+ + user::Data
+{
+}
pub struct Service {
pub alias: alias::Service,
diff --git a/src/service/rooms/outlier/mod.rs b/src/service/rooms/outlier/mod.rs
index d36adc4..6404d8a 100644
--- a/src/service/rooms/outlier/mod.rs
+++ b/src/service/rooms/outlier/mod.rs
@@ -2,9 +2,9 @@ mod data;
use std::sync::Arc;
pub use data::Data;
-use ruma::{EventId, signatures::CanonicalJsonObject};
+use ruma::{signatures::CanonicalJsonObject, EventId};
-use crate::{Result, PduEvent};
+use crate::{PduEvent, Result};
pub struct Service {
db: Arc<dyn Data>,
diff --git a/src/service/rooms/pdu_metadata/data.rs b/src/service/rooms/pdu_metadata/data.rs
index 9bc49cf..b157938 100644
--- a/src/service/rooms/pdu_metadata/data.rs
+++ b/src/service/rooms/pdu_metadata/data.rs
@@ -1,7 +1,7 @@
use std::sync::Arc;
-use ruma::{EventId, RoomId};
use crate::Result;
+use ruma::{EventId, RoomId};
pub trait Data: Send + Sync {
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()>;
diff --git a/src/service/rooms/pdu_metadata/mod.rs b/src/service/rooms/pdu_metadata/mod.rs
index 4724f85..7044338 100644
--- a/src/service/rooms/pdu_metadata/mod.rs
+++ b/src/service/rooms/pdu_metadata/mod.rs
@@ -2,7 +2,7 @@ mod data;
use std::sync::Arc;
pub use data::Data;
-use ruma::{RoomId, EventId};
+use ruma::{EventId, RoomId};
use crate::Result;
diff --git a/src/service/rooms/search/data.rs b/src/service/rooms/search/data.rs
index 0c14ffe..59652e0 100644
--- a/src/service/rooms/search/data.rs
+++ b/src/service/rooms/search/data.rs
@@ -1,5 +1,5 @@
-use ruma::RoomId;
use crate::Result;
+use ruma::RoomId;
pub trait Data: Send + Sync {
fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: &[u8], message_body: String) -> Result<()>;
diff --git a/src/service/rooms/search/mod.rs b/src/service/rooms/search/mod.rs
index ec1ad53..0ef9634 100644
--- a/src/service/rooms/search/mod.rs
+++ b/src/service/rooms/search/mod.rs
@@ -12,7 +12,12 @@ pub struct Service {
impl Service {
#[tracing::instrument(skip(self))]
- pub fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: &[u8], message_body: String) -> Result<()> {
+ pub fn index_pdu<'a>(
+ &self,
+ shortroomid: u64,
+ pdu_id: &[u8],
+ message_body: String,
+ ) -> Result<()> {
self.db.index_pdu(shortroomid, pdu_id, message_body)
}
diff --git a/src/service/rooms/short/data.rs b/src/service/rooms/short/data.rs
index 07a2712..652c525 100644
--- a/src/service/rooms/short/data.rs
+++ b/src/service/rooms/short/data.rs
@@ -1,13 +1,10 @@
use std::sync::Arc;
-use ruma::{EventId, events::StateEventType, RoomId};
use crate::Result;
+use ruma::{events::StateEventType, EventId, RoomId};
pub trait Data: Send + Sync {
- fn get_or_create_shorteventid(
- &self,
- event_id: &EventId,
- ) -> Result<u64>;
+ fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64>;
fn get_shortstatekey(
&self,
@@ -26,15 +23,9 @@ pub trait Data: Send + Sync {
fn get_statekey_from_short(&self, shortstatekey: u64) -> Result<(StateEventType, String)>;
/// Returns (shortstatehash, already_existed)
- fn get_or_create_shortstatehash(
- &self,
- state_hash: &[u8],
- ) -> Result<(u64, bool)>;
+ fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)>;
fn get_shortroomid(&self, room_id: &RoomId) -> Result<Option<u64>>;
- fn get_or_create_shortroomid(
- &self,
- room_id: &RoomId,
- ) -> Result<u64>;
+ fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64>;
}
diff --git a/src/service/rooms/short/mod.rs b/src/service/rooms/short/mod.rs
index 08ce5c5..1d2e040 100644
--- a/src/service/rooms/short/mod.rs
+++ b/src/service/rooms/short/mod.rs
@@ -2,19 +2,16 @@ mod data;
use std::sync::Arc;
pub use data::Data;
-use ruma::{EventId, events::StateEventType, RoomId};
+use ruma::{events::StateEventType, EventId, RoomId};
-use crate::{Result, Error, utils, services};
+use crate::{services, utils, Error, Result};
pub struct Service {
db: Arc<dyn Data>,
}
impl Service {
- pub fn get_or_create_shorteventid(
- &self,
- event_id: &EventId,
- ) -> Result<u64> {
+ pub fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64> {
self.db.get_or_create_shorteventid(event_id)
}
@@ -43,10 +40,7 @@ impl Service {
}
/// Returns (shortstatehash, already_existed)
- pub fn get_or_create_shortstatehash(
- &self,
- state_hash: &[u8],
- ) -> Result<(u64, bool)> {
+ pub fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)> {
self.db.get_or_create_shortstatehash(state_hash)
}
@@ -54,10 +48,7 @@ impl Service {
self.db.get_shortroomid(room_id)
}
- pub fn get_or_create_shortroomid(
- &self,
- room_id: &RoomId,
- ) -> Result<u64> {
+ pub fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64> {
self.db.get_or_create_shortroomid(room_id)
}
}
diff --git a/src/service/rooms/state/data.rs b/src/service/rooms/state/data.rs
index 8eca21d..3aa4914 100644
--- a/src/service/rooms/state/data.rs
+++ b/src/service/rooms/state/data.rs
@@ -1,7 +1,7 @@
-use std::sync::Arc;
-use std::collections::HashSet;
use crate::Result;
use ruma::{EventId, RoomId};
+use std::collections::HashSet;
+use std::sync::Arc;
use tokio::sync::MutexGuard;
pub trait Data: Send + Sync {
@@ -9,7 +9,10 @@ pub trait Data: Send + Sync {
fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>>;
/// Update the current state of the room.
- fn set_room_state(&self, room_id: &RoomId, new_shortstatehash: u64,
+ fn set_room_state(
+ &self,
+ room_id: &RoomId,
+ new_shortstatehash: u64,
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
) -> Result<()>;
@@ -20,7 +23,8 @@ pub trait Data: Send + Sync {
fn get_forward_extremities(&self, room_id: &RoomId) -> Result<HashSet<Arc<EventId>>>;
/// Replace the forward extremities of the room.
- fn set_forward_extremities<'a>(&self,
+ fn set_forward_extremities<'a>(
+ &self,
room_id: &RoomId,
event_ids: Vec<Box<EventId>>,
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
diff --git a/src/service/rooms/state/mod.rs b/src/service/rooms/state/mod.rs
index 57a0e77..2dff4b7 100644
--- a/src/service/rooms/state/mod.rs
+++ b/src/service/rooms/state/mod.rs
@@ -1,13 +1,24 @@
mod data;
-use std::{collections::{HashSet, HashMap}, sync::Arc};
+use std::{
+ collections::{HashMap, HashSet},
+ sync::Arc,
+};
pub use data::Data;
-use ruma::{RoomId, events::{room::{member::MembershipState, create::RoomCreateEventContent}, AnyStrippedStateEvent, StateEventType, RoomEventType}, UserId, EventId, serde::Raw, RoomVersionId, state_res::{StateMap, self}};
+use ruma::{
+ events::{
+ room::{create::RoomCreateEventContent, member::MembershipState},
+ AnyStrippedStateEvent, RoomEventType, StateEventType,
+ },
+ serde::Raw,
+ state_res::{self, StateMap},
+ EventId, RoomId, RoomVersionId, UserId,
+};
use serde::Deserialize;
use tokio::sync::MutexGuard;
use tracing::warn;
-use crate::{Result, services, PduEvent, Error, utils::calculate_hash};
+use crate::{services, utils::calculate_hash, Error, PduEvent, Result};
use super::state_compressor::CompressedStateEvent;
@@ -25,7 +36,8 @@ impl Service {
statediffremoved: HashSet<CompressedStateEvent>,
) -> Result<()> {
let mutex_state = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_state
.write()
.unwrap()
@@ -35,7 +47,10 @@ impl Service {
let state_lock = mutex_state.lock().await;
for event_id in statediffnew.into_iter().filter_map(|new| {
- services().rooms.state_compressor.parse_compressed_state_event(new)
+ services()
+ .rooms
+ .state_compressor
+ .parse_compressed_state_event(new)
.ok()
.map(|(_, id)| id)
}) {
@@ -75,7 +90,14 @@ impl Service {
Err(_) => continue,
};
- services().rooms.state_cache.update_membership(room_id, &user_id, membership, &pdu.sender, None, false)?;
+ services().rooms.state_cache.update_membership(
+ room_id,
+ &user_id,
+ membership,
+ &pdu.sender,
+ None,
+ false,
+ )?;
}
services().rooms.state_cache.update_joined_count(room_id)?;
@@ -98,7 +120,10 @@ impl Service {
room_id: &RoomId,
state_ids_compressed: HashSet<CompressedStateEvent>,
) -> Result<u64> {
- let shorteventid = services().rooms.short.get_or_create_shorteventid(event_id)?;
+ let shorteventid = services()
+ .rooms
+ .short
+ .get_or_create_shorteventid(event_id)?;
let previous_shortstatehash = self.db.get_room_shortstatehash(room_id)?;
@@ -109,12 +134,21 @@ impl Service {
.collect::<Vec<_>>(),
);
- let (shortstatehash, already_existed) =
- services().rooms.short.get_or_create_shortstatehash(&state_hash)?;
+ let (shortstatehash, already_existed) = services()
+ .rooms
+ .short
+ .get_or_create_shortstatehash(&state_hash)?;
if !already_existed {
- let states_parents = previous_shortstatehash
- .map_or_else(|| Ok(Vec::new()), |p| services().rooms.state_compressor.load_shortstatehash_info(p))?;
+ let states_parents = previous_shortstatehash.map_or_else(
+ || Ok(Vec::new()),
+ |p| {
+ services()
+ .rooms
+ .state_compressor
+ .load_shortstatehash_info(p)
+ },
+ )?;
let (statediffnew, statediffremoved) =
if let Some(parent_stateinfo) = states_parents.last() {
@@ -152,11 +186,11 @@ impl Service {
/// This adds all current state events (not including the incoming event)
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
#[tracing::instrument(skip(self, new_pdu))]
- pub fn append_to_state(
- &self,
- new_pdu: &PduEvent,
- ) -> Result<u64> {
- let shorteventid = services().rooms.short.get_or_create_shorteventid(&new_pdu.event_id)?;
+ pub fn append_to_state(&self, new_pdu: &PduEvent) -> Result<u64> {
+ let shorteventid = services()
+ .rooms
+ .short
+ .get_or_create_shorteventid(&new_pdu.event_id)?;
let previous_shortstatehash = self.get_room_shortstatehash(&new_pdu.room_id)?;
@@ -165,15 +199,25 @@ impl Service {
}
if let Some(state_key) = &new_pdu.state_key {
- let states_parents = previous_shortstatehash
- .map_or_else(|| Ok(Vec::new()), |p| services().rooms.state_compressor.load_shortstatehash_info(p))?;
-
- let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
- &new_pdu.kind.to_string().into(),
- state_key,
+ let states_parents = previous_shortstatehash.map_or_else(
+ || Ok(Vec::new()),
+ |p| {
+ services()
+ .rooms
+ .state_compressor
+ .load_shortstatehash_info(p)
+ },
)?;
- let new = services().rooms.state_compressor.compress_state_event(shortstatekey, &new_pdu.event_id)?;
+ let shortstatekey = services()
+ .rooms
+ .short
+ .get_or_create_shortstatekey(&new_pdu.kind.to_string().into(), state_key)?;
+
+ let new = services()
+ .rooms
+ .state_compressor
+ .compress_state_event(shortstatekey, &new_pdu.event_id)?;
let replaces = states_parents
.last()
@@ -220,14 +264,18 @@ impl Service {
) -> Result<Vec<Raw<AnyStrippedStateEvent>>> {
let mut state = Vec::new();
// Add recommended events
- if let Some(e) =
- services().rooms.state_accessor.room_state_get(&invite_event.room_id, &StateEventType::RoomCreate, "")?
- {
+ if let Some(e) = services().rooms.state_accessor.room_state_get(
+ &invite_event.room_id,
+ &StateEventType::RoomCreate,
+ "",
+ )? {
state.push(e.to_stripped_state_event());
}
- if let Some(e) =
- services().rooms.state_accessor.room_state_get(&invite_event.room_id, &StateEventType::RoomJoinRules, "")?
- {
+ if let Some(e) = services().rooms.state_accessor.room_state_get(
+ &invite_event.room_id,
+ &StateEventType::RoomJoinRules,
+ "",
+ )? {
state.push(e.to_stripped_state_event());
}
if let Some(e) = services().rooms.state_accessor.room_state_get(
@@ -237,14 +285,18 @@ impl Service {
)? {
state.push(e.to_stripped_state_event());
}
- if let Some(e) =
- services().rooms.state_accessor.room_state_get(&invite_event.room_id, &StateEventType::RoomAvatar, "")?
- {
+ if let Some(e) = services().rooms.state_accessor.room_state_get(
+ &invite_event.room_id,
+ &StateEventType::RoomAvatar,
+ "",
+ )? {
state.push(e.to_stripped_state_event());
}
- if let Some(e) =
- services().rooms.state_accessor.room_state_get(&invite_event.room_id, &StateEventType::RoomName, "")?
- {
+ if let Some(e) = services().rooms.state_accessor.room_state_get(
+ &invite_event.room_id,
+ &StateEventType::RoomName,
+ "",
+ )? {
state.push(e.to_stripped_state_event());
}
if let Some(e) = services().rooms.state_accessor.room_state_get(
@@ -260,16 +312,23 @@ impl Service {
}
#[tracing::instrument(skip(self))]
- pub fn set_room_state(&self, room_id: &RoomId, shortstatehash: u64,
+ pub fn set_room_state(
+ &self,
+ room_id: &RoomId,
+ shortstatehash: u64,
mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
- ) -> Result<()> {
+ ) -> Result<()> {
self.db.set_room_state(room_id, shortstatehash, mutex_lock)
}
/// Returns the room's version.
#[tracing::instrument(skip(self))]
pub fn get_room_version(&self, room_id: &RoomId) -> Result<RoomVersionId> {
- let create_event = services().rooms.state_accessor.room_state_get(room_id, &StateEventType::RoomCreate, "")?;
+ let create_event = services().rooms.state_accessor.room_state_get(
+ room_id,
+ &StateEventType::RoomCreate,
+ "",
+ )?;
let create_event_content: Option<RoomCreateEventContent> = create_event
.as_ref()
@@ -294,12 +353,14 @@ impl Service {
self.db.get_forward_extremities(room_id)
}
- pub fn set_forward_extremities<'a>(&self,
+ pub fn set_forward_extremities<'a>(
+ &self,
room_id: &RoomId,
event_ids: Vec<Box<EventId>>,
state_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
) -> Result<()> {
- self.db.set_forward_extremities(room_id, event_ids, state_lock)
+ self.db
+ .set_forward_extremities(room_id, event_ids, state_lock)
}
/// This fetches auth events from the current state.
@@ -312,12 +373,13 @@ impl Service {
state_key: Option<&str>,
content: &serde_json::value::RawValue,
) -> Result<StateMap<Arc<PduEvent>>> {
- let shortstatehash =
- if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(room_id)? {
- current_shortstatehash
- } else {
- return Ok(HashMap::new());
- };
+ let shortstatehash = if let Some(current_shortstatehash) =
+ services().rooms.state.get_room_shortstatehash(room_id)?
+ {
+ current_shortstatehash
+ } else {
+ return Ok(HashMap::new());
+ };
let auth_events = state_res::auth_types_for_event(kind, sender, state_key, content)
.expect("content is a valid JSON object");
@@ -325,14 +387,19 @@ impl Service {
let mut sauthevents = auth_events
.into_iter()
.filter_map(|(event_type, state_key)| {
- services().rooms.short.get_shortstatekey(&event_type.to_string().into(), &state_key)
+ services()
+ .rooms
+ .short
+ .get_shortstatekey(&event_type.to_string().into(), &state_key)
.ok()
.flatten()
.map(|s| (s, (event_type, state_key)))
})
.collect::<HashMap<_, _>>();
- let full_state = services().rooms.state_compressor
+ let full_state = services()
+ .rooms
+ .state_compressor
.load_shortstatehash_info(shortstatehash)?
.pop()
.expect("there is always one layer")
@@ -340,11 +407,25 @@ impl Service {
Ok(full_state
.into_iter()
- .filter_map(|compressed| services().rooms.state_compressor.parse_compressed_state_event(compressed).ok())
+ .filter_map(|compressed| {
+ services()
+ .rooms
+ .state_compressor
+ .parse_compressed_state_event(compressed)
+ .ok()
+ })
.filter_map(|(shortstatekey, event_id)| {
sauthevents.remove(&shortstatekey).map(|k| (k, event_id))
})
- .filter_map(|(k, event_id)| services().rooms.timeline.get_pdu(&event_id).ok().flatten().map(|pdu| (k, pdu)))
+ .filter_map(|(k, event_id)| {
+ services()
+ .rooms
+ .timeline
+ .get_pdu(&event_id)
+ .ok()
+ .flatten()
+ .map(|pdu| (k, pdu))
+ })
.collect())
}
}
diff --git a/src/service/rooms/state_accessor/data.rs b/src/service/rooms/state_accessor/data.rs
index 14f96bc..340b19c 100644
--- a/src/service/rooms/state_accessor/data.rs
+++ b/src/service/rooms/state_accessor/data.rs
@@ -1,9 +1,12 @@
-use std::{sync::Arc, collections::{HashMap, BTreeMap}};
+use std::{
+ collections::{BTreeMap, HashMap},
+ sync::Arc,
+};
use async_trait::async_trait;
-use ruma::{EventId, events::StateEventType, RoomId};
+use ruma::{events::StateEventType, EventId, RoomId};
-use crate::{Result, PduEvent};
+use crate::{PduEvent, Result};
#[async_trait]
pub trait Data: Send + Sync {
diff --git a/src/service/rooms/state_accessor/mod.rs b/src/service/rooms/state_accessor/mod.rs
index a0f5523..e179d70 100644
--- a/src/service/rooms/state_accessor/mod.rs
+++ b/src/service/rooms/state_accessor/mod.rs
@@ -1,10 +1,13 @@
mod data;
-use std::{sync::Arc, collections::{HashMap, BTreeMap}};
+use std::{
+ collections::{BTreeMap, HashMap},
+ sync::Arc,
+};
pub use data::Data;
-use ruma::{events::StateEventType, RoomId, EventId};
+use ruma::{events::StateEventType, EventId, RoomId};
-use crate::{Result, PduEvent};
+use crate::{PduEvent, Result};
pub struct Service {
db: Arc<dyn Data>,
diff --git a/src/service/rooms/state_cache/data.rs b/src/service/rooms/state_cache/data.rs
index 950143f..a6b06a5 100644
--- a/src/service/rooms/state_cache/data.rs
+++ b/src/service/rooms/state_cache/data.rs
@@ -1,12 +1,21 @@
use std::{collections::HashSet, sync::Arc};
-use ruma::{UserId, RoomId, serde::Raw, events::{AnyStrippedStateEvent, AnySyncStateEvent}, ServerName};
use crate::Result;
+use ruma::{
+ events::{AnyStrippedStateEvent, AnySyncStateEvent},
+ serde::Raw,
+ RoomId, ServerName, UserId,
+};
pub trait Data: Send + Sync {
fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
fn mark_as_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
- fn mark_as_invited(&self, user_id: &UserId, room_id: &RoomId, last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>) -> Result<()>;
+ fn mark_as_invited(
+ &self,
+ user_id: &UserId,
+ room_id: &RoomId,
+ last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>,
+ ) -> Result<()>;
fn mark_as_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
fn update_joined_count(&self, room_id: &RoomId) -> Result<()>;
diff --git a/src/service/rooms/state_cache/mod.rs b/src/service/rooms/state_cache/mod.rs
index 69bd832..04eb9af 100644
--- a/src/service/rooms/state_cache/mod.rs
+++ b/src/service/rooms/state_cache/mod.rs
@@ -9,8 +9,8 @@ use ruma::{
ignored_user_list::IgnoredUserListEvent,
room::{create::RoomCreateEventContent, member::MembershipState},
tag::{TagEvent, TagEventContent},
- AnyStrippedStateEvent, AnySyncStateEvent, GlobalAccountDataEventType,
- RoomAccountDataEventType, StateEventType, RoomAccountDataEvent, RoomAccountDataEventContent,
+ AnyStrippedStateEvent, AnySyncStateEvent, GlobalAccountDataEventType, RoomAccountDataEvent,
+ RoomAccountDataEventContent, RoomAccountDataEventType, StateEventType,
},
serde::Raw,
RoomId, ServerName, UserId,
@@ -97,8 +97,9 @@ impl Service {
RoomAccountDataEventType::Tag,
)?
.map(|event| {
- serde_json::from_str(event.get())
- .map_err(|_| Error::bad_database("Invalid account data event in db."))
+ serde_json::from_str(event.get()).map_err(|_| {
+ Error::bad_database("Invalid account data event in db.")
+ })
})
{
services()
@@ -113,16 +114,19 @@ impl Service {
};
// Copy direct chat flag
- if let Some(mut direct_event) = services().account_data.get(
- None,
- user_id,
- GlobalAccountDataEventType::Direct.to_string().into(),
- )?
+ if let Some(mut direct_event) = services()
+ .account_data
+ .get(
+ None,
+ user_id,
+ GlobalAccountDataEventType::Direct.to_string().into(),
+ )?
.map(|event| {
- serde_json::from_str::<DirectEvent>(event.get())
- .map_err(|_| Error::bad_database("Invalid account data event in db."))
+ serde_json::from_str::<DirectEvent>(event.get()).map_err(|_| {
+ Error::bad_database("Invalid account data event in db.")
+ })
})
- {
+ {
let direct_event = direct_event?;
let mut room_ids_updated = false;
@@ -138,7 +142,8 @@ impl Service {
None,
user_id,
GlobalAccountDataEventType::Direct.to_string().into(),
- &serde_json::to_value(&direct_event).expect("to json always works"),
+ &serde_json::to_value(&direct_event)
+ .expect("to json always works"),
)?;
}
};
@@ -158,10 +163,11 @@ impl Service {
.to_string()
.into(),
)?
- .map(|event| {
- serde_json::from_str::<IgnoredUserListEvent>(event.get())
- .map_err(|_| Error::bad_database("Invalid account data event in db."))
- }).transpose()?
+ .map(|event| {
+ serde_json::from_str::<IgnoredUserListEvent>(event.get())
+ .map_err(|_| Error::bad_database("Invalid account data event in db."))
+ })
+ .transpose()?
.map_or(false, |ignored| {
ignored
.content
diff --git a/src/service/rooms/state_compressor/mod.rs b/src/service/rooms/state_compressor/mod.rs
index 5f2cf02..f7c6dba 100644
--- a/src/service/rooms/state_compressor/mod.rs
+++ b/src/service/rooms/state_compressor/mod.rs
@@ -1,11 +1,15 @@
pub mod data;
-use std::{mem::size_of, sync::{Arc, Mutex}, collections::HashSet};
+use std::{
+ collections::HashSet,
+ mem::size_of,
+ sync::{Arc, Mutex},
+};
pub use data::Data;
use lru_cache::LruCache;
use ruma::{EventId, RoomId};
-use crate::{Result, utils, services};
+use crate::{services, utils, Result};
use self::data::StateDiff;
@@ -23,7 +27,6 @@ pub struct Service {
)>,
>,
>,
-
}
pub type CompressedStateEvent = [u8; 2 * size_of::<u64>()];
@@ -51,7 +54,11 @@ impl Service {
return Ok(r.clone());
}
- let StateDiff { parent, added, removed } = self.db.get_statediff(shortstatehash)?;
+ let StateDiff {
+ parent,
+ added,
+ removed,
+ } = self.db.get_statediff(shortstatehash)?;
if let Some(parent) = parent {
let mut response = self.load_shortstatehash_info(parent)?;
@@ -81,7 +88,9 @@ impl Service {
) -> Result<CompressedStateEvent> {
let mut v = shortstatekey.to_be_bytes().to_vec();
v.extend_from_slice(
- &services().rooms.short
+ &services()
+ .rooms
+ .short
.get_or_create_shorteventid(event_id)?
.to_be_bytes(),
);
@@ -175,7 +184,14 @@ impl Service {
if parent_states.is_empty() {
// There is no parent layer, create a new state
- self.db.save_statediff(shortstatehash, StateDiff { parent: None, added: statediffnew, removed: statediffremoved })?;
+ self.db.save_statediff(
+ shortstatehash,
+ StateDiff {
+ parent: None,
+ added: statediffnew,
+ removed: statediffremoved,
+ },
+ )?;
return Ok(());
};
@@ -217,7 +233,14 @@ impl Service {
)?;
} else {
// Diff small enough, we add diff as layer on top of parent
- self.db.save_statediff(shortstatehash, StateDiff { parent: Some(parent.0), added: statediffnew, removed: statediffremoved })?;
+ self.db.save_statediff(
+ shortstatehash,
+ StateDiff {
+ parent: Some(parent.0),
+ added: statediffnew,
+ removed: statediffremoved,
+ },
+ )?;
}
Ok(())
@@ -228,8 +251,7 @@ impl Service {
&self,
room_id: &RoomId,
new_state_ids_compressed: HashSet<CompressedStateEvent>,
- ) -> Result<u64>
- {
+ ) -> Result<u64> {
let previous_shortstatehash = services().rooms.state.get_room_shortstatehash(room_id)?;
let state_hash = utils::calculate_hash(
@@ -239,8 +261,10 @@ impl Service {
.collect::<Vec<_>>(),
);
- let (new_shortstatehash, already_existed) =
- services().rooms.short.get_or_create_shortstatehash(&state_hash)?;
+ let (new_shortstatehash, already_existed) = services()
+ .rooms
+ .short
+ .get_or_create_shortstatehash(&state_hash)?;
if Some(new_shortstatehash) == previous_shortstatehash {
return Ok(new_shortstatehash);
diff --git a/src/service/rooms/timeline/data.rs b/src/service/rooms/timeline/data.rs
index 20eae7f..4ae8ce9 100644
--- a/src/service/rooms/timeline/data.rs
+++ b/src/service/rooms/timeline/data.rs
@@ -1,8 +1,8 @@
use std::sync::Arc;
-use ruma::{signatures::CanonicalJsonObject, EventId, UserId, RoomId};
+use ruma::{signatures::CanonicalJsonObject, EventId, RoomId, UserId};
-use crate::{Result, PduEvent};
+use crate::{PduEvent, Result};
pub trait Data: Send + Sync {
fn first_pdu_in_room(&self, room_id: &RoomId) -> Result<Option<Arc<PduEvent>>>;
@@ -15,10 +15,7 @@ pub trait Data: Send + Sync {
fn get_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
/// Returns the json of a pdu.
- fn get_non_outlier_pdu_json(
- &self,
- event_id: &EventId,
- ) -> Result<Option<CanonicalJsonObject>>;
+ fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
/// Returns the pdu's id.
fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<Vec<u8>>>;
@@ -45,7 +42,13 @@ pub trait Data: Send + Sync {
fn pdu_count(&self, pdu_id: &[u8]) -> Result<u64>;
/// Adds a new pdu to the timeline
- fn append_pdu(&self, pdu_id: &[u8], pdu: &PduEvent, json: &CanonicalJsonObject, count: u64) -> Result<()>;
+ fn append_pdu(
+ &self,
+ pdu_id: &[u8],
+ pdu: &PduEvent,
+ json: &CanonicalJsonObject,
+ count: u64,
+ ) -> Result<()>;
/// Removes a pdu and creates a new one with the same id.
fn replace_pdu(&self, pdu_id: &[u8], pdu: &PduEvent) -> Result<()>;
@@ -75,5 +78,10 @@ pub trait Data: Send + Sync {
from: u64,
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>>;
- fn increment_notification_counts(&self, room_id: &RoomId, notifies: Vec<Box<UserId>>, highlights: Vec<Box<UserId>>) -> Result<()>;
+ fn increment_notification_counts(
+ &self,
+ room_id: &RoomId,
+ notifies: Vec<Box<UserId>>,
+ highlights: Vec<Box<UserId>>,
+ ) -> Result<()>;
}
diff --git a/src/service/rooms/timeline/mod.rs b/src/service/rooms/timeline/mod.rs
index f25550d..b71dacb 100644
--- a/src/service/rooms/timeline/mod.rs
+++ b/src/service/rooms/timeline/mod.rs
@@ -1,9 +1,9 @@
mod data;
use std::borrow::Cow;
use std::collections::HashMap;
-use std::sync::{Arc, Mutex};
-use std::{iter, collections::HashSet};
use std::fmt::Debug;
+use std::sync::{Arc, Mutex};
+use std::{collections::HashSet, iter};
pub use data::Data;
use regex::Regex;
@@ -11,13 +11,27 @@ use ruma::events::room::power_levels::RoomPowerLevelsEventContent;
use ruma::push::Ruleset;
use ruma::signatures::CanonicalJsonValue;
use ruma::state_res::RoomVersion;
-use ruma::{EventId, signatures::CanonicalJsonObject, push::{Action, Tweak}, events::{push_rules::PushRulesEvent, GlobalAccountDataEventType, RoomEventType, room::{member::MembershipState, create::RoomCreateEventContent}, StateEventType}, UserId, RoomAliasId, RoomId, uint, state_res, api::client::error::ErrorKind, serde::to_canonical_value, ServerName};
+use ruma::{
+ api::client::error::ErrorKind,
+ events::{
+ push_rules::PushRulesEvent,
+ room::{create::RoomCreateEventContent, member::MembershipState},
+ GlobalAccountDataEventType, RoomEventType, StateEventType,
+ },
+ push::{Action, Tweak},
+ serde::to_canonical_value,
+ signatures::CanonicalJsonObject,
+ state_res, uint, EventId, RoomAliasId, RoomId, ServerName, UserId,
+};
use serde::Deserialize;
use serde_json::value::to_raw_value;
use tokio::sync::MutexGuard;
-use tracing::{warn, error};
+use tracing::{error, warn};
-use crate::{services, Result, service::pdu::{PduBuilder, EventHash}, Error, PduEvent, utils};
+use crate::{
+ service::pdu::{EventHash, PduBuilder},
+ services, utils, Error, PduEvent, Result,
+};
use super::state_compressor::CompressedStateEvent;
@@ -135,7 +149,11 @@ impl Service {
leaves: Vec<Box<EventId>>,
state_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
) -> Result<Vec<u8>> {
- let shortroomid = services().rooms.short.get_shortroomid(&pdu.room_id)?.expect("room exists");
+ let shortroomid = services()
+ .rooms
+ .short
+ .get_shortroomid(&pdu.room_id)?
+ .expect("room exists");
// Make unsigned fields correct. This is not properly documented in the spec, but state
// events need to have previous content in the unsigned field, so clients can easily
@@ -145,8 +163,15 @@ impl Service {
.entry("unsigned".to_owned())
.or_insert_with(|| CanonicalJsonValue::Object(Default::default()))
{
- if let Some(shortstatehash) = services().rooms.state_accessor.pdu_shortstatehash(&pdu.event_id).unwrap() {
- if let Some(prev_state) = services().rooms.state_accessor
+ if let Some(shortstatehash) = services()
+ .rooms
+ .state_accessor
+ .pdu_shortstatehash(&pdu.event_id)
+ .unwrap()
+ {
+ if let Some(prev_state) = services()
+ .rooms
+ .state_accessor
.state_get(shortstatehash, &pdu.kind.to_string().into(), state_key)
.unwrap()
{
@@ -165,11 +190,18 @@ impl Service {
}
// We must keep track of all events that have been referenced.
- services().rooms.pdu_metadata.mark_as_referenced(&pdu.room_id, &pdu.prev_events)?;
- services().rooms.state.set_forward_extremities(&pdu.room_id, leaves, state_lock)?;
+ services()
+ .rooms
+ .pdu_metadata
+ .mark_as_referenced(&pdu.room_id, &pdu.prev_events)?;
+ services()
+ .rooms
+ .state
+ .set_forward_extremities(&pdu.room_id, leaves, state_lock)?;
let mutex_insert = Arc::clone(
- services().globals
+ services()
+ .globals
.roomid_mutex_insert
.write()
.unwrap()
@@ -181,9 +213,15 @@ impl Service {
let count1 = services().globals.next_count()?;
// Mark as read first so the sending client doesn't get a notification even if appending
// fails
- services().rooms.edus.read_receipt
+ services()
+ .rooms
+ .edus
+ .read_receipt
.private_read_set(&pdu.room_id, &pdu.sender, count1)?;
- services().rooms.user.reset_notification_counts(&pdu.sender, &pdu.room_id)?;
+ services()
+ .rooms
+ .user
+ .reset_notification_counts(&pdu.sender, &pdu.room_id)?;
let count2 = services().globals.next_count()?;
let mut pdu_id = shortroomid.to_be_bytes().to_vec();
@@ -211,7 +249,12 @@ impl Service {
let mut notifies = Vec::new();
let mut highlights = Vec::new();
- for user in services().rooms.state_cache.get_our_real_users(&pdu.room_id)?.into_iter() {
+ for user in services()
+ .rooms
+ .state_cache
+ .get_our_real_users(&pdu.room_id)?
+ .into_iter()
+ {
// Don't notify the user of their own events
if &user == &pdu.sender {
continue;
@@ -224,8 +267,11 @@ impl Service {
&user,
GlobalAccountDataEventType::PushRules.to_string().into(),
)?
- .map(|event| serde_json::from_str::<PushRulesEvent>(event.get())
- .map_err(|_| Error::bad_database("Invalid push rules event in db."))).transpose()?
+ .map(|event| {
+ serde_json::from_str::<PushRulesEvent>(event.get())
+ .map_err(|_| Error::bad_database("Invalid push rules event in db."))
+ })
+ .transpose()?
.map(|ev: PushRulesEvent| ev.content.global)
.unwrap_or_else(|| Ruleset::server_default(&user));
@@ -263,7 +309,8 @@ impl Service {
}
}
- self.db.increment_notification_counts(&pdu.room_id, notifies, highlights);
+ self.db
+ .increment_notification_counts(&pdu.room_id, notifies, highlights);
match pdu.kind {
RoomEventType::RoomRedaction => {
@@ -315,7 +362,10 @@ impl Service {
.map_err(|_| Error::bad_database("Invalid content in pdu."))?;
if let Some(body) = content.body {
- services().rooms.search.index_pdu(shortroomid, &pdu_id, body)?;
+ services()
+ .rooms
+ .search
+ .index_pdu(shortroomid, &pdu_id, body)?;
let admin_room = services().rooms.alias.resolve_local_alias(
<&RoomAliasId>::try_from(
@@ -329,8 +379,8 @@ impl Service {
// This will evaluate to false if the emergency password is set up so that
// the administrator can execute commands as conduit
- let from_conduit =
- pdu.sender == server_user && services().globals.emergency_password().is_none();
+ let from_conduit = pdu.sender == server_user
+ && services().globals.emergency_password().is_none();
if to_conduit && !from_conduit && admin_room.as_ref() == Some(&pdu.room_id) {
services().admin.process_message(body.to_string());
@@ -341,8 +391,14 @@ impl Service {
}
for appservice in services().appservice.all()? {
- if services().rooms.state_cache.appservice_in_room(&pdu.room_id, &appservice)? {
- services().sending.send_pdu_appservice(&appservice.0, &pdu_id)?;
+ if services()
+ .rooms
+ .state_cache
+ .appservice_in_room(&pdu.room_id, &appservice)?
+ {
+ services()
+ .sending
+ .send_pdu_appservice(&appservice.0, &pdu_id)?;
continue;
}
@@ -359,11 +415,14 @@ impl Service {
.get("sender_localpart")
.and_then(|string| string.as_str())
.and_then(|string| {
- UserId::parse_with_server_name(string, services().globals.server_name()).ok()
+ UserId::parse_with_server_name(string, services().globals.server_name())
+ .ok()
})
{
if state_key_uid == &appservice_uid {
- services().sending.send_pdu_appservice(&appservice.0, &pdu_id)?;
+ services()
+ .sending
+ .send_pdu_appservice(&appservice.0, &pdu_id)?;
continue;
}
}
@@ -402,7 +461,10 @@ impl Service {
.map_or(false, |state_key| users.is_match(state_key))
};
let matching_aliases = |aliases: &Regex| {
- services().rooms.alias.local_aliases_for_room(&pdu.room_id)
+ services()
+ .rooms
+ .alias
+ .local_aliases_for_room(&pdu.room_id)
.filter_map(|r| r.ok())
.any(|room_alias| aliases.is_match(room_alias.as_str()))
};
@@ -411,21 +473,22 @@ impl Service {
|| rooms.map_or(false, |rooms| rooms.contains(&pdu.room_id.as_str().into()))
|| users.iter().any(matching_users)
{
- services().sending.send_pdu_appservice(&appservice.0, &pdu_id)?;
+ services()
+ .sending
+ .send_pdu_appservice(&appservice.0, &pdu_id)?;
}
}
}
-
Ok(pdu_id)
}
pub fn create_hash_and_sign_event(
- &self,
- pdu_builder: PduBuilder,
- sender: &UserId,
- room_id: &RoomId,
- _mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
+ &self,
+ pdu_builder: PduBuilder,
+ sender: &UserId,
+ room_id: &RoomId,
+ _mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
) -> Result<(PduEvent, CanonicalJsonObject)> {
let PduBuilder {
event_type,
@@ -443,10 +506,11 @@ impl Service {
.take(20)
.collect();
- let create_event = services()
- .rooms
- .state_accessor
- .room_state_get(room_id, &StateEventType::RoomCreate, "")?;
+ let create_event = services().rooms.state_accessor.room_state_get(
+ room_id,
+ &StateEventType::RoomCreate,
+ "",
+ )?;
let create_event_content: Option<RoomCreateEventContent> = create_event
.as_ref()
@@ -464,11 +528,15 @@ impl Service {
.map_or(services().globals.default_room_version(), |create_event| {
create_event.room_version
});
- let room_version =
- RoomVersion::new(&room_version_id).expect("room version is supported");
-
- let auth_events =
- services().rooms.state.get_auth_events(room_id, &event_type, sender, state_key.as_deref(), &content)?;
+ let room_version = RoomVersion::new(&room_version_id).expect("room version is supported");
+
+ let auth_events = services().rooms.state.get_auth_events(
+ room_id,
+ &event_type,
+ sender,
+ state_key.as_deref(),
+ &content,
+ )?;
// Our depth is the maximum depth of prev_events + 1
let depth = prev_events
@@ -481,9 +549,11 @@ impl Service {
let mut unsigned = unsigned.unwrap_or_default();
if let Some(state_key) = &state_key {
- if let Some(prev_pdu) =
- services().rooms.state_accessor.room_state_get(room_id, &event_type.to_string().into(), state_key)?
- {
+ if let Some(prev_pdu) = services().rooms.state_accessor.room_state_get(
+ room_id,
+ &event_type.to_string().into(),
+ state_key,
+ )? {
unsigned.insert(
"prev_content".to_owned(),
serde_json::from_str(prev_pdu.content.get()).expect("string is valid json"),
@@ -589,7 +659,10 @@ impl Service {
);
// Generate short event id
- let _shorteventid = services().rooms.short.get_or_create_shorteventid(&pdu.event_id)?;
+ let _shorteventid = services()
+ .rooms
+ .short
+ .get_or_create_shorteventid(&pdu.event_id)?;
Ok((pdu, pdu_json))
}
@@ -604,7 +677,8 @@ impl Service {
room_id: &RoomId,
state_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
) -> Result<Arc<EventId>> {
- let (pdu, pdu_json) = self.create_hash_and_sign_event(pdu_builder, sender, room_id, &state_lock)?;
+ let (pdu, pdu_json) =
+ self.create_hash_and_sign_event(pdu_builder, sender, room_id, &state_lock)?;
// We append to state before appending the pdu, so we don't have a moment in time with the
// pdu without it's state. This is okay because append_pdu can't fail.
@@ -621,10 +695,17 @@ impl Service {
// We set the room state after inserting the pdu, so that we never have a moment in time
// where events in the current room state do not exist
- services().rooms.state.set_room_state(room_id, statehashid, state_lock)?;
+ services()
+ .rooms
+ .state
+ .set_room_state(room_id, statehashid, state_lock)?;
- let mut servers: HashSet<Box<ServerName>> =
- services().rooms.state_cache.room_servers(room_id).filter_map(|r| r.ok()).collect();
+ let mut servers: HashSet<Box<ServerName>> = services()
+ .rooms
+ .state_cache
+ .room_servers(room_id)
+ .filter_map(|r| r.ok())
+ .collect();
// In case we are kicking or banning a user, we need to inform their server of the change
if pdu.kind == RoomEventType::RoomMember {
@@ -666,13 +747,23 @@ impl Service {
)?;
if soft_fail {
- services().rooms.pdu_metadata
+ services()
+ .rooms
+ .pdu_metadata
.mark_as_referenced(&pdu.room_id, &pdu.prev_events)?;
- services().rooms.state.set_forward_extremities(&pdu.room_id, new_room_leaves, state_lock)?;
+ services().rooms.state.set_forward_extremities(
+ &pdu.room_id,
+ new_room_leaves,
+ state_lock,
+ )?;
return Ok(None);
}
- let pdu_id = services().rooms.timeline.append_pdu(pdu, pdu_json, new_room_leaves, state_lock)?;
+ let pdu_id =
+ services()
+ .rooms
+ .timeline
+ .append_pdu(pdu, pdu_json, new_room_leaves, state_lock)?;
Ok(Some(pdu_id))
}
diff --git a/src/service/rooms/user/data.rs b/src/service/rooms/user/data.rs
index 6b7ebc7..fcaff5a 100644
--- a/src/service/rooms/user/data.rs
+++ b/src/service/rooms/user/data.rs
@@ -1,5 +1,5 @@
-use ruma::{UserId, RoomId};
use crate::Result;
+use ruma::{RoomId, UserId};
pub trait Data: Send + Sync {
fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
diff --git a/src/service/rooms/user/mod.rs b/src/service/rooms/user/mod.rs
index 394a550..1caa4b3 100644
--- a/src/service/rooms/user/mod.rs
+++ b/src/service/rooms/user/mod.rs
@@ -29,7 +29,8 @@ impl Service {
token: u64,
shortstatehash: u64,
) -> Result<()> {
- self.db.associate_token_shortstatehash(room_id, token, shortstatehash)
+ self.db
+ .associate_token_shortstatehash(room_id, token, shortstatehash)
}
pub fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>> {
diff --git a/src/service/sending/mod.rs b/src/service/sending/mod.rs
index b335095..e09d423 100644
--- a/src/service/sending/mod.rs
+++ b/src/service/sending/mod.rs
@@ -6,7 +6,10 @@ use std::{
};
use crate::{
- utils::{self, calculate_hash}, Error, PduEvent, Result, services, api::{server_server, appservice_server},
+ api::{appservice_server, server_server},
+ services,
+ utils::{self, calculate_hash},
+ Error, PduEvent, Result,
};
use federation::transactions::send_transaction_message;
use futures_util::{stream::FuturesUnordered, StreamExt};
@@ -88,10 +91,7 @@ enum TransactionStatus {
}
impl Service {
- pub fn start_handler(
- &self,
- mut receiver: mpsc::UnboundedReceiver<(Vec<u8>, Vec<u8>)>,
- ) {
+ pub fn start_handler(&self, mut receiver: mpsc::UnboundedReceiver<(Vec<u8>, Vec<u8>)>) {
tokio::spawn(async move {
let mut futures = FuturesUnordered::new();
@@ -119,7 +119,11 @@ impl Service {
"Dropping some current events: {:?} {:?} {:?}",
key, outgoing_kind, event
);
- services().sending.servercurrentevent_data.remove(&key).unwrap();
+ services()
+ .sending
+ .servercurrentevent_data
+ .remove(&key)
+ .unwrap();
continue;
}
@@ -129,10 +133,7 @@ impl Service {
for (outgoing_kind, events) in initial_transactions {
current_transaction_status
.insert(outgoing_kind.get_prefix(), TransactionStatus::Running);
- futures.push(Self::handle_events(
- outgoing_kind.clone(),
- events,
- ));
+ futures.push(Self::handle_events(outgoing_kind.clone(), events));
}
loop {
@@ -246,7 +247,11 @@ impl Service {
if retry {
// We retry the previous transaction
- for (key, value) in services().sending.servercurrentevent_data.scan_prefix(prefix) {
+ for (key, value) in services()
+ .sending
+ .servercurrentevent_data
+ .scan_prefix(prefix)
+ {
if let Ok((_, e)) = Self::parse_servercurrentevent(&key, value) {
events.push(e);
}
@@ -258,7 +263,8 @@ impl Service {
} else {
&[][..]
};
- services().sending
+ services()
+ .sending
.servercurrentevent_data
.insert(&full_key, value)?;
@@ -273,7 +279,8 @@ impl Service {
if let Ok((select_edus, last_count)) = Self::select_edus(server_name) {
events.extend(select_edus.into_iter().map(SendingEventType::Edu));
- services().sending
+ services()
+ .sending
.servername_educount
.insert(server_name.as_bytes(), &last_count.to_be_bytes())?;
}
@@ -302,7 +309,8 @@ impl Service {
let room_id = room_id?;
// Look for device list updates in this room
device_list_changes.extend(
- services().users
+ services()
+ .users
.keys_changed(&room_id.to_string(), since, None)
.filter_map(|r| r.ok())
.filter(|user_id| user_id.server_name() == services().globals.server_name()),
@@ -502,7 +510,8 @@ impl Service {
let permit = services().sending.maximum_requests.acquire().await;
let response = appservice_server::send_request(
- services().appservice
+ services()
+ .appservice
.get_registration(&id)
.map_err(|e| (kind.clone(), e))?
.ok_or_else(|| {
@@ -621,16 +630,12 @@ impl Service {
let permit = services().sending.maximum_requests.acquire().await;
- let _response = services().pusher.send_push_notice(
- &userid,
- unread,
- &pusher,
- rules_for_user,
- &pdu,
- )
- .await
- .map(|_response| kind.clone())
- .map_err(|e| (kind.clone(), e));
+ let _response = services()
+ .pusher
+ .send_push_notice(&userid, unread, &pusher, rules_for_user, &pdu)
+ .await
+ .map(|_response| kind.clone())
+ .map_err(|e| (kind.clone(), e));
drop(permit);
}
diff --git a/src/service/transaction_ids/data.rs b/src/service/transaction_ids/data.rs
index c5ff05c..7485531 100644
--- a/src/service/transaction_ids/data.rs
+++ b/src/service/transaction_ids/data.rs
@@ -1,5 +1,5 @@
-use ruma::{DeviceId, UserId, TransactionId};
use crate::Result;
+use ruma::{DeviceId, TransactionId, UserId};
pub trait Data: Send + Sync {
fn add_txnid(
diff --git a/src/service/transaction_ids/mod.rs b/src/service/transaction_ids/mod.rs
index 8d5fd0a..a473e2b 100644
--- a/src/service/transaction_ids/mod.rs
+++ b/src/service/transaction_ids/mod.rs
@@ -3,8 +3,8 @@ use std::sync::Arc;
pub use data::Data;
-use ruma::{UserId, DeviceId, TransactionId};
use crate::Result;
+use ruma::{DeviceId, TransactionId, UserId};
pub struct Service {
db: Arc<dyn Data>,
diff --git a/src/service/uiaa/data.rs b/src/service/uiaa/data.rs
index 091f064..3b7eb2b 100644
--- a/src/service/uiaa/data.rs
+++ b/src/service/uiaa/data.rs
@@ -1,5 +1,5 @@
-use ruma::{api::client::uiaa::UiaaInfo, DeviceId, UserId, signatures::CanonicalJsonValue};
use crate::Result;
+use ruma::{api::client::uiaa::UiaaInfo, signatures::CanonicalJsonValue, DeviceId, UserId};
pub trait Data: Send + Sync {
fn set_uiaa_request(
diff --git a/src/service/uiaa/mod.rs b/src/service/uiaa/mod.rs
index 5444118..8f3b3b8 100644
--- a/src/service/uiaa/mod.rs
+++ b/src/service/uiaa/mod.rs
@@ -3,10 +3,17 @@ use std::sync::Arc;
pub use data::Data;
-use ruma::{api::client::{uiaa::{UiaaInfo, IncomingAuthData, IncomingPassword, AuthType, IncomingUserIdentifier}, error::ErrorKind}, DeviceId, UserId, signatures::CanonicalJsonValue};
+use ruma::{
+ api::client::{
+ error::ErrorKind,
+ uiaa::{AuthType, IncomingAuthData, IncomingPassword, IncomingUserIdentifier, UiaaInfo},
+ },
+ signatures::CanonicalJsonValue,
+ DeviceId, UserId,
+};
use tracing::error;
-use crate::{Result, utils, Error, services, api::client_server::SESSION_ID_LENGTH};
+use crate::{api::client_server::SESSION_ID_LENGTH, services, utils, Error, Result};
pub struct Service {
db: Arc<dyn Data>,
@@ -68,11 +75,11 @@ impl Service {
}
};
- let user_id =
- UserId::parse_with_server_name(username.clone(), services().globals.server_name())
- .map_err(|_| {
- Error::BadRequest(ErrorKind::InvalidParam, "User ID is invalid.")
- })?;
+ let user_id = UserId::parse_with_server_name(
+ username.clone(),
+ services().globals.server_name(),
+ )
+ .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "User ID is invalid."))?;
// Check if password is correct
if let Some(hash) = services().users.password_hash(&user_id)? {
diff --git a/src/service/users/data.rs b/src/service/users/data.rs
index 7eb0ceb..9f315d3 100644
--- a/src/service/users/data.rs
+++ b/src/service/users/data.rs
@@ -1,6 +1,12 @@
-use std::collections::BTreeMap;
use crate::Result;
-use ruma::{UserId, DeviceId, DeviceKeyAlgorithm, DeviceKeyId, serde::Raw, encryption::{OneTimeKey, DeviceKeys, CrossSigningKey}, UInt, events::AnyToDeviceEvent, api::client::{device::Device, filter::IncomingFilterDefinition}, MxcUri};
+use ruma::{
+ api::client::{device::Device, filter::IncomingFilterDefinition},
+ encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
+ events::AnyToDeviceEvent,
+ serde::Raw,
+ DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MxcUri, UInt, UserId,
+};
+use std::collections::BTreeMap;
pub trait Data: Send + Sync {
/// Check if a user has an account on this homeserver.
@@ -127,10 +133,7 @@ pub trait Data: Send + Sync {
to: Option<u64>,
) -> Box<dyn Iterator<Item = Result<Box<UserId>>>>;
- fn mark_device_key_update(
- &self,
- user_id: &UserId,
- ) -> Result<()>;
+ fn mark_device_key_update(&self, user_id: &UserId) -> Result<()>;
fn get_device_keys(
&self,
@@ -182,11 +185,8 @@ pub trait Data: Send + Sync {
) -> Result<()>;
/// Get device metadata.
- fn get_device_metadata(
- &self,
- user_id: &UserId,
- device_id: &DeviceId,
- ) -> Result<Option<Device>>;
+ fn get_device_metadata(&self, user_id: &UserId, device_id: &DeviceId)
+ -> Result<Option<Device>>;
fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>>;
@@ -196,11 +196,7 @@ pub trait Data: Send + Sync {
) -> Box<dyn Iterator<Item = Result<Device>>>;
/// Creates a new sync filter. Returns the filter id.
- fn create_filter(
- &self,
- user_id: &UserId,
- filter: &IncomingFilterDefinition,
- ) -> Result<String>;
+ fn create_filter(&self, user_id: &UserId, filter: &IncomingFilterDefinition) -> Result<String>;
fn get_filter(
&self,
diff --git a/src/service/users/mod.rs b/src/service/users/mod.rs
index 826e049..0b83460 100644
--- a/src/service/users/mod.rs
+++ b/src/service/users/mod.rs
@@ -2,9 +2,15 @@ mod data;
use std::{collections::BTreeMap, mem, sync::Arc};
pub use data::Data;
-use ruma::{UserId, MxcUri, DeviceId, DeviceKeyId, serde::Raw, encryption::{OneTimeKey, CrossSigningKey, DeviceKeys}, DeviceKeyAlgorithm, UInt, events::AnyToDeviceEvent, api::client::{device::Device, filter::IncomingFilterDefinition, error::ErrorKind}, RoomAliasId};
+use ruma::{
+ api::client::{device::Device, error::ErrorKind, filter::IncomingFilterDefinition},
+ encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
+ events::AnyToDeviceEvent,
+ serde::Raw,
+ DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MxcUri, RoomAliasId, UInt, UserId,
+};
-use crate::{Result, Error, services};
+use crate::{services, Error, Result};
pub struct Service {
db: Arc<dyn Data>,
@@ -22,15 +28,20 @@ impl Service {
}
/// Check if a user is an admin
- pub fn is_admin(
- &self,
- user_id: &UserId,
- ) -> Result<bool> {
- let admin_room_alias_id = RoomAliasId::parse(format!("#admins:{}", services().globals.server_name()))
- .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias."))?;
- let admin_room_id = services().rooms.alias.resolve_local_alias(&admin_room_alias_id)?.unwrap();
-
- services().rooms.state_cache.is_joined(user_id, &admin_room_id)
+ pub fn is_admin(&self, user_id: &UserId) -> Result<bool> {
+ let admin_room_alias_id =
+ RoomAliasId::parse(format!("#admins:{}", services().globals.server_name()))
+ .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias."))?;
+ let admin_room_id = services()
+ .rooms
+ .alias
+ .resolve_local_alias(&admin_room_alias_id)?
+ .unwrap();
+
+ services()
+ .rooms
+ .state_cache
+ .is_joined(user_id, &admin_room_id)
}
/// Create a new user account on this homeserver.
@@ -39,7 +50,6 @@ impl Service {
Ok(())
}
-
/// Returns the number of users registered on this server.
pub fn count(&self) -> Result<usize> {
self.db.count()
@@ -118,7 +128,8 @@ impl Service {
token: &str,
initial_device_display_name: Option<String>,
) -> Result<()> {
- self.db.create_device(user_id, device_id, token, initial_device_display_name)
+ self.db
+ .create_device(user_id, device_id, token, initial_device_display_name)
}
/// Removes a device from a user.
@@ -146,7 +157,8 @@ impl Service {
one_time_key_key: &DeviceKeyId,
one_time_key_value: &Raw<OneTimeKey>,
) -> Result<()> {
- self.db.add_one_time_key(user_id, device_id, one_time_key_key, one_time_key_value)
+ self.db
+ .add_one_time_key(user_id, device_id, one_time_key_key, one_time_key_value)
}
pub fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64> {
@@ -186,7 +198,8 @@ impl Service {
self_signing_key: &Option<Raw<CrossSigningKey>>,
user_signing_key: &Option<Raw<CrossSigningKey>>,
) -> Result<()> {
- self.db.add_cross_signing_keys(user_id, master_key, self_signing_key, user_signing_key)
+ self.db
+ .add_cross_signing_keys(user_id, master_key, self_signing_key, user_signing_key)
}
pub fn sign_key(
@@ -208,10 +221,7 @@ impl Service {
self.db.keys_changed(user_or_room_id, from, to)
}
- pub fn mark_device_key_update(
- &self,
- user_id: &UserId,
- ) -> Result<()> {
+ pub fn mark_device_key_update(&self, user_id: &UserId) -> Result<()> {
self.db.mark_device_key_update(user_id)
}
@@ -251,7 +261,13 @@ impl Service {
event_type: &str,
content: serde_json::Value,
) -> Result<()> {
- self.db.add_to_device_event(sender, target_user_id, target_device_id, event_type, content)
+ self.db.add_to_device_event(
+ sender,
+ target_user_id,
+ target_device_id,
+ event_type,
+ content,
+ )
}
pub fn get_to_device_events(
diff --git a/src/utils/mod.rs b/src/utils/mod.rs
index 0ee3ae8..9202eeb 100644
--- a/src/utils/mod.rs
+++ b/src/utils/mod.rs
@@ -78,7 +78,6 @@ pub fn calculate_hash(keys: &[&[u8]]) -> Vec<u8> {
hash.as_ref().to_owned()
}
-
pub fn common_elements(
mut iterators: impl Iterator<Item = impl Iterator<Item = Vec<u8>>>,
check_order: impl Fn(&[u8], &[u8]) -> Ordering,