Services refactor

Replace structs of Dep<Service> with OnceServices, so each service has a Services reference

Remove service name => Service map

Flatten Services.rooms

Make reqwest Clients lazy initialized (client service)
This commit is contained in:
dasha_uwu
2025-08-22 20:15:54 +05:00
parent 26b3a84b88
commit b5890b9664
118 changed files with 457 additions and 1923 deletions

View File

@@ -45,7 +45,6 @@ pub(super) async fn echo(&self, message: Vec<String>) -> Result {
pub(super) async fn get_auth_chain(&self, event_id: OwnedEventId) -> Result {
let Ok(event) = self
.services
.rooms
.timeline
.get_pdu_json(&event_id)
.await
@@ -64,7 +63,6 @@ pub(super) async fn get_auth_chain(&self, event_id: OwnedEventId) -> Result {
let start = Instant::now();
let count = self
.services
.rooms
.auth_chain
.event_ids_iter(room_id, once(event_id.as_ref()))
.ready_filter_map(Result::ok)
@@ -111,7 +109,6 @@ pub(super) async fn get_pdu(&self, event_id: OwnedEventId) -> Result {
let mut outlier = false;
let mut pdu_json = self
.services
.rooms
.timeline
.get_non_outlier_pdu_json(&event_id)
.await;
@@ -120,7 +117,6 @@ pub(super) async fn get_pdu(&self, event_id: OwnedEventId) -> Result {
outlier = true;
pdu_json = self
.services
.rooms
.timeline
.get_pdu_json(&event_id)
.await;
@@ -155,7 +151,6 @@ pub(super) async fn get_short_pdu(
let pdu_json = self
.services
.rooms
.timeline
.get_pdu_json_from_id(&pdu_id)
.await;
@@ -278,7 +273,6 @@ pub(super) async fn get_remote_pdu(
let _parsed_pdu = {
let parsed_result = self
.services
.rooms
.event_handler
.parse_incoming_pdu(&response.pdu)
.boxed()
@@ -298,7 +292,6 @@ pub(super) async fn get_remote_pdu(
info!("Attempting to handle event ID {event_id} as backfilled PDU");
self.services
.rooms
.timeline
.backfill_pdu(&server, response.pdu)
.await?;
@@ -313,10 +306,9 @@ pub(super) async fn get_remote_pdu(
#[admin_command]
pub(super) async fn get_room_state(&self, room: OwnedRoomOrAliasId) -> Result {
let room_id = self.services.rooms.alias.resolve(&room).await?;
let room_id = self.services.alias.resolve(&room).await?;
let room_state: Vec<Raw<AnyStateEvent>> = self
.services
.rooms
.state_accessor
.room_state_full_pdus(&room_id)
.map_ok(Event::into_format)
@@ -494,7 +486,6 @@ pub(super) async fn verify_pdu(&self, event_id: OwnedEventId) -> Result {
let mut event = self
.services
.rooms
.timeline
.get_pdu_json(&event_id)
.await?;
@@ -519,7 +510,6 @@ pub(super) async fn verify_pdu(&self, event_id: OwnedEventId) -> Result {
pub(super) async fn first_pdu_in_room(&self, room_id: OwnedRoomId) -> Result {
if !self
.services
.rooms
.state_cache
.server_in_room(&self.services.server.name, &room_id)
.await
@@ -529,7 +519,6 @@ pub(super) async fn first_pdu_in_room(&self, room_id: OwnedRoomId) -> Result {
let first_pdu = self
.services
.rooms
.timeline
.first_pdu_in_room(&room_id)
.await
@@ -544,7 +533,6 @@ pub(super) async fn first_pdu_in_room(&self, room_id: OwnedRoomId) -> Result {
pub(super) async fn latest_pdu_in_room(&self, room_id: OwnedRoomId) -> Result {
if !self
.services
.rooms
.state_cache
.server_in_room(&self.services.server.name, &room_id)
.await
@@ -554,7 +542,6 @@ pub(super) async fn latest_pdu_in_room(&self, room_id: OwnedRoomId) -> Result {
let latest_pdu = self
.services
.rooms
.timeline
.latest_pdu_in_room(&room_id)
.await
@@ -573,7 +560,6 @@ pub(super) async fn force_set_room_state_from_server(
) -> Result {
if !self
.services
.rooms
.state_cache
.server_in_room(&self.services.server.name, &room_id)
.await
@@ -583,7 +569,6 @@ pub(super) async fn force_set_room_state_from_server(
let first_pdu = self
.services
.rooms
.timeline
.latest_pdu_in_room(&room_id)
.await
@@ -591,7 +576,6 @@ pub(super) async fn force_set_room_state_from_server(
let room_version = self
.services
.rooms
.state
.get_room_version(&room_id)
.await?;
@@ -610,7 +594,6 @@ pub(super) async fn force_set_room_state_from_server(
for pdu in remote_state_response.pdus.clone() {
match self
.services
.rooms
.event_handler
.parse_incoming_pdu(&pdu)
.await
@@ -639,14 +622,12 @@ pub(super) async fn force_set_room_state_from_server(
})?;
self.services
.rooms
.timeline
.add_pdu_outlier(&event_id, &value);
if let Some(state_key) = &pdu.state_key {
let shortstatekey = self
.services
.rooms
.short
.get_or_create_shortstatekey(&pdu.kind.to_string().into(), state_key)
.await;
@@ -669,14 +650,12 @@ pub(super) async fn force_set_room_state_from_server(
};
self.services
.rooms
.timeline
.add_pdu_outlier(&event_id, &value);
}
let new_room_state = self
.services
.rooms
.event_handler
.resolve_state(&room_id, &room_version, state)
.await?;
@@ -688,21 +667,13 @@ pub(super) async fn force_set_room_state_from_server(
removed,
} = self
.services
.rooms
.state_compressor
.save_state(room_id.clone().as_ref(), new_room_state)
.await?;
let state_lock = self
.services
.rooms
.state
.mutex
.lock(&*room_id)
.await;
let state_lock = self.services.state.mutex.lock(&*room_id).await;
self.services
.rooms
.state
.force_state(room_id.clone().as_ref(), short_state_hash, added, removed, &state_lock)
.await?;
@@ -712,7 +683,6 @@ pub(super) async fn force_set_room_state_from_server(
the room's m.room.member state"
);
self.services
.rooms
.state_cache
.update_joined_count(&room_id)
.await;

View File

@@ -7,7 +7,6 @@ use crate::{admin_command, get_room_info};
#[admin_command]
pub(super) async fn disable_room(&self, room_id: OwnedRoomId) -> Result {
self.services
.rooms
.metadata
.disable_room(&room_id, true);
self.write_str("Room disabled.").await
@@ -16,7 +15,6 @@ pub(super) async fn disable_room(&self, room_id: OwnedRoomId) -> Result {
#[admin_command]
pub(super) async fn enable_room(&self, room_id: OwnedRoomId) -> Result {
self.services
.rooms
.metadata
.disable_room(&room_id, false);
self.write_str("Room enabled.").await
@@ -82,7 +80,6 @@ pub(super) async fn remote_user_in_rooms(&self, user_id: OwnedUserId) -> Result
let mut rooms: Vec<(OwnedRoomId, u64, String)> = self
.services
.rooms
.state_cache
.rooms_joined(&user_id)
.then(|room_id| get_room_info(self.services, room_id))

View File

@@ -37,7 +37,6 @@ pub(super) async fn delete(
// parsing the PDU for any MXC URLs begins here
match self
.services
.rooms
.timeline
.get_pdu_json(&event_id)
.await

View File

@@ -30,11 +30,7 @@ pub(super) async fn process(subcommand: RoomAliasCommand, context: &Context<'_>)
match subcommand {
| RoomAliasCommand::ResolveLocalAlias { alias } => {
let timer = tokio::time::Instant::now();
let results = services
.rooms
.alias
.resolve_local_alias(&alias)
.await;
let results = services.alias.resolve_local_alias(&alias).await;
let query_time = timer.elapsed();
write!(context, "Query completed in {query_time:?}:\n\n```rs\n{results:#?}\n```")
@@ -42,7 +38,6 @@ pub(super) async fn process(subcommand: RoomAliasCommand, context: &Context<'_>)
| RoomAliasCommand::LocalAliasesForRoom { room_id } => {
let timer = tokio::time::Instant::now();
let aliases: Vec<_> = services
.rooms
.alias
.local_aliases_for_room(&room_id)
.map(ToOwned::to_owned)
@@ -55,7 +50,6 @@ pub(super) async fn process(subcommand: RoomAliasCommand, context: &Context<'_>)
| RoomAliasCommand::AllLocalAliases => {
let timer = tokio::time::Instant::now();
let aliases = services
.rooms
.alias
.all_local_aliases()
.map(|(room_id, alias)| (room_id.to_owned(), alias.to_owned()))

View File

@@ -83,7 +83,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::ServerInRoom { server, room_id } => {
let timer = tokio::time::Instant::now();
let result = services
.rooms
.state_cache
.server_in_room(&server, &room_id)
.await;
@@ -98,7 +97,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomServers { room_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.room_servers(&room_id)
.map(ToOwned::to_owned)
@@ -115,7 +113,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::ServerRooms { server } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.server_rooms(&server)
.map(ToOwned::to_owned)
@@ -132,7 +129,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomMembers { room_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.room_members(&room_id)
.map(ToOwned::to_owned)
@@ -149,7 +145,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::LocalUsersInRoom { room_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.local_users_in_room(&room_id)
.map(ToOwned::to_owned)
@@ -166,7 +161,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::ActiveLocalUsersInRoom { room_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.active_local_users_in_room(&room_id)
.map(ToOwned::to_owned)
@@ -183,7 +177,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomJoinedCount { room_id } => {
let timer = tokio::time::Instant::now();
let results = services
.rooms
.state_cache
.room_joined_count(&room_id)
.await;
@@ -198,7 +191,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomInvitedCount { room_id } => {
let timer = tokio::time::Instant::now();
let results = services
.rooms
.state_cache
.room_invited_count(&room_id)
.await;
@@ -213,7 +205,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomUserOnceJoined { room_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.room_useroncejoined(&room_id)
.map(ToOwned::to_owned)
@@ -230,7 +221,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomMembersInvited { room_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.room_members_invited(&room_id)
.map(ToOwned::to_owned)
@@ -247,7 +237,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::GetInviteCount { room_id, user_id } => {
let timer = tokio::time::Instant::now();
let results = services
.rooms
.state_cache
.get_invite_count(&room_id, &user_id)
.await;
@@ -262,7 +251,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::GetLeftCount { room_id, user_id } => {
let timer = tokio::time::Instant::now();
let results = services
.rooms
.state_cache
.get_left_count(&room_id, &user_id)
.await;
@@ -277,7 +265,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomsJoined { user_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.rooms_joined(&user_id)
.map(ToOwned::to_owned)
@@ -294,7 +281,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomsInvited { user_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.rooms_invited(&user_id)
.collect()
@@ -310,7 +296,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::RoomsLeft { user_id } => {
let timer = tokio::time::Instant::now();
let results: Vec<_> = services
.rooms
.state_cache
.rooms_left(&user_id)
.collect()
@@ -326,7 +311,6 @@ pub(super) async fn process(subcommand: RoomStateCacheCommand, context: &Context
| RoomStateCacheCommand::InviteState { user_id, room_id } => {
let timer = tokio::time::Instant::now();
let results = services
.rooms
.state_cache
.invite_state(&user_id, &room_id)
.await;

View File

@@ -25,16 +25,10 @@ pub(crate) enum RoomTimelineCommand {
#[admin_command]
pub(super) async fn last(&self, room_id: OwnedRoomOrAliasId) -> Result {
let room_id = self
.services
.rooms
.alias
.resolve(&room_id)
.await?;
let room_id = self.services.alias.resolve(&room_id).await?;
let result = self
.services
.rooms
.timeline
.last_timeline_count(None, &room_id, None)
.await?;
@@ -49,18 +43,12 @@ pub(super) async fn pdus(
from: Option<String>,
limit: Option<usize>,
) -> Result {
let room_id = self
.services
.rooms
.alias
.resolve(&room_id)
.await?;
let room_id = self.services.alias.resolve(&room_id).await?;
let from: Option<PduCount> = from.as_deref().map(str::parse).transpose()?;
let result: Vec<_> = self
.services
.rooms
.timeline
.pdus_rev(None, &room_id, from)
.try_take(limit.unwrap_or(3))

View File

@@ -21,7 +21,6 @@ pub(crate) enum ShortCommand {
pub(super) async fn short_event_id(&self, event_id: OwnedEventId) -> Result {
let shortid = self
.services
.rooms
.short
.get_shorteventid(&event_id)
.await?;
@@ -31,16 +30,10 @@ pub(super) async fn short_event_id(&self, event_id: OwnedEventId) -> Result {
#[admin_command]
pub(super) async fn short_room_id(&self, room_id: OwnedRoomOrAliasId) -> Result {
let room_id = self
.services
.rooms
.alias
.resolve(&room_id)
.await?;
let room_id = self.services.alias.resolve(&room_id).await?;
let shortid = self
.services
.rooms
.short
.get_shortroomid(&room_id)
.await?;

View File

@@ -134,7 +134,6 @@ async fn get_shared_rooms(&self, user_a: OwnedUserId, user_b: OwnedUserId) -> Re
let timer = tokio::time::Instant::now();
let result: Vec<_> = self
.services
.rooms
.state_cache
.get_shared_rooms(&user_a, &user_b)
.map(ToOwned::to_owned)

View File

@@ -63,17 +63,15 @@ pub(super) async fn process(command: RoomAliasCommand, context: &Context<'_>) ->
match (
force,
services
.rooms
.alias
.resolve_local_alias(&room_alias)
.await,
) {
| (true, Ok(id)) => {
match services.rooms.alias.set_alias(
&room_alias,
&room_id,
server_user,
) {
match services
.alias
.set_alias(&room_alias, &room_id, server_user)
{
| Err(err) => Err!("Failed to remove alias: {err}"),
| Ok(()) =>
context
@@ -88,11 +86,10 @@ pub(super) async fn process(command: RoomAliasCommand, context: &Context<'_>) ->
overwrite"
),
| (_, Err(_)) => {
match services.rooms.alias.set_alias(
&room_alias,
&room_id,
server_user,
) {
match services
.alias
.set_alias(&room_alias, &room_id, server_user)
{
| Err(err) => Err!("Failed to remove alias: {err}"),
| Ok(()) => context.write_str("Successfully set alias").await,
}
@@ -101,14 +98,12 @@ pub(super) async fn process(command: RoomAliasCommand, context: &Context<'_>) ->
},
| RoomAliasCommand::Remove { .. } => {
match services
.rooms
.alias
.resolve_local_alias(&room_alias)
.await
{
| Err(_) => Err!("Alias isn't in use."),
| Ok(id) => match services
.rooms
.alias
.remove_alias(&room_alias, server_user)
.await
@@ -123,7 +118,6 @@ pub(super) async fn process(command: RoomAliasCommand, context: &Context<'_>) ->
},
| RoomAliasCommand::Which { .. } => {
match services
.rooms
.alias
.resolve_local_alias(&room_alias)
.await
@@ -141,7 +135,6 @@ pub(super) async fn process(command: RoomAliasCommand, context: &Context<'_>) ->
| RoomAliasCommand::List { room_id } =>
if let Some(room_id) = room_id {
let aliases: Vec<OwnedRoomAliasId> = services
.rooms
.alias
.local_aliases_for_room(&room_id)
.map(Into::into)
@@ -160,7 +153,6 @@ pub(super) async fn process(command: RoomAliasCommand, context: &Context<'_>) ->
context.write_str(&plain).await
} else {
let aliases = services
.rooms
.alias
.all_local_aliases()
.map(|(room_id, localpart)| (room_id.into(), localpart.into()))

View File

@@ -16,27 +16,14 @@ pub(super) async fn list_rooms(
let page = page.unwrap_or(1);
let mut rooms = self
.services
.rooms
.metadata
.iter_ids()
.filter_map(async |room_id| {
(!exclude_disabled
|| !self
.services
.rooms
.metadata
.is_disabled(room_id)
.await)
(!exclude_disabled || !self.services.metadata.is_disabled(room_id).await)
.then_some(room_id)
})
.filter_map(async |room_id| {
(!exclude_banned
|| !self
.services
.rooms
.metadata
.is_banned(room_id)
.await)
(!exclude_banned || !self.services.metadata.is_banned(room_id).await)
.then_some(room_id)
})
.then(|room_id| get_room_info(self.services, room_id))
@@ -74,12 +61,7 @@ pub(super) async fn list_rooms(
#[admin_command]
pub(super) async fn exists(&self, room_id: OwnedRoomId) -> Result {
let result = self
.services
.rooms
.metadata
.exists(&room_id)
.await;
let result = self.services.metadata.exists(&room_id).await;
self.write_str(&format!("{result}")).await
}

View File

@@ -29,18 +29,17 @@ pub(super) async fn process(command: RoomDirectoryCommand, context: &Context<'_>
let services = context.services;
match command {
| RoomDirectoryCommand::Publish { room_id } => {
services.rooms.directory.set_public(&room_id);
services.directory.set_public(&room_id);
context.write_str("Room published").await
},
| RoomDirectoryCommand::Unpublish { room_id } => {
services.rooms.directory.set_not_public(&room_id);
services.directory.set_not_public(&room_id);
context.write_str("Room unpublished").await
},
| RoomDirectoryCommand::List { page } => {
// TODO: i know there's a way to do this with clap, but i can't seem to find it
let page = page.unwrap_or(1);
let mut rooms: Vec<_> = services
.rooms
.directory
.public_rooms()
.then(|room_id| get_room_info(services, room_id))

View File

@@ -30,7 +30,6 @@ pub(crate) enum RoomInfoCommand {
async fn list_joined_members(&self, room_id: OwnedRoomId, local_only: bool) -> Result {
let room_name = self
.services
.rooms
.state_accessor
.get_name(&room_id)
.await
@@ -38,7 +37,6 @@ async fn list_joined_members(&self, room_id: OwnedRoomId, local_only: bool) -> R
let member_info: Vec<_> = self
.services
.rooms
.state_cache
.room_members(&room_id)
.ready_filter(|user_id| {
@@ -75,7 +73,6 @@ async fn list_joined_members(&self, room_id: OwnedRoomId, local_only: bool) -> R
async fn view_room_topic(&self, room_id: OwnedRoomId) -> Result {
let Ok(room_topic) = self
.services
.rooms
.state_accessor
.get_room_topic(&room_id)
.await

View File

@@ -70,10 +70,7 @@ async fn ban_room(&self, room: OwnedRoomOrAliasId) -> Result {
};
debug!("Room specified is a room ID, banning room ID");
self.services
.rooms
.metadata
.ban_room(room_id, true);
self.services.metadata.ban_room(room_id, true);
room_id.to_owned()
} else if room.is_room_alias_id() {
@@ -95,7 +92,6 @@ async fn ban_room(&self, room: OwnedRoomOrAliasId) -> Result {
let room_id = match self
.services
.rooms
.alias
.resolve_local_alias(room_alias)
.await
@@ -109,7 +105,6 @@ async fn ban_room(&self, room: OwnedRoomOrAliasId) -> Result {
match self
.services
.rooms
.alias
.resolve_alias(room_alias, None)
.await
@@ -131,10 +126,7 @@ async fn ban_room(&self, room: OwnedRoomOrAliasId) -> Result {
},
};
self.services
.rooms
.metadata
.ban_room(&room_id, true);
self.services.metadata.ban_room(&room_id, true);
room_id
} else {
@@ -148,7 +140,6 @@ async fn ban_room(&self, room: OwnedRoomOrAliasId) -> Result {
debug!("Making all users leave the room {room_id} and forgetting it");
let mut users = self
.services
.rooms
.state_cache
.room_members(&room_id)
.map(ToOwned::to_owned)
@@ -169,19 +160,16 @@ async fn ban_room(&self, room: OwnedRoomOrAliasId) -> Result {
}
self.services
.rooms
.state_cache
.forget(&room_id, user_id);
}
self.services
.rooms
.alias
.local_aliases_for_room(&room_id)
.map(ToOwned::to_owned)
.for_each(async |local_alias| {
self.services
.rooms
.alias
.remove_alias(&local_alias, &self.services.globals.server_user)
.await
@@ -190,13 +178,9 @@ async fn ban_room(&self, room: OwnedRoomOrAliasId) -> Result {
.await;
// unpublish from room directory
self.services
.rooms
.directory
.set_not_public(&room_id);
self.services.directory.set_not_public(&room_id);
self.services
.rooms
.metadata
.disable_room(&room_id, true);
@@ -258,7 +242,6 @@ async fn ban_list_of_rooms(&self) -> Result {
| Ok(room_alias) => {
let room_id = match self
.services
.rooms
.alias
.resolve_local_alias(room_alias)
.await
@@ -272,7 +255,6 @@ async fn ban_list_of_rooms(&self) -> Result {
match self
.services
.rooms
.alias
.resolve_alias(room_alias, None)
.await
@@ -320,10 +302,7 @@ async fn ban_list_of_rooms(&self) -> Result {
}
for room_id in room_ids {
self.services
.rooms
.metadata
.ban_room(&room_id, true);
self.services.metadata.ban_room(&room_id, true);
debug!("Banned {room_id} successfully");
room_ban_count = room_ban_count.saturating_add(1);
@@ -331,7 +310,6 @@ async fn ban_list_of_rooms(&self) -> Result {
debug!("Making all users leave the room {room_id} and forgetting it");
let mut users = self
.services
.rooms
.state_cache
.room_members(&room_id)
.map(ToOwned::to_owned)
@@ -352,20 +330,17 @@ async fn ban_list_of_rooms(&self) -> Result {
}
self.services
.rooms
.state_cache
.forget(&room_id, user_id);
}
// remove any local aliases, ignore errors
self.services
.rooms
.alias
.local_aliases_for_room(&room_id)
.map(ToOwned::to_owned)
.for_each(async |local_alias| {
self.services
.rooms
.alias
.remove_alias(&local_alias, &self.services.globals.server_user)
.await
@@ -374,13 +349,9 @@ async fn ban_list_of_rooms(&self) -> Result {
.await;
// unpublish from room directory, ignore errors
self.services
.rooms
.directory
.set_not_public(&room_id);
self.services.directory.set_not_public(&room_id);
self.services
.rooms
.metadata
.disable_room(&room_id, true);
}
@@ -407,10 +378,7 @@ async fn unban_room(&self, room: OwnedRoomOrAliasId) -> Result {
};
debug!("Room specified is a room ID, unbanning room ID");
self.services
.rooms
.metadata
.ban_room(room_id, false);
self.services.metadata.ban_room(room_id, false);
room_id.to_owned()
} else if room.is_room_alias_id() {
@@ -432,7 +400,6 @@ async fn unban_room(&self, room: OwnedRoomOrAliasId) -> Result {
let room_id = match self
.services
.rooms
.alias
.resolve_local_alias(room_alias)
.await
@@ -446,7 +413,6 @@ async fn unban_room(&self, room: OwnedRoomOrAliasId) -> Result {
match self
.services
.rooms
.alias
.resolve_alias(room_alias, None)
.await
@@ -466,10 +432,7 @@ async fn unban_room(&self, room: OwnedRoomOrAliasId) -> Result {
},
};
self.services
.rooms
.metadata
.ban_room(&room_id, false);
self.services.metadata.ban_room(&room_id, false);
room_id
} else {
@@ -481,7 +444,6 @@ async fn unban_room(&self, room: OwnedRoomOrAliasId) -> Result {
};
self.services
.rooms
.metadata
.disable_room(&room_id, false);
self.write_str("Room unbanned and federation re-enabled.")
@@ -492,7 +454,6 @@ async fn unban_room(&self, room: OwnedRoomOrAliasId) -> Result {
async fn list_banned_rooms(&self, no_details: bool) -> Result {
let room_ids: Vec<OwnedRoomId> = self
.services
.rooms
.metadata
.list_banned_rooms()
.map(Into::into)

View File

@@ -122,7 +122,7 @@ pub(super) async fn create_user(&self, username: String, password: Option<String
.is_empty()
{
for room in &self.services.server.config.auto_join_rooms {
let Ok(room_id) = self.services.rooms.alias.resolve(room).await else {
let Ok(room_id) = self.services.alias.resolve(room).await else {
error!(
%user_id,
"Failed to resolve room alias to room ID when attempting to auto join {room}, skipping"
@@ -132,7 +132,6 @@ pub(super) async fn create_user(&self, username: String, password: Option<String
if !self
.services
.rooms
.state_cache
.server_in_room(self.services.globals.server_name(), &room_id)
.await
@@ -186,7 +185,6 @@ pub(super) async fn create_user(&self, username: String, password: Option<String
if let Ok(admin_room) = self.services.admin.get_admin_room().await {
if self
.services
.rooms
.state_cache
.room_joined_count(&admin_room)
.await
@@ -230,7 +228,6 @@ pub(super) async fn deactivate(&self, no_leave_rooms: bool, user_id: String) ->
let all_joined_rooms: Vec<OwnedRoomId> = self
.services
.rooms
.state_cache
.rooms_joined(&user_id)
.map(Into::into)
@@ -355,7 +352,6 @@ pub(super) async fn deactivate_all(&self, no_leave_rooms: bool, force: bool) ->
info!("Forcing user {user_id} to leave all rooms apart of deactivate-all");
let all_joined_rooms: Vec<OwnedRoomId> = self
.services
.rooms
.state_cache
.rooms_joined(&user_id)
.map(Into::into)
@@ -395,7 +391,6 @@ pub(super) async fn list_joined_rooms(&self, user_id: String) -> Result {
let mut rooms: Vec<(OwnedRoomId, u64, String)> = self
.services
.rooms
.state_cache
.rooms_joined(&user_id)
.then(|room_id| get_room_info(self.services, room_id))
@@ -445,14 +440,12 @@ pub(super) async fn force_join_list_of_local_users(
let (room_id, servers) = self
.services
.rooms
.alias
.resolve_with_servers(&room_id, None)
.await?;
if !self
.services
.rooms
.state_cache
.server_in_room(self.services.globals.server_name(), &room_id)
.await
@@ -462,7 +455,6 @@ pub(super) async fn force_join_list_of_local_users(
let server_admins: Vec<_> = self
.services
.rooms
.state_cache
.active_local_users_in_room(&admin_room)
.map(ToOwned::to_owned)
@@ -471,7 +463,6 @@ pub(super) async fn force_join_list_of_local_users(
if !self
.services
.rooms
.state_cache
.room_members(&room_id)
.ready_any(|user_id| server_admins.contains(&user_id.to_owned()))
@@ -567,14 +558,12 @@ pub(super) async fn force_join_all_local_users(
let (room_id, servers) = self
.services
.rooms
.alias
.resolve_with_servers(&room_id, None)
.await?;
if !self
.services
.rooms
.state_cache
.server_in_room(self.services.globals.server_name(), &room_id)
.await
@@ -584,7 +573,6 @@ pub(super) async fn force_join_all_local_users(
let server_admins: Vec<_> = self
.services
.rooms
.state_cache
.active_local_users_in_room(&admin_room)
.map(ToOwned::to_owned)
@@ -593,7 +581,6 @@ pub(super) async fn force_join_all_local_users(
if !self
.services
.rooms
.state_cache
.room_members(&room_id)
.ready_any(|user_id| server_admins.contains(&user_id.to_owned()))
@@ -650,7 +637,6 @@ pub(super) async fn force_join_room(
let user_id = parse_local_user_id(self.services, &user_id)?;
let (room_id, servers) = self
.services
.rooms
.alias
.resolve_with_servers(&room_id, None)
.await?;
@@ -673,12 +659,7 @@ pub(super) async fn force_leave_room(
room_id: OwnedRoomOrAliasId,
) -> Result {
let user_id = parse_local_user_id(self.services, &user_id)?;
let room_id = self
.services
.rooms
.alias
.resolve(&room_id)
.await?;
let room_id = self.services.alias.resolve(&room_id).await?;
assert!(
self.services.globals.user_is_local(&user_id),
@@ -687,7 +668,6 @@ pub(super) async fn force_leave_room(
if !self
.services
.rooms
.state_cache
.is_joined(&user_id, &room_id)
.await
@@ -706,29 +686,17 @@ pub(super) async fn force_leave_room(
#[admin_command]
pub(super) async fn force_demote(&self, user_id: String, room_id: OwnedRoomOrAliasId) -> Result {
let user_id = parse_local_user_id(self.services, &user_id)?;
let room_id = self
.services
.rooms
.alias
.resolve(&room_id)
.await?;
let room_id = self.services.alias.resolve(&room_id).await?;
assert!(
self.services.globals.user_is_local(&user_id),
"Parsed user_id must be a local user"
);
let state_lock = self
.services
.rooms
.state
.mutex
.lock(&room_id)
.await;
let state_lock = self.services.state.mutex.lock(&room_id).await;
let room_power_levels: Option<RoomPowerLevels> = self
.services
.rooms
.state_accessor
.get_power_levels(&room_id)
.await
@@ -743,7 +711,6 @@ pub(super) async fn force_demote(&self, user_id: String, room_id: OwnedRoomOrAli
let user_can_demote_self = user_can_change_self
|| self
.services
.rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomCreate, "")
.await
@@ -762,7 +729,6 @@ pub(super) async fn force_demote(&self, user_id: String, room_id: OwnedRoomOrAli
let event_id = self
.services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &power_levels_content),
@@ -897,7 +863,6 @@ pub(super) async fn get_room_tags(&self, user_id: String, room_id: OwnedRoomId)
pub(super) async fn redact_event(&self, event_id: OwnedEventId) -> Result {
let Ok(event) = self
.services
.rooms
.timeline
.get_non_outlier_pdu(&event_id)
.await
@@ -925,14 +890,12 @@ pub(super) async fn redact_event(&self, event_id: OwnedEventId) -> Result {
let redaction_event_id = {
let state_lock = self
.services
.rooms
.state
.mutex
.lock(event.room_id())
.await;
self.services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {

View File

@@ -17,13 +17,11 @@ pub(crate) async fn get_room_info(
(
room_id.into(),
services
.rooms
.state_cache
.room_joined_count(room_id)
.await
.unwrap_or(0),
services
.rooms
.state_accessor
.get_name(room_id)
.await

View File

@@ -215,7 +215,6 @@ pub(crate) async fn deactivate_route(
// Remove profile pictures and display name
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(sender_user)
.map(Into::into)
@@ -317,10 +316,9 @@ pub async fn full_user_deactivate(
.await;
for room_id in all_joined_rooms {
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
let room_power_levels = services
.rooms
.state_accessor
.get_power_levels(room_id)
.await
@@ -334,7 +332,6 @@ pub async fn full_user_deactivate(
let user_can_demote_self = user_can_change_self
|| services
.rooms
.state_accessor
.room_state_get(room_id, &StateEventType::RoomCreate, "")
.await
@@ -350,7 +347,6 @@ pub async fn full_user_deactivate(
// ignore errors so deactivation doesn't fail
match services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &power_levels_content),

View File

@@ -19,7 +19,6 @@ pub(crate) async fn create_alias_route(
) -> Result<create_alias::v3::Response> {
let sender_user = body.sender_user();
services
.rooms
.alias
.appservice_checks(&body.room_alias, &body.appservice_info)
.await?;
@@ -35,7 +34,6 @@ pub(crate) async fn create_alias_route(
}
if services
.rooms
.alias
.resolve_local_alias(&body.room_alias)
.await
@@ -45,7 +43,6 @@ pub(crate) async fn create_alias_route(
}
services
.rooms
.alias
.set_alias(&body.room_alias, &body.room_id, sender_user)?;
@@ -63,13 +60,11 @@ pub(crate) async fn delete_alias_route(
) -> Result<delete_alias::v3::Response> {
let sender_user = body.sender_user();
services
.rooms
.alias
.appservice_checks(&body.room_alias, &body.appservice_info)
.await?;
services
.rooms
.alias
.remove_alias(&body.room_alias, sender_user)
.await?;
@@ -89,7 +84,6 @@ pub(crate) async fn get_alias_route(
let room_alias = body.body.room_alias;
let Ok((room_id, servers)) = services
.rooms
.alias
.resolve_alias(&room_alias, None)
.await
@@ -111,7 +105,6 @@ async fn room_available_servers(
) -> Vec<OwnedServerName> {
// find active servers in room state cache to suggest
let mut servers: Vec<OwnedServerName> = services
.rooms
.state_cache
.room_servers(room_id)
.map(ToOwned::to_owned)

View File

@@ -38,7 +38,7 @@ pub(crate) async fn get_context_route(
let event_id = &body.event_id;
let filter = &body.filter;
if !services.rooms.metadata.exists(room_id).await {
if !services.metadata.exists(room_id).await {
return Err!(Request(Forbidden("Room does not exist to this server")));
}
@@ -50,19 +50,16 @@ pub(crate) async fn get_context_route(
.min(LIMIT_MAX);
let base_id = services
.rooms
.timeline
.get_pdu_id(event_id)
.map_err(|_| err!(Request(NotFound("Event not found."))));
let base_pdu = services
.rooms
.timeline
.get_pdu(event_id)
.map_err(|_| err!(Request(NotFound("Base event not found."))));
let visible = services
.rooms
.state_accessor
.user_can_see_event(sender_user, room_id, event_id)
.map(Ok);
@@ -83,7 +80,6 @@ pub(crate) async fn get_context_route(
let base_event = ignored_filter(&services, (base_count, base_pdu), sender_user);
let events_before = services
.rooms
.timeline
.pdus_rev(Some(sender_user), room_id, Some(base_count))
.ignore_err()
@@ -94,7 +90,6 @@ pub(crate) async fn get_context_route(
.collect();
let events_after = services
.rooms
.timeline
.pdus(Some(sender_user), room_id, Some(base_count))
.ignore_err()
@@ -135,18 +130,11 @@ pub(crate) async fn get_context_route(
.map_or_else(|| body.event_id.as_ref(), |pdu| pdu.event_id.as_ref());
let state_ids = services
.rooms
.state_accessor
.pdu_shortstatehash(state_at)
.or_else(|_| {
services
.rooms
.state
.get_room_shortstatehash(room_id)
})
.or_else(|_| services.state.get_room_shortstatehash(room_id))
.map_ok(|shortstatehash| {
services
.rooms
.state_accessor
.state_full_ids(shortstatehash)
.map(Ok)
@@ -163,7 +151,6 @@ pub(crate) async fn get_context_route(
let shorteventids = state_ids.iter().map(ref_at!(1)).stream();
let lazy_loading_witnessed = lazy_loading_witnessed.unwrap_or_default();
let state: Vec<_> = services
.rooms
.short
.multi_get_statekey_from_short(shortstatekeys)
.zip(shorteventids)
@@ -182,11 +169,7 @@ pub(crate) async fn get_context_route(
Some(event_id)
})
.broad_filter_map(|event_id: &OwnedEventId| {
services
.rooms
.timeline
.get_pdu(event_id.as_ref())
.ok()
services.timeline.get_pdu(event_id.as_ref()).ok()
})
.map(Event::into_format)
.collect()

View File

@@ -111,12 +111,7 @@ pub(crate) async fn set_room_visibility_route(
) -> Result<set_room_visibility::v3::Response> {
let sender_user = body.sender_user();
if !services
.rooms
.metadata
.exists(&body.room_id)
.await
{
if !services.metadata.exists(&body.room_id).await {
// Return 404 if the room doesn't exist
return Err!(Request(NotFound("Room not found")));
}
@@ -166,7 +161,7 @@ pub(crate) async fn set_room_visibility_route(
)));
}
services.rooms.directory.set_public(&body.room_id);
services.directory.set_public(&body.room_id);
if services.server.config.admin_room_notices {
services
@@ -179,10 +174,7 @@ pub(crate) async fn set_room_visibility_route(
}
info!("{sender_user} made {0} public to the room directory", body.room_id);
},
| room::Visibility::Private => services
.rooms
.directory
.set_not_public(&body.room_id),
| room::Visibility::Private => services.directory.set_not_public(&body.room_id),
| _ => {
return Err!(Request(InvalidParam("Room visibility type is not supported.",)));
},
@@ -198,19 +190,13 @@ pub(crate) async fn get_room_visibility_route(
State(services): State<crate::State>,
body: Ruma<get_room_visibility::v3::Request>,
) -> Result<get_room_visibility::v3::Response> {
if !services
.rooms
.metadata
.exists(&body.room_id)
.await
{
if !services.metadata.exists(&body.room_id).await {
// Return 404 if the room doesn't exist
return Err!(Request(NotFound("Room not found")));
}
Ok(get_room_visibility::v3::Response {
visibility: if services
.rooms
.directory
.is_public_room(&body.room_id)
.await
@@ -295,13 +281,12 @@ pub(crate) async fn get_public_rooms_filtered_helper(
let meta_public_rooms = search_room_id
.filter(|_| services.config.allow_unlisted_room_search_by_id)
.map(|prefix| services.rooms.metadata.public_ids_prefix(prefix))
.map(|prefix| services.metadata.public_ids_prefix(prefix))
.into_iter()
.stream()
.flatten();
let mut all_rooms: Vec<PublicRoomsChunk> = services
.rooms
.directory
.public_rooms()
.map(ToOwned::to_owned)
@@ -388,7 +373,6 @@ async fn user_can_publish_room(
room_id: &RoomId,
) -> Result<bool> {
match services
.rooms
.state_accessor
.get_power_levels(room_id)
.await
@@ -397,7 +381,6 @@ async fn user_can_publish_room(
Ok(power_levels.user_can_send_state(user_id, StateEventType::RoomHistoryVisibility)),
| _ => {
match services
.rooms
.state_accessor
.room_state_get(room_id, &StateEventType::RoomCreate, "")
.await
@@ -410,39 +393,30 @@ async fn user_can_publish_room(
}
async fn public_rooms_chunk(services: &Services, room_id: OwnedRoomId) -> PublicRoomsChunk {
let name = services
.rooms
.state_accessor
.get_name(&room_id)
.ok();
let name = services.state_accessor.get_name(&room_id).ok();
let room_type = services
.rooms
.state_accessor
.get_room_type(&room_id)
.ok();
let canonical_alias = services
.rooms
.state_accessor
.get_canonical_alias(&room_id)
.ok();
let avatar_url = services.rooms.state_accessor.get_avatar(&room_id);
let avatar_url = services.state_accessor.get_avatar(&room_id);
let topic = services
.rooms
.state_accessor
.get_room_topic(&room_id)
.ok();
let world_readable = services
.rooms
.state_accessor
.is_world_readable(&room_id);
let join_rule = services
.rooms
.state_accessor
.room_state_get_content(&room_id, &StateEventType::RoomJoinRules, "")
.map_ok(|c: RoomJoinRulesEventContent| match c.join_rule {
@@ -452,15 +426,9 @@ async fn public_rooms_chunk(services: &Services, room_id: OwnedRoomId) -> Public
| _ => "invite".into(),
});
let guest_can_join = services
.rooms
.state_accessor
.guest_can_join(&room_id);
let guest_can_join = services.state_accessor.guest_can_join(&room_id);
let num_joined_members = services
.rooms
.state_cache
.room_joined_count(&room_id);
let num_joined_members = services.state_cache.room_joined_count(&room_id);
let (
(avatar_url, canonical_alias, guest_can_join, join_rule, name),

View File

@@ -394,7 +394,6 @@ pub(crate) async fn get_key_changes_route(
);
let mut rooms_joined = services
.rooms
.state_cache
.rooms_joined(sender_user)
.boxed();

View File

@@ -20,22 +20,15 @@ pub(crate) async fn ban_user_route(
return Err!(Request(Forbidden("You cannot ban yourself.")));
}
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
let current_member_content = services
.rooms
.state_accessor
.get_member(&body.room_id, &body.user_id)
.await
.unwrap_or_else(|_| RoomMemberEventContent::new(MembershipState::Ban));
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(body.user_id.to_string(), &RoomMemberEventContent {

View File

@@ -21,18 +21,9 @@ pub(crate) async fn forget_room_route(
let user_id = body.sender_user();
let room_id = &body.room_id;
let joined = services
.rooms
.state_cache
.is_joined(user_id, room_id);
let knocked = services
.rooms
.state_cache
.is_knocked(user_id, room_id);
let invited = services
.rooms
.state_cache
.is_invited(user_id, room_id);
let joined = services.state_cache.is_joined(user_id, room_id);
let knocked = services.state_cache.is_knocked(user_id, room_id);
let invited = services.state_cache.is_invited(user_id, room_id);
pin_mut!(joined, knocked, invited);
if joined.or(knocked).or(invited).await {
@@ -40,7 +31,6 @@ pub(crate) async fn forget_room_route(
}
let membership = services
.rooms
.state_accessor
.get_member(room_id, user_id)
.await;
@@ -55,15 +45,11 @@ pub(crate) async fn forget_room_route(
if non_membership
|| services
.rooms
.state_cache
.is_left(user_id, room_id)
.await
{
services
.rooms
.state_cache
.forget(room_id, user_id);
services.state_cache.forget(room_id, user_id);
}
Ok(forget_room::v3::Response::new())

View File

@@ -61,7 +61,6 @@ pub(crate) async fn invite_user_route(
}
if let Ok(target_user_membership) = services
.rooms
.state_accessor
.get_member(&body.room_id, user_id)
.await
@@ -111,7 +110,7 @@ pub(crate) async fn invite_helper(
if !services.globals.user_is_local(user_id) {
let (pdu, pdu_json, invite_room_state) = {
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
let content = RoomMemberEventContent {
avatar_url: services.users.avatar_url(user_id).await.ok(),
@@ -121,7 +120,6 @@ pub(crate) async fn invite_helper(
};
let (pdu, pdu_json) = services
.rooms
.timeline
.create_hash_and_sign_event(
PduBuilder::state(user_id.to_string(), &content),
@@ -131,18 +129,14 @@ pub(crate) async fn invite_helper(
)
.await?;
let invite_room_state = services.rooms.state.summary_stripped(&pdu).await;
let invite_room_state = services.state.summary_stripped(&pdu).await;
drop(state_lock);
(pdu, pdu_json, invite_room_state)
};
let room_version_id = services
.rooms
.state
.get_room_version(room_id)
.await?;
let room_version_id = services.state.get_room_version(room_id).await?;
let response = services
.sending
@@ -159,7 +153,6 @@ pub(crate) async fn invite_helper(
.map(Into::into)
.collect(),
via: services
.rooms
.state_cache
.servers_route_via(room_id)
.await
@@ -192,7 +185,6 @@ pub(crate) async fn invite_helper(
})?;
let pdu_id = services
.rooms
.event_handler
.handle_incoming_pdu(&origin, room_id, &event_id, value, true)
.boxed()
@@ -208,7 +200,6 @@ pub(crate) async fn invite_helper(
}
if !services
.rooms
.state_cache
.is_joined(sender_user, room_id)
.await
@@ -218,7 +209,7 @@ pub(crate) async fn invite_helper(
)));
}
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
let content = RoomMemberEventContent {
displayname: services.users.displayname(user_id).await.ok(),
@@ -230,7 +221,6 @@ pub(crate) async fn invite_helper(
};
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(user_id.to_string(), &content),

View File

@@ -76,7 +76,6 @@ pub(crate) async fn join_room_by_id_route(
// There is no body.server_name for /roomId/join
let mut servers: Vec<_> = services
.rooms
.state_cache
.servers_invite_via(&body.room_id)
.map(ToOwned::to_owned)
@@ -85,7 +84,6 @@ pub(crate) async fn join_room_by_id_route(
servers.extend(
services
.rooms
.state_cache
.invite_state(sender_user, &body.room_id)
.await
@@ -151,7 +149,6 @@ pub(crate) async fn join_room_by_id_or_alias_route(
let mut servers = body.via.clone();
servers.extend(
services
.rooms
.state_cache
.servers_invite_via(&room_id)
.map(ToOwned::to_owned)
@@ -161,7 +158,6 @@ pub(crate) async fn join_room_by_id_or_alias_route(
servers.extend(
services
.rooms
.state_cache
.invite_state(sender_user, &room_id)
.await
@@ -184,7 +180,6 @@ pub(crate) async fn join_room_by_id_or_alias_route(
},
| Err(room_alias) => {
let (room_id, mut servers) = services
.rooms
.alias
.resolve_alias(&room_alias, Some(body.via.clone()))
.await?;
@@ -199,13 +194,11 @@ pub(crate) async fn join_room_by_id_or_alias_route(
.await?;
let addl_via_servers = services
.rooms
.state_cache
.servers_invite_via(&room_id)
.map(ToOwned::to_owned);
let addl_state_servers = services
.rooms
.state_cache
.invite_state(sender_user, &room_id)
.await
@@ -254,7 +247,7 @@ pub async fn join_room_by_id_helper(
third_party_signed: Option<&ThirdPartySigned>,
appservice_info: &Option<RegistrationInfo>,
) -> Result<join_room_by_id::v3::Response> {
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
let user_is_guest = services
.users
@@ -265,7 +258,6 @@ pub async fn join_room_by_id_helper(
if user_is_guest
&& !services
.rooms
.state_accessor
.guest_can_join(room_id)
.await
@@ -274,7 +266,6 @@ pub async fn join_room_by_id_helper(
}
if services
.rooms
.state_cache
.is_joined(sender_user, room_id)
.await
@@ -284,7 +275,6 @@ pub async fn join_room_by_id_helper(
}
if let Ok(membership) = services
.rooms
.state_accessor
.get_member(room_id, sender_user)
.await
@@ -296,7 +286,6 @@ pub async fn join_room_by_id_helper(
}
let server_in_room = services
.rooms
.state_cache
.server_in_room(services.globals.server_name(), room_id)
.await;
@@ -518,7 +507,6 @@ async fn join_room_by_id_helper_remote(
}
services
.rooms
.short
.get_or_create_shortroomid(room_id)
.await;
@@ -565,13 +553,11 @@ async fn join_room_by_id_helper_remote(
};
services
.rooms
.timeline
.add_pdu_outlier(&event_id, &value);
if let Some(state_key) = &pdu.state_key {
let shortstatekey = services
.rooms
.short
.get_or_create_shortstatekey(&pdu.kind.to_string().into(), state_key)
.await;
@@ -600,7 +586,6 @@ async fn join_room_by_id_helper_remote(
.ready_filter_map(Result::ok)
.ready_for_each(|(event_id, value)| {
services
.rooms
.timeline
.add_pdu_outlier(&event_id, &value);
})
@@ -612,10 +597,9 @@ async fn join_room_by_id_helper_remote(
state_res::auth_check(
&room_version::rules(&room_version_id)?,
&parsed_join_pdu,
&async |event_id| services.rooms.timeline.get_pdu(&event_id).await,
&async |event_id| services.timeline.get_pdu(&event_id).await,
&async |event_type, state_key| {
let shortstatekey = services
.rooms
.short
.get_shortstatekey(&event_type, state_key.as_str())
.await?;
@@ -624,7 +608,7 @@ async fn join_room_by_id_helper_remote(
err!(Request(NotFound("Missing fetch_state {shortstatekey:?}")))
})?;
services.rooms.timeline.get_pdu(event_id).await
services.timeline.get_pdu(event_id).await
},
)
.boxed()
@@ -632,7 +616,6 @@ async fn join_room_by_id_helper_remote(
info!("Compressing state from send_join");
let compressed: CompressedState = services
.rooms
.state_compressor
.compress_state_events(state.iter().map(|(ssk, eid)| (ssk, eid.borrow())))
.collect()
@@ -644,21 +627,18 @@ async fn join_room_by_id_helper_remote(
added,
removed,
} = services
.rooms
.state_compressor
.save_state(room_id, Arc::new(compressed))
.await?;
debug!("Forcing state for new room");
services
.rooms
.state
.force_state(room_id, statehash_before_join, added, removed, &state_lock)
.await?;
info!("Updating joined counts for new room");
services
.rooms
.state_cache
.update_joined_count(room_id)
.await;
@@ -667,14 +647,12 @@ async fn join_room_by_id_helper_remote(
// time with the pdu without it's state. This is okay because append_pdu can't
// fail.
let statehash_after_join = services
.rooms
.state
.append_to_state(&parsed_join_pdu)
.await?;
info!("Appending new room join event");
services
.rooms
.timeline
.append_pdu(
&parsed_join_pdu,
@@ -688,7 +666,6 @@ async fn join_room_by_id_helper_remote(
// We set the room state after inserting the pdu, so that we never have a moment
// in time where events in the current room state do not exist
services
.rooms
.state
.set_room_state(room_id, statehash_after_join, &state_lock);
@@ -708,7 +685,6 @@ async fn join_room_by_id_helper_local(
debug_info!("We can join locally");
let join_rules_event_content = services
.rooms
.state_accessor
.room_state_get_content::<RoomJoinRulesEventContent>(
room_id,
@@ -737,18 +713,16 @@ async fn join_room_by_id_helper_local(
.stream()
.any(|restriction_room_id| {
services
.rooms
.state_cache
.is_joined(sender_user, restriction_room_id)
})
.await
{
let users = services
.rooms
.state_cache
.local_users_in_room(room_id)
.filter(|user| {
services.rooms.state_accessor.user_can_invite(
services.state_accessor.user_can_invite(
room_id,
user,
sender_user,
@@ -775,7 +749,6 @@ async fn join_room_by_id_helper_local(
// Try normal join first
let Err(error) = services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(sender_user.to_string(), &content),
@@ -912,7 +885,6 @@ async fn join_room_by_id_helper_local(
drop(state_lock);
services
.rooms
.event_handler
.handle_incoming_pdu(&remote_server, room_id, &signed_event_id, signed_value, true)
.boxed()

View File

@@ -14,15 +14,9 @@ pub(crate) async fn kick_user_route(
State(services): State<crate::State>,
body: Ruma<kick_user::v3::Request>,
) -> Result<kick_user::v3::Response> {
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
let Ok(event) = services
.rooms
.state_accessor
.get_member(&body.room_id, &body.user_id)
.await
@@ -43,7 +37,6 @@ pub(crate) async fn kick_user_route(
}
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(body.user_id.to_string(), &RoomMemberEventContent {

View File

@@ -67,7 +67,6 @@ pub(crate) async fn knock_room_route(
let mut servers = body.via.clone();
servers.extend(
services
.rooms
.state_cache
.servers_invite_via(&room_id)
.map(ToOwned::to_owned)
@@ -77,7 +76,6 @@ pub(crate) async fn knock_room_route(
servers.extend(
services
.rooms
.state_cache
.invite_state(sender_user, &room_id)
.await
@@ -100,7 +98,6 @@ pub(crate) async fn knock_room_route(
},
| Err(room_alias) => {
let (room_id, mut servers) = services
.rooms
.alias
.resolve_alias(&room_alias, Some(body.via.clone()))
.await?;
@@ -115,13 +112,11 @@ pub(crate) async fn knock_room_route(
.await?;
let addl_via_servers = services
.rooms
.state_cache
.servers_invite_via(&room_id)
.map(ToOwned::to_owned);
let addl_state_servers = services
.rooms
.state_cache
.invite_state(sender_user, &room_id)
.await
@@ -158,10 +153,9 @@ async fn knock_room_by_id_helper(
reason: Option<String>,
servers: &[OwnedServerName],
) -> Result<knock_room::v3::Response> {
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
if services
.rooms
.state_cache
.is_invited(sender_user, room_id)
.await
@@ -173,7 +167,6 @@ async fn knock_room_by_id_helper(
}
if services
.rooms
.state_cache
.is_joined(sender_user, room_id)
.await
@@ -183,7 +176,6 @@ async fn knock_room_by_id_helper(
}
if services
.rooms
.state_cache
.is_knocked(sender_user, room_id)
.await
@@ -193,7 +185,6 @@ async fn knock_room_by_id_helper(
}
if let Ok(membership) = services
.rooms
.state_accessor
.get_member(room_id, sender_user)
.await
@@ -205,7 +196,6 @@ async fn knock_room_by_id_helper(
}
let server_in_room = services
.rooms
.state_cache
.server_in_room(services.globals.server_name(), room_id)
.await;
@@ -237,11 +227,7 @@ async fn knock_room_helper_local(
) -> Result {
debug_info!("We can knock locally");
let room_version_id = services
.rooms
.state
.get_room_version(room_id)
.await?;
let room_version_id = services.state.get_room_version(room_id).await?;
if matches!(
room_version_id,
@@ -265,7 +251,6 @@ async fn knock_room_helper_local(
// Try normal knock first
let Err(error) = services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(sender_user.to_string(), &content),
@@ -366,7 +351,6 @@ async fn knock_room_helper_local(
info!("send_knock finished");
services
.rooms
.short
.get_or_create_shortroomid(room_id)
.await;
@@ -378,7 +362,6 @@ async fn knock_room_helper_local(
info!("Updating membership locally to knock state with provided stripped state events");
services
.rooms
.state_cache
.update_membership(
room_id,
@@ -401,7 +384,6 @@ async fn knock_room_helper_local(
info!("Appending room knock event locally");
services
.rooms
.timeline
.append_pdu(
&parsed_knock_pdu,
@@ -503,7 +485,6 @@ async fn knock_room_helper_remote(
info!("send_knock finished");
services
.rooms
.short
.get_or_create_shortroomid(room_id)
.await;
@@ -550,13 +531,11 @@ async fn knock_room_helper_remote(
let event_id = gen_event_id(&event, &room_version_id)?;
let shortstatekey = services
.rooms
.short
.get_or_create_shortstatekey(&event_type, &state_key)
.await;
services
.rooms
.timeline
.add_pdu_outlier(&event_id, &event);
@@ -565,7 +544,6 @@ async fn knock_room_helper_remote(
info!("Compressing state from send_knock");
let compressed: CompressedState = services
.rooms
.state_compressor
.compress_state_events(
state_map
@@ -581,27 +559,23 @@ async fn knock_room_helper_remote(
added,
removed,
} = services
.rooms
.state_compressor
.save_state(room_id, Arc::new(compressed))
.await?;
debug!("Forcing state for new room");
services
.rooms
.state
.force_state(room_id, statehash_before_knock, added, removed, &state_lock)
.await?;
let statehash_after_knock = services
.rooms
.state
.append_to_state(&parsed_knock_pdu)
.await?;
info!("Updating membership locally to knock state with provided stripped state events");
services
.rooms
.state_cache
.update_membership(
room_id,
@@ -624,7 +598,6 @@ async fn knock_room_helper_remote(
info!("Appending room knock event locally");
services
.rooms
.timeline
.append_pdu(
&parsed_knock_pdu,
@@ -638,7 +611,6 @@ async fn knock_room_helper_remote(
// We set the room state after inserting the pdu, so that we never have a moment
// in time where events in the current room state do not exist
services
.rooms
.state
.set_room_state(room_id, statehash_after_knock, &state_lock);

View File

@@ -42,19 +42,16 @@ pub(crate) async fn leave_room_route(
// and ignores errors
pub async fn leave_all_rooms(services: &Services, user_id: &UserId) {
let rooms_joined = services
.rooms
.state_cache
.rooms_joined(user_id)
.map(ToOwned::to_owned);
let rooms_invited = services
.rooms
.state_cache
.rooms_invited(user_id)
.map(|(r, _)| r);
let rooms_knocked = services
.rooms
.state_cache
.rooms_knocked(user_id)
.map(|(r, _)| r);
@@ -74,10 +71,7 @@ pub async fn leave_all_rooms(services: &Services, user_id: &UserId) {
warn!(%user_id, "Failed to leave {room_id} remotely: {e}");
}
services
.rooms
.state_cache
.forget(&room_id, user_id);
services.state_cache.forget(&room_id, user_id);
}
}
@@ -98,15 +92,14 @@ pub async fn leave_room(
blurhash: None,
};
let is_banned = services.rooms.metadata.is_banned(room_id);
let is_disabled = services.rooms.metadata.is_disabled(room_id);
let is_banned = services.metadata.is_banned(room_id);
let is_disabled = services.metadata.is_disabled(room_id);
pin_mut!(is_banned, is_disabled);
if is_banned.or(is_disabled).await {
// the room is banned/disabled, the room must be rejected locally since we
// cant/dont want to federate with this server
services
.rooms
.state_cache
.update_membership(
room_id,
@@ -123,13 +116,11 @@ pub async fn leave_room(
}
let dont_have_room = services
.rooms
.state_cache
.server_in_room(services.globals.server_name(), room_id)
.eq(&false);
let not_knocked = services
.rooms
.state_cache
.is_knocked(user_id, room_id)
.eq(&false);
@@ -145,27 +136,15 @@ pub async fn leave_room(
}
let last_state = services
.rooms
.state_cache
.invite_state(user_id, room_id)
.or_else(|_| {
services
.rooms
.state_cache
.knock_state(user_id, room_id)
})
.or_else(|_| {
services
.rooms
.state_cache
.left_state(user_id, room_id)
})
.or_else(|_| services.state_cache.knock_state(user_id, room_id))
.or_else(|_| services.state_cache.left_state(user_id, room_id))
.await
.ok();
// We always drop the invite, we can't rely on other servers
services
.rooms
.state_cache
.update_membership(
room_id,
@@ -178,10 +157,9 @@ pub async fn leave_room(
)
.await?;
} else {
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
let Ok(event) = services
.rooms
.state_accessor
.room_state_get_content::<RoomMemberEventContent>(
room_id,
@@ -195,7 +173,6 @@ pub async fn leave_room(
);
return services
.rooms
.state_cache
.update_membership(
room_id,
@@ -210,7 +187,6 @@ pub async fn leave_room(
};
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(user_id.to_string(), &RoomMemberEventContent {
@@ -235,7 +211,6 @@ async fn remote_leave_room(services: &Services, user_id: &UserId, room_id: &Room
Err!(BadServerResponse("No remote server available to assist in leaving {room_id}."));
let mut servers: HashSet<OwnedServerName> = services
.rooms
.state_cache
.servers_invite_via(room_id)
.map(ToOwned::to_owned)
@@ -243,7 +218,6 @@ async fn remote_leave_room(services: &Services, user_id: &UserId, room_id: &Room
.await;
match services
.rooms
.state_cache
.invite_state(user_id, room_id)
.await
@@ -259,7 +233,6 @@ async fn remote_leave_room(services: &Services, user_id: &UserId, room_id: &Room
},
| _ => {
match services
.rooms
.state_cache
.knock_state(user_id, room_id)
.await

View File

@@ -25,7 +25,6 @@ pub(crate) async fn get_member_events_route(
body: Ruma<get_member_events::v3::Request>,
) -> Result<get_member_events::v3::Response> {
if !services
.rooms
.state_accessor
.user_can_see_state_events(body.sender_user(), &body.room_id)
.await
@@ -37,7 +36,6 @@ pub(crate) async fn get_member_events_route(
let not_membership = body.not_membership.as_ref();
Ok(get_member_events::v3::Response {
chunk: services
.rooms
.state_accessor
.room_state_full(&body.room_id)
.ready_filter_map(Result::ok)
@@ -62,7 +60,6 @@ pub(crate) async fn joined_members_route(
body: Ruma<joined_members::v3::Request>,
) -> Result<joined_members::v3::Response> {
if !services
.rooms
.state_accessor
.user_can_see_state_events(body.sender_user(), &body.room_id)
.await
@@ -72,7 +69,6 @@ pub(crate) async fn joined_members_route(
Ok(joined_members::v3::Response {
joined: services
.rooms
.state_accessor
.room_state_full(&body.room_id)
.ready_filter_map(Result::ok)

View File

@@ -42,7 +42,6 @@ pub(crate) async fn joined_rooms_route(
) -> Result<joined_rooms::v3::Response> {
Ok(joined_rooms::v3::Response {
joined_rooms: services
.rooms
.state_cache
.rooms_joined(body.sender_user())
.map(ToOwned::to_owned)
@@ -69,7 +68,7 @@ pub(crate) async fn banned_room_check(
}
if let Some(room_id) = room_id {
if services.rooms.metadata.is_banned(room_id).await
if services.metadata.is_banned(room_id).await
|| (room_id.server_name().is_some()
&& services
.config
@@ -105,7 +104,6 @@ pub(crate) async fn banned_room_check(
}
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(user_id)
.map(Into::into)
@@ -150,7 +148,6 @@ pub(crate) async fn banned_room_check(
}
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(user_id)
.map(Into::into)

View File

@@ -14,15 +14,9 @@ pub(crate) async fn unban_user_route(
State(services): State<crate::State>,
body: Ruma<unban_user::v3::Request>,
) -> Result<unban_user::v3::Response> {
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
let current_member_content = services
.rooms
.state_accessor
.get_member(&body.room_id, &body.user_id)
.await
@@ -36,7 +30,6 @@ pub(crate) async fn unban_user_route(
}
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(body.user_id.to_string(), &RoomMemberEventContent {

View File

@@ -73,7 +73,7 @@ pub(crate) async fn get_message_events_route(
let room_id = &body.room_id;
let filter = &body.filter;
if !services.rooms.metadata.exists(room_id).await {
if !services.metadata.exists(room_id).await {
return Err!(Request(Forbidden("Room does not exist to this server")));
}
@@ -97,7 +97,6 @@ pub(crate) async fn get_message_events_route(
if matches!(body.dir, Direction::Backward) {
services
.rooms
.timeline
.backfill_if_required(room_id, from)
.await
@@ -107,14 +106,12 @@ pub(crate) async fn get_message_events_route(
let it = match body.dir {
| Direction::Forward => services
.rooms
.timeline
.pdus(Some(sender_user), room_id, Some(from))
.ignore_err()
.boxed(),
| Direction::Backward => services
.rooms
.timeline
.pdus_rev(Some(sender_user), room_id, Some(from))
.ignore_err()
@@ -192,7 +189,7 @@ where
.max()
.unwrap_or_else(PduCount::max);
let receipts = services.rooms.read_receipt.readreceipts_since(
let receipts = services.read_receipt.readreceipts_since(
lazy_loading_context.room_id,
oldest.into_unsigned(),
Some(newest.into_unsigned()),
@@ -213,7 +210,6 @@ where
.await;
services
.rooms
.lazy_loading
.witness_retain(witness, lazy_loading_context)
.await
@@ -225,7 +221,6 @@ async fn get_member_event(
user_id: &UserId,
) -> Option<Raw<AnyStateEvent>> {
services
.rooms
.state_accessor
.room_state_get(room_id, &StateEventType::RoomMember, user_id.as_str())
.map_ok(Event::into_format)
@@ -293,7 +288,6 @@ pub(crate) async fn visibility_filter(
let (_, pdu) = &item;
services
.rooms
.state_accessor
.user_can_see_event(user_id, pdu.room_id(), pdu.event_id())
.await

View File

@@ -44,7 +44,6 @@ pub(crate) async fn get_presence_route(
let mut presence_event = None;
let has_shared_rooms = services
.rooms
.state_cache
.user_sees_user(body.sender_user(), &body.user_id)
.await;

View File

@@ -42,7 +42,6 @@ pub(crate) async fn set_displayname_route(
}
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(&body.user_id)
.map(ToOwned::to_owned)
@@ -138,7 +137,6 @@ pub(crate) async fn set_avatar_url_route(
}
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(&body.user_id)
.map(ToOwned::to_owned)
@@ -448,9 +446,8 @@ async fn update_all_rooms(
user_id: &UserId,
) {
for (pdu_builder, room_id) in all_joined_rooms {
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
if let Err(e) = services
.rooms
.timeline
.build_and_append_pdu(pdu_builder, user_id, room_id, &state_lock)
.await

View File

@@ -44,7 +44,6 @@ pub(crate) async fn set_read_marker_route(
if body.private_read_receipt.is_some() || body.read_receipt.is_some() {
services
.rooms
.user
.reset_notification_counts(sender_user, &body.room_id);
}
@@ -70,7 +69,6 @@ pub(crate) async fn set_read_marker_route(
)]);
services
.rooms
.read_receipt
.readreceipt_update(
sender_user,
@@ -85,7 +83,6 @@ pub(crate) async fn set_read_marker_route(
if let Some(event) = &body.private_read_receipt {
let count = services
.rooms
.timeline
.get_pdu_count(event)
.await
@@ -98,7 +95,6 @@ pub(crate) async fn set_read_marker_route(
};
services
.rooms
.read_receipt
.private_read_set(&body.room_id, sender_user, count);
}
@@ -120,7 +116,6 @@ pub(crate) async fn create_receipt_route(
create_receipt::v3::ReceiptType::Read | create_receipt::v3::ReceiptType::ReadPrivate
) {
services
.rooms
.user
.reset_notification_counts(sender_user, &body.room_id);
}
@@ -166,7 +161,6 @@ pub(crate) async fn create_receipt_route(
)]);
services
.rooms
.read_receipt
.readreceipt_update(
sender_user,
@@ -180,7 +174,6 @@ pub(crate) async fn create_receipt_route(
},
| create_receipt::v3::ReceiptType::ReadPrivate => {
let count = services
.rooms
.timeline
.get_pdu_count(&body.event_id)
.await
@@ -193,7 +186,6 @@ pub(crate) async fn create_receipt_route(
};
services
.rooms
.read_receipt
.private_read_set(&body.room_id, sender_user, count);
},

View File

@@ -18,15 +18,9 @@ pub(crate) async fn redact_event_route(
let sender_user = body.sender_user();
let body = &body.body;
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
let event_id = services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {

View File

@@ -517,7 +517,6 @@ pub(crate) async fn register_route(
&& services.config.grant_admin_to_first_user
&& let Ok(admin_room) = services.admin.get_admin_room().await
&& services
.rooms
.state_cache
.room_joined_count(&admin_room)
.await
@@ -536,7 +535,7 @@ pub(crate) async fn register_route(
&& (services.config.allow_guests_auto_join_rooms || !is_guest)
{
for room in &services.server.config.auto_join_rooms {
let Ok(room_id) = services.rooms.alias.resolve(room).await else {
let Ok(room_id) = services.alias.resolve(room).await else {
error!(
"Failed to resolve room alias to room ID when attempting to auto join \
{room}, skipping"
@@ -545,7 +544,6 @@ pub(crate) async fn register_route(
};
if !services
.rooms
.state_cache
.server_in_room(services.globals.server_name(), &room_id)
.await

View File

@@ -133,7 +133,6 @@ async fn paginate_relations_with_filter(
let depth: u8 = if recurse { 3 } else { 1 };
let events: Vec<_> = services
.rooms
.pdu_metadata
.get_relations(sender_user, room_id, target, start, limit, depth, dir)
.await
@@ -183,7 +182,6 @@ async fn visibility_filter<Pdu: Event>(
let (_, pdu) = &item;
services
.rooms
.state_accessor
.user_can_see_event(sender_user, pdu.room_id(), pdu.event_id())
.await

View File

@@ -43,7 +43,6 @@ pub(crate) async fn report_room_route(
delay_response().await;
if !services
.rooms
.state_cache
.server_in_room(&services.server.name, &body.room_id)
.await
@@ -92,12 +91,7 @@ pub(crate) async fn report_event_route(
delay_response().await;
// check if we know about the reported event ID or if it's invalid
let Ok(pdu) = services
.rooms
.timeline
.get_pdu(&body.event_id)
.await
else {
let Ok(pdu) = services.timeline.get_pdu(&body.event_id).await else {
return Err!(Request(NotFound("Event ID is not known to us or Event ID is invalid")));
};
@@ -167,7 +161,6 @@ async fn is_event_report_valid(
}
if !services
.rooms
.state_cache
.room_members(room_id)
.ready_any(|user_id| user_id == sender_user)

View File

@@ -18,7 +18,6 @@ pub(crate) async fn get_room_aliases_route(
let sender_user = body.sender_user();
if !services
.rooms
.state_accessor
.user_can_see_state_events(sender_user, &body.room_id)
.await
@@ -28,7 +27,6 @@ pub(crate) async fn get_room_aliases_route(
Ok(aliases::v3::Response {
aliases: services
.rooms
.alias
.local_aliases_for_room(&body.room_id)
.map(ToOwned::to_owned)

View File

@@ -114,7 +114,6 @@ pub(crate) async fn create_room_route(
// 2. Let the room creator join
let sender_user = body.sender_user();
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(sender_user.to_string(), &RoomMemberEventContent {
@@ -176,7 +175,6 @@ pub(crate) async fn create_room_route(
)?;
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {
@@ -195,7 +193,6 @@ pub(crate) async fn create_room_route(
// 4. Canonical room alias
if let Some(room_alias_id) = &alias {
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomCanonicalAliasEventContent {
@@ -214,7 +211,6 @@ pub(crate) async fn create_room_route(
// 5.1 Join Rules
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(
@@ -234,7 +230,6 @@ pub(crate) async fn create_room_route(
// 5.2 History Visibility
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(
@@ -250,7 +245,6 @@ pub(crate) async fn create_room_route(
// 5.3 Guest Access
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(
@@ -299,7 +293,6 @@ pub(crate) async fn create_room_route(
}
services
.rooms
.timeline
.build_and_append_pdu(pdu_builder, sender_user, &room_id, &state_lock)
.boxed()
@@ -309,7 +302,6 @@ pub(crate) async fn create_room_route(
// 7. Events implied by name and topic
if let Some(name) = &body.name {
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomNameEventContent::new(name.clone())),
@@ -323,7 +315,6 @@ pub(crate) async fn create_room_route(
if let Some(topic) = &body.topic {
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomTopicEventContent {
@@ -371,13 +362,12 @@ pub(crate) async fn create_room_route(
// Homeserver specific stuff
if let Some(alias) = alias {
services
.rooms
.alias
.set_alias(&alias, &room_id, sender_user)?;
}
if body.visibility == room::Visibility::Public {
services.rooms.directory.set_public(&room_id);
services.directory.set_public(&room_id);
if services.server.config.admin_room_notices {
services
@@ -463,9 +453,8 @@ async fn create_create_event(
// 1. The room create event, using a placeholder room_id
let room_id = ruma::room_id!("!thiswillbereplaced").to_owned();
let state_lock = services.rooms.state.mutex.lock(&room_id).await;
let state_lock = services.state.mutex.lock(&room_id).await;
let create_event_id = services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {
@@ -485,7 +474,7 @@ async fn create_create_event(
// The real room_id is now the event_id.
let room_id = OwnedRoomId::from_parts('!', create_event_id.localpart(), None)?;
let state_lock = services.rooms.state.mutex.lock(&room_id).await;
let state_lock = services.state.mutex.lock(&room_id).await;
Ok((room_id, state_lock))
}
@@ -501,10 +490,9 @@ async fn create_create_event_legacy(
| Some(custom_id) => custom_room_id_check(services, custom_id).await?,
};
let state_lock = services.rooms.state.mutex.lock(&room_id).await;
let state_lock = services.state.mutex.lock(&room_id).await;
let _short_id = services
.rooms
.short
.get_or_create_shortroomid(&room_id)
.await;
@@ -567,7 +555,6 @@ async fn create_create_event_legacy(
// 1. The room create event
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {
@@ -681,7 +668,6 @@ async fn room_alias_check(
})?;
if services
.rooms
.alias
.resolve_local_alias(&full_room_alias)
.await
@@ -740,7 +726,6 @@ async fn custom_room_id_check(services: &Services, custom_room_id: &str) -> Resu
// check if room ID doesn't already exist instead of erroring on auth check
if services
.rooms
.short
.get_shortroomid(&room_id)
.await

View File

@@ -16,13 +16,11 @@ pub(crate) async fn get_room_event_route(
let room_id = &body.room_id;
let event = services
.rooms
.timeline
.get_pdu(event_id)
.map_err(|_| err!(Request(NotFound("Event {} not found.", event_id))));
let visible = services
.rooms
.state_accessor
.user_can_see_event(body.sender_user(), room_id, event_id)
.map(Ok);

View File

@@ -17,7 +17,6 @@ pub(crate) async fn room_initial_sync_route(
let room_id = &body.room_id;
if !services
.rooms
.state_accessor
.user_can_see_state_events(body.sender_user(), room_id)
.await
@@ -26,19 +25,13 @@ pub(crate) async fn room_initial_sync_route(
}
let membership = services
.rooms
.state_cache
.user_membership(body.sender_user(), room_id)
.map(Ok);
let visibility = services
.rooms
.directory
.visibility(room_id)
.map(Ok);
let visibility = services.directory.visibility(room_id).map(Ok);
let state = services
.rooms
.state_accessor
.room_state_full_pdus(room_id)
.map_ok(Event::into_format)
@@ -46,7 +39,6 @@ pub(crate) async fn room_initial_sync_route(
let limit = LIMIT_MAX;
let events = services
.rooms
.timeline
.pdus_rev(None, room_id, None)
.try_take(limit)

View File

@@ -54,12 +54,11 @@ pub(crate) async fn get_room_summary(
body: Ruma<get_summary::v1::Request>,
) -> Result<get_summary::v1::Response> {
let (room_id, servers) = services
.rooms
.alias
.resolve_with_servers(&body.room_id_or_alias, Some(body.via.clone()))
.await?;
if services.rooms.metadata.is_banned(&room_id).await {
if services.metadata.is_banned(&room_id).await {
return Err!(Request(Forbidden("This room is banned on this homeserver.")));
}
@@ -75,7 +74,6 @@ async fn room_summary_response(
sender_user: Option<&UserId>,
) -> Result<get_summary::v1::Response> {
if services
.rooms
.state_cache
.server_in_room(services.globals.server_name(), room_id)
.await
@@ -103,20 +101,11 @@ async fn local_room_summary_response(
sender_user: Option<&UserId>,
) -> Result<get_summary::v1::Response> {
trace!(?sender_user, "Sending local room summary response for {room_id:?}");
let join_rule = services
.rooms
.state_accessor
.get_join_rules(room_id);
let join_rule = services.state_accessor.get_join_rules(room_id);
let world_readable = services
.rooms
.state_accessor
.is_world_readable(room_id);
let world_readable = services.state_accessor.is_world_readable(room_id);
let guest_can_join = services
.rooms
.state_accessor
.guest_can_join(room_id);
let guest_can_join = services.state_accessor.guest_can_join(room_id);
let (join_rule, world_readable, guest_can_join) =
join3(join_rule, world_readable, guest_can_join).await;
@@ -134,49 +123,35 @@ async fn local_room_summary_response(
.await?;
let canonical_alias = services
.rooms
.state_accessor
.get_canonical_alias(room_id)
.ok();
let name = services
.rooms
.state_accessor
.get_name(room_id)
.ok();
let name = services.state_accessor.get_name(room_id).ok();
let topic = services
.rooms
.state_accessor
.get_room_topic(room_id)
.ok();
let room_type = services
.rooms
.state_accessor
.get_room_type(room_id)
.ok();
let avatar_url = services
.rooms
.state_accessor
.get_avatar(room_id)
.map(|res| res.into_option().unwrap_or_default().url);
let room_version = services
.rooms
.state
.get_room_version(room_id)
.ok();
let room_version = services.state.get_room_version(room_id).ok();
let encryption = services
.rooms
.state_accessor
.get_room_encryption(room_id)
.ok();
let num_joined_members = services
.rooms
.state_cache
.room_joined_count(room_id)
.unwrap_or(0);
@@ -184,7 +159,6 @@ async fn local_room_summary_response(
let membership: OptionFuture<_> = sender_user
.map(|sender_user| {
services
.rooms
.state_accessor
.get_member(room_id, sender_user)
.map_ok_or(MembershipState::Leave, |content| content.membership)
@@ -244,7 +218,7 @@ async fn remote_room_summary_hierarchy_response(
return Err!(Request(Forbidden("Federation is disabled.")));
}
if services.rooms.metadata.is_disabled(room_id).await {
if services.metadata.is_disabled(room_id).await {
return Err!(Request(Forbidden(
"Federaton of room {room_id} is currently disabled on this server."
)));
@@ -313,7 +287,6 @@ where
match sender_user {
| Some(sender_user) => {
let user_can_see_state_events = services
.rooms
.state_accessor
.user_can_see_state_events(sender_user, room_id);
@@ -322,12 +295,9 @@ where
.is_deactivated(sender_user)
.unwrap_or(false);
let user_in_allowed_restricted_room = allowed_room_ids.stream().any(|room| {
services
.rooms
.state_cache
.is_joined(sender_user, room)
});
let user_in_allowed_restricted_room = allowed_room_ids
.stream()
.any(|room| services.state_cache.is_joined(sender_user, room));
let (user_can_see_state_events, is_guest, user_in_allowed_restricted_room) =
join3(user_can_see_state_events, is_guest, user_in_allowed_restricted_room)

View File

@@ -81,23 +81,16 @@ pub(crate) async fn upgrade_room_route(
let replacement_room = RoomId::new_v1(services.globals.server_name());
let _short_id = services
.rooms
.short
.get_or_create_shortroomid(&replacement_room)
.await;
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
// Send a m.room.tombstone event to the old room to indicate that it is not
// intended to be used any further Fail if the sender does not have the required
// permissions
let tombstone_event_id = services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(StateKey::new(), &RoomTombstoneEventContent {
@@ -112,16 +105,10 @@ pub(crate) async fn upgrade_room_route(
// Change lock to replacement room
drop(state_lock);
let state_lock = services
.rooms
.state
.mutex
.lock(&replacement_room)
.await;
let state_lock = services.state.mutex.lock(&replacement_room).await;
// Get the old room creation event
let mut create_event_content: CanonicalJsonObject = services
.rooms
.state_accessor
.room_state_get_content(&body.room_id, &StateEventType::RoomCreate, "")
.await
@@ -174,7 +161,6 @@ pub(crate) async fn upgrade_room_route(
}
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {
@@ -193,7 +179,6 @@ pub(crate) async fn upgrade_room_route(
// Join the new room
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {
@@ -222,7 +207,6 @@ pub(crate) async fn upgrade_room_route(
// Replicate transferable state events to the new room
for event_type in TRANSFERABLE_STATE_EVENTS {
let event_content = match services
.rooms
.state_accessor
.room_state_get(&body.room_id, event_type, "")
.await
@@ -232,7 +216,6 @@ pub(crate) async fn upgrade_room_route(
};
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {
@@ -250,27 +233,23 @@ pub(crate) async fn upgrade_room_route(
// Moves any local aliases to the new room
let mut local_aliases = services
.rooms
.alias
.local_aliases_for_room(&body.room_id)
.boxed();
while let Some(alias) = local_aliases.next().await {
services
.rooms
.alias
.remove_alias(alias, sender_user)
.await?;
services
.rooms
.alias
.set_alias(alias, &replacement_room, sender_user)?;
}
// Get the old room power levels
let power_levels_event_content: RoomPowerLevelsEventContent = services
.rooms
.state_accessor
.room_state_get_content(&body.room_id, &StateEventType::RoomPowerLevels, "")
.await
@@ -290,7 +269,6 @@ pub(crate) async fn upgrade_room_route(
// Modify the power levels in the old room to prevent sending of events and
// inviting new users
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(StateKey::new(), &RoomPowerLevelsEventContent {

View File

@@ -87,7 +87,6 @@ async fn category_room_events(
.map(StreamExt::boxed)
.unwrap_or_else(|| {
services
.rooms
.state_cache
.rooms_joined(sender_user)
.map(ToOwned::to_owned)
@@ -110,12 +109,7 @@ async fn category_room_events(
limit,
};
let (count, results) = services
.rooms
.search
.search_pdus(&query)
.await
.ok()?;
let (count, results) = services.search.search_pdus(&query).await.ok()?;
results
.collect::<Vec<_>>()
@@ -187,7 +181,6 @@ async fn category_room_events(
async fn procure_room_state(services: &Services, room_id: &RoomId) -> Result<RoomState> {
let state = services
.rooms
.state_accessor
.room_state_full_pdus(room_id)
.map_ok(Event::into_format)
@@ -208,14 +201,12 @@ async fn check_room_visible(
let is_joined = !check_visible
|| services
.rooms
.state_cache
.is_joined(user_id, room_id)
.await;
let state_visible = !check_state
|| services
.rooms
.state_accessor
.user_can_see_state_events(user_id, room_id)
.await;

View File

@@ -30,16 +30,10 @@ pub(crate) async fn send_message_event_route(
return Err!(Request(Forbidden("Encryption has been disabled")));
}
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
if body.event_type == MessageLikeEventType::CallInvite
&& services
.rooms
.directory
.is_public_room(&body.room_id)
.await
@@ -75,7 +69,6 @@ pub(crate) async fn send_message_event_route(
.map_err(|e| err!(Request(BadJson("Invalid JSON body: {e}"))))?;
let event_id = services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {

View File

@@ -90,7 +90,6 @@ where
.collect::<Vec<_>>();
let summary = services
.rooms
.spaces
.get_summary_and_children_client(room_id, suggested_only, sender_user, &via)
.await;
@@ -139,7 +138,6 @@ where
.stream()
.skip_while(|(room, _)| {
services
.rooms
.short
.get_shortroomid(room)
.map_ok(|short| {
@@ -172,7 +170,6 @@ where
.into_iter()
.map(|(room_id, via)| async move {
let summary = services
.rooms
.spaces
.get_summary_and_children_client(
&room_id,
@@ -199,7 +196,7 @@ where
let next_short_room_ids: Vec<_> = parents
.iter()
.stream()
.filter_map(|room_id| services.rooms.short.get_shortroomid(room_id).ok())
.filter_map(|room_id| services.short.get_shortroomid(room_id).ok())
.collect()
.await;

View File

@@ -78,7 +78,6 @@ pub(crate) async fn get_state_events_route(
let sender_user = body.sender_user();
if !services
.rooms
.state_accessor
.user_can_see_state_events(sender_user, &body.room_id)
.await
@@ -88,7 +87,6 @@ pub(crate) async fn get_state_events_route(
Ok(get_state_events::v3::Response {
room_state: services
.rooms
.state_accessor
.room_state_full_pdus(&body.room_id)
.map_ok(Event::into_format)
@@ -112,7 +110,6 @@ pub(crate) async fn get_state_events_for_key_route(
let sender_user = body.sender_user();
if !services
.rooms
.state_accessor
.user_can_see_state_events(sender_user, &body.room_id)
.await
@@ -123,7 +120,6 @@ pub(crate) async fn get_state_events_for_key_route(
}
let event = services
.rooms
.state_accessor
.room_state_get(&body.room_id, &body.event_type, &body.state_key)
.await
@@ -184,9 +180,8 @@ async fn send_state_event_for_key_helper(
timestamp: Option<ruma::MilliSecondsSinceUnixEpoch>,
) -> Result<OwnedEventId> {
allowed_to_send_state_event(services, room_id, event_type, state_key, json).await?;
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
let event_id = services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder {
@@ -332,7 +327,6 @@ async fn allowed_to_send_state_event(
for alias in aliases {
let (alias_room_id, _servers) = services
.rooms
.alias
.resolve_alias(&alias, None)
.await
@@ -380,7 +374,6 @@ async fn allowed_to_send_state_event(
}
services
.rooms
.state_cache
.is_joined(&authorising_user, room_id)
.map(is_false!())

View File

@@ -29,7 +29,6 @@ async fn load_timeline(
limit: usize,
) -> Result<(Vec<(PduCount, PduEvent)>, bool, PduCount), Error> {
let last_timeline_count = services
.rooms
.timeline
.last_timeline_count(Some(sender_user), room_id, next_batch)
.await?;
@@ -39,7 +38,6 @@ async fn load_timeline(
}
let non_timeline_pdus = services
.rooms
.timeline
.pdus_rev(Some(sender_user), room_id, None)
.ready_filter_map(Result::ok)
@@ -70,14 +68,12 @@ async fn share_encrypted_room(
ignore_room: Option<&RoomId>,
) -> bool {
services
.rooms
.state_cache
.get_shared_rooms(sender_user, user_id)
.ready_filter(|&room_id| Some(room_id) != ignore_room)
.map(ToOwned::to_owned)
.broad_any(async |other_room_id| {
services
.rooms
.state_accessor
.is_encrypted_room(&other_room_id)
.await

View File

@@ -225,7 +225,6 @@ async fn build_sync_events(
};
let joined_rooms = services
.rooms
.state_cache
.rooms_joined(sender_user)
.map(ToOwned::to_owned)
@@ -258,7 +257,6 @@ async fn build_sync_events(
);
let left_rooms = services
.rooms
.state_cache
.rooms_left(sender_user)
.broad_filter_map(|(room_id, _)| {
@@ -278,12 +276,10 @@ async fn build_sync_events(
.collect();
let invited_rooms = services
.rooms
.state_cache
.rooms_invited(sender_user)
.fold_default(async |mut invited_rooms: BTreeMap<_, _>, (room_id, invite_state)| {
let invite_count = services
.rooms
.state_cache
.get_invite_count(&room_id, sender_user)
.await
@@ -303,12 +299,10 @@ async fn build_sync_events(
});
let knocked_rooms = services
.rooms
.state_cache
.rooms_knocked(sender_user)
.fold_default(async |mut knocked_rooms: BTreeMap<_, _>, (room_id, knock_state)| {
let knock_count = services
.rooms
.state_cache
.get_knock_count(&room_id, sender_user)
.await
@@ -439,7 +433,6 @@ async fn process_presence_updates(
.presence_since(since, Some(next_batch))
.filter(|(user_id, ..)| {
services
.rooms
.state_cache
.user_sees_user(syncing_user, user_id)
})
@@ -475,7 +468,6 @@ async fn handle_left_room(
filter: &FilterDefinition,
) -> Result<Option<LeftRoom>> {
let left_count = services
.rooms
.state_cache
.get_left_count(room_id, sender_user)
.await
@@ -504,11 +496,11 @@ async fn handle_left_room(
return Ok(None);
}
let is_not_found = services.rooms.metadata.exists(room_id).eq(&false);
let is_not_found = services.metadata.exists(room_id).eq(&false);
let is_disabled = services.rooms.metadata.is_disabled(room_id);
let is_disabled = services.metadata.is_disabled(room_id);
let is_banned = services.rooms.metadata.is_banned(room_id);
let is_banned = services.metadata.is_banned(room_id);
pin_mut!(is_not_found, is_disabled, is_banned);
if is_not_found.or(is_disabled).or(is_banned).await {
@@ -547,14 +539,12 @@ async fn handle_left_room(
let mut left_state_events = Vec::new();
let since_shortstatehash = services
.rooms
.user
.get_token_shortstatehash(room_id, since);
let since_state_ids: HashMap<_, OwnedEventId> = since_shortstatehash
.map_ok(|since_shortstatehash| {
services
.rooms
.state_accessor
.state_full_ids(since_shortstatehash)
.map(Ok)
@@ -565,7 +555,6 @@ async fn handle_left_room(
.unwrap_or_default();
let Ok(left_event_id): Result<OwnedEventId> = services
.rooms
.state_accessor
.room_state_get_id(room_id, &StateEventType::RoomMember, sender_user.as_str())
.await
@@ -575,7 +564,6 @@ async fn handle_left_room(
};
let Ok(left_shortstatehash) = services
.rooms
.state_accessor
.pdu_shortstatehash(&left_event_id)
.await
@@ -585,14 +573,12 @@ async fn handle_left_room(
};
let mut left_state_ids: HashMap<_, _> = services
.rooms
.state_accessor
.state_full_ids(left_shortstatehash)
.collect()
.await;
let leave_shortstatekey = services
.rooms
.short
.get_or_create_shortstatekey(&StateEventType::RoomMember, sender_user.as_str())
.await;
@@ -602,7 +588,6 @@ async fn handle_left_room(
for (shortstatekey, event_id) in left_state_ids {
if full_state || since_state_ids.get(&shortstatekey) != Some(&event_id) {
let (event_type, state_key) = services
.rooms
.short
.get_statekey_from_short(shortstatekey)
.await?;
@@ -618,7 +603,7 @@ async fn handle_left_room(
continue;
}
let Ok(pdu) = services.rooms.timeline.get_pdu(&event_id).await else {
let Ok(pdu) = services.timeline.get_pdu(&event_id).await else {
error!("Pdu in state not found: {event_id}");
continue;
};
@@ -659,7 +644,6 @@ async fn load_joined_room(
filter: &FilterDefinition,
) -> Result<(JoinedRoom, HashSet<OwnedUserId>, HashSet<OwnedUserId>)> {
let since_shortstatehash = services
.rooms
.user
.get_token_shortstatehash(room_id, since)
.ok()
@@ -682,7 +666,6 @@ async fn load_joined_room(
);
let receipt_events = services
.rooms
.read_receipt
.readreceipts_since(room_id, since, Some(next_batch))
.filter_map(async |(read_user, _, edu)| {
@@ -706,7 +689,6 @@ async fn load_joined_room(
.map(PduCount::into_unsigned)
.map(|shorteventid| {
services
.rooms
.state_accessor
.get_shortstatehash(shorteventid)
})
@@ -714,7 +696,6 @@ async fn load_joined_room(
.into();
let current_shortstatehash = services
.rooms
.state_accessor
.get_shortstatehash(last_timeline_count.into_unsigned())
.or_else(|_| services.state.get_room_shortstatehash(room_id));
@@ -727,10 +708,10 @@ async fn load_joined_room(
let current_shortstatehash = current_shortstatehash
.map_err(|_| err!(Database(error!("Room {room_id} has no state"))))?;
let associate_token = services
.rooms
.user
.associate_token_shortstatehash(room_id, next_batch, current_shortstatehash);
let associate_token =
services
.user
.associate_token_shortstatehash(room_id, next_batch, current_shortstatehash);
let lazy_loading_enabled = filter.room.state.lazy_load_options.is_enabled()
|| filter
@@ -751,12 +732,7 @@ async fn load_joined_room(
// Reset lazy loading because this is an initial sync
let lazy_load_reset: OptionFuture<_> = initial
.then(|| {
services
.rooms
.lazy_loading
.reset(lazy_loading_context)
})
.then(|| services.lazy_loading.reset(lazy_loading_context))
.into();
lazy_load_reset.await;
@@ -771,7 +747,6 @@ async fn load_joined_room(
.collect();
services
.rooms
.lazy_loading
.witness_retain(witness, lazy_loading_context)
})
@@ -781,7 +756,6 @@ async fn load_joined_room(
.is_empty()
.then(|| {
services
.rooms
.user
.last_notification_read(sender_user, room_id)
})
@@ -790,20 +764,15 @@ async fn load_joined_room(
let since_sender_member: OptionFuture<_> = since_shortstatehash
.map(|short| {
services
.rooms
.state_accessor
.state_get_content(short, &StateEventType::RoomMember, sender_user.as_str())
.ok()
})
.into();
let encrypted_room = services
.rooms
.state_accessor
.is_encrypted_room(room_id);
let encrypted_room = services.state_accessor.is_encrypted_room(room_id);
let last_privateread_update = services
.rooms
.read_receipt
.last_privateread_update(sender_user, room_id);
@@ -867,7 +836,6 @@ async fn load_joined_room(
let notification_count: OptionFuture<_> = send_notification_counts
.then(|| {
services
.rooms
.user
.notification_count(sender_user, room_id)
.map(TryInto::try_into)
@@ -878,7 +846,6 @@ async fn load_joined_room(
let highlight_count: OptionFuture<_> = send_notification_counts
.then(|| {
services
.rooms
.user
.highlight_count(sender_user, room_id)
.map(TryInto::try_into)
@@ -890,7 +857,6 @@ async fn load_joined_room(
.gt(&since)
.then(|| {
services
.rooms
.read_receipt
.private_read_get(room_id, sender_user)
.map(Result::ok)
@@ -898,7 +864,6 @@ async fn load_joined_room(
.into();
let typing_events = services
.rooms
.typing
.last_typing_update(room_id)
.and_then(async |count| {
@@ -1086,7 +1051,6 @@ async fn calculate_state_changes<'a>(
let state_get_shorteventid = |user_id: &'a UserId| {
services
.rooms
.state_accessor
.state_get_shortid(
horizon_shortstatehash,
@@ -1112,7 +1076,6 @@ async fn calculate_state_changes<'a>(
.then(|| {
StreamExt::into_future(
services
.rooms
.state_accessor
.state_added((since_shortstatehash, horizon_shortstatehash))
.boxed(),
@@ -1124,7 +1087,6 @@ async fn calculate_state_changes<'a>(
.then(|| {
StreamExt::into_future(
services
.rooms
.state_accessor
.state_full_shortids(horizon_shortstatehash)
.expect_ok(),
@@ -1145,18 +1107,12 @@ async fn calculate_state_changes<'a>(
.chain(lazy_state_ids.stream())
.broad_filter_map(|shorteventid| {
services
.rooms
.short
.get_eventid_from_short(shorteventid)
.ok()
})
.broad_filter_map(async |event_id: OwnedEventId| {
services
.rooms
.timeline
.get_pdu(&event_id)
.ok()
.await
services.timeline.get_pdu(&event_id).ok().await
})
.collect::<Vec<_>>()
.boxed()
@@ -1188,7 +1144,6 @@ async fn lazy_filter(
shorteventid: ShortEventId,
) -> Option<ShortEventId> {
let (event_type, state_key) = services
.rooms
.short
.get_statekey_from_short(shortstatekey)
.await
@@ -1204,13 +1159,11 @@ async fn calculate_counts(
sender_user: &UserId,
) -> (Option<u64>, Option<u64>, Option<Vec<OwnedUserId>>) {
let joined_member_count = services
.rooms
.state_cache
.room_joined_count(room_id)
.unwrap_or(0);
let invited_member_count = services
.rooms
.state_cache
.room_invited_count(room_id)
.unwrap_or(0);
@@ -1233,7 +1186,6 @@ async fn calculate_heroes(
sender_user: &UserId,
) -> Vec<OwnedUserId> {
services
.rooms
.timeline
.all_pdus(sender_user, room_id)
.ready_filter(|(_, pdu)| pdu.kind == RoomMember)
@@ -1277,14 +1229,8 @@ async fn fold_hero(
}
let (is_invited, is_joined) = join(
services
.rooms
.state_cache
.is_invited(user_id, room_id),
services
.rooms
.state_cache
.is_joined(user_id, room_id),
services.state_cache.is_invited(user_id, room_id),
services.state_cache.is_joined(user_id, room_id),
)
.await;
@@ -1304,7 +1250,6 @@ async fn typings_event_for_user(
Ok(SyncEphemeralRoomEvent {
content: TypingEventContent {
user_ids: services
.rooms
.typing
.typing_users_for_user(room_id, sender_user)
.await?,

View File

@@ -98,21 +98,18 @@ pub(crate) async fn sync_events_v5_route(
.update_snake_sync_request_with_cache(&snake_key, &mut cached);
let all_joined_rooms = services
.rooms
.state_cache
.rooms_joined(sender_user)
.map(ToOwned::to_owned)
.collect::<Vec<OwnedRoomId>>();
let all_invited_rooms = services
.rooms
.state_cache
.rooms_invited(sender_user)
.map(|r| r.0)
.collect::<Vec<OwnedRoomId>>();
let all_knocked_rooms = services
.rooms
.state_cache
.rooms_knocked(sender_user)
.map(|r| r.0)
@@ -234,11 +231,11 @@ async fn fetch_subscriptions(
) {
let mut known_subscription_rooms = BTreeSet::new();
for (room_id, room) in &body.room_subscriptions {
let not_exists = services.rooms.metadata.exists(room_id).eq(&false);
let not_exists = services.metadata.exists(room_id).eq(&false);
let is_disabled = services.rooms.metadata.is_disabled(room_id);
let is_disabled = services.metadata.is_disabled(room_id);
let is_banned = services.rooms.metadata.is_banned(room_id);
let is_banned = services.metadata.is_banned(room_id);
pin_mut!(not_exists, is_disabled, is_banned);
if not_exists.or(is_disabled).or(is_banned).await {
@@ -414,7 +411,6 @@ where
{
// TODO: figure out a timestamp we can use for remote invites
invite_state = services
.rooms
.state_cache
.invite_state(sender_user, room_id)
.await
@@ -453,7 +449,6 @@ where
}
let last_privateread_update = services
.rooms
.read_receipt
.last_privateread_update(sender_user, room_id)
.await;
@@ -461,7 +456,6 @@ where
let private_read_event: OptionFuture<_> = (last_privateread_update > *roomsince)
.then(|| {
services
.rooms
.read_receipt
.private_read_get(room_id, sender_user)
.ok()
@@ -469,7 +463,6 @@ where
.into();
let mut receipts: Vec<Raw<AnySyncEphemeralRoomEvent>> = services
.rooms
.read_receipt
.readreceipts_since(room_id, *roomsince, Some(next_batch))
.filter_map(async |(read_user, _ts, v)| {
@@ -552,7 +545,6 @@ where
.stream()
.filter_map(async |state| {
services
.rooms
.state_accessor
.room_state_get(room_id, &state.0, &state.1)
.await
@@ -563,7 +555,6 @@ where
.await;
let room_name = services
.rooms
.state_accessor
.get_name(room_id)
.await
@@ -572,13 +563,11 @@ where
// Heroes
let heroes: Vec<_> = if room_name.is_none() {
services
.rooms
.state_cache
.room_members(room_id)
.ready_filter(|member| *member != sender_user)
.filter_map(|user_id| {
services
.rooms
.state_accessor
.get_member(room_id, user_id)
.map_ok(|memberevent| sync_events::v5::response::Hero {
@@ -629,12 +618,7 @@ where
None
};
let room_avatar = match services
.rooms
.state_accessor
.get_avatar(room_id)
.await
{
let room_avatar = match services.state_accessor.get_avatar(room_id).await {
| ruma::JsOption::Some(avatar) => ruma::JsOption::from_option(avatar.url),
| ruma::JsOption::Null => ruma::JsOption::Null,
| ruma::JsOption::Undefined => ruma::JsOption::Undefined,
@@ -653,7 +637,6 @@ where
unread_notifications: UnreadNotificationsCount {
highlight_count: Some(
services
.rooms
.user
.highlight_count(sender_user, room_id)
.await
@@ -662,7 +645,6 @@ where
),
notification_count: Some(
services
.rooms
.user
.notification_count(sender_user, room_id)
.await
@@ -676,7 +658,6 @@ where
limited,
joined_count: Some(
services
.rooms
.state_cache
.room_joined_count(room_id)
.await
@@ -686,7 +667,6 @@ where
),
invited_count: Some(
services
.rooms
.state_cache
.room_invited_count(room_id)
.await
@@ -780,7 +760,6 @@ where
for room_id in all_joined_rooms {
let Ok(current_shortstatehash) = services
.rooms
.state
.get_room_shortstatehash(room_id)
.await
@@ -790,14 +769,12 @@ where
};
let since_shortstatehash = services
.rooms
.user
.get_token_shortstatehash(room_id, globalsince)
.await
.ok();
let encrypted_room = services
.rooms
.state_accessor
.state_get(current_shortstatehash, &StateEventType::RoomEncryption, "")
.await
@@ -810,13 +787,11 @@ where
}
let since_encryption = services
.rooms
.state_accessor
.state_get(since_shortstatehash, &StateEventType::RoomEncryption, "")
.await;
let since_sender_member: Option<RoomMemberEventContent> = services
.rooms
.state_accessor
.state_get_content(
since_shortstatehash,
@@ -834,14 +809,12 @@ where
if encrypted_room {
let current_state_ids: HashMap<_, OwnedEventId> = services
.rooms
.state_accessor
.state_full_ids(current_shortstatehash)
.collect()
.await;
let since_state_ids: HashMap<_, _> = services
.rooms
.state_accessor
.state_full_ids(since_shortstatehash)
.collect()
@@ -852,7 +825,7 @@ where
continue;
}
let Ok(pdu) = services.rooms.timeline.get_pdu(&id).await else {
let Ok(pdu) = services.timeline.get_pdu(&id).await else {
error!("Pdu in state not found: {id}");
continue;
};
@@ -897,7 +870,6 @@ where
// If the user is in a new encrypted room, give them all joined users
device_list_changes.extend(
services
.rooms
.state_cache
.room_members(room_id)
// Don't send key updates from the sender to the sender
@@ -995,7 +967,6 @@ where
.stream()
.filter_map(async |room_id| {
services
.rooms
.typing
.typing_users_for_user(room_id, sender_user)
.inspect_err(|e| warn!(%room_id, "Failed to get typing events for room: {e}"))
@@ -1037,7 +1008,6 @@ where
{
rooms.filter_map(async |room_id| {
let room_type = services
.rooms
.state_accessor
.get_room_type(room_id)
.await;

View File

@@ -32,13 +32,11 @@ pub(crate) async fn get_threads_route(
.unwrap_or_else(PduCount::max);
let threads: Vec<(PduCount, PduEvent)> = services
.rooms
.threads
.threads_until(body.sender_user(), &body.room_id, from, &body.include)
.take(limit)
.try_filter_map(async |(count, pdu)| {
Ok(services
.rooms
.state_accessor
.user_can_see_event(body.sender_user(), &body.room_id, &pdu.event_id)
.await

View File

@@ -19,7 +19,6 @@ pub(crate) async fn create_typing_event_route(
}
if !services
.rooms
.state_cache
.is_joined(sender_user, &body.room_id)
.await
@@ -46,7 +45,6 @@ pub(crate) async fn create_typing_event_route(
.try_mul(1000)?,
);
services
.rooms
.typing
.typing_add(
sender_user,
@@ -59,7 +57,6 @@ pub(crate) async fn create_typing_event_route(
},
| _ => {
services
.rooms
.typing
.typing_remove(sender_user, &body.room_id)
.await?;

View File

@@ -45,7 +45,6 @@ pub(crate) async fn get_mutual_rooms_route(
}
let mutual_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.get_shared_rooms(sender_user, &body.user_id)
.map(ToOwned::to_owned)
@@ -137,7 +136,6 @@ pub(crate) async fn set_profile_field_route(
if body.value.field_name() == ProfileFieldName::DisplayName {
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(&body.user_id)
.map(Into::into)
@@ -155,7 +153,6 @@ pub(crate) async fn set_profile_field_route(
let mxc = ruma::OwnedMxcUri::from(body.value.value().to_string());
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(&body.user_id)
.map(Into::into)
@@ -199,7 +196,6 @@ pub(crate) async fn delete_profile_field_route(
if body.field == ProfileFieldName::DisplayName {
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(&body.user_id)
.map(Into::into)
@@ -209,7 +205,6 @@ pub(crate) async fn delete_profile_field_route(
update_displayname(&services, &body.user_id, None, &all_joined_rooms).await;
} else if body.field == ProfileFieldName::AvatarUrl {
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(&body.user_id)
.map(Into::into)

View File

@@ -57,13 +57,11 @@ pub(crate) async fn search_users_route(
}
let user_in_public_room = services
.rooms
.state_cache
.rooms_joined(&user_id)
.map(ToOwned::to_owned)
.broad_any(async |room_id| {
services
.rooms
.state_accessor
.get_join_rules(&room_id)
.map(|rule| matches!(rule, JoinRule::Public))
@@ -71,7 +69,6 @@ pub(crate) async fn search_users_route(
});
let user_sees_user = services
.rooms
.state_cache
.user_sees_user(sender_user, &user_id);

View File

@@ -46,7 +46,6 @@ pub(crate) async fn get_backfill_route(
.stream()
.filter_map(|event_id| {
services
.rooms
.timeline
.get_pdu_count(event_id)
.map(Result::ok)
@@ -60,13 +59,11 @@ pub(crate) async fn get_backfill_route(
origin: services.globals.server_name().to_owned(),
pdus: services
.rooms
.timeline
.pdus_rev(None, &body.room_id, Some(from.saturating_add(1)))
.try_take(limit)
.try_filter_map(async |(_, pdu)| {
Ok(services
.rooms
.state_accessor
.server_can_see_event(body.origin(), &pdu.room_id, &pdu.event_id)
.await
@@ -74,7 +71,6 @@ pub(crate) async fn get_backfill_route(
})
.try_filter_map(async |pdu| {
Ok(services
.rooms
.timeline
.get_pdu_json(&pdu.event_id)
.await

View File

@@ -16,7 +16,6 @@ pub(crate) async fn get_event_route(
body: Ruma<get_event::v1::Request>,
) -> Result<get_event::v1::Response> {
let event = services
.rooms
.timeline
.get_pdu_json(&body.event_id)
.await

View File

@@ -30,7 +30,6 @@ pub(crate) async fn get_event_authorization_route(
.await?;
let event = services
.rooms
.timeline
.get_pdu_json(&body.event_id)
.await
@@ -45,18 +44,10 @@ pub(crate) async fn get_event_authorization_route(
.map_err(|_| Error::bad_database("Invalid room_id in event in database."))?;
let auth_chain = services
.rooms
.auth_chain
.event_ids_iter(room_id, once(body.event_id.borrow()))
.ready_filter_map(Result::ok)
.filter_map(async |id| {
services
.rooms
.timeline
.get_pdu_json(&id)
.await
.ok()
})
.filter_map(async |id| services.timeline.get_pdu_json(&id).await.ok())
.then(|pdu| {
services
.sending

View File

@@ -38,12 +38,7 @@ pub(crate) async fn get_missing_events_route(
let mut i: usize = 0;
while i < queued_events.len() && events.len() < limit {
let Ok(pdu) = services
.rooms
.timeline
.get_pdu(&queued_events[i])
.await
else {
let Ok(pdu) = services.timeline.get_pdu(&queued_events[i]).await else {
debug!(
?body.origin,
"Event {} does not exist locally, skipping", &queued_events[i]
@@ -58,7 +53,6 @@ pub(crate) async fn get_missing_events_route(
}
if !services
.rooms
.state_accessor
.server_can_see_event(body.origin(), &body.room_id, &queued_events[i])
.await

View File

@@ -17,12 +17,7 @@ pub(crate) async fn get_hierarchy_route(
State(services): State<crate::State>,
body: Ruma<get_hierarchy::v1::Request>,
) -> Result<get_hierarchy::v1::Response> {
if !services
.rooms
.metadata
.exists(&body.room_id)
.await
{
if !services.metadata.exists(&body.room_id).await {
return Err!(Request(NotFound("Room does not exist.")));
}
@@ -30,7 +25,6 @@ pub(crate) async fn get_hierarchy_route(
let suggested_only = body.suggested_only;
let ref identifier = Identifier::ServerName(body.origin());
match services
.rooms
.spaces
.get_summary_and_children_local(room_id, identifier)
.await?
@@ -47,7 +41,6 @@ pub(crate) async fn get_hierarchy_route(
.stream()
.broad_filter_map(async |(child, _via)| {
match services
.rooms
.spaces
.get_summary_and_children_local(&child, identifier)
.await

View File

@@ -31,7 +31,6 @@ pub(crate) async fn create_invite_route(
) -> Result<create_invite::v2::Response> {
// ACL check origin
services
.rooms
.event_handler
.acl_check(body.origin(), &body.room_id)
.await?;
@@ -88,7 +87,6 @@ pub(crate) async fn create_invite_route(
// Make sure we're not ACL'ed from their room.
services
.rooms
.event_handler
.acl_check(invited_user.server_name(), &body.room_id)
.await?;
@@ -109,11 +107,8 @@ pub(crate) async fn create_invite_route(
.try_into()
.map_err(|e| err!(Request(InvalidParam("Invalid sender property: {e}"))))?;
if services
.rooms
.metadata
.is_banned(&body.room_id)
.await && !services.users.is_admin(&invited_user).await
if services.metadata.is_banned(&body.room_id).await
&& !services.users.is_admin(&invited_user).await
{
return Err!(Request(Forbidden("This room is banned on this homeserver.")));
}
@@ -144,13 +139,11 @@ pub(crate) async fn create_invite_route(
// record the invited state for client /sync through update_membership(), and
// send the invite PDU to the relevant appservices.
if !services
.rooms
.state_cache
.server_in_room(services.globals.server_name(), &body.room_id)
.await
{
services
.rooms
.state_cache
.update_membership(
&body.room_id,

View File

@@ -26,12 +26,7 @@ pub(crate) async fn create_join_event_template_route(
State(services): State<crate::State>,
body: Ruma<prepare_join_event::v1::Request>,
) -> Result<prepare_join_event::v1::Response> {
if !services
.rooms
.metadata
.exists(&body.room_id)
.await
{
if !services.metadata.exists(&body.room_id).await {
return Err!(Request(NotFound("Room is unknown to this server.")));
}
@@ -41,7 +36,6 @@ pub(crate) async fn create_join_event_template_route(
// ACL check origin server
services
.rooms
.event_handler
.acl_check(body.origin(), &body.room_id)
.await?;
@@ -74,7 +68,6 @@ pub(crate) async fn create_join_event_template_route(
}
let room_version_id = services
.rooms
.state
.get_room_version(&body.room_id)
.await?;
@@ -85,12 +78,7 @@ pub(crate) async fn create_join_event_template_route(
));
}
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
let join_authorized_via_users_server: Option<OwnedUserId> = {
use RoomVersionId::*;
@@ -106,11 +94,10 @@ pub(crate) async fn create_join_event_template_route(
.await?
{
let users = services
.rooms
.state_cache
.local_users_in_room(&body.room_id)
.filter(|user| {
services.rooms.state_accessor.user_can_invite(
services.state_accessor.user_can_invite(
&body.room_id,
user,
&body.user_id,
@@ -133,7 +120,6 @@ pub(crate) async fn create_join_event_template_route(
};
let (_pdu, mut pdu_json) = services
.rooms
.timeline
.create_hash_and_sign_event(
PduBuilder::state(body.user_id.to_string(), &RoomMemberEventContent {
@@ -172,7 +158,6 @@ pub(crate) async fn user_can_perform_restricted_join(
}
if services
.rooms
.state_cache
.is_joined(user_id, room_id)
.await
@@ -182,7 +167,6 @@ pub(crate) async fn user_can_perform_restricted_join(
}
let Ok(join_rules_event_content) = services
.rooms
.state_accessor
.room_state_get_content::<RoomJoinRulesEventContent>(
room_id,
@@ -217,7 +201,6 @@ pub(crate) async fn user_can_perform_restricted_join(
.stream()
.any(|m| {
services
.rooms
.state_cache
.is_joined(user_id, &m.room_id)
})

View File

@@ -17,12 +17,7 @@ pub(crate) async fn create_knock_event_template_route(
State(services): State<crate::State>,
body: Ruma<prepare_knock_event::v1::Request>,
) -> Result<prepare_knock_event::v1::Response> {
if !services
.rooms
.metadata
.exists(&body.room_id)
.await
{
if !services.metadata.exists(&body.room_id).await {
return Err!(Request(NotFound("Room is unknown to this server.")));
}
@@ -32,7 +27,6 @@ pub(crate) async fn create_knock_event_template_route(
// ACL check origin server
services
.rooms
.event_handler
.acl_check(body.origin(), &body.room_id)
.await?;
@@ -63,7 +57,6 @@ pub(crate) async fn create_knock_event_template_route(
}
let room_version_id = services
.rooms
.state
.get_room_version(&body.room_id)
.await?;
@@ -82,15 +75,9 @@ pub(crate) async fn create_knock_event_template_route(
));
}
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
if let Ok(membership) = services
.rooms
.state_accessor
.get_member(&body.room_id, &body.user_id)
.await
@@ -106,7 +93,6 @@ pub(crate) async fn create_knock_event_template_route(
}
let (_pdu, mut pdu_json) = services
.rooms
.timeline
.create_hash_and_sign_event(
PduBuilder::state(

View File

@@ -16,12 +16,7 @@ pub(crate) async fn create_leave_event_template_route(
State(services): State<crate::State>,
body: Ruma<prepare_leave_event::v1::Request>,
) -> Result<prepare_leave_event::v1::Response> {
if !services
.rooms
.metadata
.exists(&body.room_id)
.await
{
if !services.metadata.exists(&body.room_id).await {
return Err!(Request(NotFound("Room is unknown to this server.")));
}
@@ -33,25 +28,17 @@ pub(crate) async fn create_leave_event_template_route(
// ACL check origin
services
.rooms
.event_handler
.acl_check(body.origin(), &body.room_id)
.await?;
let room_version_id = services
.rooms
.state
.get_room_version(&body.room_id)
.await?;
let state_lock = services
.rooms
.state
.mutex
.lock(&body.room_id)
.await;
let state_lock = services.state.mutex.lock(&body.room_id).await;
let (_pdu, mut pdu_json) = services
.rooms
.timeline
.create_hash_and_sign_event(
PduBuilder::state(

View File

@@ -23,14 +23,12 @@ pub(crate) async fn get_room_information_route(
body: Ruma<get_room_information::v1::Request>,
) -> Result<get_room_information::v1::Response> {
let room_id = services
.rooms
.alias
.resolve_local_alias(&body.room_alias)
.await
.map_err(|_| err!(Request(NotFound("Room alias not found."))))?;
let mut servers: Vec<OwnedServerName> = services
.rooms
.state_cache
.room_servers(&room_id)
.map(ToOwned::to_owned)

View File

@@ -92,12 +92,7 @@ pub(crate) async fn send_transaction_message_route(
.pdus
.iter()
.stream()
.broad_then(|pdu| {
services
.rooms
.event_handler
.parse_incoming_pdu(pdu)
})
.broad_then(|pdu| services.event_handler.parse_incoming_pdu(pdu))
.inspect_err(|e| debug_warn!("Could not parse PDU: {e}"))
.ready_filter_map(Result::ok);
@@ -186,7 +181,6 @@ async fn handle_room(
pdus: impl Iterator<Item = Pdu> + Send,
) -> Result<ResolvedMap> {
let _room_lock = services
.rooms
.event_handler
.mutex_federation
.lock(room_id)
@@ -197,7 +191,6 @@ async fn handle_room(
services.server.check_running()?;
let pdu_start_time = Instant::now();
let result = services
.rooms
.event_handler
.handle_incoming_pdu(origin, &room_id, &event_id, value, true)
.map_ok(|_| ())
@@ -311,7 +304,6 @@ async fn handle_edu_receipt_room(
room_updates: ReceiptMap,
) {
if services
.rooms
.event_handler
.acl_check(origin, &room_id)
.await
@@ -351,7 +343,6 @@ async fn handle_edu_receipt_room_user(
}
if !services
.rooms
.state_cache
.server_in_room(origin, room_id)
.await
@@ -373,7 +364,6 @@ async fn handle_edu_receipt_room_user(
let receipts = [(ReceiptType::Read, BTreeMap::from(user_data))];
let content = [(event_id.clone(), BTreeMap::from(receipts))];
services
.rooms
.read_receipt
.readreceipt_update(user_id, room_id, &ReceiptEvent {
content: ReceiptEventContent(content.into()),
@@ -399,7 +389,6 @@ async fn handle_edu_typing(
}
if services
.rooms
.event_handler
.acl_check(typing.user_id.server_name(), &typing.room_id)
.await
@@ -413,7 +402,6 @@ async fn handle_edu_typing(
}
if !services
.rooms
.state_cache
.is_joined(&typing.user_id, &typing.room_id)
.await
@@ -430,7 +418,6 @@ async fn handle_edu_typing(
let timeout = millis_since_unix_epoch().saturating_add(secs.saturating_mul(1000));
services
.rooms
.typing
.typing_add(&typing.user_id, &typing.room_id, timeout)
.await
@@ -438,7 +425,6 @@ async fn handle_edu_typing(
.ok();
} else {
services
.rooms
.typing
.typing_remove(&typing.user_id, &typing.room_id)
.await

View File

@@ -31,13 +31,12 @@ async fn create_join_event(
room_id: &RoomId,
pdu: &RawJsonValue,
) -> Result<create_join_event::v1::RoomState> {
if !services.rooms.metadata.exists(room_id).await {
if !services.metadata.exists(room_id).await {
return Err!(Request(NotFound("Room is unknown to this server.")));
}
// ACL check origin server
services
.rooms
.event_handler
.acl_check(origin, room_id)
.await?;
@@ -45,7 +44,6 @@ async fn create_join_event(
// We need to return the state prior to joining, let's keep a reference to that
// here
let shortstatehash = services
.rooms
.state
.get_room_shortstatehash(room_id)
.await
@@ -53,11 +51,7 @@ async fn create_join_event(
// We do not add the event_id field to the pdu here because of signature and
// hashes checks
let room_version_id = services
.rooms
.state
.get_room_version(room_id)
.await?;
let room_version_id = services.state.get_room_version(room_id).await?;
let Ok((event_id, mut value)) = gen_event_id_canonical_json(pdu, &room_version_id) else {
// Event could not be converted to canonical json
@@ -118,7 +112,6 @@ async fn create_join_event(
.map_err(|e| err!(Request(BadJson(warn!("sender property is not a valid user ID: {e}")))))?;
services
.rooms
.event_handler
.acl_check(sender.server_name(), room_id)
.await?;
@@ -159,7 +152,6 @@ async fn create_join_event(
}
if !services
.rooms
.state_cache
.is_joined(&authorising_user, room_id)
.await
@@ -199,14 +191,12 @@ async fn create_join_event(
.map_err(|e| err!(Request(BadJson("Event has an invalid origin server name: {e}"))))?;
let mutex_lock = services
.rooms
.event_handler
.mutex_federation
.lock(room_id)
.await;
let pdu_id = services
.rooms
.event_handler
.handle_incoming_pdu(&origin, room_id, &event_id, value.clone(), true)
.boxed()
@@ -216,7 +206,6 @@ async fn create_join_event(
drop(mutex_lock);
let state_ids: Vec<OwnedEventId> = services
.rooms
.state_accessor
.state_full_ids(shortstatehash)
.map(at!(1))
@@ -226,7 +215,7 @@ async fn create_join_event(
let state = state_ids
.iter()
.try_stream()
.broad_and_then(|event_id| services.rooms.timeline.get_pdu_json(event_id))
.broad_and_then(|event_id| services.timeline.get_pdu_json(event_id))
.broad_and_then(|pdu| {
services
.sending
@@ -239,16 +228,9 @@ async fn create_join_event(
let starting_events = state_ids.iter().map(Borrow::borrow);
let auth_chain = services
.rooms
.auth_chain
.event_ids_iter(room_id, starting_events)
.broad_and_then(async |event_id| {
services
.rooms
.timeline
.get_pdu_json(&event_id)
.await
})
.broad_and_then(async |event_id| services.timeline.get_pdu_json(&event_id).await)
.broad_and_then(|pdu| {
services
.sending

View File

@@ -55,24 +55,17 @@ pub(crate) async fn create_knock_event_v1_route(
}
}
if !services
.rooms
.metadata
.exists(&body.room_id)
.await
{
if !services.metadata.exists(&body.room_id).await {
return Err!(Request(NotFound("Room is unknown to this server.")));
}
// ACL check origin server
services
.rooms
.event_handler
.acl_check(body.origin(), &body.room_id)
.await?;
let room_version_id = services
.rooms
.state
.get_room_version(&body.room_id)
.await?;
@@ -127,7 +120,6 @@ pub(crate) async fn create_knock_event_v1_route(
.map_err(|e| err!(Request(BadJson("Event sender is not a valid user ID: {e}"))))?;
services
.rooms
.event_handler
.acl_check(sender.server_name(), &body.room_id)
.await?;
@@ -168,14 +160,12 @@ pub(crate) async fn create_knock_event_v1_route(
.map_err(|e| err!(Request(InvalidParam("Invalid knock event PDU: {e}"))))?;
let mutex_lock = services
.rooms
.event_handler
.mutex_federation
.lock(&body.room_id)
.await;
let pdu_id = services
.rooms
.event_handler
.handle_incoming_pdu(&origin, &body.room_id, &event_id, value.clone(), true)
.boxed()
@@ -191,7 +181,6 @@ pub(crate) async fn create_knock_event_v1_route(
Ok(create_knock_event::v1::Response {
knock_room_state: services
.rooms
.state
.summary_stripped(&pdu)
.await

View File

@@ -46,24 +46,19 @@ async fn create_leave_event(
room_id: &RoomId,
pdu: &RawJsonValue,
) -> Result {
if !services.rooms.metadata.exists(room_id).await {
if !services.metadata.exists(room_id).await {
return Err!(Request(NotFound("Room is unknown to this server.")));
}
// ACL check origin
services
.rooms
.event_handler
.acl_check(origin, room_id)
.await?;
// We do not add the event_id field to the pdu here because of signature and
// hashes checks
let room_version_id = services
.rooms
.state
.get_room_version(room_id)
.await?;
let room_version_id = services.state.get_room_version(room_id).await?;
let Ok((event_id, value)) = gen_event_id_canonical_json(pdu, &room_version_id) else {
// Event could not be converted to canonical json
return Err!(Request(BadJson("Could not convert event to canonical json.")));
@@ -124,7 +119,6 @@ async fn create_leave_event(
.map_err(|e| err!(Request(BadJson(warn!("sender property is not a valid user ID: {e}")))))?;
services
.rooms
.event_handler
.acl_check(sender.server_name(), room_id)
.await?;
@@ -147,14 +141,12 @@ async fn create_leave_event(
}
let mutex_lock = services
.rooms
.event_handler
.mutex_federation
.lock(room_id)
.await;
let pdu_id = services
.rooms
.event_handler
.handle_incoming_pdu(origin, room_id, &event_id, value, true)
.boxed()

View File

@@ -25,14 +25,12 @@ pub(crate) async fn get_room_state_route(
.await?;
let shortstatehash = services
.rooms
.state_accessor
.pdu_shortstatehash(&body.event_id)
.await
.map_err(|_| err!(Request(NotFound("PDU state not found."))))?;
let state_ids: Vec<OwnedEventId> = services
.rooms
.state_accessor
.state_full_ids(shortstatehash)
.map(at!(1))
@@ -42,7 +40,7 @@ pub(crate) async fn get_room_state_route(
let pdus = state_ids
.iter()
.try_stream()
.and_then(|id| services.rooms.timeline.get_pdu_json(id))
.and_then(|id| services.timeline.get_pdu_json(id))
.and_then(|pdu| {
services
.sending
@@ -53,10 +51,9 @@ pub(crate) async fn get_room_state_route(
.await?;
let auth_chain = services
.rooms
.auth_chain
.event_ids_iter(&body.room_id, once(body.event_id.borrow()))
.and_then(async |id| services.rooms.timeline.get_pdu_json(&id).await)
.and_then(async |id| services.timeline.get_pdu_json(&id).await)
.and_then(|pdu| {
services
.sending

View File

@@ -26,14 +26,12 @@ pub(crate) async fn get_room_state_ids_route(
.await?;
let shortstatehash = services
.rooms
.state_accessor
.pdu_shortstatehash(&body.event_id)
.await
.map_err(|_| err!(Request(NotFound("Pdu state not found."))))?;
let pdu_ids: Vec<OwnedEventId> = services
.rooms
.state_accessor
.state_full_ids(shortstatehash)
.map(at!(1))
@@ -41,7 +39,6 @@ pub(crate) async fn get_room_state_ids_route(
.await;
let auth_chain_ids = services
.rooms
.auth_chain
.event_ids_iter(&body.room_id, once(body.event_id.borrow()))
.try_collect()

View File

@@ -14,20 +14,17 @@ pub(super) struct AccessCheck<'a> {
pub(super) async fn check(&self) -> Result {
let acl_check = self
.services
.rooms
.event_handler
.acl_check(self.origin, self.room_id)
.map(|result| result.is_ok());
let world_readable = self
.services
.rooms
.state_accessor
.is_world_readable(self.room_id);
let server_in_room = self
.services
.rooms
.state_cache
.server_in_room(self.origin, self.room_id);
@@ -35,7 +32,6 @@ pub(super) async fn check(&self) -> Result {
// acknowledge bans or leaves
let user_is_knocking = self
.services
.rooms
.state_cache
.room_members_knocked(self.room_id)
.count();
@@ -44,7 +40,6 @@ pub(super) async fn check(&self) -> Result {
.event_id
.map(|event_id| {
self.services
.rooms
.state_accessor
.server_can_see_event(self.origin, self.room_id, event_id)
})

View File

@@ -16,10 +16,8 @@ use tuwunel_core::{
};
use tuwunel_database::{Deserialized, Handle, Ignore, Json, Map};
use crate::{Dep, globals};
pub struct Service {
services: Services,
services: Arc<crate::services::OnceServices>,
db: Data,
}
@@ -28,16 +26,10 @@ struct Data {
roomusertype_roomuserdataid: Arc<Map>,
}
struct Services {
globals: Dep<globals::Service>,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
globals: args.depend::<globals::Service>("globals"),
},
services: args.services.clone(),
db: Data {
roomuserdataid_accountdata: args.db["roomuserdataid_accountdata"].clone(),
roomusertype_roomuserdataid: args.db["roomusertype_roomuserdataid"].clone(),

View File

@@ -12,11 +12,9 @@ use termimad::MadSkin;
use tokio::task::JoinHandle;
use tuwunel_core::{Server, debug, defer, error, log, log::is_systemd_mode};
use crate::{Dep, admin};
pub struct Console {
server: Arc<Server>,
admin: Dep<admin::Service>,
services: Arc<crate::services::OnceServices>,
worker_join: Mutex<Option<JoinHandle<()>>>,
input_abort: Mutex<Option<AbortHandle>>,
command_abort: Mutex<Option<AbortHandle>>,
@@ -31,7 +29,7 @@ impl Console {
pub(super) fn new(args: &crate::Args<'_>) -> Arc<Self> {
Arc::new(Self {
server: args.server.clone(),
admin: args.depend::<admin::Service>("admin"),
services: args.services.clone(),
worker_join: None.into(),
input_abort: None.into(),
command_abort: None.into(),
@@ -177,7 +175,12 @@ impl Console {
}
async fn process(self: Arc<Self>, line: String) {
match self.admin.command_in_place(line, None).await {
match self
.services
.admin
.command_in_place(line, None)
.await
{
| Ok(Some(ref content)) => self.output(content),
| Err(ref content) => self.output_err(content),
| _ => unreachable!(),
@@ -213,7 +216,8 @@ impl Console {
}
fn tab_complete(&self, line: &str) -> String {
self.admin
self.services
.admin
.complete_command(line)
.unwrap_or_else(|| line.to_owned())
}

View File

@@ -45,12 +45,11 @@ pub async fn create_admin_room(services: &Services) -> Result {
let room_version = RoomVersionId::V11;
let _short_id = services
.rooms
.short
.get_or_create_shortroomid(&room_id)
.await;
let state_lock = services.rooms.state.mutex.lock(&room_id).await;
let state_lock = services.state.mutex.lock(&room_id).await;
// Create a user for the server
let server_user = services.globals.server_user.as_ref();
@@ -69,7 +68,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
// 1. The room create event
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomCreateEventContent {
@@ -87,7 +85,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
// 2. Make server user/bot join
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(
@@ -105,7 +102,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
let users = BTreeMap::from_iter([(server_user.into(), 69420.into())]);
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomPowerLevelsEventContent {
@@ -121,7 +117,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
// 4.1 Join Rules
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomJoinRulesEventContent::new(JoinRule::Invite)),
@@ -134,7 +129,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
// 4.2 History Visibility
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(
@@ -150,7 +144,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
// 4.3 Guest Access
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(
@@ -167,7 +160,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
// 5. Events implied by name and topic
let room_name = format!("{} Admin Room", services.config.server_name);
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomNameEventContent::new(room_name)),
@@ -179,7 +171,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
.await?;
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomTopicEventContent {
@@ -197,7 +188,6 @@ pub async fn create_admin_room(services: &Services) -> Result {
let alias = &services.globals.admin_alias;
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomCanonicalAliasEventContent {
@@ -212,13 +202,11 @@ pub async fn create_admin_room(services: &Services) -> Result {
.await?;
services
.rooms
.alias
.set_alias(alias, &room_id, server_user)?;
// 7. (ad-hoc) Disable room URL previews for everyone by default
services
.rooms
.timeline
.build_and_append_pdu(
PduBuilder::state(String::new(), &RoomPreviewUrlsEventContent { disabled: true }),

View File

@@ -5,7 +5,7 @@ mod grant;
use std::{
pin::Pin,
sync::{Arc, RwLock as StdRwLock, Weak},
sync::{Arc, RwLock as StdRwLock},
};
use async_trait::async_trait;
@@ -17,13 +17,13 @@ use ruma::{
};
use tokio::sync::{RwLock, mpsc};
use tuwunel_core::{
Err, Error, Event, Result, Server, debug, err, error, error::default_log, pdu::PduBuilder,
Err, Error, Event, Result, debug, err, error, error::default_log, pdu::PduBuilder,
};
use crate::{Dep, account_data, globals, rooms, rooms::state::RoomMutexGuard};
use crate::rooms::state::RoomMutexGuard;
pub struct Service {
services: Services,
services: Arc<crate::services::OnceServices>,
channel: StdRwLock<Option<mpsc::Sender<CommandInput>>>,
pub handle: RwLock<Option<Processor>>,
pub complete: StdRwLock<Option<Completer>>,
@@ -31,18 +31,6 @@ pub struct Service {
pub console: Arc<console::Console>,
}
struct Services {
server: Arc<Server>,
globals: Dep<globals::Service>,
alias: Dep<rooms::alias::Service>,
timeline: Dep<rooms::timeline::Service>,
state: Dep<rooms::state::Service>,
state_cache: Dep<rooms::state_cache::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
account_data: Dep<account_data::Service>,
services: StdRwLock<Option<Weak<crate::Services>>>,
}
/// Inputs to a command are a multi-line string and optional reply_id.
#[derive(Clone, Debug, Default)]
pub struct CommandInput {
@@ -77,18 +65,7 @@ const COMMAND_QUEUE_LIMIT: usize = 512;
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
server: args.server.clone(),
globals: args.depend::<globals::Service>("globals"),
alias: args.depend::<rooms::alias::Service>("rooms::alias"),
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
state: args.depend::<rooms::state::Service>("rooms::state"),
state_cache: args.depend::<rooms::state_cache::Service>("rooms::state_cache"),
state_accessor: args
.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
account_data: args.depend::<account_data::Service>("account_data"),
services: None.into(),
},
services: args.services.clone(),
channel: StdRwLock::new(None),
handle: RwLock::new(None),
complete: StdRwLock::new(None),
@@ -234,16 +211,7 @@ impl Service {
.await
.expect("Admin module is not loaded");
let services = self
.services
.services
.read()
.expect("locked")
.as_ref()
.and_then(Weak::upgrade)
.expect("Services self-reference not initialized.");
handle(services, command).await
handle(Arc::clone(self.services.get_services()), command).await
}
/// Checks whether a given user is an admin of this server
@@ -423,17 +391,4 @@ impl Service {
.await
.unwrap_or(false)
}
/// Sets the self-reference to crate::Services which will provide context to
/// the admin commands.
pub(super) fn set_services(&self, services: Option<&Arc<crate::Services>>) {
let receiver = &mut *self
.services
.services
.write()
.expect("locked for writing");
let weak = services.map(Arc::downgrade);
*receiver = weak;
}
}

View File

@@ -11,23 +11,17 @@ use async_trait::async_trait;
use futures::{Future, FutureExt, Stream, StreamExt, TryStreamExt};
use ruma::{RoomAliasId, RoomId, UserId, api::appservice::Registration};
use tokio::sync::{RwLock, RwLockReadGuard};
use tuwunel_core::{Err, Result, Server, debug, err, utils::stream::IterStream};
use tuwunel_core::{Err, Result, debug, err, utils::stream::IterStream};
use tuwunel_database::Map;
pub use self::{namespace_regex::NamespaceRegex, registration_info::RegistrationInfo};
use crate::{Dep, sending};
pub struct Service {
registration_info: RwLock<Registrations>,
services: Services,
services: Arc<crate::services::OnceServices>,
db: Data,
}
struct Services {
sending: Dep<sending::Service>,
server: Arc<Server>,
}
struct Data {
id_appserviceregistrations: Arc<Map>,
}
@@ -39,10 +33,7 @@ impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
registration_info: RwLock::new(BTreeMap::new()),
services: Services {
sending: args.depend::<sending::Service>("sending"),
server: args.server.clone(),
},
services: args.services.clone(),
db: Data {
id_appserviceregistrations: args.db["id_appserviceregistrations"].clone(),
},

View File

@@ -1,22 +1,26 @@
use std::{sync::Arc, time::Duration};
use std::{
sync::{Arc, LazyLock},
time::Duration,
};
use either::Either;
use ipaddress::IPAddress;
use reqwest::redirect;
use tuwunel_core::{Config, Result, err, implement, trace};
use crate::{resolver, service};
use crate::{service, services::OnceServices};
type ClientLazylock = LazyLock<reqwest::Client, Box<dyn FnOnce() -> reqwest::Client + Send>>;
pub struct Service {
pub default: reqwest::Client,
pub url_preview: reqwest::Client,
pub extern_media: reqwest::Client,
pub well_known: reqwest::Client,
pub federation: reqwest::Client,
pub synapse: reqwest::Client,
pub sender: reqwest::Client,
pub appservice: reqwest::Client,
pub pusher: reqwest::Client,
pub default: ClientLazylock,
pub url_preview: ClientLazylock,
pub extern_media: ClientLazylock,
pub well_known: ClientLazylock,
pub federation: ClientLazylock,
pub synapse: ClientLazylock,
pub sender: ClientLazylock,
pub appservice: ClientLazylock,
pub pusher: ClientLazylock,
pub cidr_range_denylist: Vec<IPAddress>,
}
@@ -24,86 +28,89 @@ pub struct Service {
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
let config = &args.server.config;
let resolver = args.require::<resolver::Service>("resolver");
let url_preview_bind_addr = config
.url_preview_bound_interface
.clone()
.and_then(Either::left);
let url_preview_bind_iface = config
.url_preview_bound_interface
.clone()
.and_then(Either::right);
macro_rules! create_client {
($config:ident, $services:ident; $expr:expr) => {{
fn make($services: Arc<OnceServices>) -> Result<reqwest::Client> {
let $config = &$services.server.config;
Ok($expr.build()?)
}
let services = Arc::clone(args.services);
LazyLock::new(Box::new(|| make(services).unwrap()))
}};
}
Ok(Arc::new(Self {
default: base(config)?
.dns_resolver(resolver.resolver.clone())
.build()?,
default: create_client!(config, services; base(config)?
.dns_resolver(Arc::clone(&services.resolver.resolver))),
url_preview: base(config)
url_preview: create_client!(config, services; {
let url_preview_bind_addr = config
.url_preview_bound_interface
.clone()
.and_then(Either::left);
let url_preview_bind_iface = config
.url_preview_bound_interface
.clone()
.and_then(Either::right);
base(config)
.and_then(|builder| {
builder_interface(builder, url_preview_bind_iface.as_deref())
})?
.local_address(url_preview_bind_addr)
.dns_resolver(resolver.resolver.clone())
.dns_resolver(Arc::clone(&services.resolver.resolver))
.redirect(redirect::Policy::limited(3))
.build()?,
}),
extern_media: base(config)?
.dns_resolver(resolver.resolver.clone())
.redirect(redirect::Policy::limited(3))
.build()?,
extern_media: create_client!(config, services; base(config)?
.dns_resolver(Arc::clone(&services.resolver.resolver))
.redirect(redirect::Policy::limited(3))),
well_known: base(config)?
.dns_resolver(resolver.resolver.clone())
well_known: create_client!(config, services; base(config)?
.dns_resolver(Arc::clone(&services.resolver.resolver))
.connect_timeout(Duration::from_secs(config.well_known_conn_timeout))
.read_timeout(Duration::from_secs(config.well_known_timeout))
.timeout(Duration::from_secs(config.well_known_timeout))
.pool_max_idle_per_host(0)
.redirect(redirect::Policy::limited(4))
.build()?,
.redirect(redirect::Policy::limited(4))),
federation: base(config)?
.dns_resolver(resolver.resolver.hooked.clone())
federation: create_client!(config, services; base(config)?
.dns_resolver(Arc::clone(&services.resolver.resolver.hooked))
.read_timeout(Duration::from_secs(config.federation_timeout))
.pool_max_idle_per_host(config.federation_idle_per_host.into())
.pool_idle_timeout(Duration::from_secs(config.federation_idle_timeout))
.redirect(redirect::Policy::limited(3))
.build()?,
.redirect(redirect::Policy::limited(3))),
synapse: base(config)?
.dns_resolver(resolver.resolver.hooked.clone())
synapse: create_client!(config, services; base(config)?
.dns_resolver(Arc::clone(&services.resolver.resolver.hooked))
.read_timeout(Duration::from_secs(305))
.pool_max_idle_per_host(0)
.redirect(redirect::Policy::limited(3))
.build()?,
.redirect(redirect::Policy::limited(3))),
sender: base(config)?
.dns_resolver(resolver.resolver.hooked.clone())
sender: create_client!(config, services; base(config)?
.dns_resolver(Arc::clone(&services.resolver.resolver.hooked))
.read_timeout(Duration::from_secs(config.sender_timeout))
.timeout(Duration::from_secs(config.sender_timeout))
.pool_max_idle_per_host(1)
.pool_idle_timeout(Duration::from_secs(config.sender_idle_timeout))
.redirect(redirect::Policy::limited(2))
.build()?,
.redirect(redirect::Policy::limited(2))),
appservice: base(config)?
.dns_resolver(resolver.resolver.clone())
appservice: create_client!(config, services; base(config)?
.dns_resolver(Arc::clone(&services.resolver.resolver))
.connect_timeout(Duration::from_secs(5))
.read_timeout(Duration::from_secs(config.appservice_timeout))
.timeout(Duration::from_secs(config.appservice_timeout))
.pool_max_idle_per_host(1)
.pool_idle_timeout(Duration::from_secs(config.appservice_idle_timeout))
.redirect(redirect::Policy::limited(2))
.build()?,
.redirect(redirect::Policy::limited(2))),
pusher: base(config)?
.dns_resolver(resolver.resolver.clone())
pusher: create_client!(config, services; base(config)?
.dns_resolver(Arc::clone(&services.resolver.resolver))
.pool_max_idle_per_host(1)
.pool_idle_timeout(Duration::from_secs(config.pusher_idle_timeout))
.redirect(redirect::Policy::limited(2))
.build()?,
.redirect(redirect::Policy::limited(2))),
cidr_range_denylist: config
.ip_range_denylist

View File

@@ -9,31 +9,14 @@ use ruma::{
};
use tuwunel_core::{Result, debug_warn, error, warn};
use crate::{Dep, account_data, config, globals, users};
pub struct Service {
services: Services,
}
struct Services {
account_data: Dep<account_data::Service>,
config: Dep<config::Service>,
globals: Dep<globals::Service>,
users: Dep<users::Service>,
services: Arc<crate::services::OnceServices>,
}
#[async_trait]
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
account_data: args.depend::<account_data::Service>("account_data"),
config: args.depend::<config::Service>("config"),
globals: args.depend::<globals::Service>("globals"),
users: args.depend::<users::Service>("users"),
},
}))
Ok(Arc::new(Self { services: args.services.clone() }))
}
async fn worker(self: Arc<Self>) -> Result {

View File

@@ -2,31 +2,17 @@ mod execute;
use std::sync::Arc;
use tuwunel_core::{Result, Server};
use tuwunel_core::Result;
use crate::{Dep, client, resolver, server_keys};
use crate::services::OnceServices;
pub struct Service {
services: Services,
}
struct Services {
server: Arc<Server>,
client: Dep<client::Service>,
resolver: Dep<resolver::Service>,
server_keys: Dep<server_keys::Service>,
services: Arc<OnceServices>,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
server: args.server.clone(),
client: args.depend::<client::Service>("client"),
resolver: args.depend::<resolver::Service>("resolver"),
server_keys: args.depend::<server_keys::Service>("server_keys"),
},
}))
Ok(Arc::new(Self { services: args.services.clone() }))
}
fn name(&self) -> &str { crate::service::make_name(std::module_path!()) }

View File

@@ -12,11 +12,9 @@ use tuwunel_core::{
};
use tuwunel_database::{Deserialized, Ignore, Interfix, Json, Map};
use crate::{Dep, globals};
pub struct Service {
db: Data,
services: Services,
services: Arc<crate::services::OnceServices>,
}
struct Data {
@@ -25,10 +23,6 @@ struct Data {
backupkeyid_backup: Arc<Map>,
}
struct Services {
globals: Dep<globals::Service>,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
@@ -37,9 +31,7 @@ impl crate::Service for Service {
backupid_etag: args.db["backupid_etag"].clone(),
backupkeyid_backup: args.db["backupkeyid_backup"].clone(),
},
services: Services {
globals: args.depend::<globals::Service>("globals"),
},
services: args.services.clone(),
}))
}

View File

@@ -10,13 +10,13 @@ use tuwunel_core::{
Err, Error, Result, Server, debug, debug_warn, error, trace, utils::time, warn,
};
use crate::{Services, service, service::Service};
use crate::{Services, service::Service};
pub(crate) struct Manager {
manager: Mutex<Option<JoinHandle<Result>>>,
workers: Mutex<Workers>,
server: Arc<Server>,
service: Arc<service::Map>,
services: Arc<Services>,
}
type Workers = JoinSet<WorkerResult>;
@@ -26,12 +26,12 @@ type WorkersLocked<'a> = MutexGuard<'a, Workers>;
const RESTART_DELAY_MS: u64 = 2500;
impl Manager {
pub(super) fn new(services: &Services) -> Arc<Self> {
pub(super) fn new(services: &Arc<Services>) -> Arc<Self> {
Arc::new(Self {
manager: Mutex::new(None),
workers: Mutex::new(JoinSet::new()),
server: services.server.clone(),
service: services.service.clone(),
services: services.clone(),
})
}
@@ -55,19 +55,8 @@ impl Manager {
.spawn(async move { self_.worker().await }),
);
// we can't hold the lock during the iteration with start_worker so the values
// are snapshotted here
let services: Vec<Arc<dyn Service>> = self
.service
.read()
.expect("locked for reading")
.values()
.map(|val| val.0.upgrade())
.map(|arc| arc.expect("services available for manager startup"))
.collect();
debug!("Starting service workers...");
for service in services {
for service in self.services.services() {
self.start_worker(&mut workers, &service).await?;
}

View File

@@ -15,14 +15,13 @@ use tokio::{
io::{AsyncReadExt, AsyncWriteExt, BufReader},
};
use tuwunel_core::{
Err, Result, Server, debug, debug_error, debug_info, debug_warn, err, error, trace,
Err, Result, debug, debug_error, debug_info, debug_warn, err, error, trace,
utils::{self, MutexMap},
warn,
};
use self::data::{Data, Metadata};
pub use self::thumbnail::Dim;
use crate::{Dep, client, globals, sending};
#[derive(Debug)]
pub struct FileMeta {
@@ -34,14 +33,7 @@ pub struct FileMeta {
pub struct Service {
url_preview_mutex: MutexMap<String, ()>,
pub(super) db: Data,
services: Services,
}
struct Services {
server: Arc<Server>,
client: Dep<client::Service>,
globals: Dep<globals::Service>,
sending: Dep<sending::Service>,
services: Arc<crate::services::OnceServices>,
}
/// generated MXC ID (`media-id`) length
@@ -59,12 +51,7 @@ impl crate::Service for Service {
Ok(Arc::new(Self {
url_preview_mutex: MutexMap::new(),
db: Data::new(args.db),
services: Services {
server: args.server.clone(),
client: args.depend::<client::Service>("client"),
globals: args.depend::<globals::Service>("globals"),
sending: args.depend::<sending::Service>("sending"),
},
services: args.services.clone(),
}))
}

View File

@@ -186,7 +186,6 @@ async fn migrate(services: &Services) -> Result {
let patterns = services.globals.forbidden_alias_names();
if !patterns.is_empty() {
for room_id in services
.rooms
.metadata
.iter_ids()
.map(ToOwned::to_owned)
@@ -194,7 +193,6 @@ async fn migrate(services: &Services) -> Result {
.await
{
services
.rooms
.alias
.local_aliases_for_room(&room_id)
.ready_for_each(|room_alias| {
@@ -406,7 +404,6 @@ async fn retroactively_fix_bad_data_from_roomuserid_joined(services: &Services)
let _cork = db.cork_and_sync();
let room_ids = services
.rooms
.metadata
.iter_ids()
.map(ToOwned::to_owned)
@@ -417,7 +414,6 @@ async fn retroactively_fix_bad_data_from_roomuserid_joined(services: &Services)
debug_info!("Fixing room {room_id}");
let users_in_room: Vec<OwnedUserId> = services
.rooms
.state_cache
.room_members(room_id)
.map(ToOwned::to_owned)
@@ -429,7 +425,6 @@ async fn retroactively_fix_bad_data_from_roomuserid_joined(services: &Services)
.stream()
.filter(|user_id| {
services
.rooms
.state_accessor
.get_member(room_id, user_id)
.map(|member| {
@@ -444,7 +439,6 @@ async fn retroactively_fix_bad_data_from_roomuserid_joined(services: &Services)
.stream()
.filter(|user_id| {
services
.rooms
.state_accessor
.get_member(room_id, user_id)
.map(|member| {
@@ -457,7 +451,6 @@ async fn retroactively_fix_bad_data_from_roomuserid_joined(services: &Services)
for user_id in &joined_members {
debug_info!("User is joined, marking as joined");
services
.rooms
.state_cache
.mark_as_joined(user_id, room_id);
}
@@ -465,7 +458,6 @@ async fn retroactively_fix_bad_data_from_roomuserid_joined(services: &Services)
for user_id in &non_joined_members {
debug_info!("User is left or banned, marking as left");
services
.rooms
.state_cache
.mark_as_left(user_id, room_id);
}
@@ -478,7 +470,6 @@ async fn retroactively_fix_bad_data_from_roomuserid_joined(services: &Services)
);
services
.rooms
.state_cache
.update_joined_count(room_id)
.await;

View File

@@ -27,7 +27,7 @@ pub mod transaction_ids;
pub mod uiaa;
pub mod users;
pub(crate) use service::{Args, Dep, Service};
pub(crate) use service::{Args, Service};
pub use crate::services::Services;

View File

@@ -9,17 +9,11 @@ use tuwunel_core::{
use tuwunel_database::{Deserialized, Json, Map};
use super::Presence;
use crate::{Dep, globals, users};
pub(crate) struct Data {
presenceid_presence: Arc<Map>,
userid_presenceid: Arc<Map>,
services: Services,
}
struct Services {
globals: Dep<globals::Service>,
users: Dep<users::Service>,
services: Arc<crate::services::OnceServices>,
}
impl Data {
@@ -28,10 +22,7 @@ impl Data {
Self {
presenceid_presence: db["presenceid_presence"].clone(),
userid_presenceid: db["userid_presenceid"].clone(),
services: Services {
globals: args.depend::<globals::Service>("globals"),
users: args.depend::<users::Service>("users"),
},
services: args.services.clone(),
}
}

View File

@@ -8,13 +8,9 @@ use futures::{Stream, StreamExt, TryFutureExt, stream::FuturesUnordered};
use loole::{Receiver, Sender};
use ruma::{OwnedUserId, UInt, UserId, events::presence::PresenceEvent, presence::PresenceState};
use tokio::time::sleep;
use tuwunel_core::{
Error, Result, Server, checked, debug, debug_warn, error, result::LogErr, trace,
};
use tuwunel_database::Database;
use tuwunel_core::{Error, Result, checked, debug, debug_warn, error, result::LogErr, trace};
use self::{data::Data, presence::Presence};
use crate::{Dep, globals, users};
pub struct Service {
timer_channel: (Sender<TimerType>, Receiver<TimerType>),
@@ -22,14 +18,7 @@ pub struct Service {
idle_timeout: u64,
offline_timeout: u64,
db: Data,
services: Services,
}
struct Services {
server: Arc<Server>,
db: Arc<Database>,
globals: Dep<globals::Service>,
users: Dep<users::Service>,
services: Arc<crate::services::OnceServices>,
}
type TimerType = (OwnedUserId, Duration);
@@ -46,12 +35,7 @@ impl crate::Service for Service {
idle_timeout: checked!(idle_timeout_s * 1_000)?,
offline_timeout: checked!(offline_timeout_s * 1_000)?,
db: Data::new(&args),
services: Services {
server: args.server.clone(),
db: args.db.clone(),
globals: args.depend::<globals::Service>("globals"),
users: args.depend::<users::Service>("users"),
},
services: args.services.clone(),
}))
}

View File

@@ -29,20 +29,9 @@ use tuwunel_core::{
};
use tuwunel_database::{Deserialized, Ignore, Interfix, Json, Map};
use crate::{Dep, client, globals, rooms, sending, users};
pub struct Service {
db: Data,
services: Services,
}
struct Services {
globals: Dep<globals::Service>,
client: Dep<client::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
state_cache: Dep<rooms::state_cache::Service>,
users: Dep<users::Service>,
sending: Dep<sending::Service>,
services: Arc<crate::services::OnceServices>,
}
struct Data {
@@ -57,15 +46,7 @@ impl crate::Service for Service {
senderkey_pusher: args.db["senderkey_pusher"].clone(),
pushkey_deviceid: args.db["pushkey_deviceid"].clone(),
},
services: Services {
globals: args.depend::<globals::Service>("globals"),
client: args.depend::<client::Service>("client"),
state_accessor: args
.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
state_cache: args.depend::<rooms::state_cache::Service>("rooms::state_cache"),
users: args.depend::<users::Service>("users"),
sending: args.depend::<sending::Service>("sending"),
},
services: args.services.clone(),
}))
}

View File

@@ -9,21 +9,15 @@ mod well_known;
use std::sync::Arc;
use async_trait::async_trait;
use tuwunel_core::{Result, Server, arrayvec::ArrayString, utils::MutexMap};
use tuwunel_core::{Result, arrayvec::ArrayString, utils::MutexMap};
use self::{cache::Cache, dns::Resolver};
use crate::{Dep, client};
pub struct Service {
pub cache: Arc<Cache>,
pub resolver: Arc<Resolver>,
resolving: Resolving,
services: Services,
}
struct Services {
server: Arc<Server>,
client: Dep<client::Service>,
services: Arc<crate::services::OnceServices>,
}
type Resolving = MutexMap<NameBuf, ()>;
@@ -42,10 +36,7 @@ impl crate::Service for Service {
cache: cache.clone(),
resolver: Resolver::build(args.server, cache)?,
resolving: MutexMap::new(),
services: Services {
server: args.server.clone(),
client: args.depend::<client::Service>("client"),
},
services: args.services.clone(),
}))
}

View File

@@ -8,17 +8,17 @@ use ruma::{
events::StateEventType,
};
use tuwunel_core::{
Err, Result, Server, err,
Err, Result, err,
matrix::Event,
utils::{ReadyExt, stream::TryIgnore},
};
use tuwunel_database::{Deserialized, Ignore, Interfix, Map};
use crate::{Dep, admin, appservice, appservice::RegistrationInfo, globals, rooms, sending};
use crate::appservice::RegistrationInfo;
pub struct Service {
db: Data,
services: Services,
services: Arc<crate::services::OnceServices>,
}
struct Data {
@@ -27,15 +27,6 @@ struct Data {
aliasid_alias: Arc<Map>,
}
struct Services {
server: Arc<Server>,
admin: Dep<admin::Service>,
appservice: Dep<appservice::Service>,
globals: Dep<globals::Service>,
sending: Dep<sending::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
@@ -44,15 +35,7 @@ impl crate::Service for Service {
alias_roomid: args.db["alias_roomid"].clone(),
aliasid_alias: args.db["aliasid_alias"].clone(),
},
services: Services {
server: args.server.clone(),
admin: args.depend::<admin::Service>("admin"),
appservice: args.depend::<appservice::Service>("appservice"),
globals: args.depend::<globals::Service>("globals"),
sending: args.depend::<sending::Service>("sending"),
state_accessor: args
.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
},
services: args.services.clone(),
}))
}

View File

@@ -21,29 +21,19 @@ use tuwunel_core::{
};
use self::data::Data;
use crate::{Dep, rooms, rooms::short::ShortEventId};
use crate::rooms::short::ShortEventId;
pub struct Service {
services: Services,
services: Arc<crate::services::OnceServices>,
db: Data,
}
struct Services {
short: Dep<rooms::short::Service>,
state: Dep<rooms::state::Service>,
timeline: Dep<rooms::timeline::Service>,
}
type Bucket<'a> = BTreeSet<(u64, &'a EventId)>;
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
short: args.depend::<rooms::short::Service>("rooms::short"),
state: args.depend::<rooms::state::Service>("rooms::state"),
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
},
services: args.services.clone(),
db: Data::new(&args),
}))
}

View File

@@ -21,31 +21,14 @@ use std::{
use async_trait::async_trait;
use ruma::{EventId, OwnedRoomId, RoomId};
use tuwunel_core::{
Err, Result, Server, implement,
Err, Result, implement,
matrix::{Event, PduEvent},
utils::{MutexMap, continue_exponential_backoff},
};
use crate::{Dep, globals, rooms, sending, server_keys};
pub struct Service {
pub mutex_federation: RoomMutexMap,
services: Services,
}
struct Services {
globals: Dep<globals::Service>,
sending: Dep<sending::Service>,
auth_chain: Dep<rooms::auth_chain::Service>,
metadata: Dep<rooms::metadata::Service>,
pdu_metadata: Dep<rooms::pdu_metadata::Service>,
server_keys: Dep<server_keys::Service>,
short: Dep<rooms::short::Service>,
state: Dep<rooms::state::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
state_compressor: Dep<rooms::state_compressor::Service>,
timeline: Dep<rooms::timeline::Service>,
server: Arc<Server>,
services: Arc<crate::services::OnceServices>,
}
type RoomMutexMap = MutexMap<OwnedRoomId, ()>;
@@ -55,22 +38,7 @@ impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
mutex_federation: RoomMutexMap::new(),
services: Services {
globals: args.depend::<globals::Service>("globals"),
sending: args.depend::<sending::Service>("sending"),
auth_chain: args.depend::<rooms::auth_chain::Service>("rooms::auth_chain"),
metadata: args.depend::<rooms::metadata::Service>("rooms::metadata"),
server_keys: args.depend::<server_keys::Service>("server_keys"),
pdu_metadata: args.depend::<rooms::pdu_metadata::Service>("rooms::pdu_metadata"),
short: args.depend::<rooms::short::Service>("rooms::short"),
state: args.depend::<rooms::state::Service>("rooms::state"),
state_accessor: args
.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
state_compressor: args
.depend::<rooms::state_compressor::Service>("rooms::state_compressor"),
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
server: args.server.clone(),
},
services: args.services.clone(),
}))
}

View File

@@ -11,11 +11,9 @@ use tuwunel_core::{
};
use tuwunel_database::Map;
use crate::{Dep, rooms};
pub struct Service {
db: Data,
services: Services,
services: Arc<crate::services::OnceServices>,
}
struct Data {
@@ -25,12 +23,6 @@ struct Data {
pduid_pdu: Arc<Map>,
}
struct Services {
directory: Dep<rooms::directory::Service>,
short: Dep<rooms::short::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
@@ -40,12 +32,7 @@ impl crate::Service for Service {
roomid_shortroomid: args.db["roomid_shortroomid"].clone(),
pduid_pdu: args.db["pduid_pdu"].clone(),
},
services: Services {
directory: args.depend::<rooms::directory::Service>("rooms::directory"),
short: args.depend::<rooms::short::Service>("rooms::short"),
state_accessor: args
.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
},
services: args.services.clone(),
}))
}

View File

@@ -17,27 +17,3 @@ pub mod threads;
pub mod timeline;
pub mod typing;
pub mod user;
use std::sync::Arc;
pub struct Service {
pub alias: Arc<alias::Service>,
pub auth_chain: Arc<auth_chain::Service>,
pub directory: Arc<directory::Service>,
pub event_handler: Arc<event_handler::Service>,
pub lazy_loading: Arc<lazy_loading::Service>,
pub metadata: Arc<metadata::Service>,
pub pdu_metadata: Arc<pdu_metadata::Service>,
pub read_receipt: Arc<read_receipt::Service>,
pub search: Arc<search::Service>,
pub short: Arc<short::Service>,
pub spaces: Arc<spaces::Service>,
pub state: Arc<state::Service>,
pub state_accessor: Arc<state_accessor::Service>,
pub state_cache: Arc<state_cache::Service>,
pub state_compressor: Arc<state_compressor::Service>,
pub threads: Arc<threads::Service>,
pub timeline: Arc<timeline::Service>,
pub typing: Arc<typing::Service>,
pub user: Arc<user::Service>,
}

View File

@@ -14,23 +14,16 @@ use tuwunel_core::{
};
use tuwunel_database::Map;
use crate::{
Dep, rooms,
rooms::{
short::{ShortEventId, ShortRoomId},
timeline::{PduId, RawPduId},
},
use crate::rooms::{
short::{ShortEventId, ShortRoomId},
timeline::{PduId, RawPduId},
};
pub(super) struct Data {
tofrom_relation: Arc<Map>,
referencedevents: Arc<Map>,
softfailedeventids: Arc<Map>,
services: Services,
}
struct Services {
timeline: Dep<rooms::timeline::Service>,
services: Arc<crate::services::OnceServices>,
}
impl Data {
@@ -40,9 +33,7 @@ impl Data {
tofrom_relation: db["tofrom_relation"].clone(),
referencedevents: db["referencedevents"].clone(),
softfailedeventids: db["softfailedeventids"].clone(),
services: Services {
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
},
services: args.services.clone(),
}
}

View File

@@ -9,25 +9,16 @@ use tuwunel_core::{
};
use self::data::Data;
use crate::{Dep, rooms};
pub struct Service {
services: Services,
services: Arc<crate::services::OnceServices>,
db: Data,
}
struct Services {
short: Dep<rooms::short::Service>,
timeline: Dep<rooms::timeline::Service>,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
short: args.depend::<rooms::short::Service>("rooms::short"),
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
},
services: args.services.clone(),
db: Data::new(&args),
}))
}

View File

@@ -12,19 +12,13 @@ use tuwunel_core::{
};
use tuwunel_database::{Deserialized, Json, Map};
use crate::{Dep, globals};
pub(super) struct Data {
roomuserid_privateread: Arc<Map>,
roomuserid_lastprivatereadupdate: Arc<Map>,
services: Services,
services: Arc<crate::services::OnceServices>,
readreceiptid_readreceipt: Arc<Map>,
}
struct Services {
globals: Dep<globals::Service>,
}
pub(super) type ReceiptItem<'a> = (&'a UserId, u64, Raw<AnySyncEphemeralRoomEvent>);
impl Data {
@@ -34,9 +28,7 @@ impl Data {
roomuserid_privateread: db["roomuserid_privateread"].clone(),
roomuserid_lastprivatereadupdate: db["roomuserid_lastprivatereadupdate"].clone(),
readreceiptid_readreceipt: db["readreceiptid_readreceipt"].clone(),
services: Services {
globals: args.depend::<globals::Service>("globals"),
},
services: args.services.clone(),
}
}

View File

@@ -21,27 +21,16 @@ use tuwunel_core::{
};
use self::data::{Data, ReceiptItem};
use crate::{Dep, rooms, sending};
pub struct Service {
services: Services,
services: Arc<crate::services::OnceServices>,
db: Data,
}
struct Services {
sending: Dep<sending::Service>,
short: Dep<rooms::short::Service>,
timeline: Dep<rooms::timeline::Service>,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
sending: args.depend::<sending::Service>("sending"),
short: args.depend::<rooms::short::Service>("rooms::short"),
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
},
services: args.services.clone(),
db: Data::new(&args),
}))
}

View File

@@ -14,29 +14,20 @@ use tuwunel_core::{
};
use tuwunel_database::{Map, keyval::Val};
use crate::{
Dep, rooms,
rooms::{
short::ShortRoomId,
timeline::{PduId, RawPduId},
},
use crate::rooms::{
short::ShortRoomId,
timeline::{PduId, RawPduId},
};
pub struct Service {
db: Data,
services: Services,
services: Arc<crate::services::OnceServices>,
}
struct Data {
tokenids: Arc<Map>,
}
struct Services {
short: Dep<rooms::short::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
timeline: Dep<rooms::timeline::Service>,
}
#[derive(Clone, Debug)]
pub struct RoomQuery<'a> {
pub room_id: &'a RoomId,
@@ -56,12 +47,7 @@ impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
db: Data { tokenids: args.db["tokenids"].clone() },
services: Services {
short: args.depend::<rooms::short::Service>("rooms::short"),
state_accessor: args
.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
},
services: args.services.clone(),
}))
}

View File

@@ -7,11 +7,9 @@ pub use tuwunel_core::matrix::pdu::{ShortEventId, ShortId, ShortRoomId, ShortSta
use tuwunel_core::{Result, err, implement, matrix::StateKey, utils, utils::IterStream};
use tuwunel_database::{Deserialized, Get, Map, Qry};
use crate::{Dep, globals};
pub struct Service {
db: Data,
services: Services,
services: Arc<crate::services::OnceServices>,
}
struct Data {
@@ -23,10 +21,6 @@ struct Data {
statehash_shortstatehash: Arc<Map>,
}
struct Services {
globals: Dep<globals::Service>,
}
pub type ShortStateHash = ShortId;
impl crate::Service for Service {
@@ -40,9 +34,7 @@ impl crate::Service for Service {
roomid_shortroomid: args.db["roomid_shortroomid"].clone(),
statehash_shortstatehash: args.db["statehash_shortstatehash"].clone(),
},
services: Services {
globals: args.depend::<globals::Service>("globals"),
},
services: args.services.clone(),
}))
}

Some files were not shown because too many files have changed in this diff Show More