Refactor sliding window selector. (fixes #170)

Refactor list filtering.

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk
2025-10-07 21:35:42 +00:00
parent ab8536d5c3
commit 46c940b863
16 changed files with 818 additions and 684 deletions

View File

@@ -4,41 +4,39 @@ use tuwunel_core::{
Result, extract_variant,
utils::{IterStream, ReadyExt, stream::BroadbandExt},
};
use tuwunel_service::Services;
use tuwunel_service::sync::Room;
use super::{KnownRooms, SyncInfo, TodoRoom, TodoRooms, extension_rooms_todo};
use super::{Connection, SyncInfo, Window, extension_rooms_selector};
#[tracing::instrument(level = "trace", skip_all, fields(globalsince, next_batch))]
#[tracing::instrument(level = "trace", skip_all)]
pub(super) async fn collect(
services: &Services,
sync_info: SyncInfo<'_>,
next_batch: u64,
known_rooms: &KnownRooms,
todo_rooms: &TodoRooms,
conn: &Connection,
window: &Window,
) -> Result<response::AccountData> {
let SyncInfo { sender_user, globalsince, request, .. } = sync_info;
let SyncInfo { services, sender_user, .. } = sync_info;
let lists = request
let implicit = conn
.extensions
.account_data
.lists
.as_deref()
.map(<[_]>::iter);
let rooms = request
let explicit = conn
.extensions
.account_data
.rooms
.as_deref()
.map(<[_]>::iter);
let rooms = extension_rooms_todo(sync_info, known_rooms, todo_rooms, lists, rooms)
let rooms = extension_rooms_selector(sync_info, conn, window, implicit, explicit)
.stream()
.broad_filter_map(async |room_id| {
let &TodoRoom { roomsince, .. } = todo_rooms.get(room_id)?;
let &Room { roomsince, .. } = conn.rooms.get(room_id)?;
let changes: Vec<_> = services
.account_data
.changes_since(Some(room_id), sender_user, roomsince, Some(next_batch))
.changes_since(Some(room_id), sender_user, roomsince, Some(conn.next_batch))
.ready_filter_map(|e| extract_variant!(e, AnyRawAccountDataEvent::Room))
.collect()
.await;
@@ -52,7 +50,7 @@ pub(super) async fn collect(
let global = services
.account_data
.changes_since(None, sender_user, globalsince, Some(next_batch))
.changes_since(None, sender_user, conn.globalsince, Some(conn.next_batch))
.ready_filter_map(|e| extract_variant!(e, AnyRawAccountDataEvent::Global))
.collect();

View File

@@ -22,23 +22,20 @@ use tuwunel_core::{
stream::BroadbandExt,
},
};
use tuwunel_service::Services;
use tuwunel_service::sync::Connection;
use super::{SyncInfo, share_encrypted_room};
#[tracing::instrument(level = "trace", skip_all, fields(globalsince, next_batch,))]
#[tracing::instrument(level = "trace", skip_all)]
pub(super) async fn collect(
services: &Services,
sync_info: SyncInfo<'_>,
next_batch: u64,
conn: &Connection,
) -> Result<response::E2EE> {
let SyncInfo {
sender_user, sender_device, globalsince, ..
} = sync_info;
let SyncInfo { services, sender_user, sender_device, .. } = sync_info;
let keys_changed = services
.users
.keys_changed(sender_user, globalsince, Some(next_batch))
.keys_changed(sender_user, conn.globalsince, Some(conn.next_batch))
.map(ToOwned::to_owned)
.collect::<HashSet<_>>()
.map(|changed| (changed, HashSet::new()));
@@ -48,11 +45,7 @@ pub(super) async fn collect(
.state_cache
.rooms_joined(sender_user)
.map(ToOwned::to_owned)
.broad_filter_map(async |room_id| {
collect_room(services, sync_info, next_batch, &room_id)
.await
.ok()
})
.broad_filter_map(async |room_id| collect_room(sync_info, conn, &room_id).await.ok())
.chain(once(keys_changed))
.ready_fold((changed, left), |(mut changed, mut left), room| {
changed.extend(room.0);
@@ -77,7 +70,7 @@ pub(super) async fn collect(
.last_one_time_keys_update(sender_user)
.then(|since| -> OptionFuture<_> {
since
.gt(&globalsince)
.gt(&conn.globalsince)
.then(|| {
services
.users
@@ -103,9 +96,8 @@ pub(super) async fn collect(
#[tracing::instrument(level = "trace", skip_all, fields(room_id))]
async fn collect_room(
services: &Services,
SyncInfo { sender_user, globalsince, .. }: SyncInfo<'_>,
next_batch: u64,
SyncInfo { services, sender_user, .. }: SyncInfo<'_>,
conn: &Connection,
room_id: &RoomId,
) -> Result<pair_of!(HashSet<OwnedUserId>)> {
let current_shortstatehash = services
@@ -115,7 +107,7 @@ async fn collect_room(
let room_keys_changed = services
.users
.room_keys_changed(room_id, globalsince, Some(next_batch))
.room_keys_changed(room_id, conn.globalsince, Some(conn.next_batch))
.map(|(user_id, _)| user_id)
.map(ToOwned::to_owned)
.collect::<HashSet<_>>();
@@ -130,13 +122,13 @@ async fn collect_room(
return Ok(lists);
};
if current_shortstatehash <= globalsince {
if current_shortstatehash <= conn.globalsince {
return Ok(lists);
}
let Ok(since_shortstatehash) = services
.timeline
.prev_shortstatehash(room_id, PduCount::Normal(globalsince).saturating_add(1))
.prev_shortstatehash(room_id, PduCount::Normal(conn.globalsince).saturating_add(1))
.await
else {
return Ok(lists);
@@ -168,7 +160,7 @@ async fn collect_room(
}
let encrypted_since_last_sync = !since_encryption;
let joined_since_last_sync = sender_joined_count.is_ok_and(|count| count > globalsince);
let joined_since_last_sync = sender_joined_count.is_ok_and(|count| count > conn.globalsince);
let joined_members_burst: OptionFuture<_> = (joined_since_last_sync
|| encrypted_since_last_sync)
.then(|| {

View File

@@ -0,0 +1,156 @@
use futures::{StreamExt, future::OptionFuture, pin_mut};
use ruma::{
RoomId, api::client::sync::sync_events::v5::request::ListFilters, directory::RoomTypeFilter,
events::room::member::MembershipState,
};
use tuwunel_core::{
is_equal_to, is_true,
utils::{
BoolExt, FutureBoolExt, IterStream, ReadyExt,
future::{OptionExt, ReadyEqExt},
},
};
use super::SyncInfo;
#[tracing::instrument(name = "filter", level = "trace", skip_all)]
pub(super) async fn filter_room(
SyncInfo { services, sender_user, .. }: SyncInfo<'_>,
filter: &ListFilters,
room_id: &RoomId,
membership: Option<&MembershipState>,
) -> bool {
let match_invite: OptionFuture<_> = filter
.is_invite
.map(async |is_invite| match (membership, is_invite) {
| (Some(MembershipState::Invite), true) => true,
| (Some(MembershipState::Invite), false) => false,
| (Some(_), true) => false,
| (Some(_), false) => true,
| _ =>
services
.state_cache
.is_invited(sender_user, room_id)
.await == is_invite,
})
.into();
let match_direct: OptionFuture<_> = filter
.is_dm
.map(async |is_dm| {
services
.account_data
.is_direct(sender_user, room_id)
.await == is_dm
})
.into();
let match_direct_member: OptionFuture<_> = filter
.is_dm
.map(async |is_dm| {
services
.state_accessor
.is_direct(room_id, sender_user)
.await == is_dm
})
.into();
let match_encrypted: OptionFuture<_> = filter
.is_encrypted
.map(async |is_encrypted| {
services
.state_accessor
.is_encrypted_room(room_id)
.await == is_encrypted
})
.into();
let match_space_child: OptionFuture<_> = filter
.spaces
.is_empty()
.is_false()
.then(async || {
filter
.spaces
.iter()
.stream()
.flat_map(|room_id| services.spaces.get_space_children(room_id))
.ready_any(is_equal_to!(room_id))
.await
})
.into();
let fetch_tags = !filter.tags.is_empty() || !filter.not_tags.is_empty();
let match_room_tag: OptionFuture<_> = fetch_tags
.then(async || {
if let Some(tags) = services
.account_data
.get_room_tags(sender_user, room_id)
.await
.ok()
.filter(|tags| !tags.is_empty())
{
tags.keys().any(|tag| {
(filter.not_tags.is_empty() || !filter.not_tags.contains(tag))
|| (!filter.tags.is_empty() && filter.tags.contains(tag))
})
} else {
filter.tags.is_empty()
}
})
.into();
let fetch_room_type = !filter.room_types.is_empty() || !filter.not_room_types.is_empty();
let match_room_type: OptionFuture<_> = fetch_room_type
.then(async || {
let room_type = services
.state_accessor
.get_room_type(room_id)
.await
.ok();
let room_type = RoomTypeFilter::from(room_type);
(filter.not_room_types.is_empty() || !filter.not_room_types.contains(&room_type))
&& (filter.room_types.is_empty() || filter.room_types.contains(&room_type))
})
.into();
match_encrypted
.is_none_or(is_true!())
.and3(
match_invite.is_none_or(is_true!()),
match_direct.is_none_or(is_true!()),
match_direct_member.is_none_or(is_true!()),
)
.and3(
match_space_child.is_none_or(is_true!()),
match_room_type.is_none_or(is_true!()),
match_room_tag.is_none_or(is_true!()),
)
.await
}
#[tracing::instrument(name = "filter_meta", level = "trace", skip_all)]
pub(super) async fn filter_room_meta(
SyncInfo { services, sender_user, .. }: SyncInfo<'_>,
room_id: &RoomId,
) -> bool {
let not_exists = services.metadata.exists(room_id).eq(&false);
let is_disabled = services.metadata.is_disabled(room_id);
let is_banned = services.metadata.is_banned(room_id);
let not_visible = services
.state_accessor
.user_can_see_state_events(sender_user, room_id)
.eq(&false);
pin_mut!(not_visible, not_exists, is_disabled, is_banned);
not_visible
.or(not_exists)
.or(is_disabled)
.or(is_banned)
.await
.eq(&false)
}

View File

@@ -9,58 +9,54 @@ use tuwunel_core::{
Result,
utils::{BoolExt, IterStream, stream::BroadbandExt},
};
use tuwunel_service::{Services, rooms::read_receipt::pack_receipts};
use tuwunel_service::{rooms::read_receipt::pack_receipts, sync::Room};
use super::{KnownRooms, SyncInfo, TodoRoom, TodoRooms, extension_rooms_todo};
use super::{Connection, SyncInfo, Window, extension_rooms_selector};
#[tracing::instrument(level = "trace", skip_all)]
pub(super) async fn collect(
services: &Services,
sync_info: SyncInfo<'_>,
next_batch: u64,
known_rooms: &KnownRooms,
todo_rooms: &TodoRooms,
conn: &Connection,
window: &Window,
) -> Result<response::Receipts> {
let SyncInfo { request, .. } = sync_info;
let SyncInfo { .. } = sync_info;
let lists = request
let implicit = conn
.extensions
.receipts
.lists
.as_deref()
.map(<[_]>::iter);
let rooms = request
let explicit = conn
.extensions
.receipts
.rooms
.as_deref()
.map(<[_]>::iter);
let rooms = extension_rooms_todo(sync_info, known_rooms, todo_rooms, lists, rooms)
let rooms = extension_rooms_selector(sync_info, conn, window, implicit, explicit)
.stream()
.broad_filter_map(async |room_id| {
collect_room(services, sync_info, next_batch, todo_rooms, room_id).await
})
.broad_filter_map(|room_id| collect_room(sync_info, conn, window, room_id))
.collect()
.await;
Ok(response::Receipts { rooms })
}
#[tracing::instrument(level = "trace", skip_all, fields(room_id))]
async fn collect_room(
services: &Services,
SyncInfo { sender_user, .. }: SyncInfo<'_>,
next_batch: u64,
todo_rooms: &TodoRooms,
SyncInfo { services, sender_user, .. }: SyncInfo<'_>,
conn: &Connection,
_window: &Window,
room_id: &RoomId,
) -> Option<(OwnedRoomId, Raw<SyncReceiptEvent>)> {
let &TodoRoom { roomsince, .. } = todo_rooms.get(room_id)?;
let &Room { roomsince, .. } = conn.rooms.get(room_id)?;
let private_receipt = services
.read_receipt
.last_privateread_update(sender_user, room_id)
.then(async |last_private_update| {
if last_private_update <= roomsince || last_private_update > next_batch {
if last_private_update <= roomsince || last_private_update > conn.next_batch {
return None;
}
@@ -77,7 +73,7 @@ async fn collect_room(
let receipts: Vec<Raw<AnySyncEphemeralRoomEvent>> = services
.read_receipt
.readreceipts_since(room_id, roomsince, Some(next_batch))
.readreceipts_since(room_id, roomsince, Some(conn.next_batch))
.filter_map(async |(read_user, _ts, v)| {
services
.users
@@ -92,6 +88,6 @@ async fn collect_room(
receipts
.is_empty()
.eq(&false)
.is_false()
.then(|| (room_id.to_owned(), pack_receipts(receipts.into_iter())))
}

View File

@@ -1,4 +1,4 @@
use std::cmp::Ordering;
use std::{cmp::Ordering, collections::HashSet};
use futures::{
FutureExt, StreamExt, TryFutureExt, TryStreamExt,
@@ -6,43 +6,79 @@ use futures::{
};
use ruma::{
JsOption, MxcUri, OwnedMxcUri, RoomId, UInt, UserId,
api::client::sync::sync_events::{UnreadNotificationsCount, v5::response},
events::{StateEventType, room::member::MembershipState},
api::client::sync::sync_events::{
UnreadNotificationsCount,
v5::{DisplayName, response},
},
events::{
StateEventType,
TimelineEventType::{
self, Beacon, CallInvite, PollStart, RoomEncrypted, RoomMessage, Sticker,
},
room::member::MembershipState,
},
};
use tuwunel_core::{
Result, at, debug_error, is_equal_to,
Result, at, debug_error, err, is_equal_to,
matrix::{Event, StateKey, pdu::PduCount},
ref_at,
utils::{IterStream, ReadyExt, TryFutureExtExt, result::FlatOk, stream::BroadbandExt},
utils::{
BoolExt, IterStream, ReadyExt, TryFutureExtExt, math::usize_from_ruma, result::FlatOk,
stream::BroadbandExt,
},
};
use tuwunel_service::Services;
use tuwunel_service::{Services, sync::Room};
use super::{SyncInfo, TodoRoom};
use crate::client::{DEFAULT_BUMP_TYPES, ignored_filter, sync::load_timeline};
use super::{super::load_timeline, Connection, SyncInfo, WindowRoom};
use crate::client::ignored_filter;
#[tracing::instrument(level = "debug", skip_all, fields(room_id, roomsince))]
static DEFAULT_BUMP_TYPES: [TimelineEventType; 6] =
[CallInvite, PollStart, Beacon, RoomEncrypted, RoomMessage, Sticker];
#[tracing::instrument(
name = "room",
level = "debug",
skip_all,
fields(room_id, roomsince)
)]
#[allow(clippy::too_many_arguments)]
pub(super) async fn handle(
services: &Services,
next_batch: u64,
SyncInfo { sender_user, .. }: SyncInfo<'_>,
room_id: &RoomId,
&TodoRoom {
ref membership,
ref requested_state,
timeline_limit,
roomsince,
}: &TodoRoom,
SyncInfo { services, sender_user, .. }: SyncInfo<'_>,
conn: &Connection,
WindowRoom { lists, membership, room_id, .. }: &WindowRoom,
) -> Result<Option<response::Room>> {
let timeline: OptionFuture<_> = membership
.ne(&MembershipState::Invite)
debug_assert!(DEFAULT_BUMP_TYPES.is_sorted(), "DEFAULT_BUMP_TYPES is not sorted");
let &Room { roomsince } = conn
.rooms
.get(room_id)
.ok_or_else(|| err!("Missing connection state for {room_id}"))?;
let is_invite = *membership == Some(MembershipState::Invite);
let default_details = (0_usize, HashSet::new());
let (timeline_limit, required_state) = lists
.iter()
.filter_map(|list_id| conn.lists.get(list_id))
.map(|list| &list.room_details)
.chain(conn.subscriptions.get(room_id).into_iter())
.fold(default_details, |(mut timeline_limit, mut required_state), config| {
let limit = usize_from_ruma(config.timeline_limit);
timeline_limit = timeline_limit.max(limit);
required_state.extend(config.required_state.clone());
(timeline_limit, required_state)
});
let timeline: OptionFuture<_> = is_invite
.is_false()
.then(|| {
load_timeline(
services,
sender_user,
room_id,
PduCount::Normal(roomsince),
Some(PduCount::from(next_batch)),
Some(PduCount::from(conn.next_batch)),
timeline_limit,
)
})
@@ -56,7 +92,7 @@ pub(super) async fn handle(
let (timeline_pdus, limited, _lastcount) =
timeline.unwrap_or_else(|| (Vec::new(), true, PduCount::default()));
if roomsince != 0 && timeline_pdus.is_empty() && membership.ne(&MembershipState::Invite) {
if roomsince != 0 && timeline_pdus.is_empty() && !is_invite {
return Ok(None);
}
@@ -76,7 +112,7 @@ pub(super) async fn handle(
.is_ok()
})
.fold(Option::<UInt>::None, |mut bump_stamp, (_, pdu)| {
let ts = pdu.origin_server_ts().get();
let ts = pdu.origin_server_ts().0;
if bump_stamp.is_none_or(|bump_stamp| bump_stamp < ts) {
bump_stamp.replace(ts);
}
@@ -84,7 +120,7 @@ pub(super) async fn handle(
bump_stamp
});
let lazy = requested_state
let lazy = required_state
.iter()
.any(is_equal_to!(&(StateEventType::RoomMember, "$LAZY".into())));
@@ -102,7 +138,7 @@ pub(super) async fn handle(
.map(|sender| (StateEventType::RoomMember, StateKey::from_str(sender.as_str())))
.stream();
let wildcard_state = requested_state
let wildcard_state = required_state
.iter()
.filter(|(_, state_key)| state_key == "*")
.map(|(event_type, _)| {
@@ -115,7 +151,7 @@ pub(super) async fn handle(
.stream()
.flatten();
let required_state = requested_state
let required_state = required_state
.iter()
.cloned()
.stream()
@@ -138,8 +174,7 @@ pub(super) async fn handle(
.collect();
// TODO: figure out a timestamp we can use for remote invites
let invite_state: OptionFuture<_> = membership
.eq(&MembershipState::Invite)
let invite_state: OptionFuture<_> = is_invite
.then(|| {
services
.state_cache
@@ -159,6 +194,7 @@ pub(super) async fn handle(
let room_name = services
.state_accessor
.get_name(room_id)
.map_ok(Into::into)
.map(Result::ok);
let room_avatar = services
@@ -194,12 +230,17 @@ pub(super) async fn handle(
.map_ok(Result::ok)
.map(FlatOk::flat_ok);
let meta = join(room_name, room_avatar);
let is_dm = services
.state_accessor
.is_direct(room_id, sender_user)
.map(|is_dm| is_dm.then_some(is_dm));
let meta = join3(room_name, room_avatar, is_dm);
let events = join3(timeline, required_state, invite_state);
let member_counts = join(joined_count, invited_count);
let notification_counts = join(highlight_count, notification_count);
let (
(room_name, room_avatar),
(room_name, room_avatar, is_dm),
(timeline, required_state, invite_state),
(joined_count, invited_count),
(highlight_count, notification_count),
@@ -211,7 +252,7 @@ pub(super) async fn handle(
services,
sender_user,
room_id,
room_name.as_deref(),
room_name.as_ref(),
room_avatar.as_deref(),
)
.await?;
@@ -220,14 +261,16 @@ pub(super) async fn handle(
Ok(Some(response::Room {
initial: Some(roomsince == 0),
lists: lists.clone(),
membership: membership.clone(),
name: room_name.or(hero_name),
avatar: JsOption::from_option(room_avatar.or(heroes_avatar)),
invite_state: invite_state.flatten(),
is_dm,
required_state,
timeline,
is_dm: None,
prev_batch,
invite_state: invite_state.flatten(),
prev_batch: prev_batch.as_deref().map(Into::into),
limited,
timeline,
bump_stamp,
heroes,
num_live,
@@ -237,15 +280,15 @@ pub(super) async fn handle(
}))
}
#[tracing::instrument(level = "debug", skip_all, fields(room_id, roomsince))]
#[tracing::instrument(name = "heroes", level = "trace", skip_all)]
#[allow(clippy::type_complexity)]
async fn calculate_heroes(
services: &Services,
sender_user: &UserId,
room_id: &RoomId,
room_name: Option<&str>,
room_name: Option<&DisplayName>,
room_avatar: Option<&MxcUri>,
) -> Result<(Option<Vec<response::Hero>>, Option<String>, Option<OwnedMxcUri>)> {
) -> Result<(Option<Vec<response::Hero>>, Option<DisplayName>, Option<OwnedMxcUri>)> {
const MAX_HEROES: usize = 5;
let heroes: Vec<_> = services
.state_cache
@@ -275,8 +318,10 @@ async fn calculate_heroes(
let (name, avatar) = join(name, avatar).await;
let hero = response::Hero {
user_id,
name: name.unwrap_or(content.displayname),
avatar: avatar.unwrap_or(content.avatar_url),
name: name
.unwrap_or(content.displayname)
.map(Into::into),
};
Some(hero)
@@ -291,7 +336,7 @@ async fn calculate_heroes(
heroes[0]
.name
.clone()
.unwrap_or_else(|| heroes[0].user_id.to_string()),
.unwrap_or_else(|| heroes[0].user_id.as_str().into()),
),
| Ordering::Greater => {
let firsts = heroes[1..]
@@ -299,7 +344,7 @@ async fn calculate_heroes(
.map(|h| {
h.name
.clone()
.unwrap_or_else(|| h.user_id.to_string())
.unwrap_or_else(|| h.user_id.as_str().into())
})
.collect::<Vec<_>>()
.join(", ");
@@ -307,9 +352,9 @@ async fn calculate_heroes(
let last = heroes[0]
.name
.clone()
.unwrap_or_else(|| heroes[0].user_id.to_string());
.unwrap_or_else(|| heroes[0].user_id.as_str().into());
Some(format!("{firsts} and {last}"))
Some(format!("{firsts} and {last}")).map(Into::into)
},
};

View File

@@ -0,0 +1,257 @@
use std::cmp::Ordering;
use futures::{
FutureExt, StreamExt, TryFutureExt,
future::{OptionFuture, join3},
};
use ruma::{OwnedRoomId, UInt, events::room::member::MembershipState, uint};
use tuwunel_core::{
apply, is_true,
matrix::PduCount,
trace,
utils::{
BoolExt,
future::TryExtExt,
math::usize_from_ruma,
stream::{BroadbandExt, IterStream},
},
};
use tuwunel_service::sync::Connection;
use super::{
ListIds, ResponseLists, SyncInfo, Window, WindowRoom, filter_room, filter_room_meta,
};
#[tracing::instrument(level = "debug", skip_all)]
pub(super) async fn selector(
conn: &mut Connection,
sync_info: SyncInfo<'_>,
) -> (Window, ResponseLists) {
use MembershipState::*;
let SyncInfo { services, sender_user, request, .. } = sync_info;
trace!(?request);
let mut rooms = services
.state_cache
.user_memberships(sender_user, Some(&[Join, Invite, Knock]))
.map(|(membership, room_id)| (room_id.to_owned(), Some(membership)))
.broad_filter_map(|(room_id, membership)| {
match_lists_for_room(sync_info, conn, room_id, membership)
})
.collect::<Vec<_>>()
.await;
rooms.sort_by(room_sort);
rooms
.iter_mut()
.enumerate()
.for_each(|(i, room)| {
room.ranked = i;
});
trace!(?rooms);
let lists = response_lists(rooms.iter());
trace!(?lists);
let window = select_window(sync_info, conn, rooms.iter(), &lists).await;
trace!(?window);
for room in &rooms {
conn.rooms
.entry(room.room_id.clone())
.or_default();
}
(window, lists)
}
async fn select_window<'a, Rooms>(
sync_info: SyncInfo<'_>,
conn: &Connection,
rooms: Rooms,
lists: &ResponseLists,
) -> Window
where
Rooms: Iterator<Item = &'a WindowRoom> + Clone + Send + Sync,
{
static FULL_RANGE: (UInt, UInt) = (UInt::MIN, UInt::MAX);
let selections = lists
.keys()
.cloned()
.filter_map(|id| conn.lists.get(&id).map(|list| (id, list)))
.flat_map(|(id, list)| {
let full_range = list
.ranges
.is_empty()
.then_some(&FULL_RANGE)
.into_iter();
list.ranges
.iter()
.chain(full_range)
.map(apply!(2, usize_from_ruma))
.map(move |range| (id.clone(), range))
})
.flat_map(|(id, (start, end))| {
let list = rooms
.clone()
.filter(move |&room| room.lists.contains(&id));
let cycled = list.clone().all(|room| {
conn.rooms
.get(&room.room_id)
.is_some_and(|room| room.roomsince != 0)
});
list.enumerate()
.skip_while(move |&(i, room)| {
i < start
|| conn
.rooms
.get(&room.room_id)
.is_some_and(|conn_room| {
conn_room.roomsince >= room.last_count
|| (!cycled && conn_room.roomsince != 0)
})
})
.take(end.saturating_add(1).saturating_sub(start))
.map(|(_, room)| (room.room_id.clone(), room.clone()))
});
conn.subscriptions
.iter()
.stream()
.broad_filter_map(async |(room_id, _)| {
filter_room_meta(sync_info, room_id)
.await
.then(|| WindowRoom {
room_id: room_id.clone(),
membership: None,
lists: Default::default(),
ranked: usize::MAX,
last_count: 0,
})
})
.map(|room| (room.room_id.clone(), room))
.chain(selections.stream())
.collect()
.await
}
#[tracing::instrument(
name = "matcher",
level = "trace",
skip_all,
fields(?room_id, ?membership)
)]
async fn match_lists_for_room(
sync_info: SyncInfo<'_>,
conn: &Connection,
room_id: OwnedRoomId,
membership: Option<MembershipState>,
) -> Option<WindowRoom> {
let SyncInfo { services, sender_user, .. } = sync_info;
let lists = conn
.lists
.iter()
.stream()
.filter_map(async |(id, list)| {
let filter: OptionFuture<_> = list
.filters
.clone()
.map(async |filters| {
filter_room(sync_info, &filters, &room_id, membership.as_ref()).await
})
.into();
filter
.await
.is_none_or(is_true!())
.then(|| id.clone())
})
.collect::<ListIds>()
.await;
let last_timeline_count: OptionFuture<_> = lists
.is_empty()
.is_false()
.then(|| {
services
.timeline
.last_timeline_count(None, &room_id, None)
.map_ok(PduCount::into_unsigned)
.ok()
})
.into();
let last_account_count: OptionFuture<_> = lists
.is_empty()
.is_false()
.then(|| {
services
.account_data
.last_count(Some(room_id.as_ref()), sender_user, conn.next_batch)
.ok()
})
.into();
let last_receipt_count: OptionFuture<_> = lists
.is_empty()
.is_false()
.then(|| {
services
.read_receipt
.last_receipt_count(&room_id, sender_user.into(), conn.globalsince.into())
.map(Result::ok)
})
.into();
let (last_timeline_count, last_account_count, last_receipt_count) =
join3(last_timeline_count, last_account_count, last_receipt_count).await;
Some(WindowRoom {
room_id: room_id.clone(),
membership,
lists,
ranked: 0,
last_count: [last_timeline_count, last_account_count, last_receipt_count]
.into_iter()
.map(Option::flatten)
.map(Option::unwrap_or_default)
.max()
.unwrap_or_default(),
})
}
fn response_lists<'a, Rooms>(rooms: Rooms) -> ResponseLists
where
Rooms: Iterator<Item = &'a WindowRoom>,
{
rooms
.flat_map(|room| room.lists.iter())
.fold(ResponseLists::default(), |mut lists, id| {
let list = lists.entry(id.clone()).or_default();
list.count = list
.count
.checked_add(uint!(1))
.expect("list count must not overflow JsInt");
lists
})
}
fn room_sort(a: &WindowRoom, b: &WindowRoom) -> Ordering {
if a.membership != b.membership {
if a.membership == Some(MembershipState::Invite) {
return Ordering::Less;
}
if b.membership == Some(MembershipState::Invite) {
return Ordering::Greater;
}
}
b.last_count.cmp(&a.last_count)
}

View File

@@ -1,26 +1,22 @@
use futures::StreamExt;
use ruma::api::client::sync::sync_events::v5::response;
use tuwunel_core::{self, Result};
use tuwunel_service::Services;
use super::SyncInfo;
use super::{Connection, SyncInfo};
#[tracing::instrument(level = "trace", skip_all, fields(globalsince, next_batch))]
#[tracing::instrument(level = "trace", skip_all)]
pub(super) async fn collect(
services: &Services,
SyncInfo {
sender_user, sender_device, globalsince, ..
}: SyncInfo<'_>,
next_batch: u64,
SyncInfo { services, sender_user, sender_device, .. }: SyncInfo<'_>,
conn: &Connection,
) -> Result<Option<response::ToDevice>> {
services
.users
.remove_to_device_events(sender_user, sender_device, globalsince)
.remove_to_device_events(sender_user, sender_device, conn.globalsince)
.await;
let events: Vec<_> = services
.users
.get_to_device_events(sender_user, sender_device, None, Some(next_batch))
.get_to_device_events(sender_user, sender_device, None, Some(conn.next_batch))
.collect()
.await;
@@ -28,7 +24,7 @@ pub(super) async fn collect(
.is_empty()
.eq(&false)
.then(|| response::ToDevice {
next_batch: next_batch.to_string(),
next_batch: conn.next_batch.to_string().into(),
events,
});

View File

@@ -10,37 +10,34 @@ use tuwunel_core::{
Result, debug_error,
utils::{IterStream, ReadyExt},
};
use tuwunel_service::Services;
use super::{KnownRooms, SyncInfo, TodoRooms, extension_rooms_todo};
use super::{Connection, SyncInfo, Window, extension_rooms_selector};
#[tracing::instrument(level = "trace", skip_all, fields(globalsince))]
#[tracing::instrument(level = "trace", skip_all)]
pub(super) async fn collect(
services: &Services,
sync_info: SyncInfo<'_>,
_next_batch: u64,
known_rooms: &KnownRooms,
todo_rooms: &TodoRooms,
conn: &Connection,
window: &Window,
) -> Result<response::Typing> {
use response::Typing;
let SyncInfo { sender_user, request, .. } = sync_info;
let SyncInfo { services, sender_user, .. } = sync_info;
let lists = request
let implicit = conn
.extensions
.typing
.lists
.as_deref()
.map(<[_]>::iter);
let rooms = request
let explicit = conn
.extensions
.typing
.rooms
.as_deref()
.map(<[_]>::iter);
extension_rooms_todo(sync_info, known_rooms, todo_rooms, lists, rooms)
extension_rooms_selector(sync_info, conn, window, implicit, explicit)
.stream()
.filter_map(async |room_id| {
services