2022-06-25 16:12:23 +02:00
|
|
|
mod data;
|
2022-09-06 23:15:09 +02:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
use conduit::{
|
|
|
|
|
at,
|
|
|
|
|
utils::{result::FlatOk, stream::ReadyExt, IterStream},
|
|
|
|
|
PduCount, Result,
|
|
|
|
|
};
|
|
|
|
|
use futures::{FutureExt, StreamExt};
|
2023-06-26 12:38:51 +02:00
|
|
|
use ruma::{
|
2024-03-27 15:21:46 -04:00
|
|
|
api::{client::relations::get_relating_events, Direction},
|
2023-06-26 12:38:51 +02:00
|
|
|
events::{relation::RelationType, TimelineEventType},
|
2024-10-31 23:39:20 +03:00
|
|
|
EventId, RoomId, UInt, UserId,
|
2023-06-26 12:38:51 +02:00
|
|
|
};
|
|
|
|
|
use serde::Deserialize;
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2024-08-08 17:18:30 +00:00
|
|
|
use self::data::{Data, PdusIterItem};
|
2024-07-18 06:37:47 +00:00
|
|
|
use crate::{rooms, Dep};
|
2022-06-25 16:12:23 +02:00
|
|
|
|
2024-05-09 15:59:08 -07:00
|
|
|
pub struct Service {
|
2024-07-18 06:37:47 +00:00
|
|
|
services: Services,
|
2024-05-27 03:17:20 +00:00
|
|
|
db: Data,
|
2022-06-25 16:12:23 +02:00
|
|
|
}
|
|
|
|
|
|
2024-07-18 06:37:47 +00:00
|
|
|
struct Services {
|
|
|
|
|
short: Dep<rooms::short::Service>,
|
|
|
|
|
state_accessor: Dep<rooms::state_accessor::Service>,
|
|
|
|
|
timeline: Dep<rooms::timeline::Service>,
|
|
|
|
|
}
|
|
|
|
|
|
2023-06-26 12:38:51 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize)]
|
|
|
|
|
struct ExtractRelType {
|
|
|
|
|
rel_type: RelationType,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Clone, Debug, Deserialize)]
|
|
|
|
|
struct ExtractRelatesToEventId {
|
|
|
|
|
#[serde(rename = "m.relates_to")]
|
|
|
|
|
relates_to: ExtractRelType,
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-04 03:26:19 +00:00
|
|
|
impl crate::Service for Service {
|
|
|
|
|
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
|
|
|
|
|
Ok(Arc::new(Self {
|
2024-07-18 06:37:47 +00:00
|
|
|
services: Services {
|
|
|
|
|
short: args.depend::<rooms::short::Service>("rooms::short"),
|
|
|
|
|
state_accessor: args.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
|
|
|
|
|
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
|
|
|
|
|
},
|
|
|
|
|
db: Data::new(&args),
|
2024-07-04 03:26:19 +00:00
|
|
|
}))
|
2024-05-27 03:17:20 +00:00
|
|
|
}
|
|
|
|
|
|
2024-07-04 03:26:19 +00:00
|
|
|
fn name(&self) -> &str { crate::service::make_name(std::module_path!()) }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Service {
|
2024-07-07 19:03:15 +00:00
|
|
|
#[tracing::instrument(skip(self, from, to), level = "debug")]
|
2024-08-08 17:18:30 +00:00
|
|
|
pub fn add_relation(&self, from: PduCount, to: PduCount) {
|
2023-06-26 12:38:51 +02:00
|
|
|
match (from, to) {
|
|
|
|
|
(PduCount::Normal(f), PduCount::Normal(t)) => self.db.add_relation(f, t),
|
|
|
|
|
_ => {
|
|
|
|
|
// TODO: Relations with backfilled pdus
|
|
|
|
|
},
|
|
|
|
|
}
|
2024-03-05 19:48:54 -05:00
|
|
|
}
|
|
|
|
|
|
2023-11-27 00:39:50 -05:00
|
|
|
#[allow(clippy::too_many_arguments)]
|
2024-08-08 17:18:30 +00:00
|
|
|
pub async fn paginate_relations_with_filter(
|
|
|
|
|
&self, sender_user: &UserId, room_id: &RoomId, target: &EventId, filter_event_type: Option<TimelineEventType>,
|
2024-10-31 23:39:20 +03:00
|
|
|
filter_rel_type: Option<RelationType>, from: Option<&str>, to: Option<&str>, limit: Option<UInt>,
|
2024-04-03 14:10:00 -04:00
|
|
|
recurse: bool, dir: Direction,
|
2023-06-26 12:38:51 +02:00
|
|
|
) -> Result<get_relating_events::v1::Response> {
|
2024-10-31 23:39:20 +03:00
|
|
|
let from = from
|
|
|
|
|
.map(PduCount::try_from_string)
|
|
|
|
|
.transpose()?
|
|
|
|
|
.unwrap_or_else(|| match dir {
|
2024-04-03 14:10:00 -04:00
|
|
|
Direction::Forward => PduCount::min(),
|
|
|
|
|
Direction::Backward => PduCount::max(),
|
2024-10-31 23:39:20 +03:00
|
|
|
});
|
2024-04-03 14:10:00 -04:00
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
let to = to.map(PduCount::try_from_string).flat_ok();
|
2024-04-03 14:10:00 -04:00
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
// Use limit or else 30, with maximum 100
|
|
|
|
|
let limit: usize = limit
|
|
|
|
|
.map(TryInto::try_into)
|
|
|
|
|
.flat_ok()
|
|
|
|
|
.unwrap_or(30)
|
2024-04-03 14:10:00 -04:00
|
|
|
.min(100);
|
|
|
|
|
|
|
|
|
|
// Spec (v1.10) recommends depth of at least 3
|
|
|
|
|
let depth: u8 = if recurse {
|
|
|
|
|
3
|
|
|
|
|
} else {
|
|
|
|
|
1
|
|
|
|
|
};
|
|
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
let events: Vec<PdusIterItem> = self
|
|
|
|
|
.get_relations(sender_user, room_id, target, from, limit, depth, dir)
|
|
|
|
|
.await
|
2024-08-08 17:18:30 +00:00
|
|
|
.into_iter()
|
2024-10-31 23:39:20 +03:00
|
|
|
.filter(|(_, pdu)| {
|
|
|
|
|
filter_event_type
|
2024-08-08 17:18:30 +00:00
|
|
|
.as_ref()
|
2024-10-31 23:39:20 +03:00
|
|
|
.is_none_or(|kind| *kind == pdu.kind)
|
|
|
|
|
})
|
|
|
|
|
.filter(|(_, pdu)| {
|
|
|
|
|
filter_rel_type.as_ref().is_none_or(|rel_type| {
|
|
|
|
|
pdu.get_content()
|
|
|
|
|
.map(|c: ExtractRelatesToEventId| c.relates_to.rel_type)
|
|
|
|
|
.is_ok_and(|r| r == *rel_type)
|
|
|
|
|
})
|
2024-08-08 17:18:30 +00:00
|
|
|
})
|
|
|
|
|
.stream()
|
|
|
|
|
.filter_map(|item| self.visibility_filter(sender_user, item))
|
2024-10-31 23:39:20 +03:00
|
|
|
.ready_take_while(|(count, _)| Some(*count) != to)
|
|
|
|
|
.take(limit)
|
2024-08-08 17:18:30 +00:00
|
|
|
.collect()
|
2024-10-31 23:39:20 +03:00
|
|
|
.boxed()
|
2024-08-08 17:18:30 +00:00
|
|
|
.await;
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
let next_batch = match dir {
|
|
|
|
|
Direction::Backward => events.first(),
|
|
|
|
|
Direction::Forward => events.last(),
|
|
|
|
|
}
|
|
|
|
|
.map(at!(0))
|
|
|
|
|
.map(|t| t.stringify());
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-08-16 16:49:51 -03:00
|
|
|
Ok(get_relating_events::v1::Response {
|
2024-10-31 23:39:20 +03:00
|
|
|
next_batch,
|
2024-08-16 16:49:51 -03:00
|
|
|
prev_batch: Some(from.stringify()),
|
2024-08-08 17:18:30 +00:00
|
|
|
recursion_depth: recurse.then_some(depth.into()),
|
2024-10-31 23:39:20 +03:00
|
|
|
chunk: events
|
|
|
|
|
.into_iter()
|
|
|
|
|
.map(at!(1))
|
|
|
|
|
.map(|pdu| pdu.to_message_like_event())
|
|
|
|
|
.collect(),
|
2024-08-16 16:49:51 -03:00
|
|
|
})
|
2024-03-05 19:48:54 -05:00
|
|
|
}
|
|
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
|
|
|
pub async fn get_relations(
|
|
|
|
|
&self, user_id: &UserId, room_id: &RoomId, target: &EventId, until: PduCount, limit: usize, max_depth: u8,
|
|
|
|
|
dir: Direction,
|
|
|
|
|
) -> Vec<PdusIterItem> {
|
2024-08-08 17:18:30 +00:00
|
|
|
let room_id = self.services.short.get_or_create_shortroomid(room_id).await;
|
|
|
|
|
|
|
|
|
|
let target = match self.services.timeline.get_pdu_count(target).await {
|
|
|
|
|
Ok(PduCount::Normal(c)) => c,
|
2023-06-26 12:38:51 +02:00
|
|
|
// TODO: Support backfilled relations
|
|
|
|
|
_ => 0, // This will result in an empty iterator
|
|
|
|
|
};
|
2024-04-03 14:10:00 -04:00
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
let mut pdus: Vec<_> = self
|
2024-08-08 17:18:30 +00:00
|
|
|
.db
|
2024-10-31 23:39:20 +03:00
|
|
|
.get_relations(user_id, room_id, target, until, dir)
|
2024-08-08 17:18:30 +00:00
|
|
|
.collect()
|
|
|
|
|
.await;
|
|
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
let mut stack: Vec<_> = pdus.iter().map(|pdu| (pdu.clone(), 1)).collect();
|
2024-08-08 17:18:30 +00:00
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
'limit: while let Some(stack_pdu) = stack.pop() {
|
2024-08-08 17:18:30 +00:00
|
|
|
let target = match stack_pdu.0 .0 {
|
|
|
|
|
PduCount::Normal(c) => c,
|
|
|
|
|
// TODO: Support backfilled relations
|
|
|
|
|
PduCount::Backfilled(_) => 0, // This will result in an empty iterator
|
|
|
|
|
};
|
|
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
let relations: Vec<_> = self
|
2024-08-08 17:18:30 +00:00
|
|
|
.db
|
2024-10-31 23:39:20 +03:00
|
|
|
.get_relations(user_id, room_id, target, until, dir)
|
2024-08-08 17:18:30 +00:00
|
|
|
.collect()
|
|
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
for relation in relations {
|
|
|
|
|
if stack_pdu.1 < max_depth {
|
|
|
|
|
stack.push((relation.clone(), stack_pdu.1.saturating_add(1)));
|
2024-04-03 14:10:00 -04:00
|
|
|
}
|
|
|
|
|
|
2024-08-08 17:18:30 +00:00
|
|
|
pdus.push(relation);
|
2024-10-31 23:39:20 +03:00
|
|
|
if pdus.len() >= limit {
|
|
|
|
|
break 'limit;
|
|
|
|
|
}
|
2024-08-08 17:18:30 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
pdus
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async fn visibility_filter(&self, sender_user: &UserId, item: PdusIterItem) -> Option<PdusIterItem> {
|
|
|
|
|
let (_, pdu) = &item;
|
2024-08-08 17:18:30 +00:00
|
|
|
|
2024-10-31 23:39:20 +03:00
|
|
|
self.services
|
|
|
|
|
.state_accessor
|
|
|
|
|
.user_can_see_event(sender_user, &pdu.room_id, &pdu.event_id)
|
|
|
|
|
.await
|
|
|
|
|
.then_some(item)
|
2023-06-25 19:31:40 +02:00
|
|
|
}
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-08-08 17:18:30 +00:00
|
|
|
#[inline]
|
2024-07-07 19:03:15 +00:00
|
|
|
#[tracing::instrument(skip_all, level = "debug")]
|
2024-08-08 17:18:30 +00:00
|
|
|
pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) {
|
|
|
|
|
self.db.mark_as_referenced(room_id, event_ids);
|
2021-08-24 19:10:31 +02:00
|
|
|
}
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-08-08 17:18:30 +00:00
|
|
|
#[inline]
|
2024-07-07 19:03:15 +00:00
|
|
|
#[tracing::instrument(skip(self), level = "debug")]
|
2024-08-08 17:18:30 +00:00
|
|
|
pub async fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> bool {
|
|
|
|
|
self.db.is_event_referenced(room_id, event_id).await
|
2021-02-03 20:00:01 -05:00
|
|
|
}
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-08-08 17:18:30 +00:00
|
|
|
#[inline]
|
2024-07-07 19:03:15 +00:00
|
|
|
#[tracing::instrument(skip(self), level = "debug")]
|
2024-08-08 17:18:30 +00:00
|
|
|
pub fn mark_event_soft_failed(&self, event_id: &EventId) { self.db.mark_event_soft_failed(event_id) }
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-08-08 17:18:30 +00:00
|
|
|
#[inline]
|
2024-07-07 19:03:15 +00:00
|
|
|
#[tracing::instrument(skip(self), level = "debug")]
|
2024-08-08 17:18:30 +00:00
|
|
|
pub async fn is_event_soft_failed(&self, event_id: &EventId) -> bool {
|
|
|
|
|
self.db.is_event_soft_failed(event_id).await
|
|
|
|
|
}
|
2022-06-25 16:12:23 +02:00
|
|
|
}
|