Refactor join, alias services

Split knock, user register from api into services

Fix autojoin not working with v12 rooms

Fix 'm.login.registration_token/validity' for reloaded registration tokens

Change join servers order

Move autojoin for ldap
This commit is contained in:
dasha_uwu
2025-12-05 14:00:28 +05:00
committed by Jason Volk
parent 959c559bd8
commit 7115fb2796
25 changed files with 1153 additions and 1334 deletions

View File

@@ -121,26 +121,16 @@ async fn room_available_servers(
// insert our server as the very first choice if in list, else check if we can
// prefer the room alias server first
match servers
if let Some(server_index) = servers
.iter()
.position(|server_name| services.globals.server_is_ours(server_name))
{
| Some(server_index) => {
servers.swap_remove(server_index);
servers.insert(0, services.globals.server_name().to_owned());
},
| _ => {
match servers
.iter()
.position(|server| server == room_alias.server_name())
{
| Some(alias_server_index) => {
servers.swap_remove(alias_server_index);
servers.insert(0, room_alias.server_name().into());
},
| _ => {},
}
},
servers.swap(0, server_index);
} else if let Some(alias_server_index) = servers
.iter()
.position(|server| server == room_alias.server_name())
{
servers.swap(0, alias_server_index);
}
servers

View File

@@ -2,14 +2,14 @@ use axum::extract::State;
use axum_client_ip::InsecureClientIp;
use futures::StreamExt;
use ruma::{
MilliSecondsSinceUnixEpoch, OwnedDeviceId,
MilliSecondsSinceUnixEpoch,
api::client::device::{
self, delete_device, delete_devices, get_device, get_devices, update_device,
},
};
use tuwunel_core::{Err, Result, debug, err, utils, utils::string::to_small_string};
use tuwunel_core::{Err, Result, debug, err, utils::string::to_small_string};
use crate::{Ruma, client::DEVICE_ID_LENGTH, router::auth_uiaa};
use crate::{Ruma, router::auth_uiaa};
/// # `GET /_matrix/client/r0/devices`
///
@@ -94,13 +94,11 @@ pub(crate) async fn update_device_route(
appservice.registration.id
);
let device_id = OwnedDeviceId::from(utils::random_string(DEVICE_ID_LENGTH));
services
.users
.create_device(
sender_user,
&device_id,
None,
(Some(&appservice.registration.as_token), None),
None,
None,

View File

@@ -22,8 +22,7 @@ pub(crate) async fn invite_user_route(
invite_check(&services, sender_user, room_id).await?;
banned_room_check(&services, sender_user, Some(room_id), room_id.server_name(), client)
.await?;
banned_room_check(&services, sender_user, room_id, None, client).await?;
let invite_user::v3::InvitationRecipient::UserId { user_id } = &body.recipient else {
return Err!(Request(ThreepidDenied("Third party identifiers are not implemented")));

View File

@@ -2,13 +2,13 @@ use axum::extract::State;
use axum_client_ip::InsecureClientIp;
use futures::FutureExt;
use ruma::{
RoomId, RoomOrAliasId,
RoomId,
api::client::membership::{join_room_by_id, join_room_by_id_or_alias},
};
use tuwunel_core::{Result, warn};
use super::banned_room_check;
use crate::{Ruma, client::membership::get_join_params};
use crate::Ruma;
/// # `POST /_matrix/client/r0/rooms/{roomId}/join`
///
@@ -28,23 +28,20 @@ pub(crate) async fn join_room_by_id_route(
let room_id: &RoomId = &body.room_id;
banned_room_check(&services, sender_user, Some(room_id), room_id.server_name(), client)
.await?;
banned_room_check(&services, sender_user, room_id, None, client).await?;
let (room_id, servers) =
get_join_params(&services, sender_user, <&RoomOrAliasId>::from(room_id), &[]).await?;
let state_lock = services.state.mutex.lock(&room_id).await;
let state_lock = services.state.mutex.lock(room_id).await;
let mut errors = 0_usize;
while let Err(e) = services
.membership
.join(
sender_user,
&room_id,
room_id,
None,
body.reason.clone(),
&servers,
&body.appservice_info,
&[],
body.appservice_info.is_some(),
&state_lock,
)
.boxed()
@@ -62,7 +59,7 @@ pub(crate) async fn join_room_by_id_route(
drop(state_lock);
Ok(join_room_by_id::v3::Response { room_id })
Ok(join_room_by_id::v3::Response { room_id: room_id.to_owned() })
}
/// # `POST /_matrix/client/r0/join/{roomIdOrAlias}`
@@ -83,10 +80,12 @@ pub(crate) async fn join_room_by_id_or_alias_route(
let sender_user = body.sender_user();
let appservice_info = &body.appservice_info;
let (room_id, servers) =
get_join_params(&services, sender_user, &body.room_id_or_alias, &body.via).await?;
let (room_id, servers) = services
.alias
.maybe_resolve_with_servers(&body.room_id_or_alias, Some(&body.via))
.await?;
banned_room_check(&services, sender_user, Some(&room_id), room_id.server_name(), client)
banned_room_check(&services, sender_user, &room_id, Some(&body.room_id_or_alias), client)
.await?;
let state_lock = services.state.mutex.lock(&room_id).await;
@@ -97,9 +96,10 @@ pub(crate) async fn join_room_by_id_or_alias_route(
.join(
sender_user,
&room_id,
Some(&body.room_id_or_alias),
body.reason.clone(),
&servers,
appservice_info,
appservice_info.is_some(),
&state_lock,
)
.boxed()
@@ -117,5 +117,5 @@ pub(crate) async fn join_room_by_id_or_alias_route(
drop(state_lock);
Ok(join_room_by_id_or_alias::v3::Response { room_id })
Ok(join_room_by_id_or_alias::v3::Response { room_id: room_id.clone() })
}

View File

@@ -1,45 +1,10 @@
use std::{borrow::Borrow, collections::HashMap, iter::once, sync::Arc};
use axum::extract::State;
use axum_client_ip::InsecureClientIp;
use futures::{FutureExt, StreamExt};
use ruma::{
CanonicalJsonObject, CanonicalJsonValue, OwnedEventId, OwnedServerName, RoomId,
RoomVersionId, UserId,
api::{
client::knock::knock_room,
federation::{
membership::RawStrippedState,
{self},
},
},
canonical_json::to_canonical_value,
events::{
StateEventType,
room::member::{MembershipState, RoomMemberEventContent},
},
};
use tuwunel_core::{
Err, Result, debug, debug_info, debug_warn, err, extract_variant, info,
matrix::{
PduCount,
event::{Event, gen_event_id},
pdu::{PduBuilder, PduEvent},
},
trace,
utils::{self},
warn,
};
use tuwunel_service::{
Services,
rooms::{
state::RoomMutexGuard,
state_compressor::{CompressedState, HashSetCompressStateEvent},
},
};
use ruma::api::client::knock::knock_room;
use tuwunel_core::Result;
use super::banned_room_check;
use crate::{Ruma, client::membership::get_join_params};
use crate::Ruma;
/// # `POST /_matrix/client/*/knock/{roomIdOrAlias}`
///
@@ -51,542 +16,30 @@ pub(crate) async fn knock_room_route(
body: Ruma<knock_room::v3::Request>,
) -> Result<knock_room::v3::Response> {
let sender_user = body.sender_user();
let body = &body.body;
let (room_id, servers) =
get_join_params(&services, sender_user, &body.room_id_or_alias, &body.via).await?;
banned_room_check(&services, sender_user, Some(&room_id), room_id.server_name(), client)
let (room_id, servers) = services
.alias
.maybe_resolve_with_servers(&body.room_id_or_alias, Some(&body.via))
.await?;
knock_room_by_id_helper(&services, sender_user, &room_id, body.reason.clone(), &servers).await
}
async fn knock_room_by_id_helper(
services: &Services,
sender_user: &UserId,
room_id: &RoomId,
reason: Option<String>,
servers: &[OwnedServerName],
) -> Result<knock_room::v3::Response> {
let state_lock = services.state.mutex.lock(room_id).await;
if services
.state_cache
.is_invited(sender_user, room_id)
.await
{
debug_warn!("{sender_user} is already invited in {room_id} but attempted to knock");
return Err!(Request(Forbidden(
"You cannot knock on a room you are already invited/accepted to."
)));
}
if services
.state_cache
.is_joined(sender_user, room_id)
.await
{
debug_warn!("{sender_user} is already joined in {room_id} but attempted to knock");
return Err!(Request(Forbidden("You cannot knock on a room you are already joined in.")));
}
if services
.state_cache
.is_knocked(sender_user, room_id)
.await
{
debug_warn!("{sender_user} is already knocked in {room_id}");
return Ok(knock_room::v3::Response { room_id: room_id.into() });
}
if let Ok(membership) = services
.state_accessor
.get_member(room_id, sender_user)
.await
{
if membership.membership == MembershipState::Ban {
debug_warn!("{sender_user} is banned from {room_id} but attempted to knock");
return Err!(Request(Forbidden("You cannot knock on a room you are banned from.")));
}
}
let server_in_room = services
.state_cache
.server_in_room(services.globals.server_name(), room_id)
.await;
let local_knock = server_in_room
|| servers.is_empty()
|| (servers.len() == 1 && services.globals.server_is_ours(&servers[0]));
if local_knock {
knock_room_helper_local(services, sender_user, room_id, reason, servers, state_lock)
.boxed()
.await?;
} else {
knock_room_helper_remote(services, sender_user, room_id, reason, servers, state_lock)
.boxed()
.await?;
}
Ok(knock_room::v3::Response::new(room_id.to_owned()))
}
async fn knock_room_helper_local(
services: &Services,
sender_user: &UserId,
room_id: &RoomId,
reason: Option<String>,
servers: &[OwnedServerName],
state_lock: RoomMutexGuard,
) -> Result {
debug_info!("We can knock locally");
let room_version_id = services.state.get_room_version(room_id).await?;
if matches!(
room_version_id,
RoomVersionId::V1
| RoomVersionId::V2
| RoomVersionId::V3
| RoomVersionId::V4
| RoomVersionId::V5
| RoomVersionId::V6
) {
return Err!(Request(Forbidden("This room does not support knocking.")));
}
let content = RoomMemberEventContent {
displayname: services.users.displayname(sender_user).await.ok(),
avatar_url: services.users.avatar_url(sender_user).await.ok(),
blurhash: services.users.blurhash(sender_user).await.ok(),
reason: reason.clone(),
..RoomMemberEventContent::new(MembershipState::Knock)
};
// Try normal knock first
let Err(error) = services
.timeline
.build_and_append_pdu(
PduBuilder::state(sender_user.to_string(), &content),
sender_user,
room_id,
&state_lock,
)
.await
else {
return Ok(());
};
if servers.is_empty() || (servers.len() == 1 && services.globals.server_is_ours(&servers[0]))
{
return Err(error);
}
warn!("We couldn't do the knock locally, maybe federation can help to satisfy the knock");
let (make_knock_response, remote_server) =
make_knock_request(services, sender_user, room_id, servers).await?;
info!("make_knock finished");
let room_version_id = make_knock_response.room_version;
if !services
.server
.supported_room_version(&room_version_id)
{
return Err!(BadServerResponse(
"Remote room version {room_version_id} is not supported by tuwunel"
));
}
let mut knock_event_stub = serde_json::from_str::<CanonicalJsonObject>(
make_knock_response.event.get(),
)
.map_err(|e| {
err!(BadServerResponse("Invalid make_knock event json received from server: {e:?}"))
})?;
knock_event_stub.insert(
"origin".into(),
CanonicalJsonValue::String(services.globals.server_name().as_str().to_owned()),
);
knock_event_stub.insert(
"origin_server_ts".into(),
CanonicalJsonValue::Integer(
utils::millis_since_unix_epoch()
.try_into()
.expect("Timestamp is valid js_int value"),
),
);
knock_event_stub.insert(
"content".into(),
to_canonical_value(RoomMemberEventContent {
displayname: services.users.displayname(sender_user).await.ok(),
avatar_url: services.users.avatar_url(sender_user).await.ok(),
blurhash: services.users.blurhash(sender_user).await.ok(),
reason,
..RoomMemberEventContent::new(MembershipState::Knock)
})
.expect("event is valid, we just created it"),
);
// In order to create a compatible ref hash (EventID) the `hashes` field needs
// to be present
services
.server_keys
.hash_and_sign_event(&mut knock_event_stub, &room_version_id)?;
// Generate event id
let event_id = gen_event_id(&knock_event_stub, &room_version_id)?;
// Add event_id
knock_event_stub
.insert("event_id".into(), CanonicalJsonValue::String(event_id.clone().into()));
// It has enough fields to be called a proper event now
let knock_event = knock_event_stub;
info!("Asking {remote_server} for send_knock in room {room_id}");
let send_knock_request = federation::membership::create_knock_event::v1::Request {
room_id: room_id.to_owned(),
event_id: event_id.clone(),
pdu: services
.federation
.format_pdu_into(knock_event.clone(), Some(&room_version_id))
.await,
};
let send_knock_response = services
.federation
.execute(&remote_server, send_knock_request)
banned_room_check(&services, sender_user, &room_id, Some(&body.room_id_or_alias), client)
.await?;
info!("send_knock finished");
let state_lock = services.state.mutex.lock(&room_id).await;
services
.short
.get_or_create_shortroomid(room_id)
.await;
info!("Parsing knock event");
let parsed_knock_pdu = PduEvent::from_id_val(&event_id, knock_event.clone())
.map_err(|e| err!(BadServerResponse("Invalid knock event PDU: {e:?}")))?;
info!("Updating membership locally to knock state with provided stripped state events");
let count = services.globals.next_count();
services
.state_cache
.update_membership(
room_id,
.membership
.knock(
sender_user,
parsed_knock_pdu
.get_content::<RoomMemberEventContent>()
.expect("we just created this"),
sender_user,
Some(
send_knock_response
.knock_room_state
.into_iter()
.filter_map(|s| extract_variant!(s, RawStrippedState::Stripped))
.collect(),
),
None,
false,
PduCount::Normal(*count),
)
.await?;
info!("Appending room knock event locally");
services
.timeline
.append_pdu(
&parsed_knock_pdu,
knock_event,
once(parsed_knock_pdu.event_id.borrow()),
&room_id,
Some(&body.room_id_or_alias),
body.reason.clone(),
&servers,
&state_lock,
)
.await?;
Ok(())
}
async fn knock_room_helper_remote(
services: &Services,
sender_user: &UserId,
room_id: &RoomId,
reason: Option<String>,
servers: &[OwnedServerName],
state_lock: RoomMutexGuard,
) -> Result {
info!("Knocking {room_id} over federation.");
let (make_knock_response, remote_server) =
make_knock_request(services, sender_user, room_id, servers).await?;
info!("make_knock finished");
let room_version_id = make_knock_response.room_version;
if !services
.server
.supported_room_version(&room_version_id)
{
return Err!(BadServerResponse(
"Remote room version {room_version_id} is not supported by tuwunel"
));
}
let mut knock_event_stub: CanonicalJsonObject =
serde_json::from_str(make_knock_response.event.get()).map_err(|e| {
err!(BadServerResponse("Invalid make_knock event json received from server: {e:?}"))
})?;
knock_event_stub.insert(
"origin".into(),
CanonicalJsonValue::String(services.globals.server_name().as_str().to_owned()),
);
knock_event_stub.insert(
"origin_server_ts".into(),
CanonicalJsonValue::Integer(
utils::millis_since_unix_epoch()
.try_into()
.expect("Timestamp is valid js_int value"),
),
);
knock_event_stub.insert(
"content".into(),
to_canonical_value(RoomMemberEventContent {
displayname: services.users.displayname(sender_user).await.ok(),
avatar_url: services.users.avatar_url(sender_user).await.ok(),
blurhash: services.users.blurhash(sender_user).await.ok(),
reason,
..RoomMemberEventContent::new(MembershipState::Knock)
})
.expect("event is valid, we just created it"),
);
// In order to create a compatible ref hash (EventID) the `hashes` field needs
// to be present
services
.server_keys
.hash_and_sign_event(&mut knock_event_stub, &room_version_id)?;
// Generate event id
let event_id = gen_event_id(&knock_event_stub, &room_version_id)?;
// Add event_id
knock_event_stub
.insert("event_id".into(), CanonicalJsonValue::String(event_id.clone().into()));
// It has enough fields to be called a proper event now
let knock_event = knock_event_stub;
info!("Asking {remote_server} for send_knock in room {room_id}");
let send_knock_request = federation::membership::create_knock_event::v1::Request {
room_id: room_id.to_owned(),
event_id: event_id.clone(),
pdu: services
.federation
.format_pdu_into(knock_event.clone(), Some(&room_version_id))
.await,
};
let send_knock_response = services
.federation
.execute(&remote_server, send_knock_request)
.await?;
info!("send_knock finished");
services
.short
.get_or_create_shortroomid(room_id)
.await;
info!("Parsing knock event");
let parsed_knock_pdu = PduEvent::from_id_val(&event_id, knock_event.clone())
.map_err(|e| err!(BadServerResponse("Invalid knock event PDU: {e:?}")))?;
info!("Going through send_knock response knock state events");
let state = send_knock_response
.knock_room_state
.iter()
.map(|event| {
serde_json::from_str::<CanonicalJsonObject>(
extract_variant!(event.clone(), RawStrippedState::Stripped)
.expect("Raw<AnyStrippedStateEvent>")
.json()
.get(),
)
})
.filter_map(Result::ok);
let mut state_map: HashMap<u64, OwnedEventId> = HashMap::new();
for event in state {
let Some(state_key) = event.get("state_key") else {
debug_warn!("send_knock stripped state event missing state_key: {event:?}");
continue;
};
let Some(event_type) = event.get("type") else {
debug_warn!("send_knock stripped state event missing event type: {event:?}");
continue;
};
let Ok(state_key) = serde_json::from_value::<String>(state_key.clone().into()) else {
debug_warn!("send_knock stripped state event has invalid state_key: {event:?}");
continue;
};
let Ok(event_type) = serde_json::from_value::<StateEventType>(event_type.clone().into())
else {
debug_warn!("send_knock stripped state event has invalid event type: {event:?}");
continue;
};
let event_id = gen_event_id(&event, &room_version_id)?;
let shortstatekey = services
.short
.get_or_create_shortstatekey(&event_type, &state_key)
.await;
services
.timeline
.add_pdu_outlier(&event_id, &event);
state_map.insert(shortstatekey, event_id.clone());
}
info!("Compressing state from send_knock");
let compressed: CompressedState = services
.state_compressor
.compress_state_events(
state_map
.iter()
.map(|(ssk, eid)| (ssk, eid.borrow())),
)
.collect()
.await;
debug!("Saving compressed state");
let HashSetCompressStateEvent {
shortstatehash: statehash_before_knock,
added,
removed,
} = services
.state_compressor
.save_state(room_id, Arc::new(compressed))
.await?;
debug!("Forcing state for new room");
services
.state
.force_state(room_id, statehash_before_knock, added, removed, &state_lock)
.await?;
let statehash_after_knock = services
.state
.append_to_state(&parsed_knock_pdu)
.await?;
info!("Updating membership locally to knock state with provided stripped state events");
let count = services.globals.next_count();
services
.state_cache
.update_membership(
room_id,
sender_user,
parsed_knock_pdu
.get_content::<RoomMemberEventContent>()
.expect("we just created this"),
sender_user,
Some(
send_knock_response
.knock_room_state
.into_iter()
.filter_map(|s| extract_variant!(s, RawStrippedState::Stripped))
.collect(),
),
None,
false,
PduCount::Normal(*count),
)
.await?;
info!("Appending room knock event locally");
services
.timeline
.append_pdu(
&parsed_knock_pdu,
knock_event,
once(parsed_knock_pdu.event_id.borrow()),
&state_lock,
)
.await?;
info!("Setting final room state for new room");
// We set the room state after inserting the pdu, so that we never have a moment
// in time where events in the current room state do not exist
services
.state
.set_room_state(room_id, statehash_after_knock, &state_lock);
Ok(())
}
async fn make_knock_request(
services: &Services,
sender_user: &UserId,
room_id: &RoomId,
servers: &[OwnedServerName],
) -> Result<(federation::membership::prepare_knock_event::v1::Response, OwnedServerName)> {
let mut make_knock_response_and_server =
Err!(BadServerResponse("No server available to assist in knocking."));
let mut make_knock_counter: usize = 0;
for remote_server in servers {
if services.globals.server_is_ours(remote_server) {
continue;
}
info!("Asking {remote_server} for make_knock ({make_knock_counter})");
let make_knock_response = services
.federation
.execute(remote_server, federation::membership::prepare_knock_event::v1::Request {
room_id: room_id.to_owned(),
user_id: sender_user.to_owned(),
ver: services
.server
.supported_room_versions()
.collect(),
})
.await;
trace!("make_knock response: {make_knock_response:?}");
make_knock_counter = make_knock_counter.saturating_add(1);
make_knock_response_and_server = make_knock_response.map(|r| (r, remote_server.clone()));
if make_knock_response_and_server.is_ok() {
break;
}
if make_knock_counter > 40 {
warn!(
"50 servers failed to provide valid make_knock response, assuming no server can \
assist in knocking."
);
make_knock_response_and_server =
Err!(BadServerResponse("No server available to assist in knocking."));
return make_knock_response_and_server;
}
}
make_knock_response_and_server
drop(state_lock);
Ok(knock_room::v3::Response::new(room_id.clone()))
}

View File

@@ -8,15 +8,12 @@ mod leave;
mod members;
mod unban;
use std::{cmp::Ordering, net::IpAddr};
use std::net::IpAddr;
use axum::extract::State;
use futures::{FutureExt, StreamExt};
use ruma::{
OwnedRoomId, OwnedServerName, RoomId, RoomOrAliasId, ServerName, UserId,
api::client::membership::joined_rooms,
};
use tuwunel_core::{Err, Result, result::LogErr, utils::shuffle, warn};
use ruma::{RoomId, RoomOrAliasId, UserId, api::client::membership::joined_rooms};
use tuwunel_core::{Err, Result, result::LogErr, warn};
use tuwunel_service::Services;
pub(crate) use self::{
@@ -58,57 +55,42 @@ pub(crate) async fn joined_rooms_route(
pub(crate) async fn banned_room_check(
services: &Services,
user_id: &UserId,
room_id: Option<&RoomId>,
server_name: Option<&ServerName>,
room_id: &RoomId,
orig_room_id: Option<&RoomOrAliasId>,
client_ip: IpAddr,
) -> Result {
if services.users.is_admin(user_id).await {
return Ok(());
}
// TODO: weird condition
if let Some(room_id) = room_id {
if services.metadata.is_banned(room_id).await
|| (room_id.server_name().is_some()
&& services
.config
.forbidden_remote_server_names
.is_match(
room_id
.server_name()
.expect("legacy room mxid")
.host(),
)) {
warn!(
"User {user_id} who is not an admin attempted to send an invite for or \
attempted to join a banned room or banned room server name: {room_id}"
);
// room id is banned ...
if services.metadata.is_banned(room_id).await
// ... or legacy room id server is banned ...
|| room_id.server_name().is_some_and(|server_name| {
services
.config
.forbidden_remote_server_names
.is_match(server_name.host())
})
// ... or alias server is banned
|| orig_room_id.is_some_and(|orig_room_id| {
orig_room_id.server_name().is_some_and(|orig_server_name|
services
.config
.forbidden_remote_server_names
.is_match(orig_server_name.host()))
}) {
warn!(
"User {user_id} who is not an admin attempted to send an invite for or attempted to \
join a banned room or banned room server name: {room_id}"
);
maybe_deactivate(services, user_id, client_ip)
.await
.log_err()
.ok();
maybe_deactivate(services, user_id, client_ip)
.await
.log_err()
.ok();
return Err!(Request(Forbidden("This room is banned on this homeserver.")));
}
} else if let Some(server_name) = server_name {
if services
.config
.forbidden_remote_server_names
.is_match(server_name.host())
{
warn!(
"User {user_id} who is not an admin tried joining a room which has the server \
name {server_name} that is globally forbidden. Rejecting.",
);
maybe_deactivate(services, user_id, client_ip)
.await
.log_err()
.ok();
return Err!(Request(Forbidden("This remote server is banned on this homeserver.")));
}
return Err!(Request(Forbidden("This room is banned on this homeserver.")));
}
Ok(())
@@ -120,16 +102,15 @@ async fn maybe_deactivate(services: &Services, user_id: &UserId, client_ip: IpAd
.config
.auto_deactivate_banned_room_attempts
{
warn!("Automatically deactivating user {user_id} due to attempted banned room join");
let notice = format!(
"Automatically deactivating user {user_id} due to attempted banned room join from \
IP {client_ip}"
);
warn!("{notice}");
if services.server.config.admin_room_notices {
services
.admin
.send_text(&format!(
"Automatically deactivating user {user_id} due to attempted banned room \
join from IP {client_ip}"
))
.await;
services.admin.send_text(&notice).await;
}
services
@@ -141,99 +122,3 @@ async fn maybe_deactivate(services: &Services, user_id: &UserId, client_ip: IpAd
Ok(())
}
// TODO: should this be in services? banned check would have to resolve again if
// room_id is not available at callsite
async fn get_join_params(
services: &Services,
user_id: &UserId,
room_id_or_alias: &RoomOrAliasId,
via: &[OwnedServerName],
) -> Result<(OwnedRoomId, Vec<OwnedServerName>)> {
// servers tried first, additional_servers shuffled then tried after
let (room_id, mut primary_servers, mut additional_servers) =
match OwnedRoomId::try_from(room_id_or_alias.to_owned()) {
// if room id, shuffle via + room_id server_name ...
| Ok(room_id) => {
let mut additional_servers = via.to_vec();
if let Some(server) = room_id.server_name() {
additional_servers.push(server.to_owned());
}
(room_id, Vec::new(), additional_servers)
},
// ... if room alias, resolve and don't shuffle ...
| Err(room_alias) => {
let (room_id, servers) = services.alias.resolve_alias(&room_alias).await?;
(room_id, servers, Vec::new())
},
};
// either way, add invited vias
additional_servers.extend(
services
.state_cache
.servers_invite_via(&room_id)
.map(ToOwned::to_owned)
.collect::<Vec<_>>()
.await,
);
// either way, add invite senders' servers
additional_servers.extend(
services
.state_cache
.invite_state(user_id, &room_id)
.await
.unwrap_or_default()
.iter()
.filter_map(|event| event.get_field("sender").ok().flatten())
.filter_map(|sender: &str| UserId::parse(sender).ok())
.map(|user| user.server_name().to_owned()),
);
primary_servers.sort_unstable();
primary_servers.dedup();
shuffle(&mut primary_servers);
// shuffle additionals, append to base servers
additional_servers.sort_unstable();
additional_servers.dedup();
shuffle(&mut additional_servers);
let mut servers: Vec<_> = room_id_or_alias
.server_name()
.filter(|_| room_id_or_alias.is_room_alias_id())
.map(ToOwned::to_owned)
.into_iter()
.chain(primary_servers.into_iter())
.chain(additional_servers.into_iter())
.collect();
// sort deprioritized servers last
servers.sort_by(|a, b| {
let a_matches = services
.server
.config
.deprioritize_joins_through_servers
.is_match(a.host());
let b_matches = services
.server
.config
.deprioritize_joins_through_servers
.is_match(b.host());
if a_matches && !b_matches {
Ordering::Greater
} else if !a_matches && b_matches {
Ordering::Less
} else {
Ordering::Equal
}
});
Ok((room_id, servers))
}

View File

@@ -88,9 +88,6 @@ pub(super) use user_directory::*;
pub(super) use voip::*;
pub(super) use well_known::*;
/// generated device ID length
const DEVICE_ID_LENGTH: usize = 10;
/// generated user access token length
const TOKEN_LENGTH: usize = tuwunel_service::users::device::TOKEN_LENGTH;

View File

@@ -2,7 +2,6 @@ use std::fmt::Write;
use axum::extract::State;
use axum_client_ip::InsecureClientIp;
use futures::FutureExt;
use register::RegistrationKind;
use ruma::{
UserId,
@@ -13,15 +12,11 @@ use ruma::{
},
uiaa::{AuthFlow, AuthType, UiaaInfo},
},
events::GlobalAccountDataEventType,
push,
};
use tuwunel_core::{
Err, Error, Result, debug_info, debug_warn, error, info, is_equal_to, utils, warn,
};
use tuwunel_core::{Err, Error, Result, debug_info, debug_warn, info, utils};
use tuwunel_service::users::device::generate_refresh_token;
use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH};
use super::SESSION_ID_LENGTH;
use crate::Ruma;
const RANDOM_USER_ID_LENGTH: usize = 10;
@@ -48,15 +43,10 @@ pub(crate) async fn get_register_available_route(
.appservice_info
.as_ref()
.is_some_and(|appservice| {
appservice.registration.id == "irc"
|| appservice
.registration
.id
.contains("matrix-appservice-irc")
|| appservice
.registration
.id
.contains("matrix_appservice_irc")
let id = &appservice.registration.id;
id == "irc"
|| id.contains("matrix-appservice-irc")
|| id.contains("matrix_appservice_irc")
});
if services
@@ -148,71 +138,32 @@ pub(crate) async fn register_route(
let is_guest = body.kind == RegistrationKind::Guest;
let emergency_mode_enabled = services.config.emergency_password.is_some();
let user = body.username.as_deref().unwrap_or("");
let device_name = body
.initial_device_display_name
.as_deref()
.unwrap_or("");
if !services.config.allow_registration && body.appservice_info.is_none() {
match (body.username.as_ref(), body.initial_device_display_name.as_ref()) {
| (Some(username), Some(device_display_name)) => {
info!(
%is_guest,
user = %username,
device_name = %device_display_name,
"Rejecting registration attempt as registration is disabled"
);
},
| (Some(username), _) => {
info!(
%is_guest,
user = %username,
"Rejecting registration attempt as registration is disabled"
);
},
| (_, Some(device_display_name)) => {
info!(
%is_guest,
device_name = %device_display_name,
"Rejecting registration attempt as registration is disabled"
);
},
| (None, _) => {
info!(
%is_guest,
"Rejecting registration attempt as registration is disabled"
);
},
}
info!(
%is_guest,
%user,
%device_name,
"Rejecting registration attempt as registration is disabled"
);
return Err!(Request(Forbidden("Registration has been disabled.")));
}
if is_guest && !services.config.allow_guest_registration {
let display_name = body
.initial_device_display_name
.as_deref()
.unwrap_or("");
debug_warn!(
"Guest registration disabled / registration enabled with token configured, \
rejecting guest registration attempt, initial device name: \"{display_name}\""
%device_name,
"Guest registration disabled, rejecting guest registration attempt"
);
return Err!(Request(GuestAccessForbidden("Guest registration is disabled.")));
}
// forbid guests from registering if there is not a real admin user yet. give
// generic user error.
if is_guest && services.users.count().await < 2 {
let display_name = body
.initial_device_display_name
.as_deref()
.unwrap_or("");
warn!(
"Guest account attempted to register before a real admin user has been registered, \
rejecting registration. Guest's initial device name: \"{display_name}\""
);
return Err!(Request(Forbidden("Registration is temporarily disabled.")));
}
let user_id = match (body.username.as_ref(), is_guest) {
| (Some(username), false) => {
// workaround for https://github.com/matrix-org/matrix-appservice-irc/issues/1780 due to inactivity of fixing the issue
@@ -313,7 +264,13 @@ pub(crate) async fn register_route(
// UIAA
let mut uiaainfo;
let skip_auth = if services.globals.registration_token.is_some() && !is_guest {
let skip_auth = if !services
.globals
.get_registration_tokens()
.await
.is_empty()
&& !is_guest
{
// Registration token required
uiaainfo = UiaaInfo {
flows: vec![AuthFlow {
@@ -378,45 +335,9 @@ pub(crate) async fn register_route(
let password = if is_guest { None } else { body.password.as_deref() };
// Create user
services
.users
.create(&user_id, password, None)
.await?;
// Default to pretty displayname
let mut displayname = user_id.localpart().to_owned();
// If `new_user_displayname_suffix` is set, registration will push whatever
// content is set to the user's display name with a space before it
if !services
.config
.new_user_displayname_suffix
.is_empty()
&& body.appservice_info.is_none()
{
write!(displayname, " {}", services.server.config.new_user_displayname_suffix)?;
}
services
.users
.set_displayname(&user_id, Some(displayname.clone()));
// Initial account data
services
.account_data
.update(
None,
&user_id,
GlobalAccountDataEventType::PushRules
.to_string()
.into(),
&serde_json::to_value(ruma::events::push_rules::PushRulesEvent {
content: ruma::events::push_rules::PushRulesEventContent {
global: push::Ruleset::server_default(&user_id),
},
})?,
)
.full_register(&user_id, password, None, body.appservice_info.as_ref(), is_guest, true)
.await?;
if (!is_guest && body.inhibit_login)
@@ -426,169 +347,56 @@ pub(crate) async fn register_route(
.is_some_and(|appservice| appservice.registration.device_management)
{
return Ok(register::v3::Response {
access_token: None,
user_id,
device_id: None,
access_token: None,
refresh_token: None,
expires_in: None,
});
}
// Generate new device id if the user didn't specify one
let device_id = if is_guest { None } else { body.device_id.clone() }
.unwrap_or_else(|| utils::random_string(DEVICE_ID_LENGTH).into());
let device_id = if is_guest { None } else { body.device_id.as_deref() };
// Generate new token for the device
let (access_token, expires_in) = services
.users
.generate_access_token(body.body.refresh_token);
.generate_access_token(body.refresh_token);
// Generate a new refresh_token if requested by client
let refresh_token = expires_in.is_some().then(generate_refresh_token);
// Create device for this account
services
let device_id = services
.users
.create_device(
&user_id,
&device_id,
device_id,
(Some(&access_token), expires_in),
refresh_token.as_deref(),
body.initial_device_display_name.clone(),
body.initial_device_display_name.as_deref(),
Some(client.to_string()),
)
.await?;
debug_info!(%user_id, %device_id, "User account was created");
let device_display_name = body
.initial_device_display_name
.as_deref()
.unwrap_or("");
if body.appservice_info.is_none() && (!is_guest || services.config.log_guest_registrations) {
let mut notice = String::from(if is_guest { "New guest user" } else { "New user" });
// log in conduit admin channel if a non-guest user registered
if body.appservice_info.is_none() && !is_guest {
if !device_display_name.is_empty() {
let notice = format!(
"New user \"{user_id}\" registered on this server from IP {client} and device \
display name \"{device_display_name}\""
);
write!(notice, " registered on this server from IP {client}")?;
info!("{notice}");
if services.server.config.admin_room_notices {
services.admin.notice(&notice).await;
}
} else {
let notice = format!("New user \"{user_id}\" registered on this server.");
info!("{notice}");
if services.server.config.admin_room_notices {
services.admin.notice(&notice).await;
}
if let Some(device_name) = body.initial_device_display_name.as_deref() {
write!(notice, " with device name {device_name}")?;
}
}
// log in conduit admin channel if a guest registered
if body.appservice_info.is_none() && is_guest && services.config.log_guest_registrations {
debug_info!("New guest user \"{user_id}\" registered on this server.");
if !device_display_name.is_empty() {
if services.server.config.admin_room_notices {
services
.admin
.notice(&format!(
"Guest user \"{user_id}\" with device display name \
\"{device_display_name}\" registered on this server from IP {client}"
))
.await;
}
if !is_guest {
info!("{notice}");
} else {
#[allow(clippy::collapsible_else_if)]
if services.server.config.admin_room_notices {
services
.admin
.notice(&format!(
"Guest user \"{user_id}\" with no device display name registered on \
this server from IP {client}",
))
.await;
}
debug_info!("{notice}");
}
}
// If this is the first real user, grant them admin privileges except for guest
// users
// Note: the server user is generated first
if !is_guest
&& services.config.grant_admin_to_first_user
&& let Ok(admin_room) = services.admin.get_admin_room().await
&& services
.state_cache
.room_joined_count(&admin_room)
.await
.is_ok_and(is_equal_to!(1))
{
services
.admin
.make_user_admin(&user_id)
.boxed()
.await?;
warn!("Granting {user_id} admin privileges as the first user");
}
if body.appservice_info.is_none()
&& !services.server.config.auto_join_rooms.is_empty()
&& (services.config.allow_guests_auto_join_rooms || !is_guest)
{
for room in &services.server.config.auto_join_rooms {
let Ok(room_id) = services.alias.maybe_resolve(room).await else {
error!(
"Failed to resolve room alias to room ID when attempting to auto join \
{room}, skipping"
);
continue;
};
if !services
.state_cache
.server_in_room(services.globals.server_name(), &room_id)
.await
{
warn!(
"Skipping room {room} to automatically join as we have never joined before."
);
continue;
}
if let Some(room_server_name) = room.server_name() {
let state_lock = services.state.mutex.lock(&room_id).await;
match services
.membership
.join(
&user_id,
&room_id,
Some("Automatically joining this room upon registration".to_owned()),
&[services.globals.server_name().to_owned(), room_server_name.to_owned()],
&body.appservice_info,
&state_lock,
)
.boxed()
.await
{
| Err(e) => {
// don't return this error so we don't fail registrations
error!(
"Failed to automatically join room {room} for user {user_id}: {e}"
);
},
| _ => {
info!("Automatically joined room {room} for user {user_id}");
},
}
drop(state_lock);
}
if services.server.config.admin_room_notices {
services.admin.notice(&notice).await;
}
}
@@ -611,9 +419,13 @@ pub(crate) async fn check_registration_token_validity(
State(services): State<crate::State>,
body: Ruma<check_registration_token_validity::v1::Request>,
) -> Result<check_registration_token_validity::v1::Response> {
let Some(reg_token) = services.globals.registration_token.clone() else {
return Err!(Request(Forbidden("Server does not allow token registration")));
};
let tokens = services.globals.get_registration_tokens().await;
Ok(check_registration_token_validity::v1::Response { valid: reg_token == body.token })
if tokens.is_empty() {
return Err!(Request(Forbidden("Server does not allow token registration")));
}
let valid = tokens.contains(&body.token);
Ok(check_registration_token_validity::v1::Response { valid })
}

View File

@@ -55,7 +55,7 @@ pub(crate) async fn get_room_summary(
) -> Result<get_summary::v1::Response> {
let (room_id, servers) = services
.alias
.maybe_resolve_with_servers(&body.room_id_or_alias, Some(body.via.clone()))
.maybe_resolve_with_servers(&body.room_id_or_alias, Some(&body.via))
.await?;
if services.metadata.is_banned(&room_id).await {

View File

@@ -1,6 +1,6 @@
use futures::FutureExt;
use ruma::{OwnedUserId, UserId};
use tuwunel_core::{Err, Result, debug, error, info, warn};
use tuwunel_core::{Err, Result, debug};
use tuwunel_service::Services;
use super::password_login;
@@ -51,69 +51,8 @@ pub(super) async fn ldap_login(
if !services.users.exists(lowercased_user_id).await {
services
.users
.create(lowercased_user_id, Some("*"), Some("ldap"))
.full_register(lowercased_user_id, Some("*"), Some("ldap"), None, false, false)
.await?;
// Auto-join rooms for newly created LDAP users
if !services.server.config.auto_join_rooms.is_empty() {
for room in &services.server.config.auto_join_rooms {
let Ok(room_id) = services.alias.maybe_resolve(room).await else {
error!(
"Failed to resolve room alias to room ID when attempting to auto join \
{room}, skipping"
);
continue;
};
if !services
.state_cache
.server_in_room(services.globals.server_name(), &room_id)
.await
{
warn!(
"Skipping room {room} to automatically join as we have never joined \
before."
);
continue;
}
if let Some(room_server_name) = room.server_name() {
let state_lock = services.state.mutex.lock(&room_id).await;
match services
.membership
.join(
lowercased_user_id,
&room_id,
Some("Automatically joining this room upon first login".to_owned()),
&[
services.globals.server_name().to_owned(),
room_server_name.to_owned(),
],
&None,
&state_lock,
)
.boxed()
.await
{
| Err(e) => {
// don't return this error so we don't fail logins
error!(
"Failed to automatically join room {room} for user \
{lowercased_user_id}: {e}"
);
},
| _ => {
info!(
"Automatically joined room {room} for user {lowercased_user_id}"
);
},
}
drop(state_lock);
}
}
}
}
let is_tuwunel_admin = services

View File

@@ -21,7 +21,7 @@ use ruma::api::client::session::{
v3::{DiscoveryInfo, HomeserverInfo, LoginInfo},
},
};
use tuwunel_core::{Err, Result, info, utils, utils::stream::ReadyExt};
use tuwunel_core::{Err, Result, info, utils::stream::ReadyExt};
use tuwunel_service::users::device::generate_refresh_token;
use self::{ldap::ldap_login, password::password_login};
@@ -30,7 +30,7 @@ pub(crate) use self::{
refresh::refresh_token_route,
token::login_token_route,
};
use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH};
use super::TOKEN_LENGTH;
use crate::Ruma;
/// # `GET /_matrix/client/v3/login`
@@ -97,43 +97,39 @@ pub(crate) async fn login_route(
// Generate a new refresh_token if requested by client
let refresh_token = expires_in.is_some().then(generate_refresh_token);
// Generate new device id if the user didn't specify one
let device_id = body
.device_id
.clone()
.unwrap_or_else(|| utils::random_string(DEVICE_ID_LENGTH).into());
// Determine if device_id was provided and exists in the db for this user
let device_exists = services
.users
.all_device_ids(&user_id)
.ready_any(|v| v == device_id)
.await;
if !device_exists {
services
let device_id = if let Some(device_id) = &body.device_id
&& services
.users
.create_device(
&user_id,
&device_id,
(Some(&access_token), expires_in),
refresh_token.as_deref(),
body.initial_device_display_name.clone(),
Some(client.to_string()),
)
.await?;
} else {
.all_device_ids(&user_id)
.ready_any(|v| v == device_id)
.await
{
services
.users
.set_access_token(
&user_id,
&device_id,
device_id,
&access_token,
expires_in,
refresh_token.as_deref(),
)
.await?;
}
device_id.clone()
} else {
services
.users
.create_device(
&user_id,
body.device_id.as_deref(),
(Some(&access_token), expires_in),
refresh_token.as_deref(),
body.initial_device_display_name.as_deref(),
Some(client.to_string()),
)
.await?
};
info!("{user_id} logged in");

View File

@@ -27,7 +27,7 @@ pub(crate) async fn turn_server_route(
let turn_secret = &services.globals.turn_secret;
let (username, password) = if !turn_secret.is_empty() {
let (username, password) = if let Some(turn_secret) = turn_secret {
let expiry = SecondsSinceUnixEpoch::from_system_time(
SystemTime::now()
.checked_add(Duration::from_secs(services.config.turn_ttl))