feat(presence): add granular allow configuration
This commit is contained in:
parent
509afc6b46
commit
ae76052378
9 changed files with 133 additions and 98 deletions
|
@ -11,6 +11,13 @@ use std::time::Duration;
|
|||
pub async fn set_presence_route(
|
||||
body: Ruma<set_presence::v3::Request>,
|
||||
) -> Result<set_presence::v3::Response> {
|
||||
if !services().globals.allow_local_presence() {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Presence is disabled on this server",
|
||||
));
|
||||
}
|
||||
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
for room_id in services().rooms.state_cache.rooms_joined(sender_user) {
|
||||
let room_id = room_id?;
|
||||
|
@ -36,6 +43,13 @@ pub async fn set_presence_route(
|
|||
pub async fn get_presence_route(
|
||||
body: Ruma<get_presence::v3::Request>,
|
||||
) -> Result<get_presence::v3::Response> {
|
||||
if !services().globals.allow_local_presence() {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Presence is disabled on this server",
|
||||
));
|
||||
}
|
||||
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
let mut presence_event = None;
|
||||
|
|
|
@ -92,12 +92,14 @@ pub async fn set_displayname_route(
|
|||
.await;
|
||||
}
|
||||
|
||||
if services().globals.allow_local_presence() {
|
||||
// Presence update
|
||||
services()
|
||||
.rooms
|
||||
.edus
|
||||
.presence
|
||||
.ping_presence(sender_user, PresenceState::Online)?;
|
||||
}
|
||||
|
||||
Ok(set_display_name::v3::Response {})
|
||||
}
|
||||
|
@ -213,12 +215,14 @@ pub async fn set_avatar_url_route(
|
|||
.await;
|
||||
}
|
||||
|
||||
if services().globals.allow_local_presence() {
|
||||
// Presence update
|
||||
services()
|
||||
.rooms
|
||||
.edus
|
||||
.presence
|
||||
.ping_presence(sender_user, PresenceState::Online)?;
|
||||
}
|
||||
|
||||
Ok(set_avatar_url::v3::Response {})
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ use ruma::{
|
|||
uiaa::UiaaResponse,
|
||||
},
|
||||
events::{
|
||||
presence::PresenceEvent,
|
||||
room::member::{MembershipState, RoomMemberEventContent},
|
||||
StateEventType, TimelineEventType,
|
||||
},
|
||||
|
@ -175,11 +176,13 @@ async fn sync_helper(
|
|||
// bool = caching allowed
|
||||
) -> Result<(sync_events::v3::Response, bool), Error> {
|
||||
// Presence update
|
||||
if services().globals.allow_local_presence() {
|
||||
services()
|
||||
.rooms
|
||||
.edus
|
||||
.presence
|
||||
.ping_presence(&sender_user, body.set_presence)?;
|
||||
}
|
||||
|
||||
// Setup watchers, so if there's no response, we can wait for them
|
||||
let watcher = services().globals.watch(&sender_user, &sender_device);
|
||||
|
@ -255,39 +258,8 @@ async fn sync_helper(
|
|||
joined_rooms.insert(room_id.clone(), joined_room);
|
||||
}
|
||||
|
||||
// Take presence updates from this room
|
||||
for presence_data in services()
|
||||
.rooms
|
||||
.edus
|
||||
.presence
|
||||
.presence_since(&room_id, since)
|
||||
{
|
||||
let (user_id, _, presence_event) = presence_data?;
|
||||
|
||||
match presence_updates.entry(user_id) {
|
||||
Entry::Vacant(slot) => {
|
||||
slot.insert(presence_event);
|
||||
}
|
||||
Entry::Occupied(mut slot) => {
|
||||
let curr_event = slot.get_mut();
|
||||
let curr_content = &mut curr_event.content;
|
||||
let new_content = presence_event.content;
|
||||
|
||||
// Update existing presence event with more info
|
||||
curr_content.presence = new_content.presence;
|
||||
curr_content.status_msg =
|
||||
curr_content.status_msg.clone().or(new_content.status_msg);
|
||||
curr_content.last_active_ago =
|
||||
curr_content.last_active_ago.or(new_content.last_active_ago);
|
||||
curr_content.displayname =
|
||||
curr_content.displayname.clone().or(new_content.displayname);
|
||||
curr_content.avatar_url =
|
||||
curr_content.avatar_url.clone().or(new_content.avatar_url);
|
||||
curr_content.currently_active = curr_content
|
||||
.currently_active
|
||||
.or(new_content.currently_active);
|
||||
}
|
||||
}
|
||||
if services().globals.allow_local_presence() {
|
||||
process_room_presence_updates(&mut presence_updates, &room_id, since).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -599,6 +571,49 @@ async fn sync_helper(
|
|||
}
|
||||
}
|
||||
|
||||
async fn process_room_presence_updates(
|
||||
presence_updates: &mut HashMap<OwnedUserId, PresenceEvent>,
|
||||
room_id: &RoomId,
|
||||
since: u64,
|
||||
) -> Result<()> {
|
||||
// Take presence updates from this room
|
||||
for presence_data in services()
|
||||
.rooms
|
||||
.edus
|
||||
.presence
|
||||
.presence_since(room_id, since)
|
||||
{
|
||||
let (user_id, _, presence_event) = presence_data?;
|
||||
|
||||
match presence_updates.entry(user_id) {
|
||||
Entry::Vacant(slot) => {
|
||||
slot.insert(presence_event);
|
||||
}
|
||||
Entry::Occupied(mut slot) => {
|
||||
let curr_event = slot.get_mut();
|
||||
let curr_content = &mut curr_event.content;
|
||||
let new_content = presence_event.content;
|
||||
|
||||
// Update existing presence event with more info
|
||||
curr_content.presence = new_content.presence;
|
||||
curr_content.status_msg =
|
||||
curr_content.status_msg.clone().or(new_content.status_msg);
|
||||
curr_content.last_active_ago =
|
||||
curr_content.last_active_ago.or(new_content.last_active_ago);
|
||||
curr_content.displayname =
|
||||
curr_content.displayname.clone().or(new_content.displayname);
|
||||
curr_content.avatar_url =
|
||||
curr_content.avatar_url.clone().or(new_content.avatar_url);
|
||||
curr_content.currently_active = curr_content
|
||||
.currently_active
|
||||
.or(new_content.currently_active);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn load_joined_room(
|
||||
sender_user: &UserId,
|
||||
|
|
|
@ -778,6 +778,10 @@ pub async fn send_transaction_message_route(
|
|||
{
|
||||
match edu {
|
||||
Edu::Presence(presence) => {
|
||||
if !services().globals.allow_incoming_presence() {
|
||||
continue;
|
||||
}
|
||||
|
||||
for update in presence.push {
|
||||
for room_id in services().rooms.state_cache.rooms_joined(&update.user_id) {
|
||||
services().rooms.edus.presence.set_presence(
|
||||
|
|
|
@ -84,7 +84,11 @@ pub struct Config {
|
|||
pub emergency_password: Option<String>,
|
||||
|
||||
#[serde(default = "false_fn")]
|
||||
pub allow_presence: bool,
|
||||
pub allow_local_presence: bool,
|
||||
#[serde(default = "false_fn")]
|
||||
pub allow_incoming_presence: bool,
|
||||
#[serde(default = "false_fn")]
|
||||
pub allow_outgoing_presence: bool,
|
||||
#[serde(default = "default_presence_idle_timeout_s")]
|
||||
pub presence_idle_timeout_s: u64,
|
||||
#[serde(default = "default_presence_offline_timeout_s")]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use std::{iter, time::Duration};
|
||||
use std::time::Duration;
|
||||
|
||||
use ruma::{
|
||||
events::presence::PresenceEvent, presence::PresenceState, OwnedUserId, RoomId, UInt, UserId,
|
||||
|
@ -14,10 +14,6 @@ use crate::{
|
|||
|
||||
impl service::rooms::edus::presence::Data for KeyValueDatabase {
|
||||
fn get_presence(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<PresenceEvent>> {
|
||||
if !services().globals.config.allow_presence {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let key = presence_key(room_id, user_id);
|
||||
|
||||
self.roomuserid_presence
|
||||
|
@ -29,10 +25,6 @@ impl service::rooms::edus::presence::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
fn ping_presence(&self, user_id: &UserId, new_state: PresenceState) -> Result<()> {
|
||||
if !services().globals.config.allow_presence {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let now = utils::millis_since_unix_epoch();
|
||||
let mut state_changed = false;
|
||||
|
||||
|
@ -103,10 +95,6 @@ impl service::rooms::edus::presence::Data for KeyValueDatabase {
|
|||
last_active_ago: Option<UInt>,
|
||||
status_msg: Option<String>,
|
||||
) -> Result<()> {
|
||||
if !services().globals.config.allow_presence {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let now = utils::millis_since_unix_epoch();
|
||||
let last_active_ts = match last_active_ago {
|
||||
Some(last_active_ago) => now.saturating_sub(last_active_ago.into()),
|
||||
|
@ -153,10 +141,6 @@ impl service::rooms::edus::presence::Data for KeyValueDatabase {
|
|||
room_id: &RoomId,
|
||||
since: u64,
|
||||
) -> Box<dyn Iterator<Item = Result<(OwnedUserId, u64, PresenceEvent)>> + 'a> {
|
||||
if !services().globals.config.allow_presence {
|
||||
return Box::new(iter::empty());
|
||||
}
|
||||
|
||||
let prefix = [room_id.as_bytes(), &[0xff]].concat();
|
||||
|
||||
Box::new(
|
||||
|
|
|
@ -988,7 +988,7 @@ impl KeyValueDatabase {
|
|||
if services().globals.allow_check_for_updates() {
|
||||
Self::start_check_for_updates_task();
|
||||
}
|
||||
if services().globals.config.allow_presence {
|
||||
if services().globals.allow_local_presence() {
|
||||
Self::start_presence_handler(presence_receiver).await;
|
||||
}
|
||||
|
||||
|
|
|
@ -353,8 +353,16 @@ impl Service {
|
|||
&self.config.emergency_password
|
||||
}
|
||||
|
||||
pub fn allow_presence(&self) -> bool {
|
||||
self.config.allow_presence
|
||||
pub fn allow_local_presence(&self) -> bool {
|
||||
self.config.allow_local_presence
|
||||
}
|
||||
|
||||
pub fn allow_incoming_presence(&self) -> bool {
|
||||
self.config.allow_incoming_presence
|
||||
}
|
||||
|
||||
pub fn allow_outcoming_presence(&self) -> bool {
|
||||
self.config.allow_outgoing_presence
|
||||
}
|
||||
|
||||
pub fn presence_idle_timeout_s(&self) -> u64 {
|
||||
|
|
|
@ -286,6 +286,7 @@ impl Service {
|
|||
.filter(|user_id| user_id.server_name() == services().globals.server_name()),
|
||||
);
|
||||
|
||||
if services().globals.allow_outcoming_presence() {
|
||||
// Look for presence updates in this room
|
||||
let mut presence_updates = Vec::new();
|
||||
|
||||
|
@ -318,6 +319,7 @@ impl Service {
|
|||
events.push(
|
||||
serde_json::to_vec(&presence_content).expect("PresenceEvent can be serialized"),
|
||||
);
|
||||
}
|
||||
|
||||
// Look for read receipts in this room
|
||||
for r in services()
|
||||
|
|
Loading…
Add table
Reference in a new issue