de-global services

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2024-07-27 07:17:07 +00:00
parent 7e50db4193
commit 2f85a5c1ac
36 changed files with 327 additions and 336 deletions

View file

@ -2,7 +2,7 @@ use conduit::Result;
use conduit_macros::implement; use conduit_macros::implement;
use ruma::events::room::message::RoomMessageEventContent; use ruma::events::room::message::RoomMessageEventContent;
use crate::{services, Command}; use crate::Command;
/// Uses the iterator in `src/database/key_value/users.rs` to iterator over /// Uses the iterator in `src/database/key_value/users.rs` to iterator over
/// every user in our database (remote and local). Reports total count, any /// every user in our database (remote and local). Reports total count, any
@ -10,7 +10,7 @@ use crate::{services, Command};
#[implement(Command, params = "<'_>")] #[implement(Command, params = "<'_>")]
pub(super) async fn check_all_users(&self) -> Result<RoomMessageEventContent> { pub(super) async fn check_all_users(&self) -> Result<RoomMessageEventContent> {
let timer = tokio::time::Instant::now(); let timer = tokio::time::Instant::now();
let results = services().users.db.iter(); let results = self.services.users.db.iter();
let query_time = timer.elapsed(); let query_time = timer.elapsed();
let users = results.collect::<Vec<_>>(); let users = results.collect::<Vec<_>>();

View file

@ -1,4 +1,4 @@
use std::{panic::AssertUnwindSafe, time::Instant}; use std::{panic::AssertUnwindSafe, sync::Arc, time::Instant};
use clap::{CommandFactory, Parser}; use clap::{CommandFactory, Parser};
use conduit::{error, trace, utils::string::common_prefix, Error, Result}; use conduit::{error, trace, utils::string::common_prefix, Error, Result};
@ -17,37 +17,27 @@ use service::{
use crate::{admin, admin::AdminCommand, Command}; use crate::{admin, admin::AdminCommand, Command};
struct Handler { #[must_use]
services: &'static Services, pub(super) fn complete(line: &str) -> String { complete_command(AdminCommand::command(), line) }
}
#[must_use] #[must_use]
pub(super) fn complete(line: &str) -> String { pub(super) fn handle(services: Arc<Services>, command: CommandInput) -> HandlerResult {
Handler { Box::pin(handle_command(services, command))
services: service::services(),
}
.complete_command(AdminCommand::command(), line)
} }
#[must_use]
pub(super) fn handle(command: CommandInput) -> HandlerResult { Box::pin(handle_command(command)) }
#[tracing::instrument(skip_all, name = "admin")] #[tracing::instrument(skip_all, name = "admin")]
async fn handle_command(command: CommandInput) -> CommandResult { async fn handle_command(services: Arc<Services>, command: CommandInput) -> CommandResult {
AssertUnwindSafe(Box::pin(process_command(&command))) AssertUnwindSafe(Box::pin(process_command(services, &command)))
.catch_unwind() .catch_unwind()
.await .await
.map_err(Error::from_panic) .map_err(Error::from_panic)
.or_else(|error| handle_panic(&error, command)) .or_else(|error| handle_panic(&error, command))
} }
async fn process_command(command: &CommandInput) -> CommandOutput { async fn process_command(services: Arc<Services>, command: &CommandInput) -> CommandOutput {
Handler { process(services, &command.command)
services: service::services(), .await
} .and_then(|content| reply(content, command.reply_id.clone()))
.process(&command.command)
.await
.and_then(|content| reply(content, command.reply_id.clone()))
} }
fn handle_panic(error: &Error, command: CommandInput) -> CommandResult { fn handle_panic(error: &Error, command: CommandInput) -> CommandResult {
@ -68,129 +58,126 @@ fn reply(mut content: RoomMessageEventContent, reply_id: Option<OwnedEventId>) -
Some(content) Some(content)
} }
impl Handler { // Parse and process a message from the admin room
// Parse and process a message from the admin room async fn process(services: Arc<Services>, msg: &str) -> CommandOutput {
async fn process(&self, msg: &str) -> CommandOutput { let mut lines = msg.lines().filter(|l| !l.trim().is_empty());
let mut lines = msg.lines().filter(|l| !l.trim().is_empty()); let command = lines.next().expect("each string has at least one line");
let command = lines.next().expect("each string has at least one line"); let (parsed, body) = match parse_command(command) {
let (parsed, body) = match self.parse_command(command) { Ok(parsed) => parsed,
Ok(parsed) => parsed, Err(error) => {
Err(error) => { let server_name = services.globals.server_name();
let server_name = self.services.globals.server_name(); let message = error.replace("server.name", server_name.as_str());
let message = error.replace("server.name", server_name.as_str()); return Some(RoomMessageEventContent::notice_markdown(message));
return Some(RoomMessageEventContent::notice_markdown(message)); },
}, };
};
let timer = Instant::now(); let timer = Instant::now();
let body: Vec<&str> = body.iter().map(String::as_str).collect(); let body: Vec<&str> = body.iter().map(String::as_str).collect();
let context = Command { let context = Command {
services: self.services, services: &services,
body: &body, body: &body,
}; };
let result = Box::pin(admin::process(parsed, &context)).await; let result = Box::pin(admin::process(parsed, &context)).await;
let elapsed = timer.elapsed(); let elapsed = timer.elapsed();
conduit::debug!(?command, ok = result.is_ok(), "command processed in {elapsed:?}"); conduit::debug!(?command, ok = result.is_ok(), "command processed in {elapsed:?}");
match result { match result {
Ok(reply) => Some(reply), Ok(reply) => Some(reply),
Err(error) => Some(RoomMessageEventContent::notice_markdown(format!( Err(error) => Some(RoomMessageEventContent::notice_markdown(format!(
"Encountered an error while handling the command:\n```\n{error:#?}\n```" "Encountered an error while handling the command:\n```\n{error:#?}\n```"
))), ))),
}
}
// Parse chat messages from the admin room into an AdminCommand object
fn parse_command(&self, command_line: &str) -> Result<(AdminCommand, Vec<String>), String> {
let argv = self.parse_line(command_line);
let com = AdminCommand::try_parse_from(&argv).map_err(|error| error.to_string())?;
Ok((com, argv))
}
fn complete_command(&self, mut cmd: clap::Command, line: &str) -> String {
let argv = self.parse_line(line);
let mut ret = Vec::<String>::with_capacity(argv.len().saturating_add(1));
'token: for token in argv.into_iter().skip(1) {
let cmd_ = cmd.clone();
let mut choice = Vec::new();
for sub in cmd_.get_subcommands() {
let name = sub.get_name();
if *name == token {
// token already complete; recurse to subcommand
ret.push(token);
cmd.clone_from(sub);
continue 'token;
} else if name.starts_with(&token) {
// partial match; add to choices
choice.push(name);
}
}
if choice.len() == 1 {
// One choice. Add extra space because it's complete
let choice = *choice.first().expect("only choice");
ret.push(choice.to_owned());
ret.push(String::new());
} else if choice.is_empty() {
// Nothing found, return original string
ret.push(token);
} else {
// Find the common prefix
ret.push(common_prefix(&choice).into());
}
// Return from completion
return ret.join(" ");
}
// Return from no completion. Needs a space though.
ret.push(String::new());
ret.join(" ")
}
// Parse chat messages from the admin room into an AdminCommand object
fn parse_line(&self, command_line: &str) -> Vec<String> {
let mut argv = command_line
.split_whitespace()
.map(str::to_owned)
.collect::<Vec<String>>();
// Remove any escapes that came with a server-side escape command
if !argv.is_empty() && argv[0].ends_with("admin") {
argv[0] = argv[0].trim_start_matches('\\').into();
}
// First indice has to be "admin" but for console convenience we add it here
let server_user = self.services.globals.server_user.as_str();
if !argv.is_empty() && !argv[0].ends_with("admin") && !argv[0].starts_with(server_user) {
argv.insert(0, "admin".to_owned());
}
// Replace `help command` with `command --help`
// Clap has a help subcommand, but it omits the long help description.
if argv.len() > 1 && argv[1] == "help" {
argv.remove(1);
argv.push("--help".to_owned());
}
// Backwards compatibility with `register_appservice`-style commands
if argv.len() > 1 && argv[1].contains('_') {
argv[1] = argv[1].replace('_', "-");
}
// Backwards compatibility with `register_appservice`-style commands
if argv.len() > 2 && argv[2].contains('_') {
argv[2] = argv[2].replace('_', "-");
}
// if the user is using the `query` command (argv[1]), replace the database
// function/table calls with underscores to match the codebase
if argv.len() > 3 && argv[1].eq("query") {
argv[3] = argv[3].replace('_', "-");
}
trace!(?command_line, ?argv, "parse");
argv
} }
} }
// Parse chat messages from the admin room into an AdminCommand object
fn parse_command(command_line: &str) -> Result<(AdminCommand, Vec<String>), String> {
let argv = parse_line(command_line);
let com = AdminCommand::try_parse_from(&argv).map_err(|error| error.to_string())?;
Ok((com, argv))
}
fn complete_command(mut cmd: clap::Command, line: &str) -> String {
let argv = parse_line(line);
let mut ret = Vec::<String>::with_capacity(argv.len().saturating_add(1));
'token: for token in argv.into_iter().skip(1) {
let cmd_ = cmd.clone();
let mut choice = Vec::new();
for sub in cmd_.get_subcommands() {
let name = sub.get_name();
if *name == token {
// token already complete; recurse to subcommand
ret.push(token);
cmd.clone_from(sub);
continue 'token;
} else if name.starts_with(&token) {
// partial match; add to choices
choice.push(name);
}
}
if choice.len() == 1 {
// One choice. Add extra space because it's complete
let choice = *choice.first().expect("only choice");
ret.push(choice.to_owned());
ret.push(String::new());
} else if choice.is_empty() {
// Nothing found, return original string
ret.push(token);
} else {
// Find the common prefix
ret.push(common_prefix(&choice).into());
}
// Return from completion
return ret.join(" ");
}
// Return from no completion. Needs a space though.
ret.push(String::new());
ret.join(" ")
}
// Parse chat messages from the admin room into an AdminCommand object
fn parse_line(command_line: &str) -> Vec<String> {
let mut argv = command_line
.split_whitespace()
.map(str::to_owned)
.collect::<Vec<String>>();
// Remove any escapes that came with a server-side escape command
if !argv.is_empty() && argv[0].ends_with("admin") {
argv[0] = argv[0].trim_start_matches('\\').into();
}
// First indice has to be "admin" but for console convenience we add it here
if !argv.is_empty() && !argv[0].ends_with("admin") && !argv[0].starts_with('@') {
argv.insert(0, "admin".to_owned());
}
// Replace `help command` with `command --help`
// Clap has a help subcommand, but it omits the long help description.
if argv.len() > 1 && argv[1] == "help" {
argv.remove(1);
argv.push("--help".to_owned());
}
// Backwards compatibility with `register_appservice`-style commands
if argv.len() > 1 && argv[1].contains('_') {
argv[1] = argv[1].replace('_', "-");
}
// Backwards compatibility with `register_appservice`-style commands
if argv.len() > 2 && argv[2].contains('_') {
argv[2] = argv[2].replace('_', "-");
}
// if the user is using the `query` command (argv[1]), replace the database
// function/table calls with underscores to match the codebase
if argv.len() > 3 && argv[1].eq("query") {
argv[3] = argv[3].replace('_', "-");
}
trace!(?command_line, ?argv, "parse");
argv
}

View file

@ -1,4 +1,4 @@
#![recursion_limit = "168"] #![recursion_limit = "192"]
#![allow(clippy::wildcard_imports)] #![allow(clippy::wildcard_imports)]
#![allow(clippy::enum_glob_use)] #![allow(clippy::enum_glob_use)]
@ -24,7 +24,6 @@ extern crate conduit_service as service;
pub(crate) use conduit::Result; pub(crate) use conduit::Result;
pub(crate) use conduit_macros::{admin_command, admin_command_dispatch}; pub(crate) use conduit_macros::{admin_command, admin_command_dispatch};
pub(crate) use service::services;
pub(crate) use crate::{ pub(crate) use crate::{
command::Command, command::Command,
@ -38,26 +37,19 @@ conduit::mod_dtor! {}
conduit::rustc_flags_capture! {} conduit::rustc_flags_capture! {}
/// Install the admin command handler /// Install the admin command handler
pub async fn init() { pub async fn init(admin_service: &service::admin::Service) {
_ = services() _ = admin_service
.admin
.complete .complete
.write() .write()
.expect("locked for writing") .expect("locked for writing")
.insert(handler::complete); .insert(handler::complete);
_ = services() _ = admin_service.handle.write().await.insert(handler::handle);
.admin
.handle
.write()
.await
.insert(handler::handle);
} }
/// Uninstall the admin command handler /// Uninstall the admin command handler
pub async fn fini() { pub async fn fini(admin_service: &service::admin::Service) {
_ = services().admin.handle.write().await.take(); _ = admin_service.handle.write().await.take();
_ = services() _ = admin_service
.admin
.complete .complete
.write() .write()
.expect("locked for writing") .expect("locked for writing")

View file

@ -370,7 +370,7 @@ pub(crate) async fn register_route(
if let Some(room_id_server_name) = room.server_name() { if let Some(room_id_server_name) = room.server_name() {
if let Err(e) = join_room_by_id_helper( if let Err(e) = join_room_by_id_helper(
services, &services,
&user_id, &user_id,
room, room,
Some("Automatically joining this room upon registration".to_owned()), Some("Automatically joining this room upon registration".to_owned()),
@ -562,11 +562,11 @@ pub(crate) async fn deactivate_route(
.rooms_joined(sender_user) .rooms_joined(sender_user)
.filter_map(Result::ok) .filter_map(Result::ok)
.collect(); .collect();
super::update_displayname(services, sender_user.clone(), None, all_joined_rooms.clone()).await?; super::update_displayname(&services, sender_user.clone(), None, all_joined_rooms.clone()).await?;
super::update_avatar_url(services, sender_user.clone(), None, None, all_joined_rooms).await?; super::update_avatar_url(&services, sender_user.clone(), None, None, all_joined_rooms).await?;
// Make the user leave all rooms before deactivation // Make the user leave all rooms before deactivation
super::leave_all_rooms(services, sender_user).await; super::leave_all_rooms(&services, sender_user).await;
info!("User {sender_user} deactivated their account."); info!("User {sender_user} deactivated their account.");
services services

View file

@ -107,7 +107,7 @@ pub(crate) async fn get_alias_route(
return Err(Error::BadRequest(ErrorKind::NotFound, "Room with alias not found.")); return Err(Error::BadRequest(ErrorKind::NotFound, "Room with alias not found."));
}; };
let servers = room_available_servers(services, &room_id, &room_alias, &pre_servers); let servers = room_available_servers(&services, &room_id, &room_alias, &pre_servers);
debug!(?room_alias, ?room_id, "available servers: {servers:?}"); debug!(?room_alias, ?room_id, "available servers: {servers:?}");
Ok(get_alias::v3::Response::new(room_id, servers)) Ok(get_alias::v3::Response::new(room_id, servers))

View file

@ -20,7 +20,7 @@ pub(crate) async fn set_global_account_data_route(
State(services): State<crate::State>, body: Ruma<set_global_account_data::v3::Request>, State(services): State<crate::State>, body: Ruma<set_global_account_data::v3::Request>,
) -> Result<set_global_account_data::v3::Response> { ) -> Result<set_global_account_data::v3::Response> {
set_account_data( set_account_data(
services, &services,
None, None,
&body.sender_user, &body.sender_user,
&body.event_type.to_string(), &body.event_type.to_string(),
@ -37,7 +37,7 @@ pub(crate) async fn set_room_account_data_route(
State(services): State<crate::State>, body: Ruma<set_room_account_data::v3::Request>, State(services): State<crate::State>, body: Ruma<set_room_account_data::v3::Request>,
) -> Result<set_room_account_data::v3::Response> { ) -> Result<set_room_account_data::v3::Response> {
set_account_data( set_account_data(
services, &services,
Some(&body.room_id), Some(&body.room_id),
&body.sender_user, &body.sender_user,
&body.event_type.to_string(), &body.event_type.to_string(),

View file

@ -48,7 +48,7 @@ pub(crate) async fn get_public_rooms_filtered_route(
} }
let response = get_public_rooms_filtered_helper( let response = get_public_rooms_filtered_helper(
services, &services,
body.server.as_deref(), body.server.as_deref(),
body.limit, body.limit,
body.since.as_deref(), body.since.as_deref(),
@ -88,7 +88,7 @@ pub(crate) async fn get_public_rooms_route(
} }
let response = get_public_rooms_filtered_helper( let response = get_public_rooms_filtered_helper(
services, &services,
body.server.as_deref(), body.server.as_deref(),
body.limit, body.limit,
body.since.as_deref(), body.since.as_deref(),
@ -124,7 +124,7 @@ pub(crate) async fn set_room_visibility_route(
return Err(Error::BadRequest(ErrorKind::NotFound, "Room not found")); return Err(Error::BadRequest(ErrorKind::NotFound, "Room not found"));
} }
if !user_can_publish_room(services, sender_user, &body.room_id)? { if !user_can_publish_room(&services, sender_user, &body.room_id)? {
return Err(Error::BadRequest( return Err(Error::BadRequest(
ErrorKind::forbidden(), ErrorKind::forbidden(),
"User is not allowed to publish this room", "User is not allowed to publish this room",

View file

@ -77,7 +77,7 @@ pub(crate) async fn get_keys_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
get_keys_helper( get_keys_helper(
services, &services,
Some(sender_user), Some(sender_user),
&body.device_keys, &body.device_keys,
|u| u == sender_user, |u| u == sender_user,
@ -92,7 +92,7 @@ pub(crate) async fn get_keys_route(
pub(crate) async fn claim_keys_route( pub(crate) async fn claim_keys_route(
State(services): State<crate::State>, body: Ruma<claim_keys::v3::Request>, State(services): State<crate::State>, body: Ruma<claim_keys::v3::Request>,
) -> Result<claim_keys::v3::Response> { ) -> Result<claim_keys::v3::Response> {
claim_keys_helper(services, &body.one_time_keys).await claim_keys_helper(&services, &body.one_time_keys).await
} }
/// # `POST /_matrix/client/r0/keys/device_signing/upload` /// # `POST /_matrix/client/r0/keys/device_signing/upload`

View file

@ -76,12 +76,12 @@ pub(crate) async fn get_media_preview_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
let url = &body.url; let url = &body.url;
if !url_preview_allowed(services, url) { if !url_preview_allowed(&services, url) {
warn!(%sender_user, "URL is not allowed to be previewed: {url}"); warn!(%sender_user, "URL is not allowed to be previewed: {url}");
return Err(Error::BadRequest(ErrorKind::forbidden(), "URL is not allowed to be previewed")); return Err(Error::BadRequest(ErrorKind::forbidden(), "URL is not allowed to be previewed"));
} }
match get_url_preview(services, url).await { match get_url_preview(&services, url).await {
Ok(preview) => { Ok(preview) => {
let res = serde_json::value::to_raw_value(&preview).map_err(|e| { let res = serde_json::value::to_raw_value(&preview).map_err(|e| {
error!(%sender_user, "Failed to convert UrlPreviewData into a serde json value: {e}"); error!(%sender_user, "Failed to convert UrlPreviewData into a serde json value: {e}");
@ -221,7 +221,7 @@ pub(crate) async fn get_content_route(
}) })
} else if !services.globals.server_is_ours(&body.server_name) && body.allow_remote { } else if !services.globals.server_is_ours(&body.server_name) && body.allow_remote {
let response = get_remote_content( let response = get_remote_content(
services, &services,
&mxc, &mxc,
&body.server_name, &body.server_name,
body.media_id.clone(), body.media_id.clone(),
@ -311,7 +311,7 @@ pub(crate) async fn get_content_as_filename_route(
}) })
} else if !services.globals.server_is_ours(&body.server_name) && body.allow_remote { } else if !services.globals.server_is_ours(&body.server_name) && body.allow_remote {
match get_remote_content( match get_remote_content(
services, &services,
&mxc, &mxc,
&body.server_name, &body.server_name,
body.media_id.clone(), body.media_id.clone(),

View file

@ -167,7 +167,7 @@ pub(crate) async fn join_room_by_id_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
banned_room_check( banned_room_check(
services, &services,
sender_user, sender_user,
Some(&body.room_id), Some(&body.room_id),
body.room_id.server_name(), body.room_id.server_name(),
@ -202,7 +202,7 @@ pub(crate) async fn join_room_by_id_route(
} }
join_room_by_id_helper( join_room_by_id_helper(
services, &services,
sender_user, sender_user,
&body.room_id, &body.room_id,
body.reason.clone(), body.reason.clone(),
@ -231,7 +231,7 @@ pub(crate) async fn join_room_by_id_or_alias_route(
let (servers, room_id) = match OwnedRoomId::try_from(body.room_id_or_alias) { let (servers, room_id) = match OwnedRoomId::try_from(body.room_id_or_alias) {
Ok(room_id) => { Ok(room_id) => {
banned_room_check(services, sender_user, Some(&room_id), room_id.server_name(), client).await?; banned_room_check(&services, sender_user, Some(&room_id), room_id.server_name(), client).await?;
let mut servers = body.server_name.clone(); let mut servers = body.server_name.clone();
servers.extend( servers.extend(
@ -270,7 +270,7 @@ pub(crate) async fn join_room_by_id_or_alias_route(
.await?; .await?;
let (room_id, mut pre_servers) = response; let (room_id, mut pre_servers) = response;
banned_room_check(services, sender_user, Some(&room_id), Some(room_alias.server_name()), client).await?; banned_room_check(&services, sender_user, Some(&room_id), Some(room_alias.server_name()), client).await?;
let mut servers = body.server_name; let mut servers = body.server_name;
if let Some(pre_servers) = &mut pre_servers { if let Some(pre_servers) = &mut pre_servers {
@ -303,7 +303,7 @@ pub(crate) async fn join_room_by_id_or_alias_route(
}; };
let join_room_response = join_room_by_id_helper( let join_room_response = join_room_by_id_helper(
services, &services,
sender_user, sender_user,
&room_id, &room_id,
body.reason.clone(), body.reason.clone(),
@ -327,7 +327,7 @@ pub(crate) async fn leave_room_route(
) -> Result<leave_room::v3::Response> { ) -> Result<leave_room::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
leave_room(services, sender_user, &body.room_id, body.reason.clone()).await?; leave_room(&services, sender_user, &body.room_id, body.reason.clone()).await?;
Ok(leave_room::v3::Response::new()) Ok(leave_room::v3::Response::new())
} }
@ -353,13 +353,13 @@ pub(crate) async fn invite_user_route(
)); ));
} }
banned_room_check(services, sender_user, Some(&body.room_id), body.room_id.server_name(), client).await?; banned_room_check(&services, sender_user, Some(&body.room_id), body.room_id.server_name(), client).await?;
if let invite_user::v3::InvitationRecipient::UserId { if let invite_user::v3::InvitationRecipient::UserId {
user_id, user_id,
} = &body.recipient } = &body.recipient
{ {
invite_helper(services, sender_user, user_id, &body.room_id, body.reason.clone(), false).await?; invite_helper(&services, sender_user, user_id, &body.room_id, body.reason.clone(), false).await?;
Ok(invite_user::v3::Response {}) Ok(invite_user::v3::Response {})
} else { } else {
Err(Error::BadRequest(ErrorKind::NotFound, "User not found.")) Err(Error::BadRequest(ErrorKind::NotFound, "User not found."))

View file

@ -146,7 +146,7 @@ pub(crate) async fn get_message_events_route(
.timeline .timeline
.pdus_after(sender_user, &body.room_id, from)? .pdus_after(sender_user, &body.room_id, from)?
.filter_map(Result::ok) // Filter out buggy events .filter_map(Result::ok) // Filter out buggy events
.filter(|(_, pdu)| { contains_url_filter(pdu, &body.filter) && visibility_filter(services, pdu, sender_user, &body.room_id) .filter(|(_, pdu)| { contains_url_filter(pdu, &body.filter) && visibility_filter(&services, pdu, sender_user, &body.room_id)
}) })
.take_while(|&(k, _)| Some(k) != to) // Stop at `to` .take_while(|&(k, _)| Some(k) != to) // Stop at `to`
@ -193,7 +193,7 @@ pub(crate) async fn get_message_events_route(
.timeline .timeline
.pdus_until(sender_user, &body.room_id, from)? .pdus_until(sender_user, &body.room_id, from)?
.filter_map(Result::ok) // Filter out buggy events .filter_map(Result::ok) // Filter out buggy events
.filter(|(_, pdu)| {contains_url_filter(pdu, &body.filter) && visibility_filter(services, pdu, sender_user, &body.room_id)}) .filter(|(_, pdu)| {contains_url_filter(pdu, &body.filter) && visibility_filter(&services, pdu, sender_user, &body.room_id)})
.take_while(|&(k, _)| Some(k) != to) // Stop at `to` .take_while(|&(k, _)| Some(k) != to) // Stop at `to`
.take(limit) .take(limit)
.collect(); .collect();

View file

@ -33,7 +33,7 @@ pub(crate) async fn set_displayname_route(
.filter_map(Result::ok) .filter_map(Result::ok)
.collect(); .collect();
update_displayname(services, sender_user.clone(), body.displayname.clone(), all_joined_rooms).await?; update_displayname(&services, sender_user.clone(), body.displayname.clone(), all_joined_rooms).await?;
if services.globals.allow_local_presence() { if services.globals.allow_local_presence() {
// Presence update // Presence update
@ -118,7 +118,7 @@ pub(crate) async fn set_avatar_url_route(
.collect(); .collect();
update_avatar_url( update_avatar_url(
services, &services,
sender_user.clone(), sender_user.clone(),
body.avatar_url.clone(), body.avatar_url.clone(),
body.blurhash.clone(), body.blurhash.clone(),

View file

@ -40,7 +40,7 @@ pub(crate) async fn report_event_route(
}; };
is_report_valid( is_report_valid(
services, &services,
&pdu.event_id, &pdu.event_id,
&body.room_id, &body.room_id,
sender_user, sender_user,

View file

@ -79,7 +79,7 @@ pub(crate) async fn create_room_route(
} }
let room_id: OwnedRoomId = if let Some(custom_room_id) = &body.room_id { let room_id: OwnedRoomId = if let Some(custom_room_id) = &body.room_id {
custom_room_id_check(services, custom_room_id)? custom_room_id_check(&services, custom_room_id)?
} else { } else {
RoomId::new(&services.globals.config.server_name) RoomId::new(&services.globals.config.server_name)
}; };
@ -96,7 +96,7 @@ pub(crate) async fn create_room_route(
let state_lock = services.rooms.state.mutex.lock(&room_id).await; let state_lock = services.rooms.state.mutex.lock(&room_id).await;
let alias: Option<OwnedRoomAliasId> = if let Some(alias) = &body.room_alias_name { let alias: Option<OwnedRoomAliasId> = if let Some(alias) = &body.room_alias_name {
Some(room_alias_check(services, alias, &body.appservice_info).await?) Some(room_alias_check(&services, alias, &body.appservice_info).await?)
} else { } else {
None None
}; };
@ -438,7 +438,7 @@ pub(crate) async fn create_room_route(
// 8. Events implied by invite (and TODO: invite_3pid) // 8. Events implied by invite (and TODO: invite_3pid)
drop(state_lock); drop(state_lock);
for user_id in &body.invite { for user_id in &body.invite {
if let Err(e) = invite_helper(services, sender_user, user_id, &room_id, None, body.is_direct).await { if let Err(e) = invite_helper(&services, sender_user, user_id, &room_id, None, body.is_direct).await {
warn!(%e, "Failed to send invite"); warn!(%e, "Failed to send invite");
} }
} }

View file

@ -37,7 +37,7 @@ pub(crate) async fn send_state_event_for_key_route(
Ok(send_state_event::v3::Response { Ok(send_state_event::v3::Response {
event_id: send_state_event_for_key_helper( event_id: send_state_event_for_key_helper(
services, &services,
sender_user, sender_user,
&body.room_id, &body.room_id,
&body.event_type, &body.event_type,

View file

@ -137,7 +137,7 @@ pub(crate) async fn sync_events_route(
); );
if services.globals.allow_local_presence() { if services.globals.allow_local_presence() {
process_presence_updates(services, &mut presence_updates, since, &sender_user).await?; process_presence_updates(&services, &mut presence_updates, since, &sender_user).await?;
} }
let all_joined_rooms = services let all_joined_rooms = services
@ -152,7 +152,7 @@ pub(crate) async fn sync_events_route(
for room_id in all_joined_rooms { for room_id in all_joined_rooms {
let room_id = room_id?; let room_id = room_id?;
if let Ok(joined_room) = load_joined_room( if let Ok(joined_room) = load_joined_room(
services, &services,
&sender_user, &sender_user,
&sender_device, &sender_device,
&room_id, &room_id,
@ -182,7 +182,7 @@ pub(crate) async fn sync_events_route(
.collect(); .collect();
for result in all_left_rooms { for result in all_left_rooms {
handle_left_room( handle_left_room(
services, &services,
since, since,
&result?.0, &result?.0,
&sender_user, &sender_user,
@ -1214,7 +1214,7 @@ pub(crate) async fn sync_events_v4_route(
match new_membership { match new_membership {
MembershipState::Join => { MembershipState::Join => {
// A new user joined an encrypted room // A new user joined an encrypted room
if !share_encrypted_room(services, &sender_user, &user_id, room_id)? { if !share_encrypted_room(&services, &sender_user, &user_id, room_id)? {
device_list_changes.insert(user_id); device_list_changes.insert(user_id);
} }
}, },
@ -1243,7 +1243,7 @@ pub(crate) async fn sync_events_v4_route(
.filter(|user_id| { .filter(|user_id| {
// Only send keys if the sender doesn't share an encrypted room with the target // Only send keys if the sender doesn't share an encrypted room with the target
// already // already
!share_encrypted_room(services, &sender_user, user_id, room_id).unwrap_or(false) !share_encrypted_room(&services, &sender_user, user_id, room_id).unwrap_or(false)
}), }),
); );
} }
@ -1407,7 +1407,8 @@ pub(crate) async fn sync_events_v4_route(
for (room_id, (required_state_request, timeline_limit, roomsince)) in &todo_rooms { for (room_id, (required_state_request, timeline_limit, roomsince)) in &todo_rooms {
let roomsincecount = PduCount::Normal(*roomsince); let roomsincecount = PduCount::Normal(*roomsince);
let (timeline_pdus, limited) = load_timeline(services, &sender_user, room_id, roomsincecount, *timeline_limit)?; let (timeline_pdus, limited) =
load_timeline(&services, &sender_user, room_id, roomsincecount, *timeline_limit)?;
if roomsince != &0 && timeline_pdus.is_empty() { if roomsince != &0 && timeline_pdus.is_empty() {
continue; continue;

View file

@ -1,4 +1,4 @@
#![recursion_limit = "160"] #![recursion_limit = "192"]
pub mod client; pub mod client;
pub mod router; pub mod router;

View file

@ -4,6 +4,8 @@ mod handler;
mod request; mod request;
mod response; mod response;
use std::sync::Arc;
use axum::{ use axum::{
response::IntoResponse, response::IntoResponse,
routing::{any, get, post}, routing::{any, get, post},
@ -16,7 +18,7 @@ use self::handler::RouterExt;
pub(super) use self::{args::Args as Ruma, response::RumaResponse}; pub(super) use self::{args::Args as Ruma, response::RumaResponse};
use crate::{client, server}; use crate::{client, server};
pub type State = &'static service::Services; pub type State = Arc<service::Services>;
pub fn build(router: Router<State>, server: &Server) -> Router<State> { pub fn build(router: Router<State>, server: &Server) -> Router<State> {
let config = &server.config; let config = &server.config;

View file

@ -7,7 +7,7 @@ use ruma::{api::IncomingRequest, CanonicalJsonValue, OwnedDeviceId, OwnedServerN
use service::Services; use service::Services;
use super::{auth, auth::Auth, request, request::Request}; use super::{auth, auth::Auth, request, request::Request};
use crate::service::appservice::RegistrationInfo; use crate::{service::appservice::RegistrationInfo, State};
/// Extractor for Ruma request structs /// Extractor for Ruma request structs
pub(crate) struct Args<T> { pub(crate) struct Args<T> {
@ -36,14 +36,13 @@ pub(crate) struct Args<T> {
} }
#[async_trait] #[async_trait]
impl<T, S> FromRequest<S, Body> for Args<T> impl<T> FromRequest<State, Body> for Args<T>
where where
T: IncomingRequest, T: IncomingRequest,
{ {
type Rejection = Error; type Rejection = Error;
async fn from_request(request: hyper::Request<Body>, _: &S) -> Result<Self, Self::Rejection> { async fn from_request(request: hyper::Request<Body>, services: &State) -> Result<Self, Self::Rejection> {
let services = service::services(); // ???
let mut request = request::from(services, request).await?; let mut request = request::from(services, request).await?;
let mut json_body = serde_json::from_slice::<CanonicalJsonValue>(&request.body).ok(); let mut json_body = serde_json::from_slice::<CanonicalJsonValue>(&request.body).ok();
let auth = auth::auth(services, &mut request, &json_body, &T::METADATA).await?; let auth = auth::auth(services, &mut request, &json_body, &T::METADATA).await?;

View file

@ -82,7 +82,7 @@ pub(crate) async fn create_join_event_template_route(
.state_cache .state_cache
.is_left(&body.user_id, &body.room_id) .is_left(&body.user_id, &body.room_id)
.unwrap_or(true)) .unwrap_or(true))
&& user_can_perform_restricted_join(services, &body.user_id, &body.room_id, &room_version_id)? && user_can_perform_restricted_join(&services, &body.user_id, &body.room_id, &room_version_id)?
{ {
let auth_user = services let auth_user = services
.rooms .rooms

View file

@ -26,7 +26,7 @@ pub(crate) async fn get_public_rooms_filtered_route(
} }
let response = crate::client::get_public_rooms_filtered_helper( let response = crate::client::get_public_rooms_filtered_helper(
services, &services,
None, None,
body.limit, body.limit,
body.since.as_deref(), body.since.as_deref(),
@ -60,7 +60,7 @@ pub(crate) async fn get_public_rooms_route(
} }
let response = crate::client::get_public_rooms_filtered_helper( let response = crate::client::get_public_rooms_filtered_helper(
services, &services,
None, None,
body.limit, body.limit,
body.since.as_deref(), body.since.as_deref(),

View file

@ -62,8 +62,8 @@ pub(crate) async fn send_transaction_message_route(
"Starting txn", "Starting txn",
); );
let resolved_map = handle_pdus(services, &client, &body, origin, &txn_start_time).await?; let resolved_map = handle_pdus(&services, &client, &body, origin, &txn_start_time).await?;
handle_edus(services, &client, &body, origin).await?; handle_edus(&services, &client, &body, origin).await?;
debug!( debug!(
pdus = ?body.pdus.len(), pdus = ?body.pdus.len(),

View file

@ -241,7 +241,7 @@ pub(crate) async fn create_join_event_v1_route(
} }
} }
let room_state = create_join_event(services, origin, &body.room_id, &body.pdu).await?; let room_state = create_join_event(&services, origin, &body.room_id, &body.pdu).await?;
Ok(create_join_event::v1::Response { Ok(create_join_event::v1::Response {
room_state, room_state,
@ -286,7 +286,7 @@ pub(crate) async fn create_join_event_v2_route(
auth_chain, auth_chain,
state, state,
event, event,
} = create_join_event(services, origin, &body.room_id, &body.pdu).await?; } = create_join_event(&services, origin, &body.room_id, &body.pdu).await?;
let room_state = create_join_event::v2::RoomState { let room_state = create_join_event::v2::RoomState {
members_omitted: false, members_omitted: false,
auth_chain, auth_chain,

View file

@ -28,7 +28,7 @@ pub(crate) async fn create_leave_event_v1_route(
) -> Result<create_leave_event::v1::Response> { ) -> Result<create_leave_event::v1::Response> {
let origin = body.origin.as_ref().expect("server is authenticated"); let origin = body.origin.as_ref().expect("server is authenticated");
create_leave_event(services, origin, &body.room_id, &body.pdu).await?; create_leave_event(&services, origin, &body.room_id, &body.pdu).await?;
Ok(create_leave_event::v1::Response::new()) Ok(create_leave_event::v1::Response::new())
} }
@ -41,7 +41,7 @@ pub(crate) async fn create_leave_event_v2_route(
) -> Result<create_leave_event::v2::Response> { ) -> Result<create_leave_event::v2::Response> {
let origin = body.origin.as_ref().expect("server is authenticated"); let origin = body.origin.as_ref().expect("server is authenticated");
create_leave_event(services, origin, &body.room_id, &body.pdu).await?; create_leave_event(&services, origin, &body.room_id, &body.pdu).await?;
Ok(create_leave_event::v2::Response::new()) Ok(create_leave_event::v2::Response::new())
} }

View file

@ -84,7 +84,7 @@ pub(crate) async fn get_keys_route(
} }
let result = get_keys_helper( let result = get_keys_helper(
services, &services,
None, None,
&body.device_keys, &body.device_keys,
|u| Some(u.server_name()) == body.origin.as_deref(), |u| Some(u.server_name()) == body.origin.as_deref(),
@ -116,7 +116,7 @@ pub(crate) async fn claim_keys_route(
)); ));
} }
let result = claim_keys_helper(services, &body.one_time_keys).await?; let result = claim_keys_helper(&services, &body.one_time_keys).await?;
Ok(claim_keys::v1::Response { Ok(claim_keys::v1::Response {
one_time_keys: result.one_time_keys, one_time_keys: result.one_time_keys,

View file

@ -11,12 +11,12 @@ pub struct Database {
impl Database { impl Database {
/// Load an existing database or create a new one. /// Load an existing database or create a new one.
pub async fn open(server: &Arc<Server>) -> Result<Self> { pub async fn open(server: &Arc<Server>) -> Result<Arc<Self>> {
let db = Engine::open(server)?; let db = Engine::open(server)?;
Ok(Self { Ok(Arc::new(Self {
db: db.clone(), db: db.clone(),
map: maps::open(&db)?, map: maps::open(&db)?,
}) }))
} }
#[inline] #[inline]

View file

@ -1,3 +1,5 @@
#![recursion_limit = "192"]
pub(crate) mod clap; pub(crate) mod clap;
mod mods; mod mods;
mod restart; mod restart;
@ -57,17 +59,38 @@ fn main() -> Result<(), Error> {
async fn async_main(server: &Arc<Server>) -> Result<(), Error> { async fn async_main(server: &Arc<Server>) -> Result<(), Error> {
extern crate conduit_router as router; extern crate conduit_router as router;
if let Err(error) = router::start(&server.server).await { match router::start(&server.server).await {
error!("Critical error starting server: {error}"); Ok(services) => server.services.lock().await.insert(services),
return Err(error); Err(error) => {
} error!("Critical error starting server: {error}");
return Err(error);
},
};
if let Err(error) = router::run(&server.server).await { if let Err(error) = router::run(
server
.services
.lock()
.await
.as_ref()
.expect("services initialized"),
)
.await
{
error!("Critical error running server: {error}"); error!("Critical error running server: {error}");
return Err(error); return Err(error);
} }
if let Err(error) = router::stop(&server.server).await { if let Err(error) = router::stop(
server
.services
.lock()
.await
.take()
.expect("services initialied"),
)
.await
{
error!("Critical error stopping server: {error}"); error!("Critical error stopping server: {error}");
return Err(error); return Err(error);
} }

View file

@ -1,7 +1,7 @@
use std::sync::Arc; use std::sync::Arc;
use conduit::{config::Config, info, log::Log, utils::sys, Error, Result}; use conduit::{config::Config, info, log::Log, utils::sys, Error, Result};
use tokio::runtime; use tokio::{runtime, sync::Mutex};
use crate::{clap::Args, tracing::TracingFlameGuard}; use crate::{clap::Args, tracing::TracingFlameGuard};
@ -10,6 +10,8 @@ pub(crate) struct Server {
/// Server runtime state; public portion /// Server runtime state; public portion
pub(crate) server: Arc<conduit::Server>, pub(crate) server: Arc<conduit::Server>,
pub(crate) services: Mutex<Option<Arc<conduit_service::Services>>>,
_tracing_flame_guard: TracingFlameGuard, _tracing_flame_guard: TracingFlameGuard,
#[cfg(feature = "sentry_telemetry")] #[cfg(feature = "sentry_telemetry")]
@ -54,6 +56,8 @@ impl Server {
}, },
)), )),
services: None.into(),
_tracing_flame_guard: tracing_flame_guard, _tracing_flame_guard: tracing_flame_guard,
#[cfg(feature = "sentry_telemetry")] #[cfg(feature = "sentry_telemetry")]

View file

@ -6,6 +6,7 @@ use axum::{
}; };
use axum_client_ip::SecureClientIpSource; use axum_client_ip::SecureClientIpSource;
use conduit::{error, Result, Server}; use conduit::{error, Result, Server};
use conduit_service::Services;
use http::{ use http::{
header::{self, HeaderName}, header::{self, HeaderName},
HeaderValue, Method, StatusCode, HeaderValue, Method, StatusCode,
@ -34,7 +35,8 @@ const CONDUWUIT_CSP: &[&str] = &[
const CONDUWUIT_PERMISSIONS_POLICY: &[&str] = &["interest-cohort=()", "browsing-topics=()"]; const CONDUWUIT_PERMISSIONS_POLICY: &[&str] = &["interest-cohort=()", "browsing-topics=()"];
pub(crate) fn build(server: &Arc<Server>) -> Result<Router> { pub(crate) fn build(services: &Arc<Services>) -> Result<Router> {
let server = &services.server;
let layers = ServiceBuilder::new(); let layers = ServiceBuilder::new();
#[cfg(feature = "sentry_telemetry")] #[cfg(feature = "sentry_telemetry")]
@ -83,7 +85,7 @@ pub(crate) fn build(server: &Arc<Server>) -> Result<Router> {
.layer(body_limit_layer(server)) .layer(body_limit_layer(server))
.layer(CatchPanicLayer::custom(catch_panic)); .layer(CatchPanicLayer::custom(catch_panic));
Ok(router::build(server).layer(layers)) Ok(router::build(services).layer(layers))
} }
#[cfg(any(feature = "zstd_compression", feature = "gzip_compression", feature = "brotli_compression"))] #[cfg(any(feature = "zstd_compression", feature = "gzip_compression", feature = "brotli_compression"))]
@ -151,12 +153,14 @@ fn body_limit_layer(server: &Server) -> DefaultBodyLimit { DefaultBodyLimit::max
#[allow(clippy::needless_pass_by_value)] #[allow(clippy::needless_pass_by_value)]
#[tracing::instrument(skip_all, name = "panic")] #[tracing::instrument(skip_all, name = "panic")]
fn catch_panic(err: Box<dyn Any + Send + 'static>) -> http::Response<http_body_util::Full<bytes::Bytes>> { fn catch_panic(err: Box<dyn Any + Send + 'static>) -> http::Response<http_body_util::Full<bytes::Bytes>> {
conduit_service::services() //TODO: XXX
.server /*
.metrics conduit_service::services()
.requests_panic .server
.fetch_add(1, std::sync::atomic::Ordering::Release); .metrics
.requests_panic
.fetch_add(1, std::sync::atomic::Ordering::Release);
*/
let details = if let Some(s) = err.downcast_ref::<String>() { let details = if let Some(s) = err.downcast_ref::<String>() {
s.clone() s.clone()
} else if let Some(s) = err.downcast_ref::<&str>() { } else if let Some(s) = err.downcast_ref::<&str>() {

View file

@ -11,22 +11,23 @@ extern crate conduit_core as conduit;
use std::{future::Future, pin::Pin, sync::Arc}; use std::{future::Future, pin::Pin, sync::Arc};
use conduit::{Result, Server}; use conduit::{Result, Server};
use conduit_service::Services;
conduit::mod_ctor! {} conduit::mod_ctor! {}
conduit::mod_dtor! {} conduit::mod_dtor! {}
conduit::rustc_flags_capture! {} conduit::rustc_flags_capture! {}
#[no_mangle] #[no_mangle]
pub extern "Rust" fn start(server: &Arc<Server>) -> Pin<Box<dyn Future<Output = Result<()>> + Send>> { pub extern "Rust" fn start(server: &Arc<Server>) -> Pin<Box<dyn Future<Output = Result<Arc<Services>>> + Send>> {
Box::pin(run::start(server.clone())) Box::pin(run::start(server.clone()))
} }
#[no_mangle] #[no_mangle]
pub extern "Rust" fn stop(server: &Arc<Server>) -> Pin<Box<dyn Future<Output = Result<()>> + Send>> { pub extern "Rust" fn stop(services: Arc<Services>) -> Pin<Box<dyn Future<Output = Result<()>> + Send>> {
Box::pin(run::stop(server.clone())) Box::pin(run::stop(services))
} }
#[no_mangle] #[no_mangle]
pub extern "Rust" fn run(server: &Arc<Server>) -> Pin<Box<dyn Future<Output = Result<()>> + Send>> { pub extern "Rust" fn run(services: &Arc<Services>) -> Pin<Box<dyn Future<Output = Result<()>> + Send>> {
Box::pin(run::run(server.clone())) Box::pin(run::run(services.clone()))
} }

View file

@ -1,20 +1,20 @@
use std::sync::Arc; use std::sync::Arc;
use axum::{response::IntoResponse, routing::get, Router}; use axum::{response::IntoResponse, routing::get, Router};
use conduit::{Error, Server}; use conduit::Error;
use conduit_api::State;
use conduit_service::Services;
use http::{StatusCode, Uri}; use http::{StatusCode, Uri};
use ruma::api::client::error::ErrorKind; use ruma::api::client::error::ErrorKind;
extern crate conduit_api as api; pub(crate) fn build(services: &Arc<Services>) -> Router {
extern crate conduit_service as service; let router = Router::<State>::new();
let state = services.clone();
pub(crate) fn build(server: &Arc<Server>) -> Router { conduit_api::router::build(router, &services.server)
let router = Router::<api::State>::new();
api::router::build(router, server)
.route("/", get(it_works)) .route("/", get(it_works))
.fallback(not_found) .fallback(not_found)
.with_state(service::services()) .with_state(state)
} }
async fn not_found(_uri: Uri) -> impl IntoResponse { async fn not_found(_uri: Uri) -> impl IntoResponse {

View file

@ -1,28 +1,30 @@
use std::{sync::Arc, time::Duration}; extern crate conduit_admin as admin;
extern crate conduit_core as conduit;
extern crate conduit_service as service;
use std::{
sync::{atomic::Ordering, Arc},
time::Duration,
};
use axum_server::Handle as ServerHandle; use axum_server::Handle as ServerHandle;
use conduit::{debug, debug_error, debug_info, error, info, Error, Result, Server};
use service::Services;
use tokio::{ use tokio::{
sync::broadcast::{self, Sender}, sync::broadcast::{self, Sender},
task::JoinHandle, task::JoinHandle,
}; };
extern crate conduit_admin as admin;
extern crate conduit_core as conduit;
extern crate conduit_service as service;
use std::sync::atomic::Ordering;
use conduit::{debug, debug_info, error, info, Error, Result, Server};
use crate::serve; use crate::serve;
/// Main loop base /// Main loop base
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub(crate) async fn run(server: Arc<Server>) -> Result<()> { pub(crate) async fn run(services: Arc<Services>) -> Result<()> {
let server = &services.server;
debug!("Start"); debug!("Start");
// Install the admin room callback here for now // Install the admin room callback here for now
admin::init().await; admin::init(&services.admin).await;
// Setup shutdown/signal handling // Setup shutdown/signal handling
let handle = ServerHandle::new(); let handle = ServerHandle::new();
@ -33,13 +35,13 @@ pub(crate) async fn run(server: Arc<Server>) -> Result<()> {
let mut listener = server let mut listener = server
.runtime() .runtime()
.spawn(serve::serve(server.clone(), handle.clone(), tx.subscribe())); .spawn(serve::serve(services.clone(), handle.clone(), tx.subscribe()));
// Focal point // Focal point
debug!("Running"); debug!("Running");
let res = tokio::select! { let res = tokio::select! {
res = &mut listener => res.map_err(Error::from).unwrap_or_else(Err), res = &mut listener => res.map_err(Error::from).unwrap_or_else(Err),
res = service::services().poll() => handle_services_poll(&server, res, listener).await, res = services.poll() => handle_services_poll(server, res, listener).await,
}; };
// Join the signal handler before we leave. // Join the signal handler before we leave.
@ -47,7 +49,7 @@ pub(crate) async fn run(server: Arc<Server>) -> Result<()> {
_ = sigs.await; _ = sigs.await;
// Remove the admin room callback // Remove the admin room callback
admin::fini().await; admin::fini(&services.admin).await;
debug_info!("Finish"); debug_info!("Finish");
res res
@ -55,26 +57,33 @@ pub(crate) async fn run(server: Arc<Server>) -> Result<()> {
/// Async initializations /// Async initializations
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub(crate) async fn start(server: Arc<Server>) -> Result<()> { pub(crate) async fn start(server: Arc<Server>) -> Result<Arc<Services>> {
debug!("Starting..."); debug!("Starting...");
service::start(&server).await?; let services = Services::build(server).await?.start().await?;
#[cfg(feature = "systemd")] #[cfg(feature = "systemd")]
sd_notify::notify(true, &[sd_notify::NotifyState::Ready]).expect("failed to notify systemd of ready state"); sd_notify::notify(true, &[sd_notify::NotifyState::Ready]).expect("failed to notify systemd of ready state");
debug!("Started"); debug!("Started");
Ok(()) Ok(services)
} }
/// Async destructions /// Async destructions
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub(crate) async fn stop(_server: Arc<Server>) -> Result<()> { pub(crate) async fn stop(services: Arc<Services>) -> Result<()> {
debug!("Shutting down..."); debug!("Shutting down...");
// Wait for all completions before dropping or we'll lose them to the module // Wait for all completions before dropping or we'll lose them to the module
// unload and explode. // unload and explode.
service::stop().await; services.stop().await;
if let Err(services) = Arc::try_unwrap(services) {
debug_error!(
"{} dangling references to Services after shutdown",
Arc::strong_count(&services)
);
}
debug!("Cleaning up..."); debug!("Cleaning up...");

View file

@ -5,22 +5,26 @@ mod unix;
use std::sync::Arc; use std::sync::Arc;
use axum_server::Handle as ServerHandle; use axum_server::Handle as ServerHandle;
use conduit::{Result, Server}; use conduit::Result;
use conduit_service::Services;
use tokio::sync::broadcast; use tokio::sync::broadcast;
use crate::layers; use super::layers;
/// Serve clients /// Serve clients
pub(super) async fn serve(server: Arc<Server>, handle: ServerHandle, shutdown: broadcast::Receiver<()>) -> Result<()> { pub(super) async fn serve(
services: Arc<Services>, handle: ServerHandle, shutdown: broadcast::Receiver<()>,
) -> Result<()> {
let server = &services.server;
let config = &server.config; let config = &server.config;
let addrs = config.get_bind_addrs(); let addrs = config.get_bind_addrs();
let app = layers::build(&server)?; let app = layers::build(&services)?;
if cfg!(unix) && config.unix_socket_path.is_some() { if cfg!(unix) && config.unix_socket_path.is_some() {
unix::serve(&server, app, shutdown).await unix::serve(server, app, shutdown).await
} else if config.tls.is_some() { } else if config.tls.is_some() {
tls::serve(&server, app, handle, addrs).await tls::serve(server, app, handle, addrs).await
} else { } else {
plain::serve(&server, app, handle, addrs).await plain::serve(server, app, handle, addrs).await
} }
} }

View file

@ -9,7 +9,7 @@ use std::{
}; };
use async_trait::async_trait; use async_trait::async_trait;
use conduit::{debug, error, error::default_log, pdu::PduBuilder, Error, PduEvent, Result, Server}; use conduit::{debug, error, error::default_log, pdu::PduBuilder, Err, Error, PduEvent, Result, Server};
pub use create::create_admin_room; pub use create::create_admin_room;
use loole::{Receiver, Sender}; use loole::{Receiver, Sender};
use ruma::{ use ruma::{
@ -41,6 +41,7 @@ struct Services {
timeline: Dep<rooms::timeline::Service>, timeline: Dep<rooms::timeline::Service>,
state: Dep<rooms::state::Service>, state: Dep<rooms::state::Service>,
state_cache: Dep<rooms::state_cache::Service>, state_cache: Dep<rooms::state_cache::Service>,
services: StdRwLock<Option<Arc<crate::Services>>>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -50,7 +51,7 @@ pub struct CommandInput {
} }
pub type Completer = fn(&str) -> String; pub type Completer = fn(&str) -> String;
pub type Handler = fn(CommandInput) -> HandlerResult; pub type Handler = fn(Arc<crate::Services>, CommandInput) -> HandlerResult;
pub type HandlerResult = Pin<Box<dyn Future<Output = CommandResult> + Send>>; pub type HandlerResult = Pin<Box<dyn Future<Output = CommandResult> + Send>>;
pub type CommandResult = Result<CommandOutput, Error>; pub type CommandResult = Result<CommandOutput, Error>;
pub type CommandOutput = Option<RoomMessageEventContent>; pub type CommandOutput = Option<RoomMessageEventContent>;
@ -69,6 +70,7 @@ impl crate::Service for Service {
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"), timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
state: args.depend::<rooms::state::Service>("rooms::state"), state: args.depend::<rooms::state::Service>("rooms::state"),
state_cache: args.depend::<rooms::state_cache::Service>("rooms::state_cache"), state_cache: args.depend::<rooms::state_cache::Service>("rooms::state_cache"),
services: None.into(),
}, },
sender, sender,
receiver: Mutex::new(receiver), receiver: Mutex::new(receiver),
@ -172,10 +174,14 @@ impl Service {
} }
async fn process_command(&self, command: CommandInput) -> CommandResult { async fn process_command(&self, command: CommandInput) -> CommandResult {
let Some(services) = self.services.services.read().expect("locked").clone() else {
return Err!("Services self-reference not initialized.");
};
if let Some(handle) = self.handle.read().await.as_ref() { if let Some(handle) = self.handle.read().await.as_ref() {
handle(command).await handle(services, command).await
} else { } else {
Err(Error::Err("Admin module is not loaded.".into())) Err!("Admin module is not loaded.")
} }
} }
@ -356,4 +362,10 @@ impl Service {
#[cfg(feature = "console")] #[cfg(feature = "console")]
self.console.close().await; self.console.close().await;
} }
/// Sets the self-reference to crate::Services which will provide context to
/// the admin commands.
pub(super) fn set_services(&self, services: Option<Arc<crate::Services>>) {
*self.services.services.write().expect("locked for writing") = services;
}
} }

View file

@ -1,4 +1,4 @@
#![recursion_limit = "160"] #![recursion_limit = "192"]
#![allow(refining_impl_trait)] #![allow(refining_impl_trait)]
mod manager; mod manager;
@ -26,11 +26,7 @@ pub mod users;
extern crate conduit_core as conduit; extern crate conduit_core as conduit;
extern crate conduit_database as database; extern crate conduit_database as database;
use std::sync::{Arc, RwLock};
pub use conduit::{pdu, PduBuilder, PduCount, PduEvent}; pub use conduit::{pdu, PduBuilder, PduCount, PduEvent};
use conduit::{Result, Server};
use database::Database;
pub(crate) use service::{Args, Dep, Service}; pub(crate) use service::{Args, Dep, Service};
pub use crate::services::Services; pub use crate::services::Services;
@ -38,50 +34,3 @@ pub use crate::services::Services;
conduit::mod_ctor! {} conduit::mod_ctor! {}
conduit::mod_dtor! {} conduit::mod_dtor! {}
conduit::rustc_flags_capture! {} conduit::rustc_flags_capture! {}
static SERVICES: RwLock<Option<&Services>> = RwLock::new(None);
pub async fn start(server: &Arc<Server>) -> Result<()> {
let d = Arc::new(Database::open(server).await?);
let s = Box::new(Services::build(server.clone(), d)?);
_ = SERVICES.write().expect("write locked").insert(Box::leak(s));
services().start().await
}
pub async fn stop() {
services().stop().await;
// Deactivate services(). Any further use will panic the caller.
let s = SERVICES
.write()
.expect("write locked")
.take()
.expect("services initialized");
let s: *mut Services = std::ptr::from_ref(s).cast_mut();
//SAFETY: Services was instantiated in init() and leaked into the SERVICES
// global perusing as 'static for the duration of service. Now we reclaim
// it to drop it before unloading the module. If this is not done there wil
// be multiple instances after module reload.
let s = unsafe { Box::from_raw(s) };
// Drop it so we encounter any trouble before the infolog message
drop(s);
}
#[must_use]
pub fn services() -> &'static Services {
SERVICES
.read()
.expect("SERVICES locked for reading")
.expect("SERVICES initialized with Services instance")
}
#[inline]
pub fn available() -> bool {
SERVICES
.read()
.expect("SERVICES locked for reading")
.is_some()
}

View file

@ -44,7 +44,8 @@ pub struct Services {
impl Services { impl Services {
#[allow(clippy::cognitive_complexity)] #[allow(clippy::cognitive_complexity)]
pub fn build(server: Arc<Server>, db: Arc<Database>) -> Result<Self> { pub async fn build(server: Arc<Server>) -> Result<Arc<Self>> {
let db = Database::open(&server).await?;
let service: Arc<Map> = Arc::new(RwLock::new(BTreeMap::new())); let service: Arc<Map> = Arc::new(RwLock::new(BTreeMap::new()));
macro_rules! build { macro_rules! build {
($tyname:ty) => {{ ($tyname:ty) => {{
@ -58,7 +59,7 @@ impl Services {
}}; }};
} }
Ok(Self { Ok(Arc::new(Self {
account_data: build!(account_data::Service), account_data: build!(account_data::Service),
admin: build!(admin::Service), admin: build!(admin::Service),
appservice: build!(appservice::Service), appservice: build!(appservice::Service),
@ -102,12 +103,13 @@ impl Services {
service, service,
server, server,
db, db,
}) }))
} }
pub(super) async fn start(&self) -> Result<()> { pub async fn start(self: &Arc<Self>) -> Result<Arc<Self>> {
debug_info!("Starting services..."); debug_info!("Starting services...");
self.admin.set_services(Some(Arc::clone(self)));
globals::migrations::migrations(self).await?; globals::migrations::migrations(self).await?;
self.manager self.manager
.lock() .lock()
@ -118,10 +120,10 @@ impl Services {
.await?; .await?;
debug_info!("Services startup complete."); debug_info!("Services startup complete.");
Ok(()) Ok(Arc::clone(self))
} }
pub(super) async fn stop(&self) { pub async fn stop(&self) {
info!("Shutting down services..."); info!("Shutting down services...");
self.interrupt(); self.interrupt();
@ -129,6 +131,8 @@ impl Services {
manager.stop().await; manager.stop().await;
} }
self.admin.set_services(None);
debug_info!("Services shutdown complete."); debug_info!("Services shutdown complete.");
} }