inline analysis and symbol reduction; emits smaller than 64 bytes marked inline

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2024-07-03 20:06:43 +00:00
parent 1e8b8cce0f
commit eeda96d94a
35 changed files with 117 additions and 73 deletions

View file

@ -15,7 +15,7 @@ use ruma::{
events::room::message::RoomMessageEventContent, events::room::message::RoomMessageEventContent,
CanonicalJsonObject, EventId, OwnedRoomOrAliasId, RoomId, RoomVersionId, ServerName, CanonicalJsonObject, EventId, OwnedRoomOrAliasId, RoomId, RoomVersionId, ServerName,
}; };
use service::{rooms::event_handler::parse_incoming_pdu, sending::resolve::resolve_actual_dest, services, PduEvent}; use service::{rooms::event_handler::parse_incoming_pdu, sending::resolve_actual_dest, services, PduEvent};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;

View file

@ -92,6 +92,7 @@ impl Error {
} }
/// Returns the Matrix error code / error kind /// Returns the Matrix error code / error kind
#[inline]
pub fn error_code(&self) -> ruma::api::client::error::ErrorKind { pub fn error_code(&self) -> ruma::api::client::error::ErrorKind {
if let Self::Federation(_, error) = self { if let Self::Federation(_, error) = self {
return error.error_kind().unwrap_or_else(|| &Unknown).clone(); return error.error_kind().unwrap_or_else(|| &Unknown).clone();

View file

@ -8,5 +8,6 @@ pub struct Guard {
} }
impl Drop for Guard { impl Drop for Guard {
#[inline]
fn drop(&mut self) { self.capture.stop(); } fn drop(&mut self) { self.capture.stop(); }
} }

View file

@ -17,6 +17,7 @@ struct Visitor {
} }
impl Layer { impl Layer {
#[inline]
pub fn new(state: &Arc<State>) -> Self { pub fn new(state: &Arc<State>) -> Self {
Self { Self {
state: state.clone(), state: state.clone(),
@ -25,6 +26,7 @@ impl Layer {
} }
impl fmt::Debug for Layer { impl fmt::Debug for Layer {
#[inline]
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.debug_struct("capture::Layer").finish() formatter.debug_struct("capture::Layer").finish()
} }

View file

@ -3,14 +3,11 @@ mod sha256;
use crate::Result; use crate::Result;
#[inline]
pub fn password(password: &str) -> Result<String> { argon::password(password) } pub fn password(password: &str) -> Result<String> { argon::password(password) }
#[inline]
pub fn verify_password(password: &str, password_hash: &str) -> Result<()> { pub fn verify_password(password: &str, password_hash: &str) -> Result<()> {
argon::verify_password(password, password_hash) argon::verify_password(password, password_hash)
} }
#[inline]
#[must_use] #[must_use]
pub fn calculate_hash(keys: &[&[u8]]) -> Vec<u8> { sha256::hash(keys) } pub fn calculate_hash(keys: &[&[u8]]) -> Vec<u8> { sha256::hash(keys) }

View file

@ -33,7 +33,6 @@ pub fn clamp<T: Ord>(val: T, min: T, max: T) -> T { cmp::min(cmp::max(val, min),
/// ///
/// * <https://doc.rust-lang.org/std/convert/enum.Infallible.html> /// * <https://doc.rust-lang.org/std/convert/enum.Infallible.html>
#[must_use] #[must_use]
#[inline(always)]
pub fn unwrap_infallible<T>(result: Result<T, std::convert::Infallible>) -> T { pub fn unwrap_infallible<T>(result: Result<T, std::convert::Infallible>) -> T {
match result { match result {
Ok(val) => val, Ok(val) => val,

View file

@ -9,7 +9,6 @@ pub fn split_once_infallible<'a>(input: &'a str, delim: &'_ str) -> (&'a str, &'
} }
/// Parses the bytes into a string. /// Parses the bytes into a string.
#[inline]
pub fn string_from_bytes(bytes: &[u8]) -> Result<String> { pub fn string_from_bytes(bytes: &[u8]) -> Result<String> {
let str: &str = str_from_bytes(bytes)?; let str: &str = str_from_bytes(bytes)?;
Ok(str.to_owned()) Ok(str.to_owned())

View file

@ -9,6 +9,7 @@ pub struct Cork {
} }
impl Cork { impl Cork {
#[inline]
pub(super) fn new(db: &Arc<Engine>, flush: bool, sync: bool) -> Self { pub(super) fn new(db: &Arc<Engine>, flush: bool, sync: bool) -> Self {
db.cork(); db.cork();
Self { Self {

View file

@ -19,12 +19,15 @@ impl Database {
}) })
} }
#[inline]
#[must_use] #[must_use]
pub fn cork(&self) -> Cork { Cork::new(&self.db, false, false) } pub fn cork(&self) -> Cork { Cork::new(&self.db, false, false) }
#[inline]
#[must_use] #[must_use]
pub fn cork_and_flush(&self) -> Cork { Cork::new(&self.db, true, false) } pub fn cork_and_flush(&self) -> Cork { Cork::new(&self.db, true, false) }
#[inline]
#[must_use] #[must_use]
pub fn cork_and_sync(&self) -> Cork { Cork::new(&self.db, true, true) } pub fn cork_and_sync(&self) -> Cork { Cork::new(&self.db, true, true) }
} }

View file

@ -121,6 +121,7 @@ impl Engine {
pub fn sync(&self) -> Result<()> { result(DBCommon::flush_wal(&self.db, true)) } pub fn sync(&self) -> Result<()> { result(DBCommon::flush_wal(&self.db, true)) }
#[inline]
pub fn corked(&self) -> bool { self.corks.load(std::sync::atomic::Ordering::Relaxed) > 0 } pub fn corked(&self) -> bool { self.corks.load(std::sync::atomic::Ordering::Relaxed) > 0 }
pub(crate) fn cork(&self) { pub(crate) fn cork(&self) {
@ -242,6 +243,7 @@ impl Engine {
} }
impl Drop for Engine { impl Drop for Engine {
#[cold]
fn drop(&mut self) { fn drop(&mut self) {
const BLOCKING: bool = true; const BLOCKING: bool = true;

View file

@ -17,9 +17,11 @@ impl<'a> From<DBPinnableSlice<'a>> for Handle<'a> {
impl Deref for Handle<'_> { impl Deref for Handle<'_> {
type Target = [u8]; type Target = [u8];
#[inline]
fn deref(&self) -> &Self::Target { &self.val } fn deref(&self) -> &Self::Target { &self.val }
} }
impl AsRef<[u8]> for Handle<'_> { impl AsRef<[u8]> for Handle<'_> {
#[inline]
fn as_ref(&self) -> &[u8] { &self.val } fn as_ref(&self) -> &[u8] { &self.val }
} }

View file

@ -199,6 +199,7 @@ impl<'a> IntoIterator for &'a Map {
type IntoIter = Box<dyn Iterator<Item = Self::Item> + Send + 'a>; type IntoIter = Box<dyn Iterator<Item = Self::Item> + Send + 'a>;
type Item = OwnedKeyValPair; type Item = OwnedKeyValPair;
#[inline]
fn into_iter(self) -> Self::IntoIter { self.iter() } fn into_iter(self) -> Self::IntoIter { self.iter() }
} }

View file

@ -11,21 +11,19 @@ pub type Key = [Byte];
pub(crate) type Byte = u8; pub(crate) type Byte = u8;
impl OwnedKeyVal { impl OwnedKeyVal {
#[inline]
#[must_use] #[must_use]
pub fn as_slice(&self) -> KeyVal<'_> { KeyVal(&self.0, &self.1) } pub fn as_slice(&self) -> KeyVal<'_> { KeyVal(&self.0, &self.1) }
#[inline]
#[must_use] #[must_use]
pub fn to_tuple(self) -> OwnedKeyValPair { (self.0, self.1) } pub fn to_tuple(self) -> OwnedKeyValPair { (self.0, self.1) }
} }
impl From<OwnedKeyValPair> for OwnedKeyVal { impl From<OwnedKeyValPair> for OwnedKeyVal {
#[inline]
fn from((key, val): OwnedKeyValPair) -> Self { Self(key, val) } fn from((key, val): OwnedKeyValPair) -> Self { Self(key, val) }
} }
impl From<&KeyVal<'_>> for OwnedKeyVal { impl From<&KeyVal<'_>> for OwnedKeyVal {
#[inline]
fn from(slice: &KeyVal<'_>) -> Self { slice.to_owned() } fn from(slice: &KeyVal<'_>) -> Self { slice.to_owned() }
} }
@ -34,7 +32,6 @@ impl From<KeyValPair<'_>> for OwnedKeyVal {
} }
impl From<OwnedKeyVal> for OwnedKeyValPair { impl From<OwnedKeyVal> for OwnedKeyValPair {
#[inline]
fn from(val: OwnedKeyVal) -> Self { val.to_tuple() } fn from(val: OwnedKeyVal) -> Self { val.to_tuple() }
} }
@ -43,22 +40,18 @@ impl KeyVal<'_> {
#[must_use] #[must_use]
pub fn to_owned(&self) -> OwnedKeyVal { OwnedKeyVal::from(self) } pub fn to_owned(&self) -> OwnedKeyVal { OwnedKeyVal::from(self) }
#[inline]
#[must_use] #[must_use]
pub fn as_tuple(&self) -> KeyValPair<'_> { (self.0, self.1) } pub fn as_tuple(&self) -> KeyValPair<'_> { (self.0, self.1) }
} }
impl<'a> From<&'a OwnedKeyVal> for KeyVal<'a> { impl<'a> From<&'a OwnedKeyVal> for KeyVal<'a> {
#[inline]
fn from(owned: &'a OwnedKeyVal) -> Self { owned.as_slice() } fn from(owned: &'a OwnedKeyVal) -> Self { owned.as_slice() }
} }
impl<'a> From<&'a OwnedKeyValPair> for KeyVal<'a> { impl<'a> From<&'a OwnedKeyValPair> for KeyVal<'a> {
#[inline]
fn from((key, val): &'a OwnedKeyValPair) -> Self { KeyVal(key.as_slice(), val.as_slice()) } fn from((key, val): &'a OwnedKeyValPair) -> Self { KeyVal(key.as_slice(), val.as_slice()) }
} }
impl<'a> From<KeyValPair<'a>> for KeyVal<'a> { impl<'a> From<KeyValPair<'a>> for KeyVal<'a> {
#[inline]
fn from((key, val): KeyValPair<'a>) -> Self { KeyVal(key, val) } fn from((key, val): KeyValPair<'a>) -> Self { KeyVal(key, val) }
} }

View file

@ -4,6 +4,7 @@ use std::{env, os::unix::process::CommandExt, process::Command};
use conduit::{debug, info, utils}; use conduit::{debug, info, utils};
#[cold]
pub(super) fn restart() -> ! { pub(super) fn restart() -> ! {
// SAFETY: We have allowed an override for the case where the current_exe() has // SAFETY: We have allowed an override for the case where the current_exe() has
// been replaced or removed. By default the server will fail to restart if the // been replaced or removed. By default the server will fail to restart if the

View file

@ -23,6 +23,7 @@ pub struct NamespaceRegex {
impl NamespaceRegex { impl NamespaceRegex {
/// Checks if this namespace has rights to a namespace /// Checks if this namespace has rights to a namespace
#[inline]
#[must_use] #[must_use]
pub fn is_match(&self, heystack: &str) -> bool { pub fn is_match(&self, heystack: &str) -> bool {
if self.is_exclusive_match(heystack) { if self.is_exclusive_match(heystack) {
@ -38,6 +39,7 @@ impl NamespaceRegex {
} }
/// Checks if this namespace has exlusive rights to a namespace /// Checks if this namespace has exlusive rights to a namespace
#[inline]
#[must_use] #[must_use]
pub fn is_exclusive_match(&self, heystack: &str) -> bool { pub fn is_exclusive_match(&self, heystack: &str) -> bool {
if let Some(exclusive) = &self.exclusive { if let Some(exclusive) = &self.exclusive {
@ -55,6 +57,7 @@ impl RegistrationInfo {
self.users.is_match(user_id.as_str()) || self.registration.sender_localpart == user_id.localpart() self.users.is_match(user_id.as_str()) || self.registration.sender_localpart == user_id.localpart()
} }
#[inline]
#[must_use] #[must_use]
pub fn is_exclusive_user_match(&self, user_id: &UserId) -> bool { pub fn is_exclusive_user_match(&self, user_id: &UserId) -> bool {
self.users.is_exclusive_match(user_id.as_str()) || self.registration.sender_localpart == user_id.localpart() self.users.is_exclusive_match(user_id.as_str()) || self.registration.sender_localpart == user_id.localpart()
@ -143,6 +146,7 @@ impl crate::Service for Service {
} }
impl Service { impl Service {
#[inline]
pub fn all(&self) -> Result<Vec<(String, Registration)>> { iter_ids(&self.db) } pub fn all(&self) -> Result<Vec<(String, Registration)>> { iter_ids(&self.db) }
/// Registers an appservice and returns the ID to the caller /// Registers an appservice and returns the ID to the caller

View file

@ -99,6 +99,7 @@ impl Data {
}) })
} }
#[inline]
pub fn update_check_for_updates_id(&self, id: u64) -> Result<()> { pub fn update_check_for_updates_id(&self, id: u64) -> Result<()> {
self.global self.global
.insert(LAST_CHECK_FOR_UPDATES_COUNT, &id.to_be_bytes())?; .insert(LAST_CHECK_FOR_UPDATES_COUNT, &id.to_be_bytes())?;
@ -207,8 +208,6 @@ impl Data {
Ok(()) Ok(())
} }
pub fn cleanup(&self) -> Result<()> { self.db.db.cleanup() }
pub fn load_keypair(&self) -> Result<Ed25519KeyPair> { pub fn load_keypair(&self) -> Result<Ed25519KeyPair> {
let keypair_bytes = self.global.get(b"keypair")?.map_or_else( let keypair_bytes = self.global.get(b"keypair")?.map_or_else(
|| { || {
@ -241,8 +240,16 @@ impl Data {
}) })
} }
#[inline]
pub fn remove_keypair(&self) -> Result<()> { self.global.remove(b"keypair") } pub fn remove_keypair(&self) -> Result<()> { self.global.remove(b"keypair") }
/// TODO: the key valid until timestamp (`valid_until_ts`) is only honored
/// in room version > 4
///
/// Remove the outdated keys and insert the new ones.
///
/// This doesn't actually check that the keys provided are newer than the
/// old set.
pub fn add_signing_key( pub fn add_signing_key(
&self, origin: &ServerName, new_keys: ServerSigningKeys, &self, origin: &ServerName, new_keys: ServerSigningKeys,
) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>> { ) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>> {
@ -306,14 +313,18 @@ impl Data {
}) })
} }
#[inline]
pub fn bump_database_version(&self, new_version: u64) -> Result<()> { pub fn bump_database_version(&self, new_version: u64) -> Result<()> {
self.global.insert(b"version", &new_version.to_be_bytes())?; self.global.insert(b"version", &new_version.to_be_bytes())?;
Ok(()) Ok(())
} }
#[inline]
pub fn backup(&self) -> Result<(), Box<dyn std::error::Error>> { self.db.db.backup() } pub fn backup(&self) -> Result<(), Box<dyn std::error::Error>> { self.db.db.backup() }
#[inline]
pub fn backup_list(&self) -> Result<String> { self.db.db.backup_list() } pub fn backup_list(&self) -> Result<String> { self.db.db.backup_list() }
#[inline]
pub fn file_list(&self) -> Result<String> { self.db.db.file_list() } pub fn file_list(&self) -> Result<String> { self.db.db.file_list() }
} }

View file

@ -50,7 +50,10 @@ pub(crate) async fn migrations(db: &Arc<Database>, config: &Config) -> Result<()
} }
async fn fresh(db: &Arc<Database>, config: &Config) -> Result<()> { async fn fresh(db: &Arc<Database>, config: &Config) -> Result<()> {
services().globals.bump_database_version(DATABASE_VERSION)?; services()
.globals
.db
.bump_database_version(DATABASE_VERSION)?;
db["global"].insert(b"fix_bad_double_separator_in_state_cache", &[])?; db["global"].insert(b"fix_bad_double_separator_in_state_cache", &[])?;
db["global"].insert(b"retroactively_fix_bad_data_from_roomuserid_joined", &[])?; db["global"].insert(b"retroactively_fix_bad_data_from_roomuserid_joined", &[])?;
@ -68,57 +71,57 @@ async fn fresh(db: &Arc<Database>, config: &Config) -> Result<()> {
/// Apply any migrations /// Apply any migrations
async fn migrate(db: &Arc<Database>, config: &Config) -> Result<()> { async fn migrate(db: &Arc<Database>, config: &Config) -> Result<()> {
if services().globals.database_version()? < 1 { if services().globals.db.database_version()? < 1 {
db_lt_1(db, config).await?; db_lt_1(db, config).await?;
} }
if services().globals.database_version()? < 2 { if services().globals.db.database_version()? < 2 {
db_lt_2(db, config).await?; db_lt_2(db, config).await?;
} }
if services().globals.database_version()? < 3 { if services().globals.db.database_version()? < 3 {
db_lt_3(db, config).await?; db_lt_3(db, config).await?;
} }
if services().globals.database_version()? < 4 { if services().globals.db.database_version()? < 4 {
db_lt_4(db, config).await?; db_lt_4(db, config).await?;
} }
if services().globals.database_version()? < 5 { if services().globals.db.database_version()? < 5 {
db_lt_5(db, config).await?; db_lt_5(db, config).await?;
} }
if services().globals.database_version()? < 6 { if services().globals.db.database_version()? < 6 {
db_lt_6(db, config).await?; db_lt_6(db, config).await?;
} }
if services().globals.database_version()? < 7 { if services().globals.db.database_version()? < 7 {
db_lt_7(db, config).await?; db_lt_7(db, config).await?;
} }
if services().globals.database_version()? < 8 { if services().globals.db.database_version()? < 8 {
db_lt_8(db, config).await?; db_lt_8(db, config).await?;
} }
if services().globals.database_version()? < 9 { if services().globals.db.database_version()? < 9 {
db_lt_9(db, config).await?; db_lt_9(db, config).await?;
} }
if services().globals.database_version()? < 10 { if services().globals.db.database_version()? < 10 {
db_lt_10(db, config).await?; db_lt_10(db, config).await?;
} }
if services().globals.database_version()? < 11 { if services().globals.db.database_version()? < 11 {
db_lt_11(db, config).await?; db_lt_11(db, config).await?;
} }
if services().globals.database_version()? < 12 { if services().globals.db.database_version()? < 12 {
db_lt_12(db, config).await?; db_lt_12(db, config).await?;
} }
// This migration can be reused as-is anytime the server-default rules are // This migration can be reused as-is anytime the server-default rules are
// updated. // updated.
if services().globals.database_version()? < 13 { if services().globals.db.database_version()? < 13 {
db_lt_13(db, config).await?; db_lt_13(db, config).await?;
} }
@ -143,10 +146,10 @@ async fn migrate(db: &Arc<Database>, config: &Config) -> Result<()> {
} }
assert_eq!( assert_eq!(
services().globals.database_version().unwrap(), services().globals.db.database_version().unwrap(),
DATABASE_VERSION, DATABASE_VERSION,
"Failed asserting local database version {} is equal to known latest conduwuit database version {}", "Failed asserting local database version {} is equal to known latest conduwuit database version {}",
services().globals.database_version().unwrap(), services().globals.db.database_version().unwrap(),
DATABASE_VERSION, DATABASE_VERSION,
); );
@ -225,7 +228,7 @@ async fn db_lt_1(db: &Arc<Database>, _config: &Config) -> Result<()> {
serverroomids.insert(&serverroomid, &[])?; serverroomids.insert(&serverroomid, &[])?;
} }
services().globals.bump_database_version(1)?; services().globals.db.bump_database_version(1)?;
info!("Migration: 0 -> 1 finished"); info!("Migration: 0 -> 1 finished");
Ok(()) Ok(())
} }
@ -242,7 +245,7 @@ async fn db_lt_2(db: &Arc<Database>, _config: &Config) -> Result<()> {
} }
} }
services().globals.bump_database_version(2)?; services().globals.db.bump_database_version(2)?;
info!("Migration: 1 -> 2 finished"); info!("Migration: 1 -> 2 finished");
Ok(()) Ok(())
} }
@ -262,7 +265,7 @@ async fn db_lt_3(db: &Arc<Database>, _config: &Config) -> Result<()> {
mediaid_file.insert(&key, &[])?; mediaid_file.insert(&key, &[])?;
} }
services().globals.bump_database_version(3)?; services().globals.db.bump_database_version(3)?;
info!("Migration: 2 -> 3 finished"); info!("Migration: 2 -> 3 finished");
Ok(()) Ok(())
} }
@ -285,7 +288,7 @@ async fn db_lt_4(_db: &Arc<Database>, config: &Config) -> Result<()> {
} }
} }
services().globals.bump_database_version(4)?; services().globals.db.bump_database_version(4)?;
info!("Migration: 3 -> 4 finished"); info!("Migration: 3 -> 4 finished");
Ok(()) Ok(())
} }
@ -309,7 +312,7 @@ async fn db_lt_5(db: &Arc<Database>, _config: &Config) -> Result<()> {
roomusertype_roomuserdataid.insert(&key, &roomuserdataid)?; roomusertype_roomuserdataid.insert(&key, &roomuserdataid)?;
} }
services().globals.bump_database_version(5)?; services().globals.db.bump_database_version(5)?;
info!("Migration: 4 -> 5 finished"); info!("Migration: 4 -> 5 finished");
Ok(()) Ok(())
} }
@ -323,7 +326,7 @@ async fn db_lt_6(db: &Arc<Database>, _config: &Config) -> Result<()> {
services().rooms.state_cache.update_joined_count(room_id)?; services().rooms.state_cache.update_joined_count(room_id)?;
} }
services().globals.bump_database_version(6)?; services().globals.db.bump_database_version(6)?;
info!("Migration: 5 -> 6 finished"); info!("Migration: 5 -> 6 finished");
Ok(()) Ok(())
} }
@ -448,7 +451,7 @@ async fn db_lt_7(db: &Arc<Database>, _config: &Config) -> Result<()> {
)?; )?;
} }
services().globals.bump_database_version(7)?; services().globals.db.bump_database_version(7)?;
info!("Migration: 6 -> 7 finished"); info!("Migration: 6 -> 7 finished");
Ok(()) Ok(())
} }
@ -514,7 +517,7 @@ async fn db_lt_8(db: &Arc<Database>, _config: &Config) -> Result<()> {
eventid_pduid.insert_batch(batch2.iter().map(database::KeyVal::from))?; eventid_pduid.insert_batch(batch2.iter().map(database::KeyVal::from))?;
services().globals.bump_database_version(8)?; services().globals.db.bump_database_version(8)?;
info!("Migration: 7 -> 8 finished"); info!("Migration: 7 -> 8 finished");
Ok(()) Ok(())
} }
@ -571,7 +574,7 @@ async fn db_lt_9(db: &Arc<Database>, _config: &Config) -> Result<()> {
tokenids.remove(&key)?; tokenids.remove(&key)?;
} }
services().globals.bump_database_version(9)?; services().globals.db.bump_database_version(9)?;
info!("Migration: 8 -> 9 finished"); info!("Migration: 8 -> 9 finished");
Ok(()) Ok(())
} }
@ -590,7 +593,7 @@ async fn db_lt_10(db: &Arc<Database>, _config: &Config) -> Result<()> {
services().users.mark_device_key_update(&user_id)?; services().users.mark_device_key_update(&user_id)?;
} }
services().globals.bump_database_version(10)?; services().globals.db.bump_database_version(10)?;
info!("Migration: 9 -> 10 finished"); info!("Migration: 9 -> 10 finished");
Ok(()) Ok(())
} }
@ -601,7 +604,7 @@ async fn db_lt_11(_db: &Arc<Database>, _config: &Config) -> Result<()> {
//let userdevicesessionid_uiaarequest = &db["userdevicesessionid_uiaarequest"]; //let userdevicesessionid_uiaarequest = &db["userdevicesessionid_uiaarequest"];
//userdevicesessionid_uiaarequest.clear()?; //userdevicesessionid_uiaarequest.clear()?;
services().globals.bump_database_version(11)?; services().globals.db.bump_database_version(11)?;
info!("Migration: 10 -> 11 finished"); info!("Migration: 10 -> 11 finished");
Ok(()) Ok(())
} }
@ -669,7 +672,7 @@ async fn db_lt_12(_db: &Arc<Database>, config: &Config) -> Result<()> {
)?; )?;
} }
services().globals.bump_database_version(12)?; services().globals.db.bump_database_version(12)?;
info!("Migration: 11 -> 12 finished"); info!("Migration: 11 -> 12 finished");
Ok(()) Ok(())
} }
@ -706,7 +709,7 @@ async fn db_lt_13(_db: &Arc<Database>, config: &Config) -> Result<()> {
)?; )?;
} }
services().globals.bump_database_version(13)?; services().globals.db.bump_database_version(13)?;
info!("Migration: 12 -> 13 finished"); info!("Migration: 12 -> 13 finished");
Ok(()) Ok(())
} }
@ -736,8 +739,8 @@ async fn migrate_sha256_media(db: &Arc<Database>, _config: &Config) -> Result<()
// Apply fix from when sha256_media was backward-incompat and bumped the schema // Apply fix from when sha256_media was backward-incompat and bumped the schema
// version from 13 to 14. For users satisfying these conditions we can go back. // version from 13 to 14. For users satisfying these conditions we can go back.
if services().globals.database_version()? == 14 && DATABASE_VERSION == 13 { if services().globals.db.database_version()? == 14 && DATABASE_VERSION == 13 {
services().globals.bump_database_version(13)?; services().globals.db.bump_database_version(13)?;
} }
db["global"].insert(b"feat_sha256_media", &[])?; db["global"].insert(b"feat_sha256_media", &[])?;

View file

@ -17,10 +17,7 @@ use data::Data;
use ipaddress::IPAddress; use ipaddress::IPAddress;
use regex::RegexSet; use regex::RegexSet;
use ruma::{ use ruma::{
api::{ api::{client::discovery::discover_support::ContactRole, federation::discovery::VerifyKey},
client::discovery::discover_support::ContactRole,
federation::discovery::{ServerSigningKeys, VerifyKey},
},
serde::Base64, serde::Base64,
DeviceId, OwnedEventId, OwnedRoomAliasId, OwnedRoomId, OwnedServerName, OwnedServerSigningKeyId, OwnedUserId, DeviceId, OwnedEventId, OwnedRoomAliasId, OwnedRoomId, OwnedServerName, OwnedServerSigningKeyId, OwnedUserId,
RoomAliasId, RoomVersionId, ServerName, UserId, RoomAliasId, RoomVersionId, ServerName, UserId,
@ -230,6 +227,7 @@ impl Service {
pub fn allow_unstable_room_versions(&self) -> bool { self.config.allow_unstable_room_versions } pub fn allow_unstable_room_versions(&self) -> bool { self.config.allow_unstable_room_versions }
#[inline]
pub fn default_room_version(&self) -> RoomVersionId { self.config.default_room_version.clone() } pub fn default_room_version(&self) -> RoomVersionId { self.config.default_room_version.clone() }
pub fn new_user_displayname_suffix(&self) -> &String { &self.config.new_user_displayname_suffix } pub fn new_user_displayname_suffix(&self) -> &String { &self.config.new_user_displayname_suffix }
@ -317,19 +315,6 @@ impl Service {
room_versions room_versions
} }
/// TODO: the key valid until timestamp (`valid_until_ts`) is only honored
/// in room version > 4
///
/// Remove the outdated keys and insert the new ones.
///
/// This doesn't actually check that the keys provided are newer than the
/// old set.
pub fn add_signing_key(
&self, origin: &ServerName, new_keys: ServerSigningKeys,
) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>> {
self.db.add_signing_key(origin, new_keys)
}
/// This returns an empty `Ok(BTreeMap<..>)` when there are no keys found /// This returns an empty `Ok(BTreeMap<..>)` when there are no keys found
/// for the server. /// for the server.
pub fn signing_keys_for(&self, origin: &ServerName) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>> { pub fn signing_keys_for(&self, origin: &ServerName) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>> {
@ -348,14 +333,11 @@ impl Service {
Ok(keys) Ok(keys)
} }
pub fn database_version(&self) -> Result<u64> { self.db.database_version() }
pub fn bump_database_version(&self, new_version: u64) -> Result<()> { self.db.bump_database_version(new_version) }
pub fn well_known_client(&self) -> &Option<Url> { &self.config.well_known.client } pub fn well_known_client(&self) -> &Option<Url> { &self.config.well_known.client }
pub fn well_known_server(&self) -> &Option<OwnedServerName> { &self.config.well_known.server } pub fn well_known_server(&self) -> &Option<OwnedServerName> { &self.config.well_known.server }
#[inline]
pub fn valid_cidr_range(&self, ip: &IPAddress) -> bool { pub fn valid_cidr_range(&self, ip: &IPAddress) -> bool {
for cidr in &self.cidr_range_denylist { for cidr in &self.cidr_range_denylist {
if cidr.includes(ip) { if cidr.includes(ip) {

View file

@ -148,6 +148,7 @@ impl Data {
/// associated with it such as width, height, content-type, etc) /// associated with it such as width, height, content-type, etc)
pub(crate) fn get_all_media_keys(&self) -> Vec<Vec<u8>> { self.mediaid_file.iter().map(|(key, _)| key).collect() } pub(crate) fn get_all_media_keys(&self) -> Vec<Vec<u8>> { self.mediaid_file.iter().map(|(key, _)| key).collect() }
#[inline]
pub(super) fn remove_url_preview(&self, url: &str) -> Result<()> { self.url_previews.remove(url.as_bytes()) } pub(super) fn remove_url_preview(&self, url: &str) -> Result<()> { self.url_previews.remove(url.as_bytes()) }
pub(super) fn set_url_preview( pub(super) fn set_url_preview(

View file

@ -131,6 +131,7 @@ impl crate::Service for Service {
impl Service { impl Service {
/// Returns the latest presence event for the given user. /// Returns the latest presence event for the given user.
#[inline]
pub fn get_presence(&self, user_id: &UserId) -> Result<Option<PresenceEvent>> { pub fn get_presence(&self, user_id: &UserId) -> Result<Option<PresenceEvent>> {
if let Some((_, presence)) = self.db.get_presence(user_id)? { if let Some((_, presence)) = self.db.get_presence(user_id)? {
Ok(Some(presence)) Ok(Some(presence))
@ -207,6 +208,7 @@ impl Service {
/// Returns the most recent presence updates that happened after the event /// Returns the most recent presence updates that happened after the event
/// with id `since`. /// with id `since`.
#[inline]
pub fn presence_since(&self, since: u64) -> Box<dyn Iterator<Item = (OwnedUserId, u64, Vec<u8>)> + '_> { pub fn presence_since(&self, since: u64) -> Box<dyn Iterator<Item = (OwnedUserId, u64, Vec<u8>)> + '_> {
self.db.presence_since(since) self.db.presence_since(since)
} }

View file

@ -1381,6 +1381,7 @@ impl Service {
Ok(create_event_content.room_version) Ok(create_event_content.room_version)
} }
#[inline]
fn to_room_version(room_version_id: &RoomVersionId) -> RoomVersion { fn to_room_version(room_version_id: &RoomVersionId) -> RoomVersion {
RoomVersion::new(room_version_id).expect("room version is supported") RoomVersion::new(room_version_id).expect("room version is supported")
} }

View file

@ -201,6 +201,7 @@ impl super::Service {
let result = services() let result = services()
.globals .globals
.db
.add_signing_key(&k.server_name, k.clone())? .add_signing_key(&k.server_name, k.clone())?
.into_iter() .into_iter()
.map(|(k, v)| (k.to_string(), v.key)) .map(|(k, v)| (k.to_string(), v.key))
@ -249,6 +250,7 @@ impl super::Service {
if let Ok(key) = get_keys_response.server_key.deserialize() { if let Ok(key) = get_keys_response.server_key.deserialize() {
let result: BTreeMap<_, _> = services() let result: BTreeMap<_, _> = services()
.globals .globals
.db
.add_signing_key(&origin, key)? .add_signing_key(&origin, key)?
.into_iter() .into_iter()
.map(|(k, v)| (k.to_string(), v.key)) .map(|(k, v)| (k.to_string(), v.key))
@ -392,7 +394,7 @@ impl super::Service {
}) { }) {
debug!("Got signing keys: {:?}", server_keys); debug!("Got signing keys: {:?}", server_keys);
for k in server_keys { for k in server_keys {
services().globals.add_signing_key(origin, k.clone())?; services().globals.db.add_signing_key(origin, k.clone())?;
result.extend( result.extend(
k.verify_keys k.verify_keys
.into_iter() .into_iter()
@ -421,6 +423,7 @@ impl super::Service {
{ {
services() services()
.globals .globals
.db
.add_signing_key(origin, server_key.clone())?; .add_signing_key(origin, server_key.clone())?;
result.extend( result.extend(
@ -453,6 +456,7 @@ impl super::Service {
{ {
services() services()
.globals .globals
.db
.add_signing_key(origin, server_key.clone())?; .add_signing_key(origin, server_key.clone())?;
result.extend( result.extend(
@ -499,7 +503,7 @@ impl super::Service {
}) { }) {
debug!("Got signing keys: {:?}", server_keys); debug!("Got signing keys: {:?}", server_keys);
for k in server_keys { for k in server_keys {
services().globals.add_signing_key(origin, k.clone())?; services().globals.db.add_signing_key(origin, k.clone())?;
result.extend( result.extend(
k.verify_keys k.verify_keys
.into_iter() .into_iter()

View file

@ -48,10 +48,12 @@ impl Data {
})) }))
} }
#[inline]
pub(super) fn is_disabled(&self, room_id: &RoomId) -> Result<bool> { pub(super) fn is_disabled(&self, room_id: &RoomId) -> Result<bool> {
Ok(self.disabledroomids.get(room_id.as_bytes())?.is_some()) Ok(self.disabledroomids.get(room_id.as_bytes())?.is_some())
} }
#[inline]
pub(super) fn disable_room(&self, room_id: &RoomId, disabled: bool) -> Result<()> { pub(super) fn disable_room(&self, room_id: &RoomId, disabled: bool) -> Result<()> {
if disabled { if disabled {
self.disabledroomids.insert(room_id.as_bytes(), &[])?; self.disabledroomids.insert(room_id.as_bytes(), &[])?;
@ -62,10 +64,12 @@ impl Data {
Ok(()) Ok(())
} }
#[inline]
pub(super) fn is_banned(&self, room_id: &RoomId) -> Result<bool> { pub(super) fn is_banned(&self, room_id: &RoomId) -> Result<bool> {
Ok(self.bannedroomids.get(room_id.as_bytes())?.is_some()) Ok(self.bannedroomids.get(room_id.as_bytes())?.is_some())
} }
#[inline]
pub(super) fn ban_room(&self, room_id: &RoomId, banned: bool) -> Result<()> { pub(super) fn ban_room(&self, room_id: &RoomId, banned: bool) -> Result<()> {
if banned { if banned {
self.bannedroomids.insert(room_id.as_bytes(), &[])?; self.bannedroomids.insert(room_id.as_bytes(), &[])?;

View file

@ -22,22 +22,27 @@ impl crate::Service for Service {
impl Service { impl Service {
/// Checks if a room exists. /// Checks if a room exists.
#[tracing::instrument(skip(self))] #[inline]
pub fn exists(&self, room_id: &RoomId) -> Result<bool> { self.db.exists(room_id) } pub fn exists(&self, room_id: &RoomId) -> Result<bool> { self.db.exists(room_id) }
#[must_use] #[must_use]
pub fn iter_ids<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a> { self.db.iter_ids() } pub fn iter_ids<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a> { self.db.iter_ids() }
#[inline]
pub fn is_disabled(&self, room_id: &RoomId) -> Result<bool> { self.db.is_disabled(room_id) } pub fn is_disabled(&self, room_id: &RoomId) -> Result<bool> { self.db.is_disabled(room_id) }
#[inline]
pub fn disable_room(&self, room_id: &RoomId, disabled: bool) -> Result<()> { pub fn disable_room(&self, room_id: &RoomId, disabled: bool) -> Result<()> {
self.db.disable_room(room_id, disabled) self.db.disable_room(room_id, disabled)
} }
#[inline]
pub fn is_banned(&self, room_id: &RoomId) -> Result<bool> { self.db.is_banned(room_id) } pub fn is_banned(&self, room_id: &RoomId) -> Result<bool> { self.db.is_banned(room_id) }
#[inline]
pub fn ban_room(&self, room_id: &RoomId, banned: bool) -> Result<()> { self.db.ban_room(room_id, banned) } pub fn ban_room(&self, room_id: &RoomId, banned: bool) -> Result<()> { self.db.ban_room(room_id, banned) }
#[inline]
#[must_use] #[must_use]
pub fn list_banned_rooms<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a> { pub fn list_banned_rooms<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a> {
self.db.list_banned_rooms() self.db.list_banned_rooms()

View file

@ -30,6 +30,7 @@ impl Data {
}) })
} }
#[inline]
pub(super) fn set_room_state( pub(super) fn set_room_state(
&self, &self,
room_id: &RoomId, room_id: &RoomId,

View file

@ -200,6 +200,7 @@ impl Service {
if let Some(state_key) = &new_pdu.state_key { if let Some(state_key) = &new_pdu.state_key {
let states_parents = previous_shortstatehash.map_or_else( let states_parents = previous_shortstatehash.map_or_else(
|| Ok(Vec::new()), || Ok(Vec::new()),
#[inline]
|p| { |p| {
services() services()
.rooms .rooms
@ -344,6 +345,7 @@ impl Service {
Ok(create_event_content.room_version) Ok(create_event_content.room_version)
} }
#[inline]
pub fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> { pub fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> {
self.db.get_room_shortstatehash(room_id) self.db.get_room_shortstatehash(room_id)
} }

View file

@ -96,6 +96,7 @@ impl Service {
/// Returns a single PDU from `room_id` with key (`event_type`, /// Returns a single PDU from `room_id` with key (`event_type`,
/// `state_key`). /// `state_key`).
#[inline]
pub fn state_get( pub fn state_get(
&self, shortstatehash: u64, event_type: &StateEventType, state_key: &str, &self, shortstatehash: u64, event_type: &StateEventType, state_key: &str,
) -> Result<Option<Arc<PduEvent>>> { ) -> Result<Option<Arc<PduEvent>>> {
@ -113,6 +114,7 @@ impl Service {
} }
/// The user was a joined member at this state (potentially in the past) /// The user was a joined member at this state (potentially in the past)
#[inline]
fn user_was_joined(&self, shortstatehash: u64, user_id: &UserId) -> bool { fn user_was_joined(&self, shortstatehash: u64, user_id: &UserId) -> bool {
self.user_membership(shortstatehash, user_id) self.user_membership(shortstatehash, user_id)
.is_ok_and(|s| s == MembershipState::Join) .is_ok_and(|s| s == MembershipState::Join)
@ -122,6 +124,7 @@ impl Service {
/// The user was an invited or joined room member at this state (potentially /// The user was an invited or joined room member at this state (potentially
/// in the past) /// in the past)
#[inline]
fn user_was_invited(&self, shortstatehash: u64, user_id: &UserId) -> bool { fn user_was_invited(&self, shortstatehash: u64, user_id: &UserId) -> bool {
self.user_membership(shortstatehash, user_id) self.user_membership(shortstatehash, user_id)
.is_ok_and(|s| s == MembershipState::Join || s == MembershipState::Invite) .is_ok_and(|s| s == MembershipState::Join || s == MembershipState::Invite)

View file

@ -135,6 +135,7 @@ impl Service {
} }
/// Returns shortstatekey, event id /// Returns shortstatekey, event id
#[inline]
pub fn parse_compressed_state_event(&self, compressed_event: &CompressedStateEvent) -> Result<(u64, Arc<EventId>)> { pub fn parse_compressed_state_event(&self, compressed_event: &CompressedStateEvent) -> Result<(u64, Arc<EventId>)> {
Ok(( Ok((
utils::u64_from_bytes(&compressed_event[0..size_of::<u64>()]).expect("bytes have right length"), utils::u64_from_bytes(&compressed_event[0..size_of::<u64>()]).expect("bytes have right length"),

View file

@ -97,6 +97,7 @@ impl Data {
} }
/// Returns the pdu's id. /// Returns the pdu's id.
#[inline]
pub(super) fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<database::Handle<'_>>> { pub(super) fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<database::Handle<'_>>> {
self.eventid_pduid.get(event_id.as_bytes()) self.eventid_pduid.get(event_id.as_bytes())
} }

View file

@ -175,11 +175,13 @@ impl Service {
} }
/// Returns the json of a pdu. /// Returns the json of a pdu.
#[inline]
pub fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> { pub fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
self.db.get_non_outlier_pdu_json(event_id) self.db.get_non_outlier_pdu_json(event_id)
} }
/// Returns the pdu's id. /// Returns the pdu's id.
#[inline]
pub fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<database::Handle<'_>>> { pub fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<database::Handle<'_>>> {
self.db.get_pdu_id(event_id) self.db.get_pdu_id(event_id)
} }
@ -190,6 +192,7 @@ impl Service {
/// ///
/// TODO: use this? /// TODO: use this?
#[allow(dead_code)] #[allow(dead_code)]
#[inline]
pub fn get_non_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>> { pub fn get_non_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
self.db.get_non_outlier_pdu(event_id) self.db.get_non_outlier_pdu(event_id)
} }
@ -1017,6 +1020,7 @@ impl Service {
} }
/// Returns an iterator over all PDUs in a room. /// Returns an iterator over all PDUs in a room.
#[inline]
pub fn all_pdus<'a>( pub fn all_pdus<'a>(
&'a self, user_id: &UserId, room_id: &RoomId, &'a self, user_id: &UserId, room_id: &RoomId,
) -> Result<impl Iterator<Item = Result<(PduCount, PduEvent)>> + 'a> { ) -> Result<impl Iterator<Item = Result<(PduCount, PduEvent)>> + 'a> {

View file

@ -27,6 +27,7 @@ impl Data {
} }
} }
#[inline]
pub fn active_requests(&self) -> OutgoingSendingIter<'_> { pub fn active_requests(&self) -> OutgoingSendingIter<'_> {
Box::new( Box::new(
self.servercurrentevent_data self.servercurrentevent_data
@ -35,6 +36,7 @@ impl Data {
) )
} }
#[inline]
pub fn active_requests_for<'a>(&'a self, destination: &Destination) -> SendingEventIter<'a> { pub fn active_requests_for<'a>(&'a self, destination: &Destination) -> SendingEventIter<'a> {
let prefix = destination.get_prefix(); let prefix = destination.get_prefix();
Box::new( Box::new(

View file

@ -1,6 +1,6 @@
mod appservice; mod appservice;
mod data; mod data;
pub mod resolve; mod resolve;
mod send; mod send;
mod sender; mod sender;
@ -9,7 +9,7 @@ use std::{fmt::Debug, sync::Arc};
use async_trait::async_trait; use async_trait::async_trait;
use conduit::{Error, Result}; use conduit::{Error, Result};
use data::Data; use data::Data;
pub use resolve::FedDest; pub use resolve::{resolve_actual_dest, FedDest};
use ruma::{ use ruma::{
api::{appservice::Registration, OutgoingRequest}, api::{appservice::Registration, OutgoingRequest},
OwnedServerName, OwnedUserId, RoomId, ServerName, UserId, OwnedServerName, OwnedUserId, RoomId, ServerName, UserId,

View file

@ -414,6 +414,7 @@ impl FedDest {
} }
} }
#[inline]
fn port(&self) -> Option<u16> { fn port(&self) -> Option<u16> {
match &self { match &self {
Self::Literal(addr) => Some(addr.port()), Self::Literal(addr) => Some(addr.port()),

View file

@ -61,6 +61,7 @@ impl Data {
} }
/// Check if a user has an account on this homeserver. /// Check if a user has an account on this homeserver.
#[inline]
pub(super) fn exists(&self, user_id: &UserId) -> Result<bool> { pub(super) fn exists(&self, user_id: &UserId) -> Result<bool> {
Ok(self.userid_password.get(user_id.as_bytes())?.is_some()) Ok(self.userid_password.get(user_id.as_bytes())?.is_some())
} }
@ -75,6 +76,7 @@ impl Data {
} }
/// Returns the number of users registered on this server. /// Returns the number of users registered on this server.
#[inline]
pub(super) fn count(&self) -> Result<usize> { Ok(self.userid_password.iter().count()) } pub(super) fn count(&self) -> Result<usize> { Ok(self.userid_password.iter().count()) }
/// Find out which user an access token belongs to. /// Find out which user an access token belongs to.

View file

@ -53,6 +53,7 @@ impl crate::Service for Service {
impl Service { impl Service {
/// Check if a user has an account on this homeserver. /// Check if a user has an account on this homeserver.
#[inline]
pub fn exists(&self, user_id: &UserId) -> Result<bool> { self.db.exists(user_id) } pub fn exists(&self, user_id: &UserId) -> Result<bool> { self.db.exists(user_id) }
pub fn forget_sync_request_connection(&self, user_id: OwnedUserId, device_id: OwnedDeviceId, conn_id: String) { pub fn forget_sync_request_connection(&self, user_id: OwnedUserId, device_id: OwnedDeviceId, conn_id: String) {
@ -257,12 +258,14 @@ impl Service {
} }
/// Create a new user account on this homeserver. /// Create a new user account on this homeserver.
#[inline]
pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
self.db.set_password(user_id, password)?; self.db.set_password(user_id, password)?;
Ok(()) Ok(())
} }
/// Returns the number of users registered on this server. /// Returns the number of users registered on this server.
#[inline]
pub fn count(&self) -> Result<usize> { self.db.count() } pub fn count(&self) -> Result<usize> { self.db.count() }
/// Find out which user an access token belongs to. /// Find out which user an access token belongs to.
@ -283,6 +286,7 @@ impl Service {
pub fn password_hash(&self, user_id: &UserId) -> Result<Option<String>> { self.db.password_hash(user_id) } pub fn password_hash(&self, user_id: &UserId) -> Result<Option<String>> { self.db.password_hash(user_id) }
/// Hash and set the user's password to the Argon2 hash /// Hash and set the user's password to the Argon2 hash
#[inline]
pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
self.db.set_password(user_id, password) self.db.set_password(user_id, password)
} }
@ -331,6 +335,7 @@ impl Service {
} }
/// Replaces the access token of one device. /// Replaces the access token of one device.
#[inline]
pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()> { pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()> {
self.db.set_token(user_id, device_id, token) self.db.set_token(user_id, device_id, token)
} }
@ -385,18 +390,21 @@ impl Service {
self.db.keys_changed(user_or_room_id, from, to) self.db.keys_changed(user_or_room_id, from, to)
} }
#[inline]
pub fn mark_device_key_update(&self, user_id: &UserId) -> Result<()> { self.db.mark_device_key_update(user_id) } pub fn mark_device_key_update(&self, user_id: &UserId) -> Result<()> { self.db.mark_device_key_update(user_id) }
pub fn get_device_keys(&self, user_id: &UserId, device_id: &DeviceId) -> Result<Option<Raw<DeviceKeys>>> { pub fn get_device_keys(&self, user_id: &UserId, device_id: &DeviceId) -> Result<Option<Raw<DeviceKeys>>> {
self.db.get_device_keys(user_id, device_id) self.db.get_device_keys(user_id, device_id)
} }
#[inline]
pub fn parse_master_key( pub fn parse_master_key(
&self, user_id: &UserId, master_key: &Raw<CrossSigningKey>, &self, user_id: &UserId, master_key: &Raw<CrossSigningKey>,
) -> Result<(Vec<u8>, CrossSigningKey)> { ) -> Result<(Vec<u8>, CrossSigningKey)> {
Data::parse_master_key(user_id, master_key) Data::parse_master_key(user_id, master_key)
} }
#[inline]
pub fn get_key( pub fn get_key(
&self, key: &[u8], sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: &dyn Fn(&UserId) -> bool, &self, key: &[u8], sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: &dyn Fn(&UserId) -> bool,
) -> Result<Option<Raw<CrossSigningKey>>> { ) -> Result<Option<Raw<CrossSigningKey>>> {