From e2c7afe69c24c1d067ca958cd61b7f770c5b05f5 Mon Sep 17 00:00:00 2001 From: strawberry Date: Sat, 2 Mar 2024 20:55:02 -0500 Subject: [PATCH] go through a ton of pedantic clippy lints Signed-off-by: strawberry --- Cargo.toml | 9 ++++- src/api/appservice_server.rs | 2 +- src/api/client_server/account.rs | 12 +++--- src/api/client_server/alias.rs | 6 +-- src/api/client_server/backup.rs | 6 +-- src/api/client_server/context.rs | 6 +-- src/api/client_server/device.rs | 8 ++-- src/api/client_server/keys.rs | 8 ++-- src/api/client_server/media.rs | 2 +- src/api/client_server/membership.rs | 14 +++---- src/api/client_server/message.rs | 4 +- src/api/client_server/profile.rs | 8 ++-- src/api/client_server/push.rs | 2 +- src/api/client_server/read_marker.rs | 2 +- src/api/client_server/report.rs | 10 ++--- src/api/client_server/room.rs | 7 ++-- src/api/client_server/search.rs | 4 +- src/api/client_server/session.rs | 9 +++-- src/api/client_server/state.rs | 4 +- src/api/client_server/sync.rs | 40 ++++++++++---------- src/api/client_server/threads.rs | 2 +- src/api/client_server/to_device.rs | 2 +- src/api/client_server/user_directory.rs | 2 +- src/api/ruma_wrapper/axum.rs | 2 +- src/api/server_server.rs | 18 ++++----- src/config/proxy.rs | 2 +- src/database/abstraction/rocksdb.rs | 5 +-- src/database/key_value/account_data.rs | 6 +-- src/database/key_value/appservice.rs | 4 +- src/database/key_value/globals.rs | 8 ++-- src/database/key_value/key_backups.rs | 2 +- src/database/key_value/media.rs | 6 +-- src/database/key_value/rooms/alias.rs | 2 +- src/database/key_value/rooms/edus/typing.rs | 2 +- src/database/key_value/rooms/pdu_metadata.rs | 9 ++--- src/database/key_value/rooms/short.rs | 20 +++++----- src/database/key_value/rooms/state.rs | 2 +- src/database/key_value/rooms/state_cache.rs | 15 ++++++-- src/database/key_value/rooms/threads.rs | 2 +- src/database/key_value/rooms/timeline.rs | 15 ++++---- src/database/key_value/rooms/user.rs | 2 +- src/database/key_value/sending.rs | 4 +- src/database/key_value/transaction_ids.rs | 4 +- src/database/key_value/uiaa.rs | 2 +- src/database/key_value/users.rs | 4 +- src/database/mod.rs | 20 +++++----- src/service/admin/mod.rs | 22 +++++------ src/service/media/mod.rs | 11 ++---- src/service/pusher/mod.rs | 6 +-- src/service/rooms/event_handler/mod.rs | 25 +++++++----- src/service/rooms/pdu_metadata/data.rs | 8 ++-- src/service/rooms/pdu_metadata/mod.rs | 4 +- src/service/rooms/spaces/mod.rs | 8 ++-- src/service/rooms/state_accessor/mod.rs | 14 +++---- src/service/rooms/threads/mod.rs | 4 +- src/service/rooms/timeline/data.rs | 19 +++++++--- src/service/rooms/timeline/mod.rs | 20 +++++----- src/service/sending/mod.rs | 30 +++++++++------ src/service/uiaa/mod.rs | 21 +++++----- src/utils/error.rs | 6 ++- src/utils/mod.rs | 6 ++- 61 files changed, 282 insertions(+), 247 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0380c93f..946080e4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -282,6 +282,7 @@ filetype_is_file = "warn" float_cmp_const = "warn" format_push_string = "warn" impl_trait_in_params = "warn" +ref_to_mut = "warn" # let_underscore_untyped = "warn" lossy_float_literal = "warn" mem_forget = "warn" @@ -290,7 +291,9 @@ missing_assert_message = "warn" # multiple_inherent_impl = "warn" mutex_atomic = "warn" # same_name_method = "warn" -# semicolon_outside_block = "warn" +semicolon_outside_block = "warn" +fn_to_numeric_cast = "warn" +fn_to_numeric_cast_with_truncation = "warn" string_lit_chars_any = "warn" suspicious_xor_used_as_pow = "warn" try_err = "warn" @@ -298,6 +301,10 @@ unnecessary_safety_comment = "warn" unnecessary_safety_doc = "warn" unnecessary_self_imports = "warn" verbose_file_reads = "warn" +# cast_precision_loss = "warn" +cast_possible_wrap = "warn" +# cast_possible_truncation = "warn" +redundant_closure_for_method_calls = "warn" # not in rust 1.75.0 (breaks CI) # infinite_loop = "warn" diff --git a/src/api/appservice_server.rs b/src/api/appservice_server.rs index e4255e5d..cc50be19 100644 --- a/src/api/appservice_server.rs +++ b/src/api/appservice_server.rs @@ -30,7 +30,7 @@ where Error::BadServerResponse("Invalid destination") }) .unwrap() - .map(|body| body.freeze()); + .map(bytes::BytesMut::freeze); let mut parts = http_request.uri().clone().into_parts(); let old_path_and_query = parts.path_and_query.unwrap().as_str().to_owned(); diff --git a/src/api/client_server/account.rs b/src/api/client_server/account.rs index 6d0f053e..f77ce428 100644 --- a/src/api/client_server/account.rs +++ b/src/api/client_server/account.rs @@ -174,7 +174,7 @@ pub async fn register_route(body: Ruma) -> Result) -> Result) -> Result { let sender_user = body.sender_user.as_ref().expect("user is authenticated"); - let device_id = body.sender_device.as_ref().cloned(); + let device_id = body.sender_device.clone(); Ok(whoami::v3::Response { user_id: sender_user.clone(), @@ -439,7 +439,7 @@ pub async fn deactivate_route( stages: vec![AuthType::Password], }], completed: Vec::new(), - params: Default::default(), + params: Box::default(), session: None, auth_error: None, }; diff --git a/src/api/client_server/alias.rs b/src/api/client_server/alias.rs index 1a236449..230a23da 100644 --- a/src/api/client_server/alias.rs +++ b/src/api/client_server/alias.rs @@ -124,7 +124,7 @@ pub(crate) async fn get_alias_helper( .send_federation_request( room_alias.server_name(), federation::query::get_room_information::v1::Request { - room_alias: room_alias.to_owned(), + room_alias: room_alias.clone(), }, ) .await?; @@ -138,7 +138,7 @@ pub(crate) async fn get_alias_helper( .rooms .state_cache .room_servers(&room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) { servers.push(extra_servers); } @@ -224,7 +224,7 @@ pub(crate) async fn get_alias_helper( .rooms .state_cache .room_servers(&room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) { servers.push(extra_servers); } diff --git a/src/api/client_server/backup.rs b/src/api/client_server/backup.rs index 115cba7c..8bbe3ef1 100644 --- a/src/api/client_server/backup.rs +++ b/src/api/client_server/backup.rs @@ -86,7 +86,7 @@ pub async fn get_backup_info_route( etag: services() .key_backups .get_etag(sender_user, &body.version)?, - version: body.version.to_owned(), + version: body.version.clone(), }) } @@ -139,7 +139,7 @@ pub async fn add_backup_keys_route( room_id, session_id, key_data, - )? + )?; } } @@ -185,7 +185,7 @@ pub async fn add_backup_keys_for_room_route( &body.room_id, session_id, key_data, - )? + )?; } Ok(add_backup_keys_for_room::v3::Response { diff --git a/src/api/client_server/context.rs b/src/api/client_server/context.rs index 8e193e6b..e828795f 100644 --- a/src/api/client_server/context.rs +++ b/src/api/client_server/context.rs @@ -22,7 +22,7 @@ pub async fn get_context_route( LazyLoadOptions::Enabled { include_redundant_members, } => (true, *include_redundant_members), - _ => (false, false), + LazyLoadOptions::Disabled => (false, false), }; let mut lazy_loaded = HashSet::new(); @@ -79,7 +79,7 @@ pub async fn get_context_route( .timeline .pdus_until(sender_user, &room_id, base_token)? .take(limit / 2) - .filter_map(|r| r.ok()) // Remove buggy events + .filter_map(std::result::Result::ok) // Remove buggy events .filter(|(_, pdu)| { services() .rooms @@ -116,7 +116,7 @@ pub async fn get_context_route( .timeline .pdus_after(sender_user, &room_id, base_token)? .take(limit / 2) - .filter_map(|r| r.ok()) // Remove buggy events + .filter_map(std::result::Result::ok) // Remove buggy events .filter(|(_, pdu)| { services() .rooms diff --git a/src/api/client_server/device.rs b/src/api/client_server/device.rs index aba061b2..df9b0c3f 100644 --- a/src/api/client_server/device.rs +++ b/src/api/client_server/device.rs @@ -18,7 +18,7 @@ pub async fn get_devices_route( let devices: Vec = services() .users .all_devices_metadata(sender_user) - .filter_map(|r| r.ok()) // Filter out buggy devices + .filter_map(std::result::Result::ok) // Filter out buggy devices .collect(); Ok(get_devices::v3::Response { devices }) @@ -83,7 +83,7 @@ pub async fn delete_device_route( stages: vec![AuthType::Password], }], completed: Vec::new(), - params: Default::default(), + params: Box::default(), session: None, auth_error: None, }; @@ -137,7 +137,7 @@ pub async fn delete_devices_route( stages: vec![AuthType::Password], }], completed: Vec::new(), - params: Default::default(), + params: Box::default(), session: None, auth_error: None, }; @@ -162,7 +162,7 @@ pub async fn delete_devices_route( } for device_id in &body.devices { - services().users.remove_device(sender_user, device_id)? + services().users.remove_device(sender_user, device_id)?; } Ok(delete_devices::v3::Response {}) diff --git a/src/api/client_server/keys.rs b/src/api/client_server/keys.rs index bc82bd98..b57e28a8 100644 --- a/src/api/client_server/keys.rs +++ b/src/api/client_server/keys.rs @@ -111,7 +111,7 @@ pub async fn upload_signing_keys_route( stages: vec![AuthType::Password], }], completed: Vec::new(), - params: Default::default(), + params: Box::default(), session: None, auth_error: None, }; @@ -230,14 +230,14 @@ pub async fn get_key_changes_route( .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid `to`."))?, ), ) - .filter_map(|r| r.ok()), + .filter_map(std::result::Result::ok), ); for room_id in services() .rooms .state_cache .rooms_joined(sender_user) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) { device_list_updates.extend( services() @@ -251,7 +251,7 @@ pub async fn get_key_changes_route( Error::BadRequest(ErrorKind::InvalidParam, "Invalid `to`.") })?), ) - .filter_map(|r| r.ok()), + .filter_map(std::result::Result::ok), ); } Ok(get_key_changes::v3::Response { diff --git a/src/api/client_server/media.rs b/src/api/client_server/media.rs index d8e5e7fb..20fabbcf 100644 --- a/src/api/client_server/media.rs +++ b/src/api/client_server/media.rs @@ -270,7 +270,7 @@ pub async fn get_content_thumbnail_route( if services() .globals .prevent_media_downloads_from() - .contains(&body.server_name.to_owned()) + .contains(&body.server_name.clone()) { info!("Received request for remote media `{}` but server is in our media server blocklist. Returning 404.", mxc); return Err(Error::BadRequest(ErrorKind::NotFound, "Media not found.")); diff --git a/src/api/client_server/membership.rs b/src/api/client_server/membership.rs index 3d2ef5e7..92f6878b 100644 --- a/src/api/client_server/membership.rs +++ b/src/api/client_server/membership.rs @@ -68,7 +68,7 @@ pub async fn join_room_by_id_route( .iter() .filter_map(|event| serde_json::from_str(event.json().get()).ok()) .filter_map(|event: serde_json::Value| event.get("sender").cloned()) - .filter_map(|sender| sender.as_str().map(|s| s.to_owned())) + .filter_map(|sender| sender.as_str().map(std::borrow::ToOwned::to_owned)) .filter_map(|sender| UserId::parse(sender).ok()) .map(|user| user.server_name().to_owned()), ); @@ -123,7 +123,7 @@ pub async fn join_room_by_id_or_alias_route( .iter() .filter_map(|event| serde_json::from_str(event.json().get()).ok()) .filter_map(|event: serde_json::Value| event.get("sender").cloned()) - .filter_map(|sender| sender.as_str().map(|s| s.to_owned())) + .filter_map(|sender| sender.as_str().map(std::borrow::ToOwned::to_owned)) .filter_map(|sender| UserId::parse(sender).ok()) .map(|user| user.server_name().to_owned()), ); @@ -441,7 +441,7 @@ pub async fn joined_rooms_route( .rooms .state_cache .rooms_joined(sender_user) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect(), }) } @@ -507,7 +507,7 @@ pub async fn joined_members_route( .rooms .state_cache .room_members(&body.room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) { let display_name = services().users.displayname(&user_id)?; let avatar_url = services().users.avatar_url(&user_id)?; @@ -963,7 +963,7 @@ async fn join_room_by_id_helper( .rooms .state_cache .room_members(restriction_room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .find(|uid| uid.server_name() == services().globals.server_name()) }); Some(authorized_user) @@ -1384,7 +1384,7 @@ pub(crate) async fn invite_helper( .rooms .state_cache .room_servers(room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter(|server| &**server != services().globals.server_name()); services().sending.send_pdu(servers, &pdu_id)?; @@ -1593,7 +1593,7 @@ async fn remote_leave_room(user_id: &UserId, room_id: &RoomId) -> Result<()> { .iter() .filter_map(|event| serde_json::from_str(event.json().get()).ok()) .filter_map(|event: serde_json::Value| event.get("sender").cloned()) - .filter_map(|sender| sender.as_str().map(|s| s.to_owned())) + .filter_map(|sender| sender.as_str().map(std::borrow::ToOwned::to_owned)) .filter_map(|sender| UserId::parse(sender).ok()) .map(|user| user.server_name().to_owned()) .collect(); diff --git a/src/api/client_server/message.rs b/src/api/client_server/message.rs index 32a21003..156e1f5c 100644 --- a/src/api/client_server/message.rs +++ b/src/api/client_server/message.rs @@ -199,7 +199,7 @@ pub async fn get_message_events_route( .timeline .pdus_after(sender_user, &body.room_id, from)? .take(limit) - .filter_map(|r| r.ok()) // Filter out buggy events + .filter_map(std::result::Result::ok) // Filter out buggy events .filter(|(_, pdu)| { services() .rooms @@ -248,7 +248,7 @@ pub async fn get_message_events_route( .timeline .pdus_until(sender_user, &body.room_id, from)? .take(limit) - .filter_map(|r| r.ok()) // Filter out buggy events + .filter_map(std::result::Result::ok) // Filter out buggy events .filter(|(_, pdu)| { services() .rooms diff --git a/src/api/client_server/profile.rs b/src/api/client_server/profile.rs index e80459f1..fc4eff66 100644 --- a/src/api/client_server/profile.rs +++ b/src/api/client_server/profile.rs @@ -35,7 +35,7 @@ pub async fn set_displayname_route( .rooms .state_cache .rooms_joined(sender_user) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .map(|room_id| { Ok::<_, Error>(( PduBuilder { @@ -70,7 +70,7 @@ pub async fn set_displayname_route( room_id, )) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect(); for (pdu_builder, room_id) in all_rooms_joined { @@ -182,7 +182,7 @@ pub async fn set_avatar_url_route( .rooms .state_cache .rooms_joined(sender_user) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .map(|room_id| { Ok::<_, Error>(( PduBuilder { @@ -217,7 +217,7 @@ pub async fn set_avatar_url_route( room_id, )) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect(); for (pdu_builder, room_id) in all_joined_rooms { diff --git a/src/api/client_server/push.rs b/src/api/client_server/push.rs index 72768662..d2c35668 100644 --- a/src/api/client_server/push.rs +++ b/src/api/client_server/push.rs @@ -281,7 +281,7 @@ pub async fn get_pushrule_enabled_route( let global = account_data.content.global; let enabled = global .get(body.kind.clone(), &body.rule_id) - .map(|r| r.enabled()) + .map(ruma::push::AnyPushRuleRef::enabled) .ok_or(Error::BadRequest( ErrorKind::NotFound, "Push rule not found.", diff --git a/src/api/client_server/read_marker.rs b/src/api/client_server/read_marker.rs index a5553d25..d01a3a7f 100644 --- a/src/api/client_server/read_marker.rs +++ b/src/api/client_server/read_marker.rs @@ -140,7 +140,7 @@ pub async fn create_receipt_route( receipts.insert(ReceiptType::Read, user_receipts); let mut receipt_content = BTreeMap::new(); - receipt_content.insert(body.event_id.to_owned(), receipts); + receipt_content.insert(body.event_id.clone(), receipts); services().rooms.edus.read_receipt.readreceipt_update( sender_user, diff --git a/src/api/client_server/report.rs b/src/api/client_server/report.rs index 4d0921e7..e424449b 100644 --- a/src/api/client_server/report.rs +++ b/src/api/client_server/report.rs @@ -46,7 +46,7 @@ pub async fn report_event_route( .rooms .state_cache .room_members(&pdu.room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .any(|user_id| user_id == *sender_user) { return Err(Error::BadRequest( @@ -85,7 +85,7 @@ pub async fn report_event_route( sender_user.to_owned(), pdu.event_id, pdu.room_id, - pdu.sender.to_owned(), + pdu.sender.clone(), body.score.unwrap_or_else(|| ruma::Int::from(0)), body.reason.as_deref().unwrap_or("") ), @@ -97,9 +97,9 @@ pub async fn report_event_route( Report Info
  • Report Score: {4}
  • Report Reason: {5}
\ ", sender_user.to_owned(), - pdu.event_id.to_owned(), - pdu.room_id.to_owned(), - pdu.sender.to_owned(), + pdu.event_id.clone(), + pdu.room_id.clone(), + pdu.sender.clone(), body.score.unwrap_or_else(|| ruma::Int::from(0)), HtmlEscape(body.reason.as_deref().unwrap_or("")) ), diff --git a/src/api/client_server/room.rs b/src/api/client_server/room.rs index 4d7a0c0c..c888a90e 100644 --- a/src/api/client_server/room.rs +++ b/src/api/client_server/room.rs @@ -122,7 +122,7 @@ pub async fn create_room_route( None => room_id = RoomId::new(services().globals.server_name()), } } else { - room_id = RoomId::new(services().globals.server_name()) + room_id = RoomId::new(services().globals.server_name()); } // check if room ID doesn't already exist instead of erroring on auth check @@ -380,7 +380,6 @@ pub async fn create_room_route( // Figure out preset. We need it for preset specific events let preset = body.preset.clone().unwrap_or(match &body.visibility { - room::Visibility::Private => RoomPreset::PrivateChat, room::Visibility::Public => RoomPreset::PublicChat, _ => RoomPreset::PrivateChat, // Room visibility should not be custom }); @@ -672,7 +671,7 @@ pub async fn get_room_aliases_route( .rooms .alias .local_aliases_for_room(&body.room_id) - .filter_map(|a| a.ok()) + .filter_map(std::result::Result::ok) .collect(), }) } @@ -932,7 +931,7 @@ pub async fn upgrade_room_route( .rooms .alias .local_aliases_for_room(&body.room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) { services() .rooms diff --git a/src/api/client_server/search.rs b/src/api/client_server/search.rs index e9fac365..43e4c9f4 100644 --- a/src/api/client_server/search.rs +++ b/src/api/client_server/search.rs @@ -27,7 +27,7 @@ pub async fn search_events_route( .rooms .state_cache .rooms_joined(sender_user) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect() }); @@ -110,7 +110,7 @@ pub async fn search_events_route( result: Some(result), }) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .skip(skip) .take(limit) .collect(); diff --git a/src/api/client_server/session.rs b/src/api/client_server/session.rs index a5181d59..56250a0c 100644 --- a/src/api/client_server/session.rs +++ b/src/api/client_server/session.rs @@ -5,7 +5,10 @@ use ruma::{ api::client::{ error::ErrorKind, session::{ - get_login_types, + get_login_types::{ + self, + v3::{ApplicationServiceLoginType, PasswordLoginType}, + }, login::{ self, v3::{DiscoveryInfo, HomeserverInfo}, @@ -33,8 +36,8 @@ pub async fn get_login_types_route( _body: Ruma, ) -> Result { Ok(get_login_types::v3::Response::new(vec![ - get_login_types::v3::LoginType::Password(Default::default()), - get_login_types::v3::LoginType::ApplicationService(Default::default()), + get_login_types::v3::LoginType::Password(PasswordLoginType::default()), + get_login_types::v3::LoginType::ApplicationService(ApplicationServiceLoginType::default()), ])) } diff --git a/src/api/client_server/state.rs b/src/api/client_server/state.rs index ebc41a0f..1d2f5e9b 100644 --- a/src/api/client_server/state.rs +++ b/src/api/client_server/state.rs @@ -31,7 +31,7 @@ pub async fn send_state_event_for_key_route( &body.room_id, &body.event_type, &body.body.body, // Yes, I hate it too - body.state_key.to_owned(), + body.state_key.clone(), ) .await?; @@ -64,7 +64,7 @@ pub async fn send_state_event_for_empty_key_route( &body.room_id, &body.event_type.to_string().into(), &body.body.body, - body.state_key.to_owned(), + body.state_key.clone(), ) .await?; diff --git a/src/api/client_server/sync.rs b/src/api/client_server/sync.rs index 9216fb84..5ec6c26c 100644 --- a/src/api/client_server/sync.rs +++ b/src/api/client_server/sync.rs @@ -83,7 +83,7 @@ pub async fn sync_events_route( Entry::Vacant(v) => { let (tx, rx) = tokio::sync::watch::channel(None); - v.insert((body.since.to_owned(), rx.clone())); + v.insert((body.since.clone(), rx.clone())); tokio::spawn(sync_helper_wrapper( sender_user.clone(), @@ -202,7 +202,7 @@ async fn sync_helper( LazyLoadOptions::Enabled { include_redundant_members: redundant, } => (true, redundant), - _ => (false, false), + LazyLoadOptions::Disabled => (false, false), }; let full_state = body.full_state; @@ -225,7 +225,7 @@ async fn sync_helper( services() .users .keys_changed(sender_user.as_ref(), since, None) - .filter_map(|r| r.ok()), + .filter_map(std::result::Result::ok), ); let all_joined_rooms = services() @@ -285,7 +285,7 @@ async fn sync_helper( ); let insert_lock = mutex_insert.lock().await; drop(insert_lock); - } + }; let left_count = services() .rooms @@ -417,7 +417,7 @@ async fn sync_helper( ); let insert_lock = mutex_insert.lock().await; drop(insert_lock); - } + }; let invite_count = services() .rooms @@ -444,7 +444,7 @@ async fn sync_helper( .rooms .user .get_shared_rooms(vec![sender_user.clone(), user_id.clone()])? - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter_map(|other_room_id| { Some( services() @@ -604,7 +604,7 @@ async fn load_joined_room( ); let insert_lock = mutex_insert.lock().await; drop(insert_lock); - } + }; let (timeline_pdus, limited) = load_timeline(sender_user, room_id, sincecount, 10)?; @@ -671,7 +671,7 @@ async fn load_joined_room( .rooms .timeline .all_pdus(sender_user, room_id)? - .filter_map(|pdu| pdu.ok()) // Ignore all broken pdus + .filter_map(std::result::Result::ok) // Ignore all broken pdus .filter(|(_, pdu)| pdu.kind == TimelineEventType::RoomMember) .map(|(_, pdu)| { let content: RoomMemberEventContent = @@ -706,7 +706,7 @@ async fn load_joined_room( } }) // Filter out buggy users - .filter_map(|u| u.ok()) + .filter_map(std::result::Result::ok) // Filter for possible heroes .flatten() { @@ -1011,7 +1011,7 @@ async fn load_joined_room( services() .users .keys_changed(room_id.as_ref(), since, None) - .filter_map(|r| r.ok()), + .filter_map(std::result::Result::ok), ); let notification_count = if send_notification_counts { @@ -1062,7 +1062,7 @@ async fn load_joined_room( .edus .read_receipt .readreceipts_since(room_id, since) - .filter_map(|r| r.ok()) // Filter out buggy events + .filter_map(std::result::Result::ok) // Filter out buggy events .map(|(_, _, v)| v) .collect(); @@ -1176,7 +1176,7 @@ fn share_encrypted_room( .rooms .user .get_shared_rooms(vec![sender_user.to_owned(), user_id.to_owned()])? - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter(|room_id| room_id != ignore_room) .filter_map(|other_room_id| { Some( @@ -1214,7 +1214,7 @@ pub async fn sync_events_v4_route( sender_user.clone(), sender_device.clone(), conn_id.clone(), - ) + ); } } @@ -1229,7 +1229,7 @@ pub async fn sync_events_v4_route( .rooms .state_cache .rooms_joined(&sender_user) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect::>(); if body.extensions.to_device.enabled.unwrap_or(false) { @@ -1248,7 +1248,7 @@ pub async fn sync_events_v4_route( services() .users .keys_changed(sender_user.as_ref(), globalsince, None) - .filter_map(|r| r.ok()), + .filter_map(std::result::Result::ok), ); for room_id in &all_joined_rooms { @@ -1393,7 +1393,7 @@ pub async fn sync_events_v4_route( services() .users .keys_changed(room_id.as_ref(), globalsince, None) - .filter_map(|r| r.ok()), + .filter_map(std::result::Result::ok), ); } for user_id in left_encrypted_users { @@ -1401,7 +1401,7 @@ pub async fn sync_events_v4_route( .rooms .user .get_shared_rooms(vec![sender_user.clone(), user_id.clone()])? - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter_map(|other_room_id| { Some( services() @@ -1585,7 +1585,7 @@ pub async fn sync_events_v4_route( .state_accessor .room_state_get(room_id, &state.0, &state.1) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .flatten() .map(|state| state.to_sync_state_event()) .collect(); @@ -1595,7 +1595,7 @@ pub async fn sync_events_v4_route( .rooms .state_cache .room_members(room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter(|member| member != &sender_user) .map(|member| { Ok::<_, Error>( @@ -1613,7 +1613,7 @@ pub async fn sync_events_v4_route( }), ) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .flatten() .take(5) .collect::>(); diff --git a/src/api/client_server/threads.rs b/src/api/client_server/threads.rs index a095b420..e1731060 100644 --- a/src/api/client_server/threads.rs +++ b/src/api/client_server/threads.rs @@ -27,7 +27,7 @@ pub async fn get_threads_route( .threads .threads_until(sender_user, &body.room_id, from, &body.include)? .take(limit) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter(|(_, pdu)| { services() .rooms diff --git a/src/api/client_server/to_device.rs b/src/api/client_server/to_device.rs index 31590fc7..bce893ed 100644 --- a/src/api/client_server/to_device.rs +++ b/src/api/client_server/to_device.rs @@ -63,7 +63,7 @@ pub async fn send_event_to_device_route( event.deserialize_as().map_err(|_| { Error::BadRequest(ErrorKind::InvalidParam, "Event is invalid") })?, - )? + )?; } DeviceIdOrAllDevices::AllDevices => { diff --git a/src/api/client_server/user_directory.rs b/src/api/client_server/user_directory.rs index c30bac51..f648cb7b 100644 --- a/src/api/client_server/user_directory.rs +++ b/src/api/client_server/user_directory.rs @@ -52,7 +52,7 @@ pub async fn search_users_route( .rooms .state_cache .rooms_joined(&user_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .any(|room| { services() .rooms diff --git a/src/api/ruma_wrapper/axum.rs b/src/api/ruma_wrapper/axum.rs index 57348a73..1117874c 100644 --- a/src/api/ruma_wrapper/axum.rs +++ b/src/api/ruma_wrapper/axum.rs @@ -215,7 +215,7 @@ where .event_handler .fetch_signing_keys_for_server( &x_matrix.origin, - vec![x_matrix.key.to_owned()], + vec![x_matrix.key.clone()], ) .await; diff --git a/src/api/server_server.rs b/src/api/server_server.rs index 73246574..e4f8cbb1 100644 --- a/src/api/server_server.rs +++ b/src/api/server_server.rs @@ -866,7 +866,7 @@ pub async fn send_transaction_message_route( warn!( "Could not fetch all signatures for PDUs from {}: {:?}", sender_servername, e - ) + ); }); for (event_id, value, room_id) in parsed_pdus { @@ -876,7 +876,7 @@ pub async fn send_transaction_message_route( .roomid_mutex_federation .write() .unwrap() - .entry(room_id.to_owned()) + .entry(room_id.clone()) .or_default(), ); let mutex_lock = mutex.lock().await; @@ -1037,7 +1037,7 @@ pub async fn send_transaction_message_route( "Event is invalid", ) })?, - )? + )?; } DeviceIdOrAllDevices::AllDevices => { @@ -1214,7 +1214,7 @@ pub async fn get_backfill_route( .take(limit.try_into().unwrap()); let events = all_events - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter(|(_, e)| { matches!( services().rooms.state_accessor.server_can_see_event( @@ -1562,7 +1562,7 @@ pub async fn create_join_event_template_route( .roomid_mutex_state .write() .unwrap() - .entry(body.room_id.to_owned()) + .entry(body.room_id.clone()) .or_default(), ); let state_lock = mutex_state.lock().await; @@ -1768,7 +1768,7 @@ async fn create_join_event( .rooms .state_cache .room_servers(room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter(|server| &**server != services().globals.server_name()); services().sending.send_pdu(servers, &pdu_id)?; @@ -1986,7 +1986,7 @@ pub async fn get_devices_route( devices: services() .users .all_devices_metadata(&body.user_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter_map(|metadata| { let device_id_string = metadata.device_id.as_str().to_owned(); let device_display_name = match services().globals.allow_device_name_federation() { @@ -2062,11 +2062,11 @@ pub async fn get_profile_information_route( match &body.field { Some(ProfileField::DisplayName) => { - displayname = services().users.displayname(&body.user_id)? + displayname = services().users.displayname(&body.user_id)?; } Some(ProfileField::AvatarUrl) => { avatar_url = services().users.avatar_url(&body.user_id)?; - blurhash = services().users.blurhash(&body.user_id)? + blurhash = services().users.blurhash(&body.user_id)?; } // TODO: what to do with custom Some(_) => {} diff --git a/src/config/proxy.rs b/src/config/proxy.rs index 95640ab4..80eee6af 100644 --- a/src/config/proxy.rs +++ b/src/config/proxy.rs @@ -66,7 +66,7 @@ impl PartialProxyConfig { let mut excluded_because = None; // most specific reason it was excluded if self.include.is_empty() { // treat empty include list as `*` - included_because = Some(&WildCardedDomain::WildCard) + included_because = Some(&WildCardedDomain::WildCard); } for wc_domain in &self.include { if wc_domain.matches(domain) { diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 443d16d0..d51ccefd 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -40,7 +40,6 @@ fn db_options(rocksdb_cache: &rocksdb::Cache, config: &Config) -> rocksdb::Optio let rocksdb_log_level = match config.rocksdb_log_level.as_ref() { "debug" => Debug, "info" => Info, - "warn" => Warn, "error" => Error, "fatal" => Fatal, _ => Warn, @@ -71,7 +70,7 @@ fn db_options(rocksdb_cache: &rocksdb::Cache, config: &Config) -> rocksdb::Optio db_opts.set_block_based_table_factory(&block_based_options); db_opts.set_level_compaction_dynamic_level_bytes(true); db_opts.create_if_missing(true); - db_opts.increase_parallelism(num_cpus::get() as i32); + db_opts.increase_parallelism(num_cpus::get().try_into().unwrap_or_default()); //db_opts.set_max_open_files(config.rocksdb_max_open_files); db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); @@ -79,7 +78,7 @@ fn db_options(rocksdb_cache: &rocksdb::Cache, config: &Config) -> rocksdb::Optio // https://github.com/facebook/rocksdb/wiki/Setup-Options-and-Basic-Tuning db_opts.set_max_background_jobs(6); - db_opts.set_bytes_per_sync(1048576); + db_opts.set_bytes_per_sync(1_048_576); // https://github.com/facebook/rocksdb/wiki/WAL-Recovery-Modes#ktoleratecorruptedtailrecords // diff --git a/src/database/key_value/account_data.rs b/src/database/key_value/account_data.rs index daa63901..4e9da595 100644 --- a/src/database/key_value/account_data.rs +++ b/src/database/key_value/account_data.rs @@ -21,7 +21,7 @@ impl service::account_data::Data for KeyValueDatabase { data: &serde_json::Value, ) -> Result<()> { let mut prefix = room_id - .map(|r| r.to_string()) + .map(std::string::ToString::to_string) .unwrap_or_default() .as_bytes() .to_vec(); @@ -71,7 +71,7 @@ impl service::account_data::Data for KeyValueDatabase { kind: RoomAccountDataEventType, ) -> Result>> { let mut key = room_id - .map(|r| r.to_string()) + .map(std::string::ToString::to_string) .unwrap_or_default() .as_bytes() .to_vec(); @@ -106,7 +106,7 @@ impl service::account_data::Data for KeyValueDatabase { let mut userdata = HashMap::new(); let mut prefix = room_id - .map(|r| r.to_string()) + .map(std::string::ToString::to_string) .unwrap_or_default() .as_bytes() .to_vec(); diff --git a/src/database/key_value/appservice.rs b/src/database/key_value/appservice.rs index 3243183d..55206243 100644 --- a/src/database/key_value/appservice.rs +++ b/src/database/key_value/appservice.rs @@ -13,7 +13,7 @@ impl service::appservice::Data for KeyValueDatabase { self.cached_registrations .write() .unwrap() - .insert(id.to_owned(), yaml.to_owned()); + .insert(id.to_owned(), yaml.clone()); Ok(id.to_owned()) } @@ -67,7 +67,7 @@ impl service::appservice::Data for KeyValueDatabase { fn all(&self) -> Result> { self.iter_ids()? - .filter_map(|id| id.ok()) + .filter_map(std::result::Result::ok) .map(move |id| { Ok(( id.clone(), diff --git a/src/database/key_value/globals.rs b/src/database/key_value/globals.rs index 3cc1663b..b921d822 100644 --- a/src/database/key_value/globals.rs +++ b/src/database/key_value/globals.rs @@ -74,7 +74,7 @@ impl service::globals::Data for KeyValueDatabase { .rooms .state_cache .rooms_joined(user_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) { let short_roomid = services() .rooms @@ -134,7 +134,7 @@ impl service::globals::Data for KeyValueDatabase { } fn cleanup(&self) -> Result<()> { - self._db.cleanup() + self.db.cleanup() } fn memory_usage(&self) -> String { @@ -158,7 +158,7 @@ our_real_users_cache: {our_real_users_cache} appservice_in_room_cache: {appservice_in_room_cache} lasttimelinecount_cache: {lasttimelinecount_cache}\n" ); - if let Ok(db_stats) = self._db.memory_usage() { + if let Ok(db_stats) = self.db.memory_usage() { response += &db_stats; } @@ -207,7 +207,7 @@ lasttimelinecount_cache: {lasttimelinecount_cache}\n" self.global.insert(b"keypair", &keypair)?; Ok::<_, Error>(keypair) }, - |s| Ok(s.to_vec()), + Ok, )?; let mut parts = keypair_bytes.splitn(2, |&b| b == 0xff); diff --git a/src/database/key_value/key_backups.rs b/src/database/key_value/key_backups.rs index 900b700b..a8018938 100644 --- a/src/database/key_value/key_backups.rs +++ b/src/database/key_value/key_backups.rs @@ -283,7 +283,7 @@ impl service::key_backups::Data for KeyValueDatabase { Ok::<_, Error>((session_id, key_data)) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect()) } diff --git a/src/database/key_value/media.rs b/src/database/key_value/media.rs index f901bd55..1d879e72 100644 --- a/src/database/key_value/media.rs +++ b/src/database/key_value/media.rs @@ -152,21 +152,21 @@ impl service::media::Data for KeyValueDatabase { value.extend_from_slice( data.title .as_ref() - .map(|t| t.as_bytes()) + .map(std::string::String::as_bytes) .unwrap_or_default(), ); value.push(0xff); value.extend_from_slice( data.description .as_ref() - .map(|d| d.as_bytes()) + .map(std::string::String::as_bytes) .unwrap_or_default(), ); value.push(0xff); value.extend_from_slice( data.image .as_ref() - .map(|i| i.as_bytes()) + .map(std::string::String::as_bytes) .unwrap_or_default(), ); value.push(0xff); diff --git a/src/database/key_value/rooms/alias.rs b/src/database/key_value/rooms/alias.rs index 0d3e2b10..1b16d9a2 100644 --- a/src/database/key_value/rooms/alias.rs +++ b/src/database/key_value/rooms/alias.rs @@ -15,7 +15,7 @@ impl service::rooms::alias::Data for KeyValueDatabase { fn remove_alias(&self, alias: &RoomAliasId) -> Result<()> { if let Some(room_id) = self.alias_roomid.get(alias.alias().as_bytes())? { - let mut prefix = room_id.to_vec(); + let mut prefix = room_id; prefix.push(0xff); for (key, _) in self.aliasid_alias.scan_prefix(prefix) { diff --git a/src/database/key_value/rooms/edus/typing.rs b/src/database/key_value/rooms/edus/typing.rs index 5709192c..28c06652 100644 --- a/src/database/key_value/rooms/edus/typing.rs +++ b/src/database/key_value/rooms/edus/typing.rs @@ -76,7 +76,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase { .map_err(|_| Error::bad_database("RoomTyping has invalid timestamp bytes."))?, )) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .take_while(|&(_, timestamp)| timestamp < current_timestamp) { // This is an outdated edu (time > timestamp) diff --git a/src/database/key_value/rooms/pdu_metadata.rs b/src/database/key_value/rooms/pdu_metadata.rs index ebfba814..0641f9d8 100644 --- a/src/database/key_value/rooms/pdu_metadata.rs +++ b/src/database/key_value/rooms/pdu_metadata.rs @@ -4,11 +4,8 @@ use ruma::{EventId, RoomId, UserId}; use crate::{ database::KeyValueDatabase, - service::{ - self, - rooms::timeline::{data::PduData, PduCount}, - }, - services, utils, Error, Result, + service::{self, rooms::timeline::PduCount}, + services, utils, Error, PduEvent, Result, }; impl service::rooms::pdu_metadata::Data for KeyValueDatabase { @@ -25,7 +22,7 @@ impl service::rooms::pdu_metadata::Data for KeyValueDatabase { shortroomid: u64, target: u64, until: PduCount, - ) -> PduData<'a> { + ) -> Result> + 'a>> { let prefix = target.to_be_bytes().to_vec(); let mut current = prefix.clone(); diff --git a/src/database/key_value/rooms/short.rs b/src/database/key_value/rooms/short.rs index 502557a0..40893986 100644 --- a/src/database/key_value/rooms/short.rs +++ b/src/database/key_value/rooms/short.rs @@ -46,13 +46,13 @@ impl service::rooms::short::Data for KeyValueDatabase { return Ok(Some(*short)); } - let mut statekey = event_type.to_string().as_bytes().to_vec(); - statekey.push(0xff); - statekey.extend_from_slice(state_key.as_bytes()); + let mut statekey_vec = event_type.to_string().as_bytes().to_vec(); + statekey_vec.push(0xff); + statekey_vec.extend_from_slice(state_key.as_bytes()); let short = self .statekey_shortstatekey - .get(&statekey)? + .get(&statekey_vec)? .map(|shortstatekey| { utils::u64_from_bytes(&shortstatekey) .map_err(|_| Error::bad_database("Invalid shortstatekey in db.")) @@ -83,19 +83,19 @@ impl service::rooms::short::Data for KeyValueDatabase { return Ok(*short); } - let mut statekey = event_type.to_string().as_bytes().to_vec(); - statekey.push(0xff); - statekey.extend_from_slice(state_key.as_bytes()); + let mut statekey_vec = event_type.to_string().as_bytes().to_vec(); + statekey_vec.push(0xff); + statekey_vec.extend_from_slice(state_key.as_bytes()); - let short = match self.statekey_shortstatekey.get(&statekey)? { + let short = match self.statekey_shortstatekey.get(&statekey_vec)? { Some(shortstatekey) => utils::u64_from_bytes(&shortstatekey) .map_err(|_| Error::bad_database("Invalid shortstatekey in db."))?, None => { let shortstatekey = services().globals.next_count()?; self.statekey_shortstatekey - .insert(&statekey, &shortstatekey.to_be_bytes())?; + .insert(&statekey_vec, &shortstatekey.to_be_bytes())?; self.shortstatekey_statekey - .insert(&shortstatekey.to_be_bytes(), &statekey)?; + .insert(&shortstatekey.to_be_bytes(), &statekey_vec)?; shortstatekey } }; diff --git a/src/database/key_value/rooms/state.rs b/src/database/key_value/rooms/state.rs index 6865d7b9..fd0c81e6 100644 --- a/src/database/key_value/rooms/state.rs +++ b/src/database/key_value/rooms/state.rs @@ -63,7 +63,7 @@ impl service::rooms::state::Data for KeyValueDatabase { } for event_id in event_ids { - let mut key = prefix.to_owned(); + let mut key = prefix.clone(); key.extend_from_slice(event_id.as_bytes()); self.roomid_pduleaves.insert(&key, event_id.as_bytes())?; } diff --git a/src/database/key_value/rooms/state_cache.rs b/src/database/key_value/rooms/state_cache.rs index c71d481d..76d397f9 100644 --- a/src/database/key_value/rooms/state_cache.rs +++ b/src/database/key_value/rooms/state_cache.rs @@ -105,7 +105,10 @@ impl service::rooms::state_cache::Data for KeyValueDatabase { let mut joined_servers = HashSet::new(); let mut real_users = HashSet::new(); - for joined in self.room_members(room_id).filter_map(|r| r.ok()) { + for joined in self + .room_members(room_id) + .filter_map(std::result::Result::ok) + { joined_servers.insert(joined.server_name().to_owned()); if joined.server_name() == services().globals.server_name() && !services().users.is_deactivated(&joined).unwrap_or(true) @@ -115,7 +118,10 @@ impl service::rooms::state_cache::Data for KeyValueDatabase { joinedcount += 1; } - for _invited in self.room_members_invited(room_id).filter_map(|r| r.ok()) { + for _invited in self + .room_members_invited(room_id) + .filter_map(std::result::Result::ok) + { invitedcount += 1; } @@ -130,7 +136,10 @@ impl service::rooms::state_cache::Data for KeyValueDatabase { .unwrap() .insert(room_id.to_owned(), Arc::new(real_users)); - for old_joined_server in self.room_servers(room_id).filter_map(|r| r.ok()) { + for old_joined_server in self + .room_servers(room_id) + .filter_map(std::result::Result::ok) + { if !joined_servers.remove(&old_joined_server) { // Server not in room anymore let mut roomserver_id = room_id.as_bytes().to_vec(); diff --git a/src/database/key_value/rooms/threads.rs b/src/database/key_value/rooms/threads.rs index 02176fee..794b5fd7 100644 --- a/src/database/key_value/rooms/threads.rs +++ b/src/database/key_value/rooms/threads.rs @@ -70,7 +70,7 @@ impl service::rooms::threads::Data for KeyValueDatabase { })?) .map_err(|_| Error::bad_database("Invalid UserId in threadid_userids.")) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect(), )) } else { diff --git a/src/database/key_value/rooms/timeline.rs b/src/database/key_value/rooms/timeline.rs index d76e211d..0331a624 100644 --- a/src/database/key_value/rooms/timeline.rs +++ b/src/database/key_value/rooms/timeline.rs @@ -5,11 +5,7 @@ use ruma::{ }; use tracing::error; -use crate::{ - database::KeyValueDatabase, - service::{self, rooms::timeline::data::PduData}, - services, utils, Error, PduEvent, Result, -}; +use crate::{database::KeyValueDatabase, service, services, utils, Error, PduEvent, Result}; use service::rooms::timeline::PduCount; @@ -232,7 +228,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase { user_id: &UserId, room_id: &RoomId, until: PduCount, - ) -> PduData<'a> { + ) -> Result> + 'a>> { let (prefix, current) = count_to_id(room_id, until, 1, true)?; let user_id = user_id.to_owned(); @@ -254,7 +250,12 @@ impl service::rooms::timeline::Data for KeyValueDatabase { )) } - fn pdus_after<'a>(&'a self, user_id: &UserId, room_id: &RoomId, from: PduCount) -> PduData<'a> { + fn pdus_after<'a>( + &'a self, + user_id: &UserId, + room_id: &RoomId, + from: PduCount, + ) -> Result> + 'a>> { let (prefix, current) = count_to_id(room_id, from, 1, false)?; let user_id = user_id.to_owned(); diff --git a/src/database/key_value/rooms/user.rs b/src/database/key_value/rooms/user.rs index 4c435720..f439f481 100644 --- a/src/database/key_value/rooms/user.rs +++ b/src/database/key_value/rooms/user.rs @@ -131,7 +131,7 @@ impl service::rooms::user::Data for KeyValueDatabase { Ok::<_, Error>(room_id) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) }); // We use the default compare function because keys are sorted correctly (not reversed) diff --git a/src/database/key_value/sending.rs b/src/database/key_value/sending.rs index 3fc3e042..6c8e939b 100644 --- a/src/database/key_value/sending.rs +++ b/src/database/key_value/sending.rs @@ -67,9 +67,9 @@ impl service::sending::Data for KeyValueDatabase { for (outgoing_kind, event) in requests { let mut key = outgoing_kind.get_prefix(); if let SendingEventType::Pdu(value) = &event { - key.extend_from_slice(value) + key.extend_from_slice(value); } else { - key.extend_from_slice(&services().globals.next_count()?.to_be_bytes()) + key.extend_from_slice(&services().globals.next_count()?.to_be_bytes()); } let value = if let SendingEventType::Edu(value) = &event { &**value diff --git a/src/database/key_value/transaction_ids.rs b/src/database/key_value/transaction_ids.rs index 2ea6ad4a..b3bd05f4 100644 --- a/src/database/key_value/transaction_ids.rs +++ b/src/database/key_value/transaction_ids.rs @@ -12,7 +12,7 @@ impl service::transaction_ids::Data for KeyValueDatabase { ) -> Result<()> { let mut key = user_id.as_bytes().to_vec(); key.push(0xff); - key.extend_from_slice(device_id.map(|d| d.as_bytes()).unwrap_or_default()); + key.extend_from_slice(device_id.map(DeviceId::as_bytes).unwrap_or_default()); key.push(0xff); key.extend_from_slice(txn_id.as_bytes()); @@ -29,7 +29,7 @@ impl service::transaction_ids::Data for KeyValueDatabase { ) -> Result>> { let mut key = user_id.as_bytes().to_vec(); key.push(0xff); - key.extend_from_slice(device_id.map(|d| d.as_bytes()).unwrap_or_default()); + key.extend_from_slice(device_id.map(DeviceId::as_bytes).unwrap_or_default()); key.push(0xff); key.extend_from_slice(txn_id.as_bytes()); diff --git a/src/database/key_value/uiaa.rs b/src/database/key_value/uiaa.rs index 5fd91b07..652c12d0 100644 --- a/src/database/key_value/uiaa.rs +++ b/src/database/key_value/uiaa.rs @@ -34,7 +34,7 @@ impl service::uiaa::Data for KeyValueDatabase { .read() .unwrap() .get(&(user_id.to_owned(), device_id.to_owned(), session.to_owned())) - .map(|j| j.to_owned()) + .map(std::borrow::ToOwned::to_owned) } fn update_uiaa_session( diff --git a/src/database/key_value/users.rs b/src/database/key_value/users.rs index 9dfbd7a0..5601009a 100644 --- a/src/database/key_value/users.rs +++ b/src/database/key_value/users.rs @@ -650,7 +650,7 @@ impl service::users::Data for KeyValueDatabase { .rooms .state_cache .rooms_joined(user_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) { // Don't send key updates to unencrypted rooms if services() @@ -855,7 +855,7 @@ impl service::users::Data for KeyValueDatabase { .map_err(|_| Error::bad_database("ToDeviceId has invalid count bytes."))?, )) }) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .take_while(|&(_, count)| count <= until) { self.todeviceid_events.remove(&key)?; diff --git a/src/database/mod.rs b/src/database/mod.rs index 1d88b041..e9926695 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -36,7 +36,7 @@ use tokio::{sync::mpsc, time::interval}; use tracing::{debug, error, info, warn}; pub struct KeyValueDatabase { - _db: Arc, + db: Arc, //pub globals: globals::Globals, pub(super) global: Arc, @@ -252,7 +252,7 @@ impl KeyValueDatabase { let (presence_sender, presence_receiver) = mpsc::unbounded_channel(); let db_raw = Box::new(Self { - _db: builder.clone(), + db: builder.clone(), userid_password: builder.open_tree("userid_password")?, userid_displayname: builder.open_tree("userid_displayname")?, userid_avatarurl: builder.open_tree("userid_avatarurl")?, @@ -616,7 +616,7 @@ impl KeyValueDatabase { Ok::<_, Error>(()) }; - for (k, seventid) in db._db.open_tree("stateid_shorteventid")?.iter() { + for (k, seventid) in db.db.open_tree("stateid_shorteventid")?.iter() { let sstatehash = utils::u64_from_bytes(&k[0..size_of::()]) .expect("number of bytes is correct"); let sstatekey = k[size_of::()..].to_vec(); @@ -789,7 +789,7 @@ impl KeyValueDatabase { } // Force E2EE device list updates so we can send them over federation - for user_id in services().users.iter().filter_map(|r| r.ok()) { + for user_id in services().users.iter().filter_map(std::result::Result::ok) { services().users.mark_device_key_update(&user_id)?; } @@ -799,7 +799,7 @@ impl KeyValueDatabase { } if services().globals.database_version()? < 11 { - db._db + db.db .open_tree("userdevicesessionid_uiaarequest")? .clear()?; services().globals.bump_database_version(11)?; @@ -974,7 +974,7 @@ impl KeyValueDatabase { .into_iter() .map(|x| &patterns.patterns()[x]) .join(", ") - ) + ); } } } @@ -998,7 +998,7 @@ impl KeyValueDatabase { .into_iter() .map(|x| &patterns.patterns()[x]) .join(", ") - ) + ); } } } @@ -1039,7 +1039,7 @@ impl KeyValueDatabase { error!( "Could not set the configured emergency password for the conduit user: {}", e - ) + ); } }; @@ -1059,7 +1059,7 @@ impl KeyValueDatabase { pub fn flush(&self) -> Result<()> { let start = std::time::Instant::now(); - let res = self._db.flush(); + let res = self.db.flush(); debug!("flush: took {:?}", start.elapsed()); @@ -1113,7 +1113,7 @@ impl KeyValueDatabase { .send_message(RoomMessageEventContent::text_plain(format!( "@room: the following is a message from the conduwuit puppy. it was sent on '{}':\n\n{}", update.date, update.message - ))) + ))); } } services() diff --git a/src/service/admin/mod.rs b/src/service/admin/mod.rs index f2267719..d69603c7 100644 --- a/src/service/admin/mod.rs +++ b/src/service/admin/mod.rs @@ -442,14 +442,14 @@ impl Service { .roomid_mutex_state .write() .unwrap() - .entry(conduit_room.to_owned()) + .entry(conduit_room.clone()) .or_default(), ); let state_lock = mutex_state.lock().await; if let Some(reply) = reply { - message_content.relates_to = Some(Reply { in_reply_to: InReplyTo { event_id: reply.into() } }) + message_content.relates_to = Some(Reply { in_reply_to: InReplyTo { event_id: reply.into() } }); } services().rooms.timeline.build_and_append_pdu( @@ -618,7 +618,7 @@ impl Service { if let Ok(appservices) = services() .appservice .iter_ids() - .map(|ids| ids.collect::>()) + .map(std::iter::Iterator::collect::>) { let count = appservices.len(); let output = format!( @@ -626,7 +626,7 @@ impl Service { count, appservices .into_iter() - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect::>() .join(", ") ); @@ -992,7 +992,7 @@ impl Service { false => true, }, Err(_) => false, - }) + }); } for &user_id in &user_ids { @@ -1002,7 +1002,7 @@ impl Service { } if services().users.deactivate_account(user_id).is_ok() { - deactivation_count += 1 + deactivation_count += 1; } } @@ -1243,7 +1243,7 @@ impl Service { continue; } - room_ids.push(owned_room_id) + room_ids.push(owned_room_id); } Err(e) => { if force { @@ -1441,7 +1441,7 @@ impl Service { .rooms .metadata .iter_ids() - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .map(Self::get_room_info) .collect::>(); rooms.sort_by_key(|r| r.1); @@ -1672,7 +1672,7 @@ impl Service { .rooms .directory .public_rooms() - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .map(Self::get_room_info) .collect::>(); rooms.sort_by_key(|r| r.1); @@ -1933,7 +1933,7 @@ impl Service { } DebugCommand::ForceDeviceListUpdates => { // Force E2EE device list updates for all users - for user_id in services().users.iter().filter_map(|r| r.ok()) { + for user_id in services().users.iter().filter_map(std::result::Result::ok) { services().users.mark_device_key_update(&user_id)?; } RoomMessageEventContent::text_plain( @@ -2390,7 +2390,7 @@ impl Service { // Set power level let mut users = BTreeMap::new(); - users.insert(conduit_user.to_owned(), 100.into()); + users.insert(conduit_user.clone(), 100.into()); users.insert(user_id.to_owned(), 100.into()); services() diff --git a/src/service/media/mod.rs b/src/service/media/mod.rs index dc1a15d6..5b5a3344 100644 --- a/src/service/media/mod.rs +++ b/src/service/media/mod.rs @@ -249,9 +249,6 @@ impl Service { if file_created_at >= user_duration { debug!("File is within user duration, pushing to list of file paths and keys to delete."); remote_mxcs.push(mxc.to_string()); - } else { - // don't need to log this even in debug as it would be noisy - continue; } } @@ -331,7 +328,7 @@ impl Service { Ok(Some(FileMeta { content_disposition, content_type, - file: file.to_vec(), + file: file.clone(), })) } else if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), 0, 0) @@ -354,7 +351,7 @@ impl Service { return Ok(Some(FileMeta { content_disposition, content_type, - file: file.to_vec(), + file: file.clone(), })); } @@ -426,14 +423,14 @@ impl Service { Ok(Some(FileMeta { content_disposition, content_type, - file: thumbnail_bytes.to_vec(), + file: thumbnail_bytes.clone(), })) } else { // Couldn't parse file to generate thumbnail, send original Ok(Some(FileMeta { content_disposition, content_type, - file: file.to_vec(), + file: file.clone(), })) } } else { diff --git a/src/service/pusher/mod.rs b/src/service/pusher/mod.rs index 1f700e51..2131e918 100644 --- a/src/service/pusher/mod.rs +++ b/src/service/pusher/mod.rs @@ -64,7 +64,7 @@ impl Service { warn!("Failed to find destination {}: {}", destination, e); Error::BadServerResponse("Invalid destination") })? - .map(|body| body.freeze()); + .map(bytes::BytesMut::freeze); let reqwest_request = reqwest::Request::try_from(http_request)?; @@ -258,7 +258,7 @@ impl Service { .iter() .any(|t| matches!(t, Tweak::Highlight(true) | Tweak::Sound(_))) { - notifi.prio = NotificationPriority::High + notifi.prio = NotificationPriority::High; } if event_id_only { @@ -285,7 +285,7 @@ impl Service { Ok(()) } // TODO: Handle email - PusherKind::Email(_) => Ok(()), + //PusherKind::Email(_) => Ok(()), _ => Ok(()), } } diff --git a/src/service/rooms/event_handler/mod.rs b/src/service/rooms/event_handler/mod.rs index 87787ee5..dd302d22 100644 --- a/src/service/rooms/event_handler/mod.rs +++ b/src/service/rooms/event_handler/mod.rs @@ -36,7 +36,10 @@ use ruma::{ use serde_json::value::RawValue as RawJsonValue; use tracing::{debug, error, info, trace, warn}; -use crate::{service::*, services, Error, PduEvent}; +use crate::{ + service::{pdu, Arc, BTreeMap, HashMap, Result, RwLock}, + services, Error, PduEvent, +}; use super::state_compressor::CompressedStateEvent; @@ -63,7 +66,7 @@ impl Service { /// 8. If not timeline event: stop /// 9. Fetch any missing prev events doing all checks listed here starting at 1. These are timeline /// events - /// 10. Fetch missing state and auth chain events by calling /state_ids at backwards extremities + /// 10. Fetch missing state and auth chain events by calling `/state_ids` at backwards extremities /// doing all the checks in this list starting at 1. These are not timeline events /// 11. Check the auth of the event passes based on the state of the event /// 12. Ensure that the state is derived from the previous current state (i.e. we calculated by @@ -101,7 +104,7 @@ impl Service { // 1. Skip the PDU if we already have it as a timeline event if let Some(pdu_id) = services().rooms.timeline.get_pdu_id(event_id)? { - return Ok(Some(pdu_id.to_vec())); + return Ok(Some(pdu_id)); } let create_event = services() @@ -202,7 +205,7 @@ impl Service { e.insert((Instant::now(), 1)); } hash_map::Entry::Occupied(mut e) => { - *e.get_mut() = (Instant::now(), e.get().1 + 1) + *e.get_mut() = (Instant::now(), e.get().1 + 1); } } continue; @@ -246,7 +249,7 @@ impl Service { e.insert((Instant::now(), 1)); } hash_map::Entry::Occupied(mut e) => { - *e.get_mut() = (Instant::now(), e.get().1 + 1) + *e.get_mut() = (Instant::now(), e.get().1 + 1); } } } @@ -440,7 +443,7 @@ impl Service { if !matches!( auth_events .get(&(StateEventType::RoomCreate, "".to_owned())) - .map(|a| a.as_ref()), + .map(std::convert::AsRef::as_ref), Some(_) | None ) { return Err(Error::BadRequest( @@ -733,7 +736,9 @@ impl Service { .get_shortstatekey(&StateEventType::RoomCreate, "")? .expect("Room exists"); - if state.get(&create_shortstatekey).map(|id| id.as_ref()) + if state + .get(&create_shortstatekey) + .map(std::convert::AsRef::as_ref) != Some(&create_event.event_id) { return Err(Error::bad_database( @@ -1167,7 +1172,7 @@ impl Service { } } } - events_with_auth_events.push((id, None, events_in_reverse_order)) + events_with_auth_events.push((id, None, events_in_reverse_order)); } // We go through all the signatures we see on the PDUs and their unresolved @@ -1185,7 +1190,7 @@ impl Service { warn!( "Could not fetch all signatures for PDUs from {}: {:?}", origin, e - ) + ); }); let mut pdus = vec![]; @@ -1565,7 +1570,7 @@ impl Service { } drop(pkm); - } + }; if servers.is_empty() { info!("server is empty, we had all keys locally, not fetching any keys"); diff --git a/src/service/rooms/pdu_metadata/data.rs b/src/service/rooms/pdu_metadata/data.rs index 121d80dc..a4df34cc 100644 --- a/src/service/rooms/pdu_metadata/data.rs +++ b/src/service/rooms/pdu_metadata/data.rs @@ -1,20 +1,18 @@ use std::sync::Arc; -use crate::{ - service::rooms::timeline::{data::PduData, PduCount}, - Result, -}; +use crate::{service::rooms::timeline::PduCount, PduEvent, Result}; use ruma::{EventId, RoomId, UserId}; pub trait Data: Send + Sync { fn add_relation(&self, from: u64, to: u64) -> Result<()>; + #[allow(clippy::type_complexity)] fn relations_until<'a>( &'a self, user_id: &'a UserId, room_id: u64, target: u64, until: PduCount, - ) -> PduData<'a>; + ) -> Result> + 'a>>; fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc]) -> Result<()>; fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result; fn mark_event_soft_failed(&self, event_id: &EventId) -> Result<()>; diff --git a/src/service/rooms/pdu_metadata/mod.rs b/src/service/rooms/pdu_metadata/mod.rs index 411f4f54..fcc8335e 100644 --- a/src/service/rooms/pdu_metadata/mod.rs +++ b/src/service/rooms/pdu_metadata/mod.rs @@ -78,7 +78,7 @@ impl Service { }) }) .take(limit) - .filter_map(|r| r.ok()) // Filter out buggy events + .filter_map(std::result::Result::ok) // Filter out buggy events .filter(|(_, pdu)| { services() .rooms @@ -125,7 +125,7 @@ impl Service { }) }) .take(limit) - .filter_map(|r| r.ok()) // Filter out buggy events + .filter_map(std::result::Result::ok) // Filter out buggy events .filter(|(_, pdu)| { services() .rooms diff --git a/src/service/rooms/spaces/mod.rs b/src/service/rooms/spaces/mod.rs index 0174b48c..caeacc52 100644 --- a/src/service/rooms/spaces/mod.rs +++ b/src/service/rooms/spaces/mod.rs @@ -62,11 +62,11 @@ impl Service { let mut results = Vec::new(); while let Some(current_room) = { - while stack.last().map_or(false, |s| s.is_empty()) { + while stack.last().map_or(false, std::vec::Vec::is_empty) { stack.pop(); } if !stack.is_empty() { - stack.last_mut().and_then(|s| s.pop()) + stack.last_mut().and_then(std::vec::Vec::pop) } else { None } @@ -80,7 +80,7 @@ impl Service { .roomid_spacechunk_cache .lock() .unwrap() - .get_mut(¤t_room.to_owned()) + .get_mut(¤t_room.clone()) .as_ref() { if let Some(cached) = cached { @@ -202,7 +202,7 @@ impl Service { .send_federation_request( server, federation::space::get_hierarchy::v1::Request { - room_id: current_room.to_owned(), + room_id: current_room.clone(), suggested_only, }, ) diff --git a/src/service/rooms/state_accessor/mod.rs b/src/service/rooms/state_accessor/mod.rs index 8d9dae59..f6e3376c 100644 --- a/src/service/rooms/state_accessor/mod.rs +++ b/src/service/rooms/state_accessor/mod.rs @@ -81,16 +81,15 @@ impl Service { /// The user was a joined member at this state (potentially in the past) fn user_was_joined(&self, shortstatehash: u64, user_id: &UserId) -> bool { self.user_membership(shortstatehash, user_id) - .map(|s| s == MembershipState::Join) - .unwrap_or_default() // Return sensible default, i.e. false + .is_ok_and(|s| s == MembershipState::Join) // Return sensible default, i.e. false } /// The user was an invited or joined room member at this state (potentially /// in the past) fn user_was_invited(&self, shortstatehash: u64, user_id: &UserId) -> bool { self.user_membership(shortstatehash, user_id) - .map(|s| s == MembershipState::Join || s == MembershipState::Invite) - .unwrap_or_default() // Return sensible default, i.e. false + .is_ok_and(|s| s == MembershipState::Join || s == MembershipState::Invite) + // Return sensible default, i.e. false } /// Whether a server is allowed to see an event through federation, based on @@ -102,9 +101,8 @@ impl Service { room_id: &RoomId, event_id: &EventId, ) -> Result { - let shortstatehash = match self.pdu_shortstatehash(event_id)? { - Some(shortstatehash) => shortstatehash, - None => return Ok(true), + let Some(shortstatehash) = self.pdu_shortstatehash(event_id)? else { + return Ok(true); }; if let Some(visibility) = self @@ -130,7 +128,7 @@ impl Service { .rooms .state_cache .room_members(room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter(|member| member.server_name() == origin); let visibility = match history_visibility { diff --git a/src/service/rooms/threads/mod.rs b/src/service/rooms/threads/mod.rs index c6193bc8..ad70f2e0 100644 --- a/src/service/rooms/threads/mod.rs +++ b/src/service/rooms/threads/mod.rs @@ -1,5 +1,7 @@ mod data; +use std::collections::BTreeMap; + pub use data::Data; use ruma::{ api::client::{error::ErrorKind, threads::get_threads::v1::IncludeThreads}, @@ -56,7 +58,7 @@ impl Service { if let CanonicalJsonValue::Object(unsigned) = root_pdu_json .entry("unsigned".to_owned()) - .or_insert_with(|| CanonicalJsonValue::Object(Default::default())) + .or_insert_with(|| CanonicalJsonValue::Object(BTreeMap::default())) { if let Some(mut relations) = unsigned .get("m.relations") diff --git a/src/service/rooms/timeline/data.rs b/src/service/rooms/timeline/data.rs index df329fec..6290b8cc 100644 --- a/src/service/rooms/timeline/data.rs +++ b/src/service/rooms/timeline/data.rs @@ -6,8 +6,6 @@ use crate::{PduEvent, Result}; use super::PduCount; -pub type PduData<'a> = Result> + 'a>>; - pub trait Data: Send + Sync { fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result; @@ -68,12 +66,23 @@ pub trait Data: Send + Sync { /// Returns an iterator over all events and their tokens in a room that happened before the /// event with id `until` in reverse-chronological order. - fn pdus_until<'a>(&'a self, user_id: &UserId, room_id: &RoomId, until: PduCount) - -> PduData<'a>; + #[allow(clippy::type_complexity)] + fn pdus_until<'a>( + &'a self, + user_id: &UserId, + room_id: &RoomId, + until: PduCount, + ) -> Result> + 'a>>; /// Returns an iterator over all events in a room that happened after the event with id `from` /// in chronological order. - fn pdus_after<'a>(&'a self, user_id: &UserId, room_id: &RoomId, from: PduCount) -> PduData<'a>; + #[allow(clippy::type_complexity)] + fn pdus_after<'a>( + &'a self, + user_id: &UserId, + room_id: &RoomId, + from: PduCount, + ) -> Result> + 'a>>; fn increment_notification_counts( &self, diff --git a/src/service/rooms/timeline/mod.rs b/src/service/rooms/timeline/mod.rs index a96d8748..eb29d643 100644 --- a/src/service/rooms/timeline/mod.rs +++ b/src/service/rooms/timeline/mod.rs @@ -243,7 +243,7 @@ impl Service { if let Some(state_key) = &pdu.state_key { if let CanonicalJsonValue::Object(unsigned) = pdu_json .entry("unsigned".to_owned()) - .or_insert_with(|| CanonicalJsonValue::Object(Default::default())) + .or_insert_with(|| CanonicalJsonValue::Object(BTreeMap::default())) { if let Some(shortstatehash) = services() .rooms @@ -400,8 +400,10 @@ impl Service { }) }) .transpose()? - .map(|ev: PushRulesEvent| ev.content.global) - .unwrap_or_else(|| Ruleset::server_default(user)); + .map_or_else( + || Ruleset::server_default(user), + |ev: PushRulesEvent| ev.content.global, + ); let mut highlight = false; let mut notify = false; @@ -686,7 +688,7 @@ impl Service { .rooms .alias .local_aliases_for_room(&pdu.room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .any(|room_alias| aliases.is_match(room_alias.as_str())) }; @@ -930,7 +932,7 @@ impl Service { .filter(|v| v.starts_with('@')) .unwrap_or(sender.as_str()); let server_name = services().globals.server_name(); - let server_user = format!("@conduit:{}", server_name); + let server_user = format!("@conduit:{server_name}"); let content = serde_json::from_str::(pdu.content.get()) .map_err(|_| Error::bad_database("Invalid content in pdu."))?; @@ -947,7 +949,7 @@ impl Service { .rooms .state_cache .room_members(room_id) - .filter_map(|m| m.ok()) + .filter_map(std::result::Result::ok) .filter(|m| m.server_name() == server_name) .filter(|m| m != target) .count(); @@ -973,7 +975,7 @@ impl Service { .rooms .state_cache .room_members(room_id) - .filter_map(|m| m.ok()) + .filter_map(std::result::Result::ok) .filter(|m| m.server_name() == server_name) .filter(|m| m != target) .count(); @@ -1016,7 +1018,7 @@ impl Service { .rooms .state_cache .room_servers(room_id) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .collect(); // In case we are kicking or banning a user, we need to inform their server of the change @@ -1217,7 +1219,7 @@ impl Service { .roomid_mutex_federation .write() .unwrap() - .entry(room_id.to_owned()) + .entry(room_id.clone()) .or_default(), ); let mutex_lock = mutex.lock().await; diff --git a/src/service/sending/mod.rs b/src/service/sending/mod.rs index f634c903..7a8ded14 100644 --- a/src/service/sending/mod.rs +++ b/src/service/sending/mod.rs @@ -82,6 +82,7 @@ impl OutgoingKind { } #[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[allow(clippy::module_name_repetitions)] pub enum SendingEventType { Pdu(Vec), // pduid Edu(Vec), // pdu json @@ -130,7 +131,11 @@ impl Service { // Retry requests we could not finish yet let mut initial_transactions = HashMap::>::new(); - for (key, outgoing_kind, event) in self.db.active_requests().filter_map(|r| r.ok()) { + for (key, outgoing_kind, event) in self + .db + .active_requests() + .filter_map(std::result::Result::ok) + { let entry = initial_transactions .entry(outgoing_kind.clone()) .or_default(); @@ -160,7 +165,7 @@ impl Service { self.db.delete_all_active_requests_for(&outgoing_kind)?; // Find events that have been added since starting the last request - let new_events = self.db.queued_requests(&outgoing_kind).filter_map(|r| r.ok()).take(30).collect::>(); + let new_events = self.db.queued_requests(&outgoing_kind).filter_map(std::result::Result::ok).take(30).collect::>(); if !new_events.is_empty() { // Insert pdus we found @@ -247,7 +252,7 @@ impl Service { for (_, e) in self .db .active_requests_for(outgoing_kind) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) { events.push(e); } @@ -284,7 +289,7 @@ impl Service { services() .users .keys_changed(room_id.as_ref(), since, None) - .filter_map(|r| r.ok()) + .filter_map(std::result::Result::ok) .filter(|user_id| user_id.server_name() == services().globals.server_name()), ); @@ -443,7 +448,7 @@ impl Service { )?; for ((outgoing_kind, event), key) in requests.into_iter().zip(keys) { self.sender - .send((outgoing_kind.to_owned(), event, key)) + .send((outgoing_kind.clone(), event, key)) .unwrap(); } @@ -513,7 +518,7 @@ impl Service { ), ) })? - .to_room_event()) + .to_room_event()); } SendingEventType::Edu(_) => { // Appservices don't need EDUs (?) @@ -600,13 +605,12 @@ impl Service { } } - let pusher = match services() + let Some(pusher) = services() .pusher .get_pusher(userid, pushkey) .map_err(|e| (OutgoingKind::Push(userid.clone(), pushkey.clone()), e))? - { - Some(pusher) => pusher, - None => continue, + else { + continue; }; let rules_for_user = services() @@ -618,8 +622,10 @@ impl Service { ) .unwrap_or_default() .and_then(|event| serde_json::from_str::(event.get()).ok()) - .map(|ev: PushRulesEvent| ev.content.global) - .unwrap_or_else(|| push::Ruleset::server_default(userid)); + .map_or_else( + || push::Ruleset::server_default(userid), + |ev: PushRulesEvent| ev.content.global, + ); let unread: UInt = services() .rooms diff --git a/src/service/uiaa/mod.rs b/src/service/uiaa/mod.rs index 2c51833a..10ac3664 100644 --- a/src/service/uiaa/mod.rs +++ b/src/service/uiaa/mod.rs @@ -48,10 +48,10 @@ impl Service { auth: &AuthData, uiaainfo: &UiaaInfo, ) -> Result<(bool, UiaaInfo)> { - let mut uiaainfo = auth - .session() - .map(|session| self.db.get_uiaa_session(user_id, device_id, session)) - .unwrap_or_else(|| Ok(uiaainfo.clone()))?; + let mut uiaainfo = auth.session().map_or_else( + || Ok(uiaainfo.clone()), + |session| self.db.get_uiaa_session(user_id, device_id, session), + )?; if uiaainfo.session.is_none() { uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH)); @@ -64,14 +64,11 @@ impl Service { password, .. }) => { - let username = match identifier { - UserIdentifier::UserIdOrLocalpart(username) => username, - _ => { - return Err(Error::BadRequest( - ErrorKind::Unrecognized, - "Identifier type not recognized.", - )) - } + let UserIdentifier::UserIdOrLocalpart(username) = identifier else { + return Err(Error::BadRequest( + ErrorKind::Unrecognized, + "Identifier type not recognized.", + )); }; let user_id = UserId::parse_with_server_name( diff --git a/src/utils/error.rs b/src/utils/error.rs index 8549a713..ba6caf5e 100644 --- a/src/utils/error.rs +++ b/src/utils/error.rs @@ -100,7 +100,11 @@ impl Error { let message = format!("{self}"); - use ErrorKind::*; + use ErrorKind::{ + Forbidden, GuestAccessForbidden, LimitExceeded, MissingToken, NotFound, + ThreepidAuthFailed, ThreepidDenied, TooLarge, Unauthorized, Unknown, UnknownToken, + Unrecognized, UserDeactivated, WrongRoomKeysVersion, + }; let (kind, status_code) = match self { Self::BadRequest(kind, _) => ( kind.clone(), diff --git a/src/utils/mod.rs b/src/utils/mod.rs index 59c522dd..37ebc4f8 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -22,7 +22,7 @@ pub(crate) fn millis_since_unix_epoch() -> u64 { } pub(crate) fn increment(old: Option<&[u8]>) -> Option> { - let number = match old.map(|bytes| bytes.try_into()) { + let number = match old.map(std::convert::TryInto::try_into) { Some(Ok(bytes)) => { let number = u64::from_be_bytes(bytes); number + 1 @@ -94,7 +94,9 @@ pub(crate) fn common_elements( check_order: impl Fn(&[u8], &[u8]) -> Ordering, ) -> Option>> { let first_iterator = iterators.next()?; - let mut other_iterators = iterators.map(|i| i.peekable()).collect::>(); + let mut other_iterators = iterators + .map(std::iter::Iterator::peekable) + .collect::>(); Some(first_iterator.filter(move |target| { other_iterators.iter_mut().all(|it| {