actually fix all let_underscore_must_use lints

CI caught some more

Signed-off-by: strawberry <strawberry@puppygock.gay>
This commit is contained in:
strawberry 2024-05-24 19:20:19 -04:00 committed by June 🍓🦴
parent 0877ee6191
commit 6269822613
8 changed files with 54 additions and 21 deletions

View file

@ -124,7 +124,9 @@ pub(crate) async fn get_remote_pdu_list(
for pdu in list {
if force {
_ = get_remote_pdu(Vec::new(), Box::from(pdu), server.clone()).await;
if let Err(e) = get_remote_pdu(Vec::new(), Box::from(pdu), server.clone()).await {
warn!(%e, "Failed to get remote PDU, ignoring error");
}
} else {
get_remote_pdu(Vec::new(), Box::from(pdu), server.clone()).await?;
}

View file

@ -46,7 +46,7 @@ pub(crate) enum RoomAliasCommand {
room_alias_localpart: String,
},
/// - Remove an alias
/// - Remove a local alias
Remove {
/// The alias localpart to remove (`alias`, not `#alias:servername.tld`)
room_alias_localpart: String,

View file

@ -317,7 +317,11 @@ pub(crate) async fn sync_events_route(
if duration.as_secs() > 30 {
duration = Duration::from_secs(30);
}
_ = tokio::time::timeout(duration, watcher).await;
#[allow(clippy::let_underscore_must_use)]
{
_ = tokio::time::timeout(duration, watcher).await;
}
}
Ok(response)
@ -1594,7 +1598,10 @@ pub(crate) async fn sync_events_v4_route(
if duration.as_secs() > 30 {
duration = Duration::from_secs(30);
}
_ = tokio::time::timeout(duration, watcher).await;
#[allow(clippy::let_underscore_must_use)]
{
_ = tokio::time::timeout(duration, watcher).await;
}
}
Ok(sync_events::v4::Response {

View file

@ -22,6 +22,7 @@ use crate::{layers, serve};
/// Main loop base
#[tracing::instrument(skip_all)]
#[allow(clippy::let_underscore_must_use)] // various of these are intended
pub(crate) async fn run(server: Arc<Server>) -> Result<(), Error> {
let config = &server.config;
let app = layers::build(&server)?;
@ -70,6 +71,7 @@ pub(crate) async fn run(server: Arc<Server>) -> Result<(), Error> {
/// Async initializations
#[tracing::instrument(skip_all)]
#[allow(clippy::let_underscore_must_use)]
pub(crate) async fn start(server: Arc<Server>) -> Result<(), Error> {
debug!("Starting...");
let d = Arc::new(KeyValueDatabase::load_or_create(&server).await?);

View file

@ -79,11 +79,13 @@ impl RotationHandler {
pub fn watch(&self) -> impl Future<Output = ()> {
let mut r = self.0.subscribe();
#[allow(clippy::let_underscore_must_use)]
async move {
_ = r.recv().await;
}
}
#[allow(clippy::let_underscore_must_use)]
pub fn fire(&self) { _ = self.0.send(()); }
}

View file

@ -271,17 +271,20 @@ impl super::Service {
{
let mut pkm = pub_key_map.write().await;
// Try to fetch keys, failure is okay
// Servers we couldn't find in the cache will be added to `servers`
for pdu in &event.room_state.state {
_ = self
// Try to fetch keys, failure is okay. Servers we couldn't find in the cache
// will be added to `servers`
for pdu in event
.room_state
.state
.iter()
.chain(&event.room_state.auth_chain)
{
if let Err(error) = self
.get_server_keys_from_cache(pdu, &mut servers, room_version, &mut pkm)
.await;
}
for pdu in &event.room_state.auth_chain {
_ = self
.get_server_keys_from_cache(pdu, &mut servers, room_version, &mut pkm)
.await;
.await
{
debug!(%error, "failed to get server keys from cache");
};
}
drop(pkm);

View file

@ -6,6 +6,7 @@ use ruma::{
OwnedRoomId, OwnedUserId, RoomId, UserId,
};
use tokio::sync::{broadcast, RwLock};
use tracing::trace;
use crate::{
debug_info, services, user_is_local,
@ -37,7 +38,9 @@ impl Service {
.write()
.await
.insert(room_id.to_owned(), services().globals.next_count()?);
_ = self.typing_update_sender.send(room_id.to_owned());
if self.typing_update_sender.send(room_id.to_owned()).is_err() {
trace!("receiver found what it was looking for and is no longer interested");
}
// update federation
if user_is_local(user_id) {
@ -61,7 +64,9 @@ impl Service {
.write()
.await
.insert(room_id.to_owned(), services().globals.next_count()?);
_ = self.typing_update_sender.send(room_id.to_owned());
if self.typing_update_sender.send(room_id.to_owned()).is_err() {
trace!("receiver found what it was looking for and is no longer interested");
}
// update federation
if user_is_local(user_id) {
@ -114,7 +119,9 @@ impl Service {
.write()
.await
.insert(room_id.to_owned(), services().globals.next_count()?);
_ = self.typing_update_sender.send(room_id.to_owned());
if self.typing_update_sender.send(room_id.to_owned()).is_err() {
trace!("receiver found what it was looking for and is no longer interested");
}
// update federation
for user in removable {

View file

@ -10,7 +10,7 @@ use tokio::{
fs,
sync::{broadcast, Mutex, RwLock},
};
use tracing::{debug, info, trace};
use tracing::{debug, info, trace, warn};
use crate::{
account_data, admin, appservice, globals, key_backups, media, presence, pusher, rooms, sending, transaction_ids,
@ -293,7 +293,11 @@ bad_signature_ratelimiter: {bad_signature_ratelimiter}
if self.globals.allow_check_for_updates() {
let handle = globals::updates::start_check_for_updates_task().await?;
_ = self.globals.updates_handle.lock().await.insert(handle);
#[allow(clippy::let_underscore_must_use)] // needed for shutdown
{
_ = self.globals.updates_handle.lock().await.insert(handle);
}
}
debug_info!("Services startup complete.");
@ -319,13 +323,19 @@ bad_signature_ratelimiter: {bad_signature_ratelimiter}
debug!("Removing unix socket file.");
if let Some(path) = self.globals.unix_socket_path().as_ref() {
_ = fs::remove_file(path).await;
if let Err(e) = fs::remove_file(path).await {
warn!("Failed to remove UNIX socket file: {e}");
}
}
debug!("Waiting for update worker...");
if let Some(updates_handle) = self.globals.updates_handle.lock().await.take() {
updates_handle.abort();
_ = updates_handle.await;
#[allow(clippy::let_underscore_must_use)]
{
_ = updates_handle.await;
}
}
debug!("Waiting for admin worker...");