Fix use-self
Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
parent
c3c91e9d80
commit
eae41fc411
18 changed files with 47 additions and 48 deletions
|
@ -721,7 +721,6 @@ option_if_let_else = { level = "allow", priority = 1 } # TODO
|
|||
redundant_pub_crate = { level = "allow", priority = 1 } # TODO
|
||||
significant_drop_in_scrutinee = { level = "allow", priority = 1 } # TODO
|
||||
significant_drop_tightening = { level = "allow", priority = 1 } # TODO
|
||||
use_self = { level = "allow", priority = 1 } # TODO
|
||||
useless_let_if_seq = { level = "allow", priority = 1 } # TODO
|
||||
|
||||
###################
|
||||
|
|
|
@ -55,7 +55,7 @@ where
|
|||
let mut request = request::from(request).await?;
|
||||
let mut json_body = serde_json::from_slice::<CanonicalJsonValue>(&request.body).ok();
|
||||
let auth = auth::auth(&mut request, &json_body, &T::METADATA).await?;
|
||||
Ok(Ruma {
|
||||
Ok(Self {
|
||||
body: make_body::<T>(&mut request, &mut json_body, &auth)?,
|
||||
origin: auth.origin,
|
||||
sender_user: auth.sender_user,
|
||||
|
|
|
@ -409,7 +409,7 @@ impl Config {
|
|||
.merge(Env::prefixed("CONDUWUIT_").global().split("__"))
|
||||
};
|
||||
|
||||
let config = match raw_config.extract::<Config>() {
|
||||
let config = match raw_config.extract::<Self>() {
|
||||
Err(e) => return Err(Error::BadConfig(format!("{e}"))),
|
||||
Ok(config) => config,
|
||||
};
|
||||
|
|
|
@ -42,11 +42,11 @@ pub enum ProxyConfig {
|
|||
impl ProxyConfig {
|
||||
pub fn to_proxy(&self) -> Result<Option<Proxy>> {
|
||||
Ok(match self.clone() {
|
||||
ProxyConfig::None => None,
|
||||
ProxyConfig::Global {
|
||||
Self::None => None,
|
||||
Self::Global {
|
||||
url,
|
||||
} => Some(Proxy::all(url)?),
|
||||
ProxyConfig::ByDomain(proxies) => Some(Proxy::custom(move |url| {
|
||||
Self::ByDomain(proxies) => Some(Proxy::custom(move |url| {
|
||||
proxies.iter().find_map(|proxy| proxy.for_url(url)).cloned() // first matching
|
||||
// proxy
|
||||
})),
|
||||
|
@ -108,18 +108,18 @@ enum WildCardedDomain {
|
|||
impl WildCardedDomain {
|
||||
fn matches(&self, domain: &str) -> bool {
|
||||
match self {
|
||||
WildCardedDomain::WildCard => true,
|
||||
WildCardedDomain::WildCarded(d) => domain.ends_with(d),
|
||||
WildCardedDomain::Exact(d) => domain == d,
|
||||
Self::WildCard => true,
|
||||
Self::WildCarded(d) => domain.ends_with(d),
|
||||
Self::Exact(d) => domain == d,
|
||||
}
|
||||
}
|
||||
|
||||
fn more_specific_than(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(WildCardedDomain::WildCard, WildCardedDomain::WildCard) => false,
|
||||
(_, WildCardedDomain::WildCard) => true,
|
||||
(WildCardedDomain::Exact(a), WildCardedDomain::WildCarded(_)) => other.matches(a),
|
||||
(WildCardedDomain::WildCarded(a), WildCardedDomain::WildCarded(b)) => a != b && a.ends_with(b),
|
||||
(Self::WildCard, Self::WildCard) => false,
|
||||
(_, Self::WildCard) => true,
|
||||
(Self::Exact(a), Self::WildCarded(_)) => other.matches(a),
|
||||
(Self::WildCarded(a), Self::WildCarded(b)) => a != b && a.ends_with(b),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -130,11 +130,11 @@ impl std::str::FromStr for WildCardedDomain {
|
|||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
// maybe do some domain validation?
|
||||
Ok(if s.starts_with("*.") {
|
||||
WildCardedDomain::WildCarded(s[1..].to_owned())
|
||||
Self::WildCarded(s[1..].to_owned())
|
||||
} else if s == "*" {
|
||||
WildCardedDomain::WildCarded(String::new())
|
||||
Self::WildCarded(String::new())
|
||||
} else {
|
||||
WildCardedDomain::Exact(s.to_owned())
|
||||
Self::Exact(s.to_owned())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ pub trait ReloadHandle<L> {
|
|||
}
|
||||
|
||||
impl<L, S> ReloadHandle<L> for reload::Handle<L, S> {
|
||||
fn reload(&self, new_value: L) -> Result<(), reload::Error> { reload::Handle::reload(self, new_value) }
|
||||
fn reload(&self, new_value: L) -> Result<(), reload::Error> { Self::reload(self, new_value) }
|
||||
}
|
||||
|
||||
struct LogLevelReloadHandlesInner {
|
||||
|
@ -37,8 +37,8 @@ pub struct LogLevelReloadHandles {
|
|||
|
||||
impl LogLevelReloadHandles {
|
||||
#[must_use]
|
||||
pub fn new(handles: Vec<Box<dyn ReloadHandle<EnvFilter> + Send + Sync>>) -> LogLevelReloadHandles {
|
||||
LogLevelReloadHandles {
|
||||
pub fn new(handles: Vec<Box<dyn ReloadHandle<EnvFilter> + Send + Sync>>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(LogLevelReloadHandlesInner {
|
||||
handles,
|
||||
}),
|
||||
|
|
|
@ -29,8 +29,8 @@ impl PduCount {
|
|||
#[must_use]
|
||||
pub fn stringify(&self) -> String {
|
||||
match self {
|
||||
PduCount::Backfilled(x) => format!("-{x}"),
|
||||
PduCount::Normal(x) => x.to_string(),
|
||||
Self::Backfilled(x) => format!("-{x}"),
|
||||
Self::Normal(x) => x.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,10 +42,10 @@ impl PartialOrd for PduCount {
|
|||
impl Ord for PduCount {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
match (self, other) {
|
||||
(PduCount::Normal(s), PduCount::Normal(o)) => s.cmp(o),
|
||||
(PduCount::Backfilled(s), PduCount::Backfilled(o)) => o.cmp(s),
|
||||
(PduCount::Normal(_), PduCount::Backfilled(_)) => Ordering::Greater,
|
||||
(PduCount::Backfilled(_), PduCount::Normal(_)) => Ordering::Less,
|
||||
(Self::Normal(s), Self::Normal(o)) => s.cmp(o),
|
||||
(Self::Backfilled(s), Self::Backfilled(o)) => o.cmp(s),
|
||||
(Self::Normal(_), Self::Backfilled(_)) => Ordering::Greater,
|
||||
(Self::Backfilled(_), Self::Normal(_)) => Ordering::Less,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ pub struct Cork {
|
|||
impl Cork {
|
||||
pub fn new(db: &Arc<dyn KeyValueDatabaseEngine>, flush: bool, sync: bool) -> Self {
|
||||
db.cork().unwrap();
|
||||
Cork {
|
||||
Self {
|
||||
db: db.clone(),
|
||||
flush,
|
||||
sync,
|
||||
|
|
|
@ -157,7 +157,7 @@ pub struct KeyValueDatabase {
|
|||
impl KeyValueDatabase {
|
||||
/// Load an existing database or create a new one.
|
||||
#[allow(clippy::too_many_lines)]
|
||||
pub async fn load_or_create(server: &Arc<Server>) -> Result<KeyValueDatabase> {
|
||||
pub async fn load_or_create(server: &Arc<Server>) -> Result<Self> {
|
||||
let config = &server.config;
|
||||
check_db_setup(config)?;
|
||||
let builder = build(config)?;
|
||||
|
|
|
@ -84,7 +84,7 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
db.latest_sequence_number(),
|
||||
load_time.elapsed()
|
||||
);
|
||||
Ok(Arc::new(Engine {
|
||||
Ok(Self::new(Engine {
|
||||
config: config.clone(),
|
||||
row_cache,
|
||||
col_cache,
|
||||
|
@ -110,7 +110,7 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
|
||||
Ok(Arc::new(RocksDbEngineTree {
|
||||
name,
|
||||
db: Arc::clone(self),
|
||||
db: Self::clone(self),
|
||||
watchers: Watchers::default(),
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -113,7 +113,7 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
|
||||
let writer = Mutex::new(Engine::prepare_conn(&path, cache_size_per_thread)?);
|
||||
|
||||
let arc = Arc::new(Engine {
|
||||
let arc = Self::new(Engine {
|
||||
writer,
|
||||
read_conn_tls: ThreadLocal::new(),
|
||||
read_iterator_conn_tls: ThreadLocal::new(),
|
||||
|
@ -131,7 +131,7 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
)?;
|
||||
|
||||
Ok(Arc::new(SqliteTable {
|
||||
engine: Arc::clone(self),
|
||||
engine: Self::clone(self),
|
||||
name: name.to_owned(),
|
||||
watchers: Watchers::default(),
|
||||
}))
|
||||
|
|
|
@ -29,7 +29,7 @@ pub(crate) struct Server {
|
|||
}
|
||||
|
||||
impl Server {
|
||||
pub(crate) fn build(args: Args, runtime: Option<&runtime::Handle>) -> Result<Arc<Server>, Error> {
|
||||
pub(crate) fn build(args: Args, runtime: Option<&runtime::Handle>) -> Result<Arc<Self>, Error> {
|
||||
let config = Config::new(args.config)?;
|
||||
|
||||
#[cfg(feature = "sentry_telemetry")]
|
||||
|
@ -49,7 +49,7 @@ impl Server {
|
|||
conduit::version::conduwuit(),
|
||||
);
|
||||
|
||||
Ok(Arc::new(Server {
|
||||
Ok(Arc::new(Self {
|
||||
server: Arc::new(conduit::Server::new(config, runtime.cloned(), tracing_reload_handle)),
|
||||
|
||||
_tracing_flame_guard: tracing_flame_guard,
|
||||
|
|
|
@ -75,7 +75,7 @@ impl TryFrom<Vec<Namespace>> for NamespaceRegex {
|
|||
}
|
||||
}
|
||||
|
||||
Ok(NamespaceRegex {
|
||||
Ok(Self {
|
||||
exclusive: if exclusive.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
@ -102,8 +102,8 @@ pub struct RegistrationInfo {
|
|||
impl TryFrom<Registration> for RegistrationInfo {
|
||||
type Error = regex::Error;
|
||||
|
||||
fn try_from(value: Registration) -> Result<RegistrationInfo, regex::Error> {
|
||||
Ok(RegistrationInfo {
|
||||
fn try_from(value: Registration) -> Result<Self, regex::Error> {
|
||||
Ok(Self {
|
||||
users: value.namespaces.users.clone().try_into()?,
|
||||
aliases: value.namespaces.aliases.clone().try_into()?,
|
||||
rooms: value.namespaces.rooms.clone().try_into()?,
|
||||
|
|
|
@ -15,8 +15,8 @@ pub struct Client {
|
|||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(config: &Config, resolver: &Arc<resolver::Resolver>) -> Client {
|
||||
Client {
|
||||
pub fn new(config: &Config, resolver: &Arc<resolver::Resolver>) -> Self {
|
||||
Self {
|
||||
default: Self::base(config)
|
||||
.unwrap()
|
||||
.dns_resolver(resolver.clone())
|
||||
|
|
|
@ -83,7 +83,7 @@ impl Resolver {
|
|||
|
||||
let resolver = Arc::new(TokioAsyncResolver::tokio(conf, opts));
|
||||
let overrides = Arc::new(StdRwLock::new(TlsNameMap::new()));
|
||||
Resolver {
|
||||
Self {
|
||||
destinations: Arc::new(RwLock::new(WellKnownMap::new())),
|
||||
overrides: overrides.clone(),
|
||||
resolver: resolver.clone(),
|
||||
|
|
|
@ -56,7 +56,7 @@ pub struct PduEvent {
|
|||
|
||||
impl PduEvent {
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn redact(&mut self, room_version_id: RoomVersionId, reason: &PduEvent) -> crate::Result<()> {
|
||||
pub fn redact(&mut self, room_version_id: RoomVersionId, reason: &Self) -> crate::Result<()> {
|
||||
self.unsigned = None;
|
||||
|
||||
let mut content = serde_json::from_str(self.content.get())
|
||||
|
|
|
@ -211,7 +211,7 @@ impl Arena {
|
|||
fn new(root: OwnedRoomId, max_depth: usize) -> Self {
|
||||
let zero_depth = max_depth == 0;
|
||||
|
||||
Arena {
|
||||
Self {
|
||||
nodes: vec![Node {
|
||||
parent: None,
|
||||
next_sibling: None,
|
||||
|
@ -248,7 +248,7 @@ impl FromStr for PagnationToken {
|
|||
let mut values = value.split('_');
|
||||
|
||||
let mut pag_tok = || {
|
||||
Some(PagnationToken {
|
||||
Some(Self {
|
||||
skip: UInt::from_str(values.next()?).ok()?,
|
||||
limit: UInt::from_str(values.next()?).ok()?,
|
||||
max_depth: UInt::from_str(values.next()?).ok()?,
|
||||
|
@ -316,7 +316,7 @@ impl From<CachedSpaceHierarchySummary> for SpaceHierarchyRoomsChunk {
|
|||
..
|
||||
} = value.summary;
|
||||
|
||||
SpaceHierarchyRoomsChunk {
|
||||
Self {
|
||||
canonical_alias,
|
||||
name,
|
||||
num_joined_members,
|
||||
|
|
|
@ -259,19 +259,19 @@ impl Destination {
|
|||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_prefix(&self) -> Vec<u8> {
|
||||
let mut prefix = match self {
|
||||
Destination::Appservice(server) => {
|
||||
Self::Appservice(server) => {
|
||||
let mut p = b"+".to_vec();
|
||||
p.extend_from_slice(server.as_bytes());
|
||||
p
|
||||
},
|
||||
Destination::Push(user, pushkey) => {
|
||||
Self::Push(user, pushkey) => {
|
||||
let mut p = b"$".to_vec();
|
||||
p.extend_from_slice(user.as_bytes());
|
||||
p.push(0xFF);
|
||||
p.extend_from_slice(pushkey.as_bytes());
|
||||
p
|
||||
},
|
||||
Destination::Normal(server) => {
|
||||
Self::Normal(server) => {
|
||||
let mut p = Vec::new();
|
||||
p.extend_from_slice(server.as_bytes());
|
||||
p
|
||||
|
|
|
@ -421,8 +421,8 @@ impl FedDest {
|
|||
impl fmt::Display for FedDest {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
FedDest::Named(host, port) => write!(f, "{host}{port}"),
|
||||
FedDest::Literal(addr) => write!(f, "{addr}"),
|
||||
Self::Named(host, port) => write!(f, "{host}{port}"),
|
||||
Self::Literal(addr) => write!(f, "{addr}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue