Merge remote-tracking branch 'refs/remotes/origin/next' into command-refactor

This commit is contained in:
Andrei Vasiliu 2022-01-21 10:19:17 +02:00
commit f244c0e2ce
26 changed files with 278 additions and 198 deletions

View file

@ -21,7 +21,7 @@ variables:
- if: '$CI_COMMIT_BRANCH == "next"' - if: '$CI_COMMIT_BRANCH == "next"'
- if: "$CI_COMMIT_TAG" - if: "$CI_COMMIT_TAG"
interruptible: true interruptible: true
image: "rust:latest" image: "rust:1.56"
tags: ["docker"] tags: ["docker"]
variables: variables:
CARGO_PROFILE_RELEASE_LTO: "true" CARGO_PROFILE_RELEASE_LTO: "true"
@ -218,7 +218,7 @@ test:cargo:
before_script: before_script:
# - mkdir -p $CARGO_HOME # - mkdir -p $CARGO_HOME
- apt-get update -yqq - apt-get update -yqq
- apt-get install -yqq --no-install-recommends build-essential libssl-dev pkg-config - apt-get install -yqq --no-install-recommends build-essential libssl-dev pkg-config libclang-dev
- rustup component add clippy rustfmt - rustup component add clippy rustfmt
- curl "https://faulty-storage.de/gitlab-report" --output ./gitlab-report && chmod +x ./gitlab-report - curl "https://faulty-storage.de/gitlab-report" --output ./gitlab-report && chmod +x ./gitlab-report
# If provided, bring in caching through sccache, which uses an external S3 endpoint to store compilation results: # If provided, bring in caching through sccache, which uses an external S3 endpoint to store compilation results:

48
Cargo.lock generated
View file

@ -2086,7 +2086,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma" name = "ruma"
version = "0.4.0" version = "0.4.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"assign", "assign",
"js_int", "js_int",
@ -2107,7 +2107,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-api" name = "ruma-api"
version = "0.18.5" version = "0.18.5"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"bytes", "bytes",
"http", "http",
@ -2123,7 +2123,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-api-macros" name = "ruma-api-macros"
version = "0.18.5" version = "0.18.5"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
@ -2134,7 +2134,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-appservice-api" name = "ruma-appservice-api"
version = "0.4.0" version = "0.4.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"ruma-api", "ruma-api",
"ruma-common", "ruma-common",
@ -2148,7 +2148,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-client-api" name = "ruma-client-api"
version = "0.12.3" version = "0.12.3"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"assign", "assign",
"bytes", "bytes",
@ -2168,7 +2168,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-common" name = "ruma-common"
version = "0.6.0" version = "0.6.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"js_int", "js_int",
@ -2183,7 +2183,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-events" name = "ruma-events"
version = "0.24.6" version = "0.24.6"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"indoc", "indoc",
"js_int", "js_int",
@ -2194,12 +2194,13 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror",
"wildmatch",
] ]
[[package]] [[package]]
name = "ruma-events-macros" name = "ruma-events-macros"
version = "0.24.6" version = "0.24.6"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
@ -2210,7 +2211,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-federation-api" name = "ruma-federation-api"
version = "0.3.1" version = "0.3.1"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-api", "ruma-api",
@ -2225,7 +2226,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identifiers" name = "ruma-identifiers"
version = "0.20.0" version = "0.20.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"percent-encoding", "percent-encoding",
"rand 0.8.4", "rand 0.8.4",
@ -2234,12 +2235,13 @@ dependencies = [
"ruma-serde", "ruma-serde",
"ruma-serde-macros", "ruma-serde-macros",
"serde", "serde",
"uuid",
] ]
[[package]] [[package]]
name = "ruma-identifiers-macros" name = "ruma-identifiers-macros"
version = "0.20.0" version = "0.20.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"quote", "quote",
"ruma-identifiers-validation", "ruma-identifiers-validation",
@ -2249,7 +2251,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identifiers-validation" name = "ruma-identifiers-validation"
version = "0.5.0" version = "0.5.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"thiserror", "thiserror",
] ]
@ -2257,7 +2259,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identity-service-api" name = "ruma-identity-service-api"
version = "0.3.0" version = "0.3.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-api", "ruma-api",
@ -2270,7 +2272,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-push-gateway-api" name = "ruma-push-gateway-api"
version = "0.3.0" version = "0.3.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-api", "ruma-api",
@ -2285,8 +2287,9 @@ dependencies = [
[[package]] [[package]]
name = "ruma-serde" name = "ruma-serde"
version = "0.5.0" version = "0.5.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"base64 0.13.0",
"bytes", "bytes",
"form_urlencoded", "form_urlencoded",
"itoa 0.4.8", "itoa 0.4.8",
@ -2299,7 +2302,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-serde-macros" name = "ruma-serde-macros"
version = "0.5.0" version = "0.5.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
@ -2310,7 +2313,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-signatures" name = "ruma-signatures"
version = "0.9.0" version = "0.9.0"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"base64 0.13.0", "base64 0.13.0",
"ed25519-dalek", "ed25519-dalek",
@ -2327,7 +2330,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-state-res" name = "ruma-state-res"
version = "0.4.1" version = "0.4.1"
source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a"
dependencies = [ dependencies = [
"itertools", "itertools",
"js_int", "js_int",
@ -3308,6 +3311,15 @@ dependencies = [
"percent-encoding", "percent-encoding",
] ]
[[package]]
name = "uuid"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
dependencies = [
"getrandom 0.2.3",
]
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.15" version = "0.2.15"

View file

@ -7,7 +7,8 @@ homepage = "https://conduit.rs"
repository = "https://gitlab.com/famedly/conduit" repository = "https://gitlab.com/famedly/conduit"
readme = "README.md" readme = "README.md"
version = "0.2.0" version = "0.2.0"
edition = "2018" rust-version = "1.56"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -19,7 +20,7 @@ rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle request
# Used for matrix spec type definitions and helpers # Used for matrix spec type definitions and helpers
#ruma = { version = "0.4.0", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { version = "0.4.0", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
ruma = { git = "https://github.com/ruma/ruma", rev = "f8ba7f795765bf4aeb4db06849f9fdde9c162ac3", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } ruma = { git = "https://github.com/ruma/ruma", rev = "08d60b3d376b63462f769d4b9bd3bbfb560d501a", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
#ruma = { git = "https://github.com/timokoesters/ruma", rev = "50c1db7e0a3a21fc794b0cce3b64285a4c750c71", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { git = "https://github.com/timokoesters/ruma", rev = "50c1db7e0a3a21fc794b0cce3b64285a4c750c71", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
#ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
@ -78,7 +79,8 @@ crossbeam = { version = "0.8.1", optional = true }
num_cpus = "1.13.0" num_cpus = "1.13.0"
threadpool = "1.8.1" threadpool = "1.8.1"
heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d", optional = true } heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d", optional = true }
rocksdb = { version = "0.17.0", features = ["multi-threaded-cf"], optional = true } rocksdb = { version = "0.17.0", default-features = false, features = ["multi-threaded-cf", "zstd"], optional = true }
thread_local = "1.1.3" thread_local = "1.1.3"
# used for TURN server authentication # used for TURN server authentication
hmac = "0.11.0" hmac = "0.11.0"

View file

@ -1,5 +1,5 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM docker.io/rust:1.53-alpine AS builder FROM docker.io/rust:1.58-alpine AS builder
WORKDIR /usr/src/conduit WORKDIR /usr/src/conduit
# Install required packages to build Conduit and it's dependencies # Install required packages to build Conduit and it's dependencies
@ -38,7 +38,7 @@ FROM docker.io/alpine:3.15.0 AS runner
# You still need to map the port when using the docker command or docker-compose. # You still need to map the port when using the docker command or docker-compose.
EXPOSE 6167 EXPOSE 6167
# Note from @jfowl: I would like to remove this in the future and just have the Docker version be configured with envs. # Note from @jfowl: I would like to remove this in the future and just have the Docker version be configured with envs.
ENV CONDUIT_CONFIG="/srv/conduit/conduit.toml" ENV CONDUIT_CONFIG="/srv/conduit/conduit.toml"
# Conduit needs: # Conduit needs:
@ -78,4 +78,4 @@ WORKDIR /srv/conduit
# Run Conduit and print backtraces on panics # Run Conduit and print backtraces on panics
ENV RUST_BACKTRACE=1 ENV RUST_BACKTRACE=1
ENTRYPOINT [ "/srv/conduit/conduit" ] ENTRYPOINT [ "/srv/conduit/conduit" ]

View file

@ -1 +0,0 @@
1.53

View file

@ -3,8 +3,7 @@ use ruma::{
api::client::{error::ErrorKind, r0::context::get_context}, api::client::{error::ErrorKind, r0::context::get_context},
events::EventType, events::EventType,
}; };
use std::collections::HashSet; use std::{collections::HashSet, convert::TryFrom};
use std::convert::TryFrom;
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
use rocket::get; use rocket::get;
@ -55,8 +54,8 @@ pub async fn get_context_route(
))?; ))?;
if !db.rooms.lazy_load_was_sent_before( if !db.rooms.lazy_load_was_sent_before(
&sender_user, sender_user,
&sender_device, sender_device,
&body.room_id, &body.room_id,
&base_event.sender, &base_event.sender,
)? { )? {
@ -79,8 +78,8 @@ pub async fn get_context_route(
for (_, event) in &events_before { for (_, event) in &events_before {
if !db.rooms.lazy_load_was_sent_before( if !db.rooms.lazy_load_was_sent_before(
&sender_user, sender_user,
&sender_device, sender_device,
&body.room_id, &body.room_id,
&event.sender, &event.sender,
)? { )? {
@ -112,8 +111,8 @@ pub async fn get_context_route(
for (_, event) in &events_after { for (_, event) in &events_after {
if !db.rooms.lazy_load_was_sent_before( if !db.rooms.lazy_load_was_sent_before(
&sender_user, sender_user,
&sender_device, sender_device,
&body.room_id, &body.room_id,
&event.sender, &event.sender,
)? { )? {

View file

@ -272,7 +272,7 @@ pub async fn get_key_changes_route(
device_list_updates.extend( device_list_updates.extend(
db.users db.users
.keys_changed( .keys_changed(
&sender_user.to_string(), sender_user.as_str(),
body.from body.from
.parse() .parse()
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid `from`."))?, .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid `from`."))?,

View file

@ -23,7 +23,7 @@ use ruma::{
}, },
EventType, EventType,
}, },
serde::{to_canonical_value, CanonicalJsonObject, CanonicalJsonValue}, serde::{to_canonical_value, Base64, CanonicalJsonObject, CanonicalJsonValue},
state_res::{self, RoomVersion}, state_res::{self, RoomVersion},
uint, EventId, RoomId, RoomVersionId, ServerName, UserId, uint, EventId, RoomId, RoomVersionId, ServerName, UserId,
}; };
@ -787,7 +787,7 @@ async fn join_room_by_id_helper(
fn validate_and_add_event_id( fn validate_and_add_event_id(
pdu: &RawJsonValue, pdu: &RawJsonValue,
room_version: &RoomVersionId, room_version: &RoomVersionId,
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, String>>>, pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
db: &Database, db: &Database,
) -> Result<(Box<EventId>, CanonicalJsonObject)> { ) -> Result<(Box<EventId>, CanonicalJsonObject)> {
let mut value: CanonicalJsonObject = serde_json::from_str(pdu.get()).map_err(|e| { let mut value: CanonicalJsonObject = serde_json::from_str(pdu.get()).map_err(|e| {

View file

@ -74,11 +74,11 @@ pub async fn send_message_event_route(
} }
let mut unsigned = BTreeMap::new(); let mut unsigned = BTreeMap::new();
unsigned.insert("transaction_id".to_owned(), body.txn_id.clone().into()); unsigned.insert("transaction_id".to_owned(), body.txn_id.to_string().into());
let event_id = db.rooms.build_and_append_pdu( let event_id = db.rooms.build_and_append_pdu(
PduBuilder { PduBuilder {
event_type: EventType::from(&body.event_type), event_type: EventType::from(&*body.event_type),
content: serde_json::from_str(body.body.body.json().get()) content: serde_json::from_str(body.body.body.json().get())
.map_err(|_| Error::BadRequest(ErrorKind::BadJson, "Invalid JSON body."))?, .map_err(|_| Error::BadRequest(ErrorKind::BadJson, "Invalid JSON body."))?,
unsigned: Some(unsigned), unsigned: Some(unsigned),
@ -139,7 +139,7 @@ pub async fn get_message_events_route(
let to = body.to.as_ref().map(|t| t.parse()); let to = body.to.as_ref().map(|t| t.parse());
db.rooms db.rooms
.lazy_load_confirm_delivery(&sender_user, &sender_device, &body.room_id, from)?; .lazy_load_confirm_delivery(sender_user, sender_device, &body.room_id, from)?;
// Use limit or else 10 // Use limit or else 10
let limit = body.limit.try_into().map_or(10_usize, |l: u32| l as usize); let limit = body.limit.try_into().map_or(10_usize, |l: u32| l as usize);
@ -168,8 +168,8 @@ pub async fn get_message_events_route(
for (_, event) in &events_after { for (_, event) in &events_after {
if !db.rooms.lazy_load_was_sent_before( if !db.rooms.lazy_load_was_sent_before(
&sender_user, sender_user,
&sender_device, sender_device,
&body.room_id, &body.room_id,
&event.sender, &event.sender,
)? { )? {
@ -205,8 +205,8 @@ pub async fn get_message_events_route(
for (_, event) in &events_before { for (_, event) in &events_before {
if !db.rooms.lazy_load_was_sent_before( if !db.rooms.lazy_load_was_sent_before(
&sender_user, sender_user,
&sender_device, sender_device,
&body.room_id, &body.room_id,
&event.sender, &event.sender,
)? { )? {
@ -239,8 +239,8 @@ pub async fn get_message_events_route(
if let Some(next_token) = next_token { if let Some(next_token) = next_token {
db.rooms.lazy_load_mark_sent( db.rooms.lazy_load_mark_sent(
&sender_user, sender_user,
&sender_device, sender_device,
&body.room_id, &body.room_id,
lazy_loaded, lazy_loaded,
next_token, next_token,

View file

@ -52,7 +52,7 @@ pub async fn set_displayname_route(
.room_state_get( .room_state_get(
&room_id, &room_id,
&EventType::RoomMember, &EventType::RoomMember,
&sender_user.to_string(), sender_user.as_str(),
)? )?
.ok_or_else(|| { .ok_or_else(|| {
Error::bad_database( Error::bad_database(
@ -195,7 +195,7 @@ pub async fn set_avatar_url_route(
.room_state_get( .room_state_get(
&room_id, &room_id,
&EventType::RoomMember, &EventType::RoomMember,
&sender_user.to_string(), sender_user.as_str(),
)? )?
.ok_or_else(|| { .ok_or_else(|| {
Error::bad_database( Error::bad_database(

View file

@ -44,7 +44,7 @@ pub async fn send_state_event_for_key_route(
&db, &db,
sender_user, sender_user,
&body.room_id, &body.room_id,
EventType::from(&body.event_type), EventType::from(&*body.event_type),
&body.body.body, // Yes, I hate it too &body.body.body, // Yes, I hate it too
body.state_key.to_owned(), body.state_key.to_owned(),
) )
@ -86,7 +86,7 @@ pub async fn send_state_event_for_empty_key_route(
&db, &db,
sender_user, sender_user,
&body.room_id, &body.room_id,
EventType::from(&body.event_type), EventType::from(&*body.event_type),
&body.body.body, &body.body.body,
body.state_key.to_owned(), body.state_key.to_owned(),
) )

View file

@ -453,38 +453,39 @@ async fn sync_helper(
let joined_since_last_sync = since_sender_member let joined_since_last_sync = since_sender_member
.map_or(true, |member| member.membership != MembershipState::Join); .map_or(true, |member| member.membership != MembershipState::Join);
let current_state_ids = db.rooms.state_full_ids(current_shortstatehash)?;
let since_state_ids = db.rooms.state_full_ids(since_shortstatehash)?;
let mut state_events = Vec::new(); let mut state_events = Vec::new();
let mut lazy_loaded = HashSet::new(); let mut lazy_loaded = HashSet::new();
for (key, id) in current_state_ids { if since_shortstatehash != current_shortstatehash {
if body.full_state || since_state_ids.get(&key) != Some(&id) { let current_state_ids = db.rooms.state_full_ids(current_shortstatehash)?;
let pdu = match db.rooms.get_pdu(&id)? { let since_state_ids = db.rooms.state_full_ids(since_shortstatehash)?;
Some(pdu) => pdu,
None => {
error!("Pdu in state not found: {}", id);
continue;
}
};
if pdu.kind == EventType::RoomMember { for (key, id) in current_state_ids {
match UserId::parse( if body.full_state || since_state_ids.get(&key) != Some(&id) {
pdu.state_key let pdu = match db.rooms.get_pdu(&id)? {
.as_ref() Some(pdu) => pdu,
.expect("State event has state key") None => {
.clone(), error!("Pdu in state not found: {}", id);
) { continue;
Ok(state_key_userid) => {
lazy_loaded.insert(state_key_userid);
} }
Err(e) => error!("Invalid state key for member event: {}", e), };
}
}
state_events.push(pdu); if pdu.kind == EventType::RoomMember {
match UserId::parse(
pdu.state_key
.as_ref()
.expect("State event has state key")
.clone(),
) {
Ok(state_key_userid) => {
lazy_loaded.insert(state_key_userid);
}
Err(e) => error!("Invalid state key for member event: {}", e),
}
}
state_events.push(pdu);
}
} }
} }

View file

@ -53,8 +53,8 @@ pub async fn send_event_to_device_route(
serde_json::to_vec(&federation::transactions::edu::Edu::DirectToDevice( serde_json::to_vec(&federation::transactions::edu::Edu::DirectToDevice(
DirectDeviceContent { DirectDeviceContent {
sender: sender_user.clone(), sender: sender_user.clone(),
ev_type: EventType::from(&body.event_type), ev_type: EventType::from(&*body.event_type),
message_id: body.txn_id.clone(), message_id: body.txn_id.to_string(),
messages, messages,
}, },
)) ))

View file

@ -49,6 +49,8 @@ pub struct Config {
database_path: String, database_path: String,
#[serde(default = "default_db_cache_capacity_mb")] #[serde(default = "default_db_cache_capacity_mb")]
db_cache_capacity_mb: f64, db_cache_capacity_mb: f64,
#[serde(default = "default_rocksdb_max_open_files")]
rocksdb_max_open_files: i32,
#[serde(default = "default_pdu_cache_capacity")] #[serde(default = "default_pdu_cache_capacity")]
pdu_cache_capacity: u32, pdu_cache_capacity: u32,
#[serde(default = "default_cleanup_second_interval")] #[serde(default = "default_cleanup_second_interval")]
@ -127,8 +129,12 @@ fn default_db_cache_capacity_mb() -> f64 {
10.0 10.0
} }
fn default_rocksdb_max_open_files() -> i32 {
512
}
fn default_pdu_cache_capacity() -> u32 { fn default_pdu_cache_capacity() -> u32 {
1_000_000 150_000
} }
fn default_cleanup_second_interval() -> u32 { fn default_cleanup_second_interval() -> u32 {
@ -206,28 +212,22 @@ impl Database {
return Ok(()); return Ok(());
} }
if sled_exists { if sled_exists && config.database_backend != "sled" {
if config.database_backend != "sled" { return Err(Error::bad_config(
return Err(Error::bad_config( "Found sled at database_path, but is not specified in config.",
"Found sled at database_path, but is not specified in config.", ));
));
}
} }
if sqlite_exists { if sqlite_exists && config.database_backend != "sqlite" {
if config.database_backend != "sqlite" { return Err(Error::bad_config(
return Err(Error::bad_config( "Found sqlite at database_path, but is not specified in config.",
"Found sqlite at database_path, but is not specified in config.", ));
));
}
} }
if rocksdb_exists { if rocksdb_exists && config.database_backend != "rocksdb" {
if config.database_backend != "rocksdb" { return Err(Error::bad_config(
return Err(Error::bad_config( "Found rocksdb at database_path, but is not specified in config.",
"Found rocksdb at database_path, but is not specified in config.", ));
));
}
} }
Ok(()) Ok(())

View file

@ -23,12 +23,12 @@ pub trait DatabaseEngine: Send + Sync {
where where
Self: Sized; Self: Sized;
fn open_tree(&self, name: &'static str) -> Result<Arc<dyn Tree>>; fn open_tree(&self, name: &'static str) -> Result<Arc<dyn Tree>>;
fn flush(self: &Self) -> Result<()>; fn flush(&self) -> Result<()>;
fn cleanup(self: &Self) -> Result<()> { fn cleanup(&self) -> Result<()> {
Ok(()) Ok(())
} }
fn memory_usage(self: &Self) -> Result<String> { fn memory_usage(&self) -> Result<String> {
Ok("Current database engine does not support memory usage reporting.".to_string()) Ok("Current database engine does not support memory usage reporting.".to_owned())
} }
} }

View file

@ -4,7 +4,7 @@ use std::{future::Future, pin::Pin, sync::Arc, sync::RwLock};
pub struct Engine { pub struct Engine {
rocks: rocksdb::DBWithThreadMode<rocksdb::MultiThreaded>, rocks: rocksdb::DBWithThreadMode<rocksdb::MultiThreaded>,
cache_capacity_bytes: usize, max_open_files: i32,
cache: rocksdb::Cache, cache: rocksdb::Cache,
old_cfs: Vec<String>, old_cfs: Vec<String>,
} }
@ -16,7 +16,7 @@ pub struct RocksDbEngineTree<'a> {
write_lock: RwLock<()>, write_lock: RwLock<()>,
} }
fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> rocksdb::Options { fn db_options(max_open_files: i32, rocksdb_cache: &rocksdb::Cache) -> rocksdb::Options {
let mut block_based_options = rocksdb::BlockBasedOptions::default(); let mut block_based_options = rocksdb::BlockBasedOptions::default();
block_based_options.set_block_cache(rocksdb_cache); block_based_options.set_block_cache(rocksdb_cache);
@ -36,10 +36,10 @@ fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> ro
//db_opts.set_use_direct_io_for_flush_and_compaction(true); //db_opts.set_use_direct_io_for_flush_and_compaction(true);
db_opts.create_if_missing(true); db_opts.create_if_missing(true);
db_opts.increase_parallelism(num_cpus::get() as i32); db_opts.increase_parallelism(num_cpus::get() as i32);
db_opts.set_max_open_files(512); db_opts.set_max_open_files(max_open_files);
db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd);
db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level);
db_opts.optimize_level_style_compaction(cache_capacity_bytes); db_opts.optimize_level_style_compaction(10 * 1024 * 1024);
let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1);
db_opts.set_prefix_extractor(prefix_extractor); db_opts.set_prefix_extractor(prefix_extractor);
@ -52,7 +52,7 @@ impl DatabaseEngine for Arc<Engine> {
let cache_capacity_bytes = (config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize; let cache_capacity_bytes = (config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize;
let rocksdb_cache = rocksdb::Cache::new_lru_cache(cache_capacity_bytes).unwrap(); let rocksdb_cache = rocksdb::Cache::new_lru_cache(cache_capacity_bytes).unwrap();
let db_opts = db_options(cache_capacity_bytes, &rocksdb_cache); let db_opts = db_options(config.rocksdb_max_open_files, &rocksdb_cache);
let cfs = rocksdb::DBWithThreadMode::<rocksdb::MultiThreaded>::list_cf( let cfs = rocksdb::DBWithThreadMode::<rocksdb::MultiThreaded>::list_cf(
&db_opts, &db_opts,
@ -66,14 +66,14 @@ impl DatabaseEngine for Arc<Engine> {
cfs.iter().map(|name| { cfs.iter().map(|name| {
rocksdb::ColumnFamilyDescriptor::new( rocksdb::ColumnFamilyDescriptor::new(
name, name,
db_options(cache_capacity_bytes, &rocksdb_cache), db_options(config.rocksdb_max_open_files, &rocksdb_cache),
) )
}), }),
)?; )?;
Ok(Arc::new(Engine { Ok(Arc::new(Engine {
rocks: db, rocks: db,
cache_capacity_bytes, max_open_files: config.rocksdb_max_open_files,
cache: rocksdb_cache, cache: rocksdb_cache,
old_cfs: cfs, old_cfs: cfs,
})) }))
@ -84,7 +84,7 @@ impl DatabaseEngine for Arc<Engine> {
// Create if it didn't exist // Create if it didn't exist
let _ = self let _ = self
.rocks .rocks
.create_cf(name, &db_options(self.cache_capacity_bytes, &self.cache)); .create_cf(name, &db_options(self.max_open_files, &self.cache));
} }
Ok(Arc::new(RocksDbEngineTree { Ok(Arc::new(RocksDbEngineTree {

View file

@ -127,7 +127,7 @@ impl Admin {
if let Ok(response) = guard._db.memory_usage() { if let Ok(response) = guard._db.memory_usage() {
send_message(RoomMessageEventContent::text_plain(response), guard, &state_lock); send_message(RoomMessageEventContent::text_plain(response), guard, &state_lock);
} else { } else {
send_message(RoomMessageEventContent::text_plain("Failed to get database memory usage.".to_string()), guard, &state_lock); send_message(RoomMessageEventContent::text_plain("Failed to get database memory usage.".to_owned()), guard, &state_lock);
} }
} }
AdminCommand::SendMessage(message) => { AdminCommand::SendMessage(message) => {

View file

@ -2517,7 +2517,7 @@ impl Rooms {
let state_lock = mutex_state.lock().await; let state_lock = mutex_state.lock().await;
let mut event: RoomMemberEventContent = serde_json::from_str( let mut event: RoomMemberEventContent = serde_json::from_str(
self.room_state_get(room_id, &EventType::RoomMember, &user_id.to_string())? self.room_state_get(room_id, &EventType::RoomMember, user_id.as_str())?
.ok_or(Error::BadRequest( .ok_or(Error::BadRequest(
ErrorKind::BadState, ErrorKind::BadState,
"Cannot leave a room you are not a member of.", "Cannot leave a room you are not a member of.",
@ -3252,8 +3252,7 @@ impl Rooms {
&key[0].to_be_bytes(), &key[0].to_be_bytes(),
&chain &chain
.iter() .iter()
.map(|s| s.to_be_bytes().to_vec()) .flat_map(|s| s.to_be_bytes().to_vec())
.flatten()
.collect::<Vec<u8>>(), .collect::<Vec<u8>>(),
)?; )?;
} }
@ -3274,11 +3273,11 @@ impl Rooms {
) -> Result<bool> { ) -> Result<bool> {
let mut key = user_id.as_bytes().to_vec(); let mut key = user_id.as_bytes().to_vec();
key.push(0xff); key.push(0xff);
key.extend_from_slice(&device_id.as_bytes()); key.extend_from_slice(device_id.as_bytes());
key.push(0xff); key.push(0xff);
key.extend_from_slice(&room_id.as_bytes()); key.extend_from_slice(room_id.as_bytes());
key.push(0xff); key.push(0xff);
key.extend_from_slice(&ll_user.as_bytes()); key.extend_from_slice(ll_user.as_bytes());
Ok(self.lazyloadedids.get(&key)?.is_some()) Ok(self.lazyloadedids.get(&key)?.is_some())
} }
@ -3318,14 +3317,14 @@ impl Rooms {
)) { )) {
let mut prefix = user_id.as_bytes().to_vec(); let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
prefix.extend_from_slice(&device_id.as_bytes()); prefix.extend_from_slice(device_id.as_bytes());
prefix.push(0xff); prefix.push(0xff);
prefix.extend_from_slice(&room_id.as_bytes()); prefix.extend_from_slice(room_id.as_bytes());
prefix.push(0xff); prefix.push(0xff);
for ll_id in user_ids { for ll_id in user_ids {
let mut key = prefix.clone(); let mut key = prefix.clone();
key.extend_from_slice(&ll_id.as_bytes()); key.extend_from_slice(ll_id.as_bytes());
self.lazyloadedids.insert(&key, &[])?; self.lazyloadedids.insert(&key, &[])?;
} }
} }
@ -3336,15 +3335,15 @@ impl Rooms {
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn lazy_load_reset( pub fn lazy_load_reset(
&self, &self,
user_id: &Box<UserId>, user_id: &UserId,
device_id: &Box<DeviceId>, device_id: &DeviceId,
room_id: &Box<RoomId>, room_id: &RoomId,
) -> Result<()> { ) -> Result<()> {
let mut prefix = user_id.as_bytes().to_vec(); let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
prefix.extend_from_slice(&device_id.as_bytes()); prefix.extend_from_slice(device_id.as_bytes());
prefix.push(0xff); prefix.push(0xff);
prefix.extend_from_slice(&room_id.as_bytes()); prefix.extend_from_slice(room_id.as_bytes());
prefix.push(0xff); prefix.push(0xff);
for (key, _) in self.lazyloadedids.scan_prefix(prefix) { for (key, _) in self.lazyloadedids.scan_prefix(prefix) {

View file

@ -524,7 +524,7 @@ impl Sending {
.unwrap(), // TODO: handle error .unwrap(), // TODO: handle error
appservice::event::push_events::v1::Request { appservice::event::push_events::v1::Request {
events: &pdu_jsons, events: &pdu_jsons,
txn_id: &base64::encode_config( txn_id: (&*base64::encode_config(
Self::calculate_hash( Self::calculate_hash(
&events &events
.iter() .iter()
@ -534,7 +534,8 @@ impl Sending {
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
), ),
base64::URL_SAFE_NO_PAD, base64::URL_SAFE_NO_PAD,
), ))
.into(),
}, },
) )
.await .await
@ -682,7 +683,7 @@ impl Sending {
pdus: &pdu_jsons, pdus: &pdu_jsons,
edus: &edu_jsons, edus: &edu_jsons,
origin_server_ts: MilliSecondsSinceUnixEpoch::now(), origin_server_ts: MilliSecondsSinceUnixEpoch::now(),
transaction_id: &base64::encode_config( transaction_id: (&*base64::encode_config(
Self::calculate_hash( Self::calculate_hash(
&events &events
.iter() .iter()
@ -692,7 +693,8 @@ impl Sending {
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
), ),
base64::URL_SAFE_NO_PAD, base64::URL_SAFE_NO_PAD,
), ))
.into(),
}, },
) )
.await .await

View file

@ -1,7 +1,7 @@
use std::sync::Arc; use std::sync::Arc;
use crate::Result; use crate::Result;
use ruma::{DeviceId, UserId}; use ruma::{identifiers::TransactionId, DeviceId, UserId};
use super::abstraction::Tree; use super::abstraction::Tree;
@ -14,7 +14,7 @@ impl TransactionIds {
&self, &self,
user_id: &UserId, user_id: &UserId,
device_id: Option<&DeviceId>, device_id: Option<&DeviceId>,
txn_id: &str, txn_id: &TransactionId,
data: &[u8], data: &[u8],
) -> Result<()> { ) -> Result<()> {
let mut key = user_id.as_bytes().to_vec(); let mut key = user_id.as_bytes().to_vec();
@ -32,7 +32,7 @@ impl TransactionIds {
&self, &self,
user_id: &UserId, user_id: &UserId,
device_id: Option<&DeviceId>, device_id: Option<&DeviceId>,
txn_id: &str, txn_id: &TransactionId,
) -> Result<Option<Vec<u8>>> { ) -> Result<Option<Vec<u8>>> {
let mut key = user_id.as_bytes().to_vec(); let mut key = user_id.as_bytes().to_vec();
key.push(0xff); key.push(0xff);

View file

@ -166,13 +166,12 @@ impl Uiaa {
user_id: &UserId, user_id: &UserId,
device_id: &DeviceId, device_id: &DeviceId,
session: &str, session: &str,
) -> Result<Option<CanonicalJsonValue>> { ) -> Option<CanonicalJsonValue> {
Ok(self self.userdevicesessionid_uiaarequest
.userdevicesessionid_uiaarequest
.read() .read()
.unwrap() .unwrap()
.get(&(user_id.to_owned(), device_id.to_owned(), session.to_owned())) .get(&(user_id.to_owned(), device_id.to_owned(), session.to_owned()))
.map(|j| j.to_owned())) .map(|j| j.to_owned())
} }
fn update_uiaa_session( fn update_uiaa_session(

View file

@ -531,11 +531,11 @@ impl Users {
prefix.push(0xff); prefix.push(0xff);
// Master key // Master key
let master_key_map = master_key let mut master_key_ids = master_key
.deserialize() .deserialize()
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid master key"))? .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid master key"))?
.keys; .keys
let mut master_key_ids = master_key_map.values(); .into_values();
let master_key_id = master_key_ids.next().ok_or(Error::BadRequest( let master_key_id = master_key_ids.next().ok_or(Error::BadRequest(
ErrorKind::InvalidParam, ErrorKind::InvalidParam,
@ -560,13 +560,14 @@ impl Users {
// Self-signing key // Self-signing key
if let Some(self_signing_key) = self_signing_key { if let Some(self_signing_key) = self_signing_key {
let self_signing_key_map = self_signing_key let mut self_signing_key_ids = self_signing_key
.deserialize() .deserialize()
.map_err(|_| { .map_err(|_| {
Error::BadRequest(ErrorKind::InvalidParam, "Invalid self signing key") Error::BadRequest(ErrorKind::InvalidParam, "Invalid self signing key")
})? })?
.keys; .keys
let mut self_signing_key_ids = self_signing_key_map.values(); .into_values();
let self_signing_key_id = self_signing_key_ids.next().ok_or(Error::BadRequest( let self_signing_key_id = self_signing_key_ids.next().ok_or(Error::BadRequest(
ErrorKind::InvalidParam, ErrorKind::InvalidParam,
"Self signing key contained no key.", "Self signing key contained no key.",
@ -593,13 +594,14 @@ impl Users {
// User-signing key // User-signing key
if let Some(user_signing_key) = user_signing_key { if let Some(user_signing_key) = user_signing_key {
let user_signing_key_map = user_signing_key let mut user_signing_key_ids = user_signing_key
.deserialize() .deserialize()
.map_err(|_| { .map_err(|_| {
Error::BadRequest(ErrorKind::InvalidParam, "Invalid user signing key") Error::BadRequest(ErrorKind::InvalidParam, "Invalid user signing key")
})? })?
.keys; .keys
let mut user_signing_key_ids = user_signing_key_map.values(); .into_values();
let user_signing_key_id = user_signing_key_ids.next().ok_or(Error::BadRequest( let user_signing_key_id = user_signing_key_ids.next().ok_or(Error::BadRequest(
ErrorKind::InvalidParam, ErrorKind::InvalidParam,
"User signing key contained no key.", "User signing key contained no key.",

View file

@ -7,21 +7,23 @@
#![allow(clippy::suspicious_else_formatting)] #![allow(clippy::suspicious_else_formatting)]
#![deny(clippy::dbg_macro)] #![deny(clippy::dbg_macro)]
pub mod appservice_server; use std::ops::Deref;
pub mod client_server;
mod database; mod database;
mod error; mod error;
mod pdu; mod pdu;
mod ruma_wrapper; mod ruma_wrapper;
pub mod server_server;
mod utils; mod utils;
pub mod appservice_server;
pub mod client_server;
pub mod server_server;
pub use database::{Config, Database}; pub use database::{Config, Database};
pub use error::{Error, Result}; pub use error::{Error, Result};
pub use pdu::PduEvent; pub use pdu::PduEvent;
pub use rocket::Config as RocketConfig; pub use rocket::Config as RocketConfig;
pub use ruma_wrapper::{ConduitResult, Ruma, RumaResponse}; pub use ruma_wrapper::{ConduitResult, Ruma, RumaResponse};
use std::ops::Deref;
pub struct State<'r, T: Send + Sync + 'static>(pub &'r T); pub struct State<'r, T: Send + Sync + 'static>(pub &'r T);

View file

@ -7,27 +7,9 @@
#![allow(clippy::suspicious_else_formatting)] #![allow(clippy::suspicious_else_formatting)]
#![deny(clippy::dbg_macro)] #![deny(clippy::dbg_macro)]
pub mod appservice_server;
pub mod client_server;
pub mod server_server;
mod database;
mod error;
mod pdu;
mod ruma_wrapper;
mod utils;
use std::sync::Arc; use std::sync::Arc;
use database::Config;
pub use database::Database;
pub use error::{Error, Result};
use opentelemetry::trace::{FutureExt, Tracer}; use opentelemetry::trace::{FutureExt, Tracer};
pub use pdu::PduEvent;
pub use rocket::State;
use ruma::api::client::error::ErrorKind;
pub use ruma_wrapper::{ConduitResult, Ruma, RumaResponse};
use rocket::{ use rocket::{
catch, catchers, catch, catchers,
figment::{ figment::{
@ -36,9 +18,13 @@ use rocket::{
}, },
routes, Request, routes, Request,
}; };
use ruma::api::client::error::ErrorKind;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing_subscriber::{prelude::*, EnvFilter}; use tracing_subscriber::{prelude::*, EnvFilter};
pub use conduit::*; // Re-export everything from the library crate
pub use rocket::State;
fn setup_rocket(config: Figment, data: Arc<RwLock<Database>>) -> rocket::Rocket<rocket::Build> { fn setup_rocket(config: Figment, data: Arc<RwLock<Database>>) -> rocket::Rocket<rocket::Build> {
rocket::custom(config) rocket::custom(config)
.manage(data) .manage(data)

View file

@ -296,14 +296,11 @@ where
.and_then(|auth| auth.get("session")) .and_then(|auth| auth.get("session"))
.and_then(|session| session.as_str()) .and_then(|session| session.as_str())
.and_then(|session| { .and_then(|session| {
db.uiaa db.uiaa.get_uiaa_request(
.get_uiaa_request( &user_id,
&user_id, &sender_device.clone().unwrap_or_else(|| "".into()),
&sender_device.clone().unwrap_or_else(|| "".into()), session,
session, )
)
.ok()
.flatten()
}) })
{ {
for (key, value) in initial_request { for (key, value) in initial_request {

View file

@ -44,12 +44,13 @@ use ruma::{
room::{ room::{
create::RoomCreateEventContent, create::RoomCreateEventContent,
member::{MembershipState, RoomMemberEventContent}, member::{MembershipState, RoomMemberEventContent},
server_acl::RoomServerAclEventContent,
}, },
AnyEphemeralRoomEvent, EventType, AnyEphemeralRoomEvent, EventType,
}, },
int, int,
receipt::ReceiptType, receipt::ReceiptType,
serde::JsonObject, serde::{Base64, JsonObject},
signatures::{CanonicalJsonObject, CanonicalJsonValue}, signatures::{CanonicalJsonObject, CanonicalJsonValue},
state_res::{self, RoomVersion, StateMap}, state_res::{self, RoomVersion, StateMap},
to_device::DeviceIdOrAllDevices, to_device::DeviceIdOrAllDevices,
@ -551,7 +552,7 @@ pub fn get_server_keys_route(db: DatabaseGuard) -> Json<String> {
.try_into() .try_into()
.expect("found invalid server signing keys in DB"), .expect("found invalid server signing keys in DB"),
VerifyKey { VerifyKey {
key: base64::encode_config(db.globals.keypair().public_key(), base64::STANDARD_NO_PAD), key: Base64::new(db.globals.keypair().public_key().to_vec()),
}, },
); );
let mut response = serde_json::from_slice( let mut response = serde_json::from_slice(
@ -740,6 +741,8 @@ pub async fn send_transaction_message_route(
} }
}; };
acl_check(&body.origin, &room_id, &db)?;
let mutex = Arc::clone( let mutex = Arc::clone(
db.globals db.globals
.roomid_mutex_federation .roomid_mutex_federation
@ -854,7 +857,7 @@ pub async fn send_transaction_message_route(
// Check if this is a new transaction id // Check if this is a new transaction id
if db if db
.transaction_ids .transaction_ids
.existing_txnid(&sender, None, &message_id)? .existing_txnid(&sender, None, (&*message_id).into())?
.is_some() .is_some()
{ {
continue; continue;
@ -902,7 +905,7 @@ pub async fn send_transaction_message_route(
// Save transaction id with empty data // Save transaction id with empty data
db.transaction_ids db.transaction_ids
.add_txnid(&sender, None, &message_id, &[])?; .add_txnid(&sender, None, (&*message_id).into(), &[])?;
} }
Edu::_Custom(_) => {} Edu::_Custom(_) => {}
} }
@ -948,7 +951,7 @@ pub(crate) async fn handle_incoming_pdu<'a>(
value: BTreeMap<String, CanonicalJsonValue>, value: BTreeMap<String, CanonicalJsonValue>,
is_timeline_event: bool, is_timeline_event: bool,
db: &'a Database, db: &'a Database,
pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, String>>>, pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
) -> Result<Option<Vec<u8>>, String> { ) -> Result<Option<Vec<u8>>, String> {
match db.rooms.exists(room_id) { match db.rooms.exists(room_id) {
Ok(true) => {} Ok(true) => {}
@ -1123,7 +1126,7 @@ fn handle_outlier_pdu<'a>(
room_id: &'a RoomId, room_id: &'a RoomId,
value: BTreeMap<String, CanonicalJsonValue>, value: BTreeMap<String, CanonicalJsonValue>,
db: &'a Database, db: &'a Database,
pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, String>>>, pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
) -> AsyncRecursiveType<'a, Result<(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>), String>> { ) -> AsyncRecursiveType<'a, Result<(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>), String>> {
Box::pin(async move { Box::pin(async move {
// TODO: For RoomVersion6 we must check that Raw<..> is canonical do we anywhere?: https://matrix.org/docs/spec/rooms/v6#canonical-json // TODO: For RoomVersion6 we must check that Raw<..> is canonical do we anywhere?: https://matrix.org/docs/spec/rooms/v6#canonical-json
@ -1285,7 +1288,7 @@ async fn upgrade_outlier_to_timeline_pdu(
origin: &ServerName, origin: &ServerName,
db: &Database, db: &Database,
room_id: &RoomId, room_id: &RoomId,
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, String>>>, pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
) -> Result<Option<Vec<u8>>, String> { ) -> Result<Option<Vec<u8>>, String> {
if let Ok(Some(pduid)) = db.rooms.get_pdu_id(&incoming_pdu.event_id) { if let Ok(Some(pduid)) = db.rooms.get_pdu_id(&incoming_pdu.event_id) {
return Ok(Some(pduid)); return Ok(Some(pduid));
@ -1827,7 +1830,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>(
events: &'a [Arc<EventId>], events: &'a [Arc<EventId>],
create_event: &'a PduEvent, create_event: &'a PduEvent,
room_id: &'a RoomId, room_id: &'a RoomId,
pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, String>>>, pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
) -> AsyncRecursiveType<'a, Vec<(Arc<PduEvent>, Option<BTreeMap<String, CanonicalJsonValue>>)>> { ) -> AsyncRecursiveType<'a, Vec<(Arc<PduEvent>, Option<BTreeMap<String, CanonicalJsonValue>>)>> {
Box::pin(async move { Box::pin(async move {
let back_off = |id| match db.globals.bad_event_ratelimiter.write().unwrap().entry(id) { let back_off = |id| match db.globals.bad_event_ratelimiter.write().unwrap().entry(id) {
@ -1935,7 +1938,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>(
match handle_outlier_pdu( match handle_outlier_pdu(
origin, origin,
create_event, create_event,
&next_id, next_id,
room_id, room_id,
value.clone(), value.clone(),
db, db,
@ -1966,9 +1969,9 @@ pub(crate) async fn fetch_signing_keys(
db: &Database, db: &Database,
origin: &ServerName, origin: &ServerName,
signature_ids: Vec<String>, signature_ids: Vec<String>,
) -> Result<BTreeMap<String, String>> { ) -> Result<BTreeMap<String, Base64>> {
let contains_all_ids = let contains_all_ids =
|keys: &BTreeMap<String, String>| signature_ids.iter().all(|id| keys.contains_key(id)); |keys: &BTreeMap<String, Base64>| signature_ids.iter().all(|id| keys.contains_key(id));
let permit = db let permit = db
.globals .globals
@ -2356,7 +2359,10 @@ pub fn get_event_route(
.map_err(|_| Error::bad_database("Invalid room id field in event in database"))?; .map_err(|_| Error::bad_database("Invalid room id field in event in database"))?;
if !db.rooms.server_in_room(sender_servername, room_id)? { if !db.rooms.server_in_room(sender_servername, room_id)? {
return Err(Error::BadRequest(ErrorKind::NotFound, "Event not found.")); return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Server is not in room",
));
} }
Ok(get_event::v1::Response { Ok(get_event::v1::Response {
@ -2395,6 +2401,8 @@ pub fn get_missing_events_route(
)); ));
} }
acl_check(sender_servername, &body.room_id, &db)?;
let mut queued_events = body.latest_events.clone(); let mut queued_events = body.latest_events.clone();
let mut events = Vec::new(); let mut events = Vec::new();
@ -2464,6 +2472,15 @@ pub fn get_event_authorization_route(
.as_ref() .as_ref()
.expect("server is authenticated"); .expect("server is authenticated");
if !db.rooms.server_in_room(sender_servername, &body.room_id)? {
return Err(Error::BadRequest(
ErrorKind::Forbidden,
"Server is not in room.",
));
}
acl_check(sender_servername, &body.room_id, &db)?;
let event = db let event = db
.rooms .rooms
.get_pdu_json(&body.event_id)? .get_pdu_json(&body.event_id)?
@ -2477,10 +2494,6 @@ pub fn get_event_authorization_route(
let room_id = <&RoomId>::try_from(room_id_str) let room_id = <&RoomId>::try_from(room_id_str)
.map_err(|_| Error::bad_database("Invalid room id field in event in database"))?; .map_err(|_| Error::bad_database("Invalid room id field in event in database"))?;
if !db.rooms.server_in_room(sender_servername, room_id)? {
return Err(Error::BadRequest(ErrorKind::NotFound, "Event not found."));
}
let auth_chain_ids = get_auth_chain(room_id, vec![Arc::from(&*body.event_id)], &db)?; let auth_chain_ids = get_auth_chain(room_id, vec![Arc::from(&*body.event_id)], &db)?;
Ok(get_event_authorization::v1::Response { Ok(get_event_authorization::v1::Response {
@ -2520,6 +2533,8 @@ pub fn get_room_state_route(
)); ));
} }
acl_check(sender_servername, &body.room_id, &db)?;
let shortstatehash = db let shortstatehash = db
.rooms .rooms
.pdu_shortstatehash(&body.event_id)? .pdu_shortstatehash(&body.event_id)?
@ -2583,6 +2598,8 @@ pub fn get_room_state_ids_route(
)); ));
} }
acl_check(sender_servername, &body.room_id, &db)?;
let shortstatehash = db let shortstatehash = db
.rooms .rooms
.pdu_shortstatehash(&body.event_id)? .pdu_shortstatehash(&body.event_id)?
@ -2626,10 +2643,17 @@ pub fn create_join_event_template_route(
if !db.rooms.exists(&body.room_id)? { if !db.rooms.exists(&body.room_id)? {
return Err(Error::BadRequest( return Err(Error::BadRequest(
ErrorKind::NotFound, ErrorKind::NotFound,
"Server is not in room.", "Room is unknown to this server.",
)); ));
} }
let sender_servername = body
.sender_servername
.as_ref()
.expect("server is authenticated");
acl_check(sender_servername, &body.room_id, &db)?;
let prev_events: Vec<_> = db let prev_events: Vec<_> = db
.rooms .rooms
.get_pdu_leaves(&body.room_id)? .get_pdu_leaves(&body.room_id)?
@ -2782,6 +2806,7 @@ pub fn create_join_event_template_route(
async fn create_join_event( async fn create_join_event(
db: &DatabaseGuard, db: &DatabaseGuard,
sender_servername: &ServerName,
room_id: &RoomId, room_id: &RoomId,
pdu: &RawJsonValue, pdu: &RawJsonValue,
) -> Result<RoomState> { ) -> Result<RoomState> {
@ -2789,6 +2814,15 @@ async fn create_join_event(
return Err(Error::bad_config("Federation is disabled.")); return Err(Error::bad_config("Federation is disabled."));
} }
if !db.rooms.exists(room_id)? {
return Err(Error::BadRequest(
ErrorKind::NotFound,
"Room is unknown to this server.",
));
}
acl_check(sender_servername, room_id, db)?;
// We need to return the state prior to joining, let's keep a reference to that here // We need to return the state prior to joining, let's keep a reference to that here
let shortstatehash = db let shortstatehash = db
.rooms .rooms
@ -2888,7 +2922,12 @@ pub async fn create_join_event_v1_route(
db: DatabaseGuard, db: DatabaseGuard,
body: Ruma<create_join_event::v1::Request<'_>>, body: Ruma<create_join_event::v1::Request<'_>>,
) -> ConduitResult<create_join_event::v1::Response> { ) -> ConduitResult<create_join_event::v1::Response> {
let room_state = create_join_event(&db, &body.room_id, &body.pdu).await?; let sender_servername = body
.sender_servername
.as_ref()
.expect("server is authenticated");
let room_state = create_join_event(&db, sender_servername, &body.room_id, &body.pdu).await?;
Ok(create_join_event::v1::Response { room_state }.into()) Ok(create_join_event::v1::Response { room_state }.into())
} }
@ -2905,7 +2944,12 @@ pub async fn create_join_event_v2_route(
db: DatabaseGuard, db: DatabaseGuard,
body: Ruma<create_join_event::v2::Request<'_>>, body: Ruma<create_join_event::v2::Request<'_>>,
) -> ConduitResult<create_join_event::v2::Response> { ) -> ConduitResult<create_join_event::v2::Response> {
let room_state = create_join_event(&db, &body.room_id, &body.pdu).await?; let sender_servername = body
.sender_servername
.as_ref()
.expect("server is authenticated");
let room_state = create_join_event(&db, sender_servername, &body.room_id, &body.pdu).await?;
Ok(create_join_event::v2::Response { room_state }.into()) Ok(create_join_event::v2::Response { room_state }.into())
} }
@ -2926,6 +2970,13 @@ pub async fn create_invite_route(
return Err(Error::bad_config("Federation is disabled.")); return Err(Error::bad_config("Federation is disabled."));
} }
let sender_servername = body
.sender_servername
.as_ref()
.expect("server is authenticated");
acl_check(sender_servername, &body.room_id, &db)?;
if body.room_version != RoomVersionId::V5 && body.room_version != RoomVersionId::V6 { if body.room_version != RoomVersionId::V5 && body.room_version != RoomVersionId::V6 {
return Err(Error::BadRequest( return Err(Error::BadRequest(
ErrorKind::IncompatibleRoomVersion { ErrorKind::IncompatibleRoomVersion {
@ -3199,7 +3250,7 @@ pub async fn claim_keys_route(
#[tracing::instrument(skip(event, pub_key_map, db))] #[tracing::instrument(skip(event, pub_key_map, db))]
pub(crate) async fn fetch_required_signing_keys( pub(crate) async fn fetch_required_signing_keys(
event: &BTreeMap<String, CanonicalJsonValue>, event: &BTreeMap<String, CanonicalJsonValue>,
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, String>>>, pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
db: &Database, db: &Database,
) -> Result<()> { ) -> Result<()> {
let signatures = event let signatures = event
@ -3253,7 +3304,7 @@ fn get_server_keys_from_cache(
pdu: &RawJsonValue, pdu: &RawJsonValue,
servers: &mut BTreeMap<Box<ServerName>, BTreeMap<Box<ServerSigningKeyId>, QueryCriteria>>, servers: &mut BTreeMap<Box<ServerName>, BTreeMap<Box<ServerSigningKeyId>, QueryCriteria>>,
room_version: &RoomVersionId, room_version: &RoomVersionId,
pub_key_map: &mut RwLockWriteGuard<'_, BTreeMap<String, BTreeMap<String, String>>>, pub_key_map: &mut RwLockWriteGuard<'_, BTreeMap<String, BTreeMap<String, Base64>>>,
db: &Database, db: &Database,
) -> Result<()> { ) -> Result<()> {
let value: CanonicalJsonObject = serde_json::from_str(pdu.get()).map_err(|e| { let value: CanonicalJsonObject = serde_json::from_str(pdu.get()).map_err(|e| {
@ -3306,7 +3357,7 @@ fn get_server_keys_from_cache(
let signature_ids = signature_object.keys().cloned().collect::<Vec<_>>(); let signature_ids = signature_object.keys().cloned().collect::<Vec<_>>();
let contains_all_ids = let contains_all_ids =
|keys: &BTreeMap<String, String>| signature_ids.iter().all(|id| keys.contains_key(id)); |keys: &BTreeMap<String, Base64>| signature_ids.iter().all(|id| keys.contains_key(id));
let origin = <&ServerName>::try_from(signature_server.as_str()).map_err(|_| { let origin = <&ServerName>::try_from(signature_server.as_str()).map_err(|_| {
Error::BadServerResponse("Invalid servername in signatures of server response pdu.") Error::BadServerResponse("Invalid servername in signatures of server response pdu.")
@ -3339,7 +3390,7 @@ fn get_server_keys_from_cache(
pub(crate) async fn fetch_join_signing_keys( pub(crate) async fn fetch_join_signing_keys(
event: &create_join_event::v2::Response, event: &create_join_event::v2::Response,
room_version: &RoomVersionId, room_version: &RoomVersionId,
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, String>>>, pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
db: &Database, db: &Database,
) -> Result<()> { ) -> Result<()> {
let mut servers: BTreeMap<Box<ServerName>, BTreeMap<Box<ServerSigningKeyId>, QueryCriteria>> = let mut servers: BTreeMap<Box<ServerName>, BTreeMap<Box<ServerSigningKeyId>, QueryCriteria>> =
@ -3439,6 +3490,35 @@ pub(crate) async fn fetch_join_signing_keys(
Ok(()) Ok(())
} }
/// Returns Ok if the acl allows the server
fn acl_check(server_name: &ServerName, room_id: &RoomId, db: &Database) -> Result<()> {
let acl_event = match db
.rooms
.room_state_get(room_id, &EventType::RoomServerAcl, "")?
{
Some(acl) => acl,
None => return Ok(()),
};
let acl_event_content: RoomServerAclEventContent =
match serde_json::from_str(acl_event.content.get()) {
Ok(content) => content,
Err(_) => {
warn!("Invalid ACL event");
return Ok(());
}
};
if acl_event_content.is_allowed(server_name) {
Ok(())
} else {
Err(Error::BadRequest(
ErrorKind::Forbidden,
"Server was denied by ACL",
))
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{add_port_to_hostname, get_ip_with_port, FedDest}; use super::{add_port_to_hostname, get_ip_with_port, FedDest};