Compare commits

..

1 Commits

Author SHA1 Message Date
Renovate Bot fd0168da62 chore(deps): update https://github.com/cloudflare/wrangler-action action to v4 2026-05-13 05:03:12 +00:00
148 changed files with 2123 additions and 6694 deletions
+2 -2
View File
@@ -56,7 +56,7 @@ jobs:
- name: Deploy to Cloudflare Pages (Production)
if: github.ref == 'refs/heads/main' && vars.CLOUDFLARE_PROJECT_NAME != ''
uses: https://github.com/cloudflare/wrangler-action@9acf94ace14e7dc412b076f2c5c20b8ce93c79cd # v3
uses: https://github.com/cloudflare/wrangler-action@ebbaa1584979971c8614a24965b4405ff95890e0 # v4
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
@@ -64,7 +64,7 @@ jobs:
- name: Deploy to Cloudflare Pages (Preview)
if: github.ref != 'refs/heads/main' && vars.CLOUDFLARE_PROJECT_NAME != ''
uses: https://github.com/cloudflare/wrangler-action@9acf94ace14e7dc412b076f2c5c20b8ce93c79cd # v3
uses: https://github.com/cloudflare/wrangler-action@ebbaa1584979971c8614a24965b4405ff95890e0 # v4
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
+1 -1
View File
@@ -121,7 +121,7 @@ jobs:
- name: 🚀 Deploy to Cloudflare Pages
if: vars.CLOUDFLARE_PROJECT_NAME != ''
id: deploy
uses: https://github.com/cloudflare/wrangler-action@9acf94ace14e7dc412b076f2c5c20b8ce93c79cd # v3
uses: https://github.com/cloudflare/wrangler-action@ebbaa1584979971c8614a24965b4405ff95890e0 # v4
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
Generated
+413 -67
View File
@@ -39,6 +39,24 @@ dependencies = [
"memchr",
]
[[package]]
name = "aligned"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee4508988c62edf04abd8d92897fca0c2995d907ce1dfeaf369dac3716a40685"
dependencies = [
"as-slice",
]
[[package]]
name = "aligned-vec"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b"
dependencies = [
"equator",
]
[[package]]
name = "alloc-no-stdlib"
version = "2.0.4"
@@ -101,6 +119,12 @@ version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]]
name = "arbitrary"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1"
[[package]]
name = "arc-swap"
version = "1.9.1"
@@ -110,6 +134,17 @@ dependencies = [
"rustversion",
]
[[package]]
name = "arg_enum_proc_macro"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "argon2"
version = "0.5.3"
@@ -137,6 +172,15 @@ dependencies = [
"serde",
]
[[package]]
name = "as-slice"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "516b6b4f0e40d50dcda9365d53964ec74560ad4284da2e7fc97122cd83174516"
dependencies = [
"stable_deref_trait",
]
[[package]]
name = "as_variant"
version = "1.3.0"
@@ -257,6 +301,49 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "av-scenechange"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f321d77c20e19b92c39e7471cf986812cbb46659d2af674adc4331ef3f18394"
dependencies = [
"aligned",
"anyhow",
"arg_enum_proc_macro",
"arrayvec",
"log",
"num-rational",
"num-traits",
"pastey",
"rayon",
"thiserror",
"v_frame",
"y4m",
]
[[package]]
name = "av1-grain"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8cfddb07216410377231960af4fcab838eaa12e013417781b78bd95ee22077f8"
dependencies = [
"anyhow",
"arrayvec",
"log",
"nom 8.0.0",
"num-rational",
"v_frame",
]
[[package]]
name = "avif-serialize"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "375082f007bd67184fb9c0374614b29f9aaa604ec301635f72338bb65386a53d"
dependencies = [
"arrayvec",
]
[[package]]
name = "aws-lc-rs"
version = "1.16.3"
@@ -465,12 +552,27 @@ dependencies = [
"syn",
]
[[package]]
name = "bit_field"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e4b40c7323adcfc0a41c4b88143ed58346ff65a288fc144329c5c45e05d70c6"
[[package]]
name = "bitflags"
version = "2.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4512299f36f043ab09a583e57bceb5a5aab7a73db1805848e8fef3c9e8c78b3"
[[package]]
name = "bitstream-io"
version = "4.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eff00be299a18769011411c9def0d827e8f2d7bf0c3dbf53633147a8867fd1f"
dependencies = [
"no_std_io2",
]
[[package]]
name = "blake2"
version = "0.10.6"
@@ -507,6 +609,15 @@ dependencies = [
"objc2",
]
[[package]]
name = "blurhash"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e79769241dcd44edf79a732545e8b5cec84c247ac060f5252cd51885d093a8fc"
dependencies = [
"image",
]
[[package]]
name = "brotli"
version = "8.0.2"
@@ -1056,6 +1167,7 @@ dependencies = [
"assign",
"async-trait",
"base64 0.22.1",
"blurhash",
"bytes",
"conduwuit_core",
"conduwuit_database",
@@ -1088,7 +1200,6 @@ dependencies = [
"serde",
"serde-saphyr",
"serde_json",
"serde_urlencoded",
"sha2 0.11.0",
"termimad",
"tokio",
@@ -1108,29 +1219,18 @@ dependencies = [
"axum",
"axum-extra",
"base64 0.22.1",
"conduwuit_api",
"conduwuit_build_metadata",
"conduwuit_core",
"conduwuit_database",
"conduwuit_service",
"form_urlencoded",
"futures",
"lettre",
"memory-serve",
"rand 0.10.1",
"recaptcha-verify",
"reqwest 0.12.28",
"ruma",
"serde",
"serde_json",
"serde_urlencoded",
"thiserror",
"tower-http",
"tower-sec-fetch",
"tower-sessions",
"tower-sessions-core",
"tracing",
"url",
"validator",
]
@@ -1393,6 +1493,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "crunchy"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-common"
version = "0.1.7"
@@ -1538,7 +1644,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c"
dependencies = [
"powerfmt",
"serde_core",
]
[[package]]
@@ -1709,6 +1814,26 @@ dependencies = [
"syn",
]
[[package]]
name = "equator"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc"
dependencies = [
"equator-macro",
]
[[package]]
name = "equator-macro"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "equivalent"
version = "1.0.2"
@@ -1744,12 +1869,33 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "exr"
version = "1.74.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4300e043a56aa2cb633c01af81ca8f699a321879a7854d3896a0ba89056363be"
dependencies = [
"bit_field",
"half",
"lebe",
"miniz_oxide",
"rayon-core",
"smallvec",
"zune-inflate",
]
[[package]]
name = "fastrand"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f1f227452a390804cdb637b74a86990f2a7d7ba4b7d5693aac9b4dd6defd8d6"
[[package]]
name = "fax"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "caf1079563223d5d59d83c85886a56e586cfd5c1a26292e971a0fa266531ac5a"
[[package]]
name = "fdeflate"
version = "0.3.7"
@@ -2099,6 +2245,17 @@ dependencies = [
"tokio-util",
]
[[package]]
name = "half"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b"
dependencies = [
"cfg-if",
"crunchy",
"zerocopy",
]
[[package]]
name = "hardened_malloc-rs"
version = "0.1.2+12"
@@ -2534,11 +2691,17 @@ dependencies = [
"bytemuck",
"byteorder-lite",
"color_quant",
"exr",
"gif",
"image-webp",
"moxcms",
"num-traits",
"png",
"qoi",
"ravif",
"rayon",
"rgb",
"tiff",
"zune-core",
"zune-jpeg",
]
@@ -2553,6 +2716,12 @@ dependencies = [
"quick-error",
]
[[package]]
name = "imgref"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40fac9d56ed6437b198fddba683305e8e2d651aa42647f00f5ae542e7f5c94a2"
[[package]]
name = "indexmap"
version = "2.14.0"
@@ -2571,6 +2740,17 @@ version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb"
[[package]]
name = "interpolate_name"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "ipaddress"
version = "0.1.3"
@@ -2772,6 +2952,12 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]]
name = "lebe"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a79a3332a6609480d7d0c9eab957bca6b455b91bb84e66d19f5ff66294b85b8"
[[package]]
name = "lettre"
version = "0.11.21"
@@ -2808,6 +2994,16 @@ version = "0.2.186"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68ab91017fe16c622486840e4c83c9a37afeff978bd239b5293d61ece587de66"
[[package]]
name = "libfuzzer-sys"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f12a681b7dd8ce12bff52488013ba614b869148d54dd79836ab85aafdd53f08d"
dependencies = [
"arbitrary",
"cc",
]
[[package]]
name = "libloading"
version = "0.8.9"
@@ -2900,6 +3096,15 @@ dependencies = [
"futures-sink",
]
[[package]]
name = "loop9"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fae87c125b03c1d2c0150c90365d7d6bcc53fb73a9acaef207d2d065860f062"
dependencies = [
"imgref",
]
[[package]]
name = "lru-cache"
version = "0.1.2"
@@ -2978,6 +3183,16 @@ version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
[[package]]
name = "maybe-rayon"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519"
dependencies = [
"cfg-if",
"rayon",
]
[[package]]
name = "memchr"
version = "2.8.0"
@@ -3126,6 +3341,15 @@ dependencies = [
"libc",
]
[[package]]
name = "no_std_io2"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b51ed7824b6e07d354605f4abb3d9d300350701299da96642ee084f5ce631550"
dependencies = [
"memchr",
]
[[package]]
name = "nohash-hasher"
version = "0.2.0"
@@ -3157,6 +3381,12 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
[[package]]
name = "noop_proc_macro"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8"
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
@@ -3205,6 +3435,17 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6673768db2d862beb9b39a78fdcb1a69439615d5794a1be50caa9bc92c81967"
[[package]]
name = "num-derive"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "num-integer"
version = "0.1.46"
@@ -3579,6 +3820,12 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "pastey"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec"
[[package]]
name = "pear"
version = "0.2.9"
@@ -3810,6 +4057,25 @@ dependencies = [
"yansi",
]
[[package]]
name = "profiling"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773"
dependencies = [
"profiling-procmacros",
]
[[package]]
name = "profiling-procmacros"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "prost"
version = "0.14.3"
@@ -3866,6 +4132,15 @@ version = "0.1.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0c5ccf5294c6ccd63a74f1565028353830a9c2f5eb0c682c355c471726a6e3f"
[[package]]
name = "qoi"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001"
dependencies = [
"bytemuck",
]
[[package]]
name = "quick-error"
version = "2.0.1"
@@ -4020,6 +4295,76 @@ version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63b8176103e19a2643978565ca18b50549f6101881c443590420e4dc998a3c69"
[[package]]
name = "rav1e"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43b6dd56e85d9483277cde964fd1bdb0428de4fec5ebba7540995639a21cb32b"
dependencies = [
"aligned-vec",
"arbitrary",
"arg_enum_proc_macro",
"arrayvec",
"av-scenechange",
"av1-grain",
"bitstream-io",
"built",
"cfg-if",
"interpolate_name",
"itertools 0.14.0",
"libc",
"libfuzzer-sys",
"log",
"maybe-rayon",
"new_debug_unreachable",
"noop_proc_macro",
"num-derive",
"num-traits",
"paste",
"profiling",
"rand 0.9.4",
"rand_chacha",
"simd_helpers",
"thiserror",
"v_frame",
"wasm-bindgen",
]
[[package]]
name = "ravif"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e52310197d971b0f5be7fe6b57530dcd27beb35c1b013f29d66c1ad73fbbcc45"
dependencies = [
"avif-serialize",
"imgref",
"loop9",
"quick-error",
"rav1e",
"rayon",
"rgb",
]
[[package]]
name = "rayon"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb39b166781f92d482534ef4b4b1b2568f42613b53e5b6c160e24cfbfa30926d"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "recaptcha-verify"
version = "0.2.0"
@@ -4160,6 +4505,12 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7"
[[package]]
name = "rgb"
version = "0.8.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47b34b781b31e5d73e9fbc8689c70551fd1ade9a19e3e28cfec8580a79290cc4"
[[package]]
name = "ring"
version = "0.17.14"
@@ -5020,6 +5371,15 @@ dependencies = [
"simdutf8",
]
[[package]]
name = "simd_helpers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95890f873bec569a0362c235787f3aca6e1e887302ba4840839bcc6459c42da6"
dependencies = [
"quote",
]
[[package]]
name = "simdutf8"
version = "0.1.5"
@@ -5238,6 +5598,20 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "tiff"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b63feaf3343d35b6ca4d50483f94843803b0f51634937cc2ec519fc32232bc52"
dependencies = [
"fax",
"flate2",
"half",
"quick-error",
"weezl",
"zune-jpeg",
]
[[package]]
name = "tikv-jemalloc-ctl"
version = "0.6.1"
@@ -5569,22 +5943,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "tower-cookies"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "151b5a3e3c45df17466454bb74e9ecedecc955269bdedbf4d150dfa393b55a36"
dependencies = [
"axum-core",
"cookie",
"futures-util",
"http",
"parking_lot",
"pin-project-lite",
"tower-layer",
"tower-service",
]
[[package]]
name = "tower-http"
version = "0.6.8"
@@ -5633,44 +5991,6 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tower-sessions"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "518dca34b74a17cadfcee06e616a09d2bd0c3984eff1769e1e76d58df978fc78"
dependencies = [
"async-trait",
"http",
"time",
"tokio",
"tower-cookies",
"tower-layer",
"tower-service",
"tower-sessions-core",
"tracing",
]
[[package]]
name = "tower-sessions-core"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "568531ec3dfcf3ffe493de1958ae5662a0284ac5d767476ecdb6a34ff8c6b06c"
dependencies = [
"async-trait",
"axum-core",
"base64 0.22.1",
"futures",
"http",
"parking_lot",
"rand 0.9.4",
"serde",
"serde_json",
"thiserror",
"time",
"tokio",
"tracing",
]
[[package]]
name = "tracing"
version = "0.1.44"
@@ -5892,6 +6212,17 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "v_frame"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "666b7727c8875d6ab5db9533418d7c764233ac9c0cff1d469aec8fa127597be2"
dependencies = [
"aligned-vec",
"num-traits",
"wasm-bindgen",
]
[[package]]
name = "validator"
version = "0.20.0"
@@ -6506,6 +6837,12 @@ dependencies = [
"conduwuit_admin",
]
[[package]]
name = "y4m"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5a4b21e1a62b67a2970e6831bc091d7b87e119e7f9791aef9702e3bef04448"
[[package]]
name = "yansi"
version = "1.0.1"
@@ -6655,6 +6992,15 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb8a0807f7c01457d0379ba880ba6322660448ddebc890ce29bb64da71fb40f9"
[[package]]
name = "zune-inflate"
version = "0.2.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73ab332fe2f6680068f3582b16a24f90ad7096d5d39b974d1c0aff0125116f02"
dependencies = [
"simd-adler32",
]
[[package]]
name = "zune-jpeg"
version = "0.5.15"
+10 -4
View File
@@ -180,7 +180,7 @@ version = "0.5.3"
features = ["alloc", "rand"]
default-features = false
# Used to generate thumbnails for images
# Used to generate thumbnails for images & blurhashes
[workspace.dependencies.image]
version = "0.25.5"
default-features = false
@@ -191,6 +191,14 @@ features = [
"webp",
]
[workspace.dependencies.blurhash]
version = "0.2.3"
default-features = false
features = [
"fast-linear-to-srgb",
"image",
]
# logging
[workspace.dependencies.log]
version = "0.4.27"
@@ -356,6 +364,7 @@ features = [
"ring-compat",
"compat-upload-signatures",
"compat-optional-txn-pdus",
"unstable-msc2448",
"unstable-msc2666",
"unstable-msc2867",
"unstable-msc2870",
@@ -559,9 +568,6 @@ features = ["std"]
[workspace.dependencies.nonzero_ext]
version = "0.3.0"
[workspace.dependencies.serde_urlencoded]
version = "0.7.1"
#
# Patches
#
-1
View File
@@ -1 +0,0 @@
Users may now be forbidden from deactivating their own accounts with the new `allow_deactivation` config option.
-1
View File
@@ -1 +0,0 @@
Added support for authenticating clients using the new OAuth 2.0 login API. Contributed by @ginger.
-1
View File
@@ -1 +0,0 @@
Support for server-side blurhashing (part of MSC2448) has been removed.
-9
View File
@@ -1,9 +0,0 @@
Implemented event rejection, which should resolve and prevent future netsplits of the kinds observed
within some Continuwuity rooms.
Also resolved several bugs related to both soft-failing events, and event backfilling, which should
improve state resolution stability.
The `!admin debug get-pdu` command was updated to disambiguate event acceptance status, and
`!admin debug show-auth-chain` was added to visually display event auth chains, which may assist
developers in debugging strangely complex events.
Contributed by @nex.
+31 -39
View File
@@ -524,15 +524,17 @@
#
#recaptcha_private_site_key =
# Controls whether users are allowed to deactivate their own accounts
# through the account management panel or their Matrix clients. Server
# admins can always deactivate users using the relevant admin commands.
# Policy documents, such as terms and conditions or a privacy policy,
# which users must agree to when registering an account.
#
# Note that, in some jurisdictions, you may be legally required to honor
# users who request to deactivate their accounts if you set this option
# to `false`.
# Example:
# ```ignore
# [global.registration_terms.privacy_policy]
# en = { name = "Privacy Policy", url = "https://homeserver.example/en/privacy_policy.html" }
# es = { name = "Política de Privacidad", url = "https://homeserver.example/es/privacy_policy.html" }
# ```
#
#allow_deactivation = true
#registration_terms = {}
# Controls whether encrypted rooms and events are allowed.
#
@@ -1785,9 +1787,11 @@
#stream_amplification = 1024
# Number of sender task workers; determines sender parallelism. Default is
# core count. Override by setting a different value.
# '0' which means the value is determined internally, likely matching the
# number of tokio worker-threads or number of cores, etc. Override by
# setting a non-zero value.
#
#sender_workers = core count
#sender_workers = 0
# Enables listener sockets; can be set to false to disable listening. This
# option is intended for developer/diagnostic purposes only.
@@ -1870,6 +1874,24 @@
#
#support_pgp_key =
[global.blurhashing]
# blurhashing x component, 4 is recommended by https://blurha.sh/
#
#components_x = 4
# blurhashing y component, 3 is recommended by https://blurha.sh/
#
#components_y = 3
# Max raw size that the server will blurhash, this is the size of the
# image after converting it to raw data, it should be higher than the
# upload limit but not too high. The higher it is the higher the
# potential load will be for clients requesting blurhashes. The default
# is 33.55MB. Setting it to 0 disables blurhashing.
#
#blurhash_max_raw_size = 33554432
[global.matrix_rtc]
# A list of MatrixRTC foci (transports) which will be served via the
@@ -1967,33 +1989,3 @@
# provide an email address.
#
#require_email_for_token_registration = false
#[global.registration-terms]
# The language code to provide to clients along with the policy documents.
#
#language = "en"
# Policy documents, such as terms and conditions or a privacy policy,
# which users must agree to when registering an account.
#
# Example:
# ```ignore
# [global.registration_terms.documents]
# privacy_policy = { name = "Privacy Policy", url = "https://homeserver.example/en/privacy_policy.html" }
# ```
#
#documents = {}
#[global.oauth]
# The compatibility mode to use for OAuth.
#
# - "disabled": OAuth will be unavailable. Users will only be able to log
# in using legacy authentication.
# - "hybrid": OAuth and legacy authentication will both be available. Some
# clients may only use one or the other.
# - "exclusive": Only OAuth will be available. Clients which require
# legacy authentication will be unable to log in.
#
#compatibility_mode = "hybrid"
+1 -1
View File
@@ -146,7 +146,7 @@ cargo clippy \
--locked \
--profile test \
--no-default-features \
--features=console,systemd,element_hacks,direct_tls,perf_measurements,brotli_compression \
--features=console,systemd,element_hacks,direct_tls,perf_measurements,brotli_compression,blurhashing \
--color=always \
-- \
-D warnings
+1 -1
View File
@@ -16,7 +16,7 @@
};
#[derive(Debug, Parser)]
#[command(name = conduwuit_core::BRANDING, version = conduwuit_core::version())]
#[command(name = conduwuit_core::name(), version = conduwuit_core::version())]
pub enum AdminCommand {
#[command(subcommand)]
/// Commands for managing appservices
+9 -213
View File
@@ -1,5 +1,5 @@
use std::{
collections::{HashMap, HashSet},
collections::HashMap,
fmt::Write,
iter::once,
time::{Instant, SystemTime},
@@ -22,7 +22,7 @@
use lettre::message::Mailbox;
use ruma::{
CanonicalJsonObject, CanonicalJsonValue, EventId, OwnedEventId, OwnedRoomId,
OwnedRoomOrAliasId, OwnedServerName, RoomId, RoomVersionId, UInt,
OwnedRoomOrAliasId, OwnedServerName, RoomId, RoomVersionId,
api::federation::event::get_room_state, events::AnyStateEvent, serde::Raw,
};
use service::rooms::{
@@ -69,189 +69,6 @@ pub(super) async fn get_auth_chain(&self, event_id: OwnedEventId) -> Result {
self.write_str(&out).await
}
#[derive(Clone, Copy, Eq, PartialEq)]
enum NodeStatus {
Normal(bool),
SoftFailed(bool),
Rejected(bool),
}
struct AuthChild {
node_id: String,
event_id: OwnedEventId,
depth: UInt,
ts: UInt,
first_seen: bool,
pdu: Option<PduEvent>,
}
fn render_node(
graph: &mut String,
node_id: &str,
event_id: &EventId,
status: NodeStatus,
) -> Result {
let evt_str = event_id.to_string();
let status_label = match status {
| NodeStatus::Normal(false) => evt_str,
| NodeStatus::Normal(true) => format!("{evt_str} (missing locally)"),
| NodeStatus::SoftFailed(false) => format!("{evt_str} (soft-failed)"),
| NodeStatus::SoftFailed(true) => format!("{evt_str} (soft-failed & missing locally)"),
| NodeStatus::Rejected(false) => format!("{evt_str} (rejected)"),
| NodeStatus::Rejected(true) => format!("{evt_str} (rejected & missing locally)"),
};
writeln!(graph, "{node_id}[\"{}\"]", status_label.as_str())?;
match status {
| NodeStatus::Rejected(_) => writeln!(graph, "class {node_id} rejected;")?,
| NodeStatus::SoftFailed(_) => writeln!(graph, "class {node_id} soft_failed;")?,
| NodeStatus::Normal(_) => {},
}
Ok(())
}
#[admin_command]
pub(super) async fn show_auth_chain(&self, event_id: OwnedEventId) -> Result {
let node_status = async |event_id: &EventId, missing: bool| -> NodeStatus {
if self
.services
.rooms
.pdu_metadata
.is_event_rejected(event_id)
.await
{
NodeStatus::Rejected(missing)
} else if self
.services
.rooms
.pdu_metadata
.is_event_soft_failed(event_id)
.await
{
NodeStatus::SoftFailed(missing)
} else {
NodeStatus::Normal(missing)
}
};
let Ok(root) = self.services.rooms.timeline.get_pdu(&event_id).await else {
return Err!("Event not found.");
};
let mut graph = String::from(
"```mermaid\n%% This is a mermaid graph. You can plug this output into\n\
%% https://mermaid.live/edit to visualise it on-the-fly.\nflowchart TD\n\
classDef rejected fill:#ffe5e5,stroke:#cc0000,stroke-width:2px,color:#000;\n\
classDef soft_failed fill:#fff6cc,stroke:#c9a400,stroke-width:2px,color:#000;\n"
);
let mut node_ids: HashMap<OwnedEventId, String> = HashMap::new();
let mut cached_events: HashMap<OwnedEventId, PduEvent> =
HashMap::from([(event_id.clone(), root.clone())]);
let mut scheduled: HashSet<OwnedEventId> = HashSet::from([event_id.clone()]);
let mut visited: HashSet<OwnedEventId> = HashSet::new();
let mut stack = vec![root];
let mut next_node_id = 0_usize;
let node_id_for = |event_id: &OwnedEventId,
node_ids: &mut HashMap<OwnedEventId, String>,
next_node_id: &mut usize| {
node_ids
.entry(event_id.clone())
.or_insert_with(|| {
let id = format!("n{}", *next_node_id);
*next_node_id = next_node_id.saturating_add(1);
id
})
.clone()
};
while let Some(event) = stack.pop() {
let current_event_id = event.event_id().to_owned();
if !visited.insert(current_event_id.clone()) {
continue;
}
let current_node_id = node_id_for(&current_event_id, &mut node_ids, &mut next_node_id);
let current_status = node_status(&current_event_id, false).await;
render_node(&mut graph, &current_node_id, &current_event_id, current_status)?;
let mut children = Vec::with_capacity(event.auth_events.len());
for auth_event_id in event.auth_events().rev() {
let auth_event_id = auth_event_id.to_owned();
let auth_node_id = node_id_for(&auth_event_id, &mut node_ids, &mut next_node_id);
writeln!(graph, "{current_node_id} --> {auth_node_id}")?;
let first_seen = scheduled.insert(auth_event_id.clone());
let auth_pdu = if let Some(auth_pdu) = cached_events.get(&auth_event_id) {
// NOTE: events might be referenced multiple times (like the create event)
// so this saves some cheeky db lookup time
Some(auth_pdu.clone())
} else if first_seen {
match self.services.rooms.timeline.get_pdu(&auth_event_id).await {
| Ok(auth_event) => {
cached_events.insert(auth_event_id.clone(), auth_event.clone());
Some(auth_event)
},
| Err(_) => None,
}
} else {
None
};
// NOTE: Depth is used as the primary sorting key here, even though it has no
// bearing on state resolution or anything. Timestamp is used as a
// tiebreaker, failing back to lexicographical comparison.
let (depth, ts) = auth_pdu
.as_ref()
.map_or((UInt::MAX, UInt::MAX), |pdu| (pdu.depth, pdu.origin_server_ts));
children.push(AuthChild {
node_id: auth_node_id,
event_id: auth_event_id,
depth,
ts,
first_seen,
pdu: auth_pdu,
});
}
children.sort_by(|a, b| {
a.depth
.cmp(&b.depth)
.then(a.ts.cmp(&b.ts))
.then(a.event_id.as_str().cmp(b.event_id.as_str()))
});
for child in children.into_iter().rev() {
if !child.first_seen {
continue;
}
if let Some(child_pdu) = child.pdu {
// We have this PDU so will want to traverse it.
stack.push(child_pdu);
} else {
// We don't have this PDU locally so we can't traverse its auth events,
// but we can still render it as a node.
render_node(
&mut graph,
&child.node_id,
&child.event_id,
node_status(&child.event_id, true).await,
)?;
}
}
}
graph.push_str("```\n");
self.write_str(&graph).await
}
#[admin_command]
pub(super) async fn parse_pdu(&self) -> Result {
if self.body.len() < 2
@@ -294,31 +111,15 @@ pub(super) async fn get_pdu(&self, event_id: OwnedEventId) -> Result {
outlier = true;
pdu_json = self.services.rooms.timeline.get_pdu_json(&event_id).await;
}
let rejected = self
.services
.rooms
.pdu_metadata
.is_event_rejected(&event_id)
.await;
let soft_failed = self
.services
.rooms
.pdu_metadata
.is_event_soft_failed(&event_id)
.await;
match pdu_json {
| Err(_) => return Err!("PDU not found locally."),
| Ok(json) => {
let text = serde_json::to_string_pretty(&json)?;
let msg = if rejected {
"Rejected PDU:"
} else if soft_failed {
"Soft-failed PDU:"
} else if outlier {
"Outlier PDU:"
let msg = if outlier {
"Outlier (Rejected / Soft Failed) PDU found in our database"
} else {
"PDU:"
"PDU found in our database"
};
write!(self, "{msg}\n```json\n{text}\n```")
},
@@ -813,10 +614,6 @@ pub(super) async fn force_set_room_state_from_server(
.await;
state.insert(shortstatekey, pdu.event_id.clone());
self.services
.rooms
.pdu_metadata
.clear_pdu_markers(pdu.event_id());
}
}
@@ -834,10 +631,6 @@ pub(super) async fn force_set_room_state_from_server(
.rooms
.outlier
.add_pdu_outlier(&event_id, &value);
self.services
.rooms
.pdu_metadata
.clear_pdu_markers(&event_id);
}
info!("Resolving new room state");
@@ -869,7 +662,10 @@ pub(super) async fn force_set_room_state_from_server(
.force_state(room_id.clone().as_ref(), short_state_hash, added, removed, &state_lock)
.await?;
info!("Updating joined counts for room");
info!(
"Updating joined counts for room just in case (e.g. we may have found a difference in \
the room's m.room.member state"
);
self.services
.rooms
.state_cache
+1 -10
View File
@@ -17,21 +17,12 @@ pub enum DebugCommand {
message: Vec<String>,
},
/// Loads the auth_chain of a PDU, reporting how long it took.
/// Get the auth_chain of a PDU
GetAuthChain {
/// An event ID (the $ character followed by the base64 reference hash)
event_id: OwnedEventId,
},
/// Walks & displays the auth_chain of a PDU in a mermaid graph format.
///
/// This is useless to basically anyone but developers, and is also probably
/// slow and memory hungry.
ShowAuthChain {
/// The root event ID to start walking back from.
event_id: OwnedEventId,
},
/// Parse and print a PDU from a JSON
///
/// The PDU event is only checked for validity and is not added to the
+1 -183
View File
@@ -1,6 +1,6 @@
use conduwuit::{Err, Result};
use futures::StreamExt;
use ruma::{OwnedRoomId, OwnedRoomOrAliasId};
use ruma::OwnedRoomId;
use crate::{PAGE_SIZE, admin_command, get_room_info};
@@ -82,185 +82,3 @@ pub(super) async fn exists(&self, room_id: OwnedRoomId) -> Result {
self.write_str(&format!("{result}")).await
}
#[admin_command]
pub(super) async fn purge_sync_tokens(&self, room: OwnedRoomOrAliasId) -> Result {
// Resolve the room ID from the room or alias ID
let room_id = self.services.rooms.alias.resolve(&room).await?;
// Delete all tokens for this room using the service method
let Ok(deleted_count) = self.services.rooms.user.delete_room_tokens(&room_id).await else {
return Err!("Failed to delete sync tokens for room {}", room_id.as_str());
};
self.write_str(&format!(
"Successfully deleted {deleted_count} sync tokens for room {}",
room_id.as_str()
))
.await
}
/// Target options for room purging
#[derive(Default, Debug, clap::ValueEnum, Clone)]
pub enum RoomTargetOption {
#[default]
/// Target all rooms
All,
/// Target only disabled rooms
DisabledOnly,
/// Target only banned rooms
BannedOnly,
}
#[admin_command]
pub(super) async fn purge_all_sync_tokens(
&self,
target_option: Option<RoomTargetOption>,
execute: bool,
) -> Result {
use conduwuit::{debug, info};
let mode = if !execute { "Simulating" } else { "Starting" };
// strictly, we should check if these reach the max value after the loop and
// warn the user that the count is too large
let mut total_rooms_checked: usize = 0;
let mut total_tokens_deleted: usize = 0;
let mut error_count: u32 = 0;
let mut skipped_rooms: usize = 0;
info!("{} purge of sync tokens", mode);
// Get all rooms in the server
let all_rooms = self
.services
.rooms
.metadata
.iter_ids()
.collect::<Vec<_>>()
.await;
info!("Found {} rooms total on the server", all_rooms.len());
// Filter rooms based on options
let mut rooms = Vec::new();
for room_id in all_rooms {
if let Some(target) = &target_option {
match target {
| RoomTargetOption::DisabledOnly => {
if !self.services.rooms.metadata.is_disabled(&room_id).await {
debug!("Skipping room {} as it's not disabled", room_id.as_str());
skipped_rooms = skipped_rooms.saturating_add(1);
continue;
}
},
| RoomTargetOption::BannedOnly => {
if !self.services.rooms.metadata.is_banned(&room_id).await {
debug!("Skipping room {} as it's not banned", room_id.as_str());
skipped_rooms = skipped_rooms.saturating_add(1);
continue;
}
},
| RoomTargetOption::All => {},
}
}
rooms.push(room_id);
}
// Total number of rooms we'll be checking
let total_rooms = rooms.len();
info!(
"Processing {} rooms after filtering (skipped {} rooms)",
total_rooms, skipped_rooms
);
// Process each room
for room_id in rooms {
total_rooms_checked = total_rooms_checked.saturating_add(1);
// Log progress periodically
if total_rooms_checked.is_multiple_of(100) || total_rooms_checked == total_rooms {
info!(
"Progress: {}/{} rooms checked, {} tokens {}",
total_rooms_checked,
total_rooms,
total_tokens_deleted,
if !execute { "would be deleted" } else { "deleted" }
);
}
// In dry run mode, just count what would be deleted, don't actually delete
debug!(
"Room {}: {}",
room_id.as_str(),
if !execute {
"would purge sync tokens"
} else {
"purging sync tokens"
}
);
if !execute {
// For dry run mode, count tokens without deleting
match self.services.rooms.user.count_room_tokens(&room_id).await {
| Ok(count) =>
if count > 0 {
debug!(
"Would delete {} sync tokens for room {}",
count,
room_id.as_str()
);
total_tokens_deleted = total_tokens_deleted.saturating_add(count);
} else {
debug!("No sync tokens found for room {}", room_id.as_str());
},
| Err(e) => {
debug!("Error counting sync tokens for room {}: {:?}", room_id.as_str(), e);
error_count = error_count.saturating_add(1);
},
}
} else {
// Real deletion mode
match self.services.rooms.user.delete_room_tokens(&room_id).await {
| Ok(count) =>
if count > 0 {
debug!("Deleted {} sync tokens for room {}", count, room_id.as_str());
total_tokens_deleted = total_tokens_deleted.saturating_add(count);
} else {
debug!("No sync tokens found for room {}", room_id.as_str());
},
| Err(e) => {
debug!("Error purging sync tokens for room {}: {:?}", room_id.as_str(), e);
error_count = error_count.saturating_add(1);
},
}
}
}
let action = if !execute { "would be deleted" } else { "deleted" };
info!(
"Finished {}: checked {} rooms out of {} total, {} tokens {}, errors: {}",
if !execute {
"purge simulation"
} else {
"purging sync tokens"
},
total_rooms_checked,
total_rooms,
total_tokens_deleted,
action,
error_count
);
self.write_str(&format!(
"Finished {}: checked {} rooms out of {} total, {} tokens {}, errors: {}",
if !execute { "simulation" } else { "purging sync tokens" },
total_rooms_checked,
total_rooms,
total_tokens_deleted,
action,
error_count
))
.await
}
+1 -23
View File
@@ -5,9 +5,8 @@
mod moderation;
use clap::Subcommand;
use commands::RoomTargetOption;
use conduwuit::Result;
use ruma::{OwnedRoomId, OwnedRoomOrAliasId};
use ruma::OwnedRoomId;
use self::{
alias::RoomAliasCommand, directory::RoomDirectoryCommand, info::RoomInfoCommand,
@@ -61,25 +60,4 @@ pub enum RoomCommand {
Exists {
room_id: OwnedRoomId,
},
/// - Delete all sync tokens for a room
PurgeSyncTokens {
/// Room ID or alias to purge sync tokens for
#[arg(value_parser)]
room: OwnedRoomOrAliasId,
},
/// - Delete sync tokens for all rooms that have no local users
///
/// By default, processes all empty rooms.
PurgeAllSyncTokens {
/// Target specific room types
#[arg(long, value_enum)]
target_option: Option<RoomTargetOption>,
/// Execute token deletions. Otherwise,
/// Performs a dry run without actually deleting any tokens
#[arg(long)]
execute: bool,
},
}
+2 -25
View File
@@ -30,37 +30,14 @@ pub(super) async fn issue_token(&self, expires: super::TokenExpires) -> Result {
.issue_token(self.sender_or_service_user().into(), expires);
self.write_str(&format!(
"New registration token issued: `{token}` . {}.",
"New registration token issued: `{token}`. {}.",
if let Some(expires) = info.expires {
format!("{expires}")
} else {
"Never expires".to_owned()
}
))
.await?;
if self
.services
.config
.oauth
.compatibility_mode
.oauth_available()
{
self.write_str(&format!(
"\nInvite link using this token: {}",
self.services
.config
.get_client_domain()
.join(&format!(
"{}/account/register/?flow=trusted&token={token}",
conduwuit::ROUTE_PREFIX
))
.unwrap()
))
.await?;
}
Ok(())
.await
}
#[admin_command]
+147 -46
View File
@@ -1,10 +1,13 @@
use std::collections::{BTreeMap, HashSet};
use std::{
collections::{BTreeMap, HashSet},
fmt::Write as _,
};
use api::client::{
full_user_deactivate, leave_room, recreate_push_rules_and_return, remote_leave_room,
};
use conduwuit::{
Err, Result, debug_warn, info,
Err, Result, debug_warn, error, info,
matrix::{Event, pdu::PartialPdu},
utils::{self, ReadyExt},
warn,
@@ -50,22 +53,130 @@ pub(super) async fn list_users(&self) -> Result {
#[admin_command]
pub(super) async fn create_user(&self, username: String, password: Option<String>) -> Result {
// Validate user id
let user_id = self
.services
let user_id = parse_local_user_id(self.services, &username)?;
if let Err(e) = user_id.validate_strict() {
if self.services.config.emergency_password.is_none() {
return Err!("Username {user_id} contains disallowed characters or spaces: {e}");
}
}
if self.services.users.exists(&user_id).await {
return Err!("User {user_id} already exists");
}
let password = password.unwrap_or_else(|| utils::random_string(AUTO_GEN_PASSWORD_LENGTH));
// Create user
self.services
.users
.determine_registration_user_id(Some(username), None, None)
.create(&user_id, Some(HashedPassword::new(&password)?))
.await?;
let password = HashedPassword::new(
&password.unwrap_or_else(|| utils::random_string(AUTO_GEN_PASSWORD_LENGTH)),
)?;
// Default to pretty displayname
let mut displayname = user_id.localpart().to_owned();
// If `new_user_displayname_suffix` is set, registration will push whatever
// content is set to the user's display name with a space before it
if !self
.services
.server
.config
.new_user_displayname_suffix
.is_empty()
{
write!(displayname, " {}", self.services.server.config.new_user_displayname_suffix)?;
}
self.services
.users
.create_local_account(&user_id, password, None)
.await;
.set_displayname(&user_id, Some(displayname));
self.write_str(&format!("Created user {user_id}")).await
// Initial account data
self.services
.account_data
.update(
None,
&user_id,
ruma::events::GlobalAccountDataEventType::PushRules
.to_string()
.into(),
&serde_json::to_value(ruma::events::push_rules::PushRulesEvent::new(
ruma::events::push_rules::PushRulesEventContent::new(
ruma::push::Ruleset::server_default(&user_id),
),
))
.unwrap(),
)
.await?;
if !self.services.server.config.auto_join_rooms.is_empty() {
for room in &self.services.server.config.auto_join_rooms {
let Ok(room_id) = self.services.rooms.alias.resolve(room).await else {
error!(
%user_id,
"Failed to resolve room alias to room ID when attempting to auto join {room}, skipping"
);
continue;
};
if !self
.services
.rooms
.state_cache
.server_in_room(self.services.globals.server_name(), &room_id)
.await
{
warn!(
"Skipping room {room} to automatically join as we have never joined before."
);
continue;
}
if let Some(room_server_name) = room.server_name() {
match self
.services
.rooms
.membership
.join_room(
&user_id,
&room_id,
Some("Automatically joining this room upon registration".to_owned()),
&[
self.services.globals.server_name().to_owned(),
room_server_name.to_owned(),
],
)
.await
{
| Ok(_response) => {
info!("Automatically joined room {room} for user {user_id}");
},
| Err(e) => {
// don't return this error so we don't fail registrations
error!(
"Failed to automatically join room {room} for user {user_id}: {e}"
);
self.services
.admin
.send_text(&format!(
"Failed to automatically join room {room} for user {user_id}: \
{e}"
))
.await;
},
}
}
}
}
// we dont add a device since we're not the user, just the creator
// Make the first user to register an administrator and disable first-run mode.
self.services.firstrun.empower_first_user(&user_id).await?;
self.write_str(&format!("Created user with user_id: {user_id} and password: `{password}`"))
.await
}
#[admin_command]
@@ -191,6 +302,31 @@ pub(super) async fn reset_password(
Ok(())
}
#[admin_command]
pub(super) async fn issue_password_reset_link(&self, username: String) -> Result {
use conduwuit_service::password_reset::{PASSWORD_RESET_PATH, RESET_TOKEN_QUERY_PARAM};
self.bail_restricted()?;
let mut reset_url = self
.services
.config
.get_client_domain()
.join(PASSWORD_RESET_PATH)
.unwrap();
let user_id = parse_local_user_id(self.services, &username)?;
let token = self.services.password_reset.issue_token(user_id).await?;
reset_url
.query_pairs_mut()
.append_pair(RESET_TOKEN_QUERY_PARAM, &token.token);
self.write_str(&format!("Password reset link issued for {username}: {reset_url}"))
.await?;
Ok(())
}
#[admin_command]
pub(super) async fn deactivate_all(&self, no_leave_rooms: bool, force: bool) -> Result {
if self.body.len() < 2
@@ -627,41 +763,6 @@ pub(super) async fn force_join_room(
.await
}
#[admin_command]
pub(super) async fn force_join_room_remotely(
&self,
user_id: String,
room_id: OwnedRoomOrAliasId,
via: String,
) -> Result {
let via = ServerName::parse(&via)?;
let user_id = parse_local_user_id(self.services, &user_id)?;
let (room_id, mut servers) = self
.services
.rooms
.alias
.resolve_with_servers(&room_id, None)
.await?;
if servers.contains(&via) {
servers.retain(|n| *n != via);
}
servers.insert(0, via);
assert!(
self.services.globals.user_is_local(&user_id),
"Parsed user_id must be a local user"
);
let state_lock = self.services.rooms.state.mutex.lock(&*room_id).await;
self.services
.rooms
.membership
.join_remote_room(&user_id, &room_id, None, &servers, state_lock)
.await?;
self.write_str(&format!("{user_id} has been joined to {room_id}."))
.await
}
#[admin_command]
pub(super) async fn force_leave_room(
&self,
+6 -14
View File
@@ -29,6 +29,12 @@ pub enum UserCommand {
password: Option<String>,
},
/// Issue a self-service password reset link for a user.
IssuePasswordResetLink {
/// Username of the user who may use the link
username: String,
},
/// Get a user's associated email address.
GetEmail {
user_id: String,
@@ -177,20 +183,6 @@ pub enum UserCommand {
room_id: OwnedRoomOrAliasId,
},
/// Manually join a local user to a room via a remote server, regardless of
/// our current residency.
ForceJoinRoomRemotely {
/// The user to join
user_id: String,
/// The room to join
room_id: OwnedRoomOrAliasId,
/// The server name to join via.
///
/// This server will always be tried first, however if more are
/// available, they may be tried after.
via: String,
},
/// Manually leave a local user from a room.
ForceLeaveRoom {
user_id: String,
+6 -11
View File
@@ -24,7 +24,7 @@
power_levels::RoomPowerLevelsEventContent,
},
};
use service::{mailer::messages, uiaa::UiaaInitiator, users::HashedPassword};
use service::{mailer::messages, uiaa::Identity, users::HashedPassword};
use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH};
use crate::Ruma;
@@ -121,7 +121,7 @@ pub(crate) async fn change_password_route(
&body.auth,
vec![AuthFlow::new(vec![AuthType::Password])],
Box::default(),
Some(UiaaInitiator::new(user_id, body.sender_device())),
Some(Identity::from_user_id(user_id)),
)
.await?
} else {
@@ -187,7 +187,7 @@ pub(crate) async fn change_password_route(
if services.server.config.admin_room_notices {
services
.admin
.notice(&format!("User {sender_user} changed their password."))
.notice(&format!("User {} changed their password.", &sender_user))
.await;
}
@@ -270,17 +270,10 @@ pub(crate) async fn deactivate_route(
.as_ref()
.ok_or_else(|| err!(Request(MissingToken("Missing access token."))))?;
if !services.config.allow_deactivation {
return Err!(Request(Unauthorized(
"You may not deactivate your own account. Contact your server's administrator for \
assistance."
)));
}
// Prompt the user to confirm with their password using UIAA
let _ = services
.uiaa
.authenticate_password(&body.auth, sender_user, body.sender_device(), None)
.authenticate_password(&body.auth, Some(Identity::from_user_id(sender_user)))
.await?;
// Remove profile pictures and display name
@@ -329,6 +322,8 @@ pub(crate) async fn check_registration_token_validity(
/// Runs through all the deactivation steps:
///
/// - Mark as deactivated
/// - Removing display name
/// - Removing avatar URL and blurhash
/// - Removing all profile data
/// - Leaving all rooms (and forgets all of them)
pub async fn full_user_deactivate(
+298 -52
View File
@@ -1,15 +1,17 @@
use std::collections::HashMap;
use std::{collections::HashMap, fmt::Write};
use axum::extract::State;
use axum_client_ip::ClientIp;
use conduwuit::{
Err, Result, debug_info, info,
Err, Result, debug_info, error, info,
utils::{self},
warn,
};
use conduwuit_service::Services;
use futures::StreamExt;
use futures::{FutureExt, StreamExt};
use lettre::{Address, message::Mailbox};
use ruma::{
OwnedUserId, UserId,
api::client::{
account::{
register::{self, LoginType, RegistrationKind},
@@ -18,6 +20,11 @@
uiaa::{AuthFlow, AuthType},
},
assign,
events::{
GlobalAccountDataEventType, push_rules::PushRulesEvent,
room::message::RoomMessageEventContent,
},
push,
};
use serde_json::value::RawValue;
use service::{mailer::messages, users::HashedPassword};
@@ -25,6 +32,8 @@
use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH};
use crate::Ruma;
const RANDOM_USER_ID_LENGTH: usize = 10;
/// # `POST /_matrix/client/v3/register`
///
/// Register an account on this homeserver.
@@ -43,6 +52,8 @@ pub(crate) async fn register_route(
return Err!(Request(GuestAccessForbidden("Guests may not register on this server.")));
}
let emergency_mode_enabled = services.config.emergency_password.is_some();
// Allow registration if it's enabled in the config file or if this is the first
// run (so the first user account can be created)
let allow_registration =
@@ -60,51 +71,101 @@ pub(crate) async fn register_route(
)));
}
let user_id = if body.body.login_type == Some(LoginType::ApplicationService) {
let Some(appservice_info) = &body.appservice_info else {
return Err!(Request(Forbidden(
"Only appservices can use the appservice login type."
)));
};
let user_id = services
.users
.determine_registration_user_id(body.username.clone(), None, Some(appservice_info))
.await?;
services.users.create(&user_id, None).await?;
user_id
let identity = if body.appservice_info.is_some() {
// Appservices can skip auth
None
} else {
// Perform UIAA to determine the user's identity
let (flows, params) = create_registration_uiaa_session(&services).await?;
let identity = services
.uiaa
.authenticate(&body.auth, flows, params, None)
.await?;
let password = if let Some(password) = &body.password {
HashedPassword::new(password)?
} else {
return Err!(Request(InvalidParam("A password must be provided.")));
};
let user_id = services
.users
.determine_registration_user_id(body.username.clone(), identity.email.as_ref(), None)
.await?;
services
.users
.create_local_account(&user_id, password, identity.email)
.await;
user_id
Some(
services
.uiaa
.authenticate(&body.auth, flows, params, None)
.await?,
)
};
// If the user didn't supply a username but did supply an email, use
// the email's user as their initial localpart to avoid falling back to
// a randomly generated localpart
let supplied_username = body.username.clone().or_else(|| {
if let Some(identity) = &identity
&& let Some(email) = &identity.email
{
Some(email.user().to_owned())
} else {
None
}
});
let user_id =
determine_registration_user_id(&services, supplied_username, emergency_mode_enabled)
.await?;
if body.body.login_type == Some(LoginType::ApplicationService) {
// For appservice logins, make sure that the user ID is in the appservice's
// namespace
match body.appservice_info {
| Some(ref info) =>
if !info.is_user_match(&user_id) && !emergency_mode_enabled {
return Err!(Request(Exclusive(
"Username is not in an appservice namespace."
)));
},
| _ => {
return Err!(Request(MissingToken("Missing appservice token.")));
},
}
} else if services.appservice.is_exclusive_user_id(&user_id).await && !emergency_mode_enabled
{
// For non-appservice logins, ban user IDs which are in an appservice's
// namespace (unless emergency mode is enabled)
return Err!(Request(Exclusive("Username is reserved by an appservice.")));
}
let password = if body.appservice_info.is_some() {
None
} else if let Some(password) = body.password.as_deref() {
Some(HashedPassword::new(password)?)
} else {
return Err!(Request(InvalidParam("A password must be provided")));
};
// Create user
services.users.create(&user_id, password).await?;
// Set an initial display name
let mut displayname = user_id.localpart().to_owned();
// Apply the new user displayname suffix, if it's set
if !services.globals.new_user_displayname_suffix().is_empty()
&& body.appservice_info.is_none()
{
write!(displayname, " {}", services.server.config.new_user_displayname_suffix)?;
}
services
.users
.set_displayname(&user_id, Some(displayname.clone()));
// Initial account data
services
.account_data
.update(
None,
&user_id,
GlobalAccountDataEventType::PushRules.to_string().into(),
&serde_json::to_value(PushRulesEvent::new(
push::Ruleset::server_default(&user_id).into(),
))
.expect("should be able to serialize push rules"),
)
.await?;
// Generate new device id if the user didn't specify one
let (token, device) = if !body.inhibit_login {
// Generate new device id if the user didn't specify one
let device_id = body
.device_id
.clone()
@@ -120,7 +181,6 @@ pub(crate) async fn register_route(
&user_id,
&device_id,
&new_token,
None,
body.initial_device_display_name.clone(),
Some(client.to_string()),
)
@@ -131,7 +191,118 @@ pub(crate) async fn register_route(
(None, None)
};
debug_info!(%user_id, ?device, "New account created via legacy registration");
debug_info!(%user_id, ?device, "User account was created");
// If the user registered with an email, associate it with their account.
if let Some(identity) = identity
&& let Some(email) = identity.email
{
// This may fail if the email is already in use, but we already check for that
// in `/requestToken`, so ignoring the error is acceptable here in the rare case
// that an email is sniped by another user between the `/requestToken` request
// and the `/register` request.
let _ = services
.threepid
.associate_localpart_email(user_id.localpart(), &email)
.await;
}
let device_display_name = body.initial_device_display_name.as_deref().unwrap_or("");
if body.appservice_info.is_none() {
if !device_display_name.is_empty() {
let notice = format!(
"New user \"{user_id}\" registered on this server from IP {client} and device \
display name \"{device_display_name}\""
);
info!("{notice}");
if services.server.config.admin_room_notices {
services.admin.notice(&notice).await;
}
} else {
let notice = format!("New user \"{user_id}\" registered on this server.");
info!("{notice}");
if services.server.config.admin_room_notices {
services.admin.notice(&notice).await;
}
}
}
// Make the first user to register an administrator and disable first-run mode.
let was_first_user = services.firstrun.empower_first_user(&user_id).await?;
// If the registering user was not the first and we're suspending users on
// register, suspend them.
if !was_first_user && services.config.suspend_on_register {
// Note that we can still do auto joins for suspended users
services
.users
.suspend_account(&user_id, &services.globals.server_user)
.await;
// And send an @room notice to the admin room, to prompt admins to review the
// new user and ideally unsuspend them if deemed appropriate.
if services.server.config.admin_room_notices {
services
.admin
.send_loud_message(RoomMessageEventContent::text_plain(format!(
"User {user_id} has been suspended as they are not the first user on this \
server. Please review and unsuspend them if appropriate."
)))
.await
.ok();
}
}
if body.appservice_info.is_none() && !services.server.config.auto_join_rooms.is_empty() {
for room in &services.server.config.auto_join_rooms {
let Ok(room_id) = services.rooms.alias.resolve(room).await else {
error!(
"Failed to resolve room alias to room ID when attempting to auto join \
{room}, skipping"
);
continue;
};
if !services
.rooms
.state_cache
.server_in_room(services.globals.server_name(), &room_id)
.await
{
warn!(
"Skipping room {room} to automatically join as we have never joined before."
);
continue;
}
if let Some(room_server_name) = room.server_name() {
match services
.rooms
.membership
.join_room(
&user_id,
&room_id,
Some("Automatically joining this room upon registration".to_owned()),
&[services.globals.server_name().to_owned(), room_server_name.to_owned()],
)
.boxed()
.await
{
| Err(e) => {
// don't return this error so we don't fail registrations
error!(
"Failed to automatically join room {room} for user {user_id}: {e}"
);
},
| _ => {
info!("Automatically joined room {room} for user {user_id}");
},
}
}
}
}
Ok(assign!(register::v3::Response::new(user_id), {
access_token: token,
@@ -203,21 +374,21 @@ async fn create_registration_uiaa_session(
// Require all users to agree to the terms and conditions, if configured
let terms = &services.config.registration_terms;
if !terms.documents.is_empty() {
let mut terms_map = HashMap::new();
if !terms.is_empty() {
let mut terms =
serde_json::to_value(terms.clone()).expect("failed to serialize terms");
for (id, document) in &terms.documents {
terms_map.insert(id.to_owned(), serde_json::json!({
terms.language.clone(): serde_json::to_value(document).expect("should be able to serialize document")
}));
// Insert a dummy `version` field
for (_, documents) in terms.as_object_mut().unwrap() {
let documents = documents.as_object_mut().unwrap();
documents.insert("version".to_owned(), "latest".into());
}
terms_map.insert("version".to_owned(), "latest".into());
params.insert(
AuthType::Terms.as_str().to_owned(),
serde_json::json!({
"policies": terms_map,
"policies": terms,
}),
);
@@ -250,6 +421,81 @@ async fn create_registration_uiaa_session(
Ok((flows, params))
}
async fn determine_registration_user_id(
services: &Services,
supplied_username: Option<String>,
emergency_mode_enabled: bool,
) -> Result<OwnedUserId> {
if let Some(supplied_username) = supplied_username {
// The user gets to pick their username. Do some validation to make sure it's
// acceptable.
// Don't allow registration with forbidden usernames.
if services
.globals
.forbidden_usernames()
.is_match(&supplied_username)
&& !emergency_mode_enabled
{
return Err!(Request(Forbidden("Username is forbidden")));
}
// Create and validate the user ID
let user_id = match UserId::parse_with_server_name(
&supplied_username,
services.globals.server_name(),
) {
| Ok(user_id) => {
if let Err(e) = user_id.validate_strict() {
// Unless we are in emergency mode, we should follow synapse's behaviour on
// not allowing things like spaces and UTF-8 characters in usernames
if !emergency_mode_enabled {
return Err!(Request(InvalidUsername(debug_warn!(
"Username {supplied_username} contains disallowed characters or \
spaces: {e}"
))));
}
}
// Don't allow registration with user IDs that aren't local
if !services.globals.user_is_local(&user_id) {
return Err!(Request(InvalidUsername(
"Username {supplied_username} is not local to this server"
)));
}
user_id
},
| Err(e) => {
return Err!(Request(InvalidUsername(debug_warn!(
"Username {supplied_username} is not valid: {e}"
))));
},
};
if services.users.exists(&user_id).await {
return Err!(Request(UserInUse("User ID is not available.")));
}
Ok(user_id)
} else {
// The user didn't specify a username. Generate a username for
// them.
loop {
let user_id = UserId::parse_with_server_name(
utils::random_string(RANDOM_USER_ID_LENGTH).to_lowercase(),
services.globals.server_name(),
)
.unwrap();
if !services.users.exists(&user_id).await {
break Ok(user_id);
}
}
}
}
/// # `POST /_matrix/client/v3/register/email/requestToken`
///
/// Requests a validation email for the purpose of registering a new account.
+4 -5
View File
@@ -11,7 +11,7 @@
},
thirdparty::{Medium, ThirdPartyIdentifierInit},
};
use service::mailer::messages;
use service::{mailer::messages, uiaa::Identity};
use crate::Ruma;
@@ -116,15 +116,14 @@ pub(crate) async fn add_3pid_route(
// Require password auth to add an email
let _ = services
.uiaa
.authenticate_password(&body.auth, sender_user, body.sender_device(), None)
.authenticate_password(&body.auth, Some(Identity::from_user_id(sender_user)))
.await?;
let email = services
.threepid
.get_valid_session(&body.sid, &body.client_secret)
.consume_valid_session(&body.sid, &body.client_secret)
.await
.map_err(|message| err!(Request(ThreepidAuthFailed("{message}"))))?
.consume();
.map_err(|message| err!(Request(ThreepidAuthFailed("{message}"))))?;
services
.threepid
+3 -3
View File
@@ -8,6 +8,7 @@
self, delete_device, delete_devices, get_device, get_devices, update_device,
},
};
use service::uiaa::Identity;
use crate::{Ruma, client::DEVICE_ID_LENGTH};
@@ -94,7 +95,6 @@ pub(crate) async fn update_device_route(
&device_id,
&appservice.registration.as_token,
None,
None,
Some(client.to_string()),
)
.await?;
@@ -126,7 +126,7 @@ pub(crate) async fn delete_device_route(
// Prompt the user to confirm with their password using UIAA
let _ = services
.uiaa
.authenticate_password(&body.auth, sender_user, body.sender_device(), None)
.authenticate_password(&body.auth, Some(Identity::from_user_id(sender_user)))
.await?;
}
@@ -162,7 +162,7 @@ pub(crate) async fn delete_devices_route(
// Prompt the user to confirm with their password using UIAA
let _ = services
.uiaa
.authenticate_password(&body.auth, sender_user, body.sender_device(), None)
.authenticate_password(&body.auth, Some(Identity::from_user_id(sender_user)))
.await?;
}
+2 -7
View File
@@ -26,7 +26,7 @@
serde::Raw,
};
use serde_json::json;
use service::oauth::OAuthTicket;
use service::uiaa::Identity;
use crate::Ruma;
@@ -204,12 +204,7 @@ pub(crate) async fn upload_signing_keys_route(
{
let _ = services
.uiaa
.authenticate_password(
&body.auth,
sender_user,
body.sender_device(),
Some(OAuthTicket::CrossSigningReset),
)
.authenticate_password(&body.auth, Some(Identity::from_user_id(sender_user)))
.await?;
}
+12 -1
View File
@@ -21,6 +21,7 @@
},
media::create_content,
},
assign,
};
use service::media::mxc::Mxc;
@@ -75,7 +76,17 @@ pub(crate) async fn create_content_route(
return Err!(Request(Unknown("Failed to save uploaded media")));
}
Ok(create_content::v3::Response::new(mxc.to_string().into()))
let blurhash = body.generate_blurhash.then(|| {
services
.media
.create_blurhash(&body.file, content_type, filename)
.ok()
.flatten()
});
Ok(assign!(create_content::v3::Response::new(mxc.to_string().into()), {
blurhash: blurhash.flatten(),
}))
}
/// # `GET /_matrix/client/v1/media/thumbnail/{serverName}/{mediaId}`
+1
View File
@@ -247,6 +247,7 @@ pub(crate) async fn invite_helper(
let mut content = RoomMemberEventContent::new(MembershipState::Invite);
content.displayname = services.users.displayname(recipient_user).await.ok();
content.avatar_url = services.users.avatar_url(recipient_user).await.ok();
content.blurhash = services.users.blurhash(recipient_user).await.ok();
content.is_direct = Some(is_direct);
content.reason = reason;
+2
View File
@@ -343,6 +343,7 @@ async fn knock_room_helper_local(
let mut content = RoomMemberEventContent::new(MembershipState::Knock);
content.displayname = services.users.displayname(sender_user).await.ok();
content.avatar_url = services.users.avatar_url(sender_user).await.ok();
content.blurhash = services.users.blurhash(sender_user).await.ok();
content.reason.clone_from(&reason.clone());
// Try normal knock first
@@ -526,6 +527,7 @@ async fn knock_room_helper_remote(
let mut knock_content = RoomMemberEventContent::new(MembershipState::Knock);
knock_content.displayname = services.users.displayname(sender_user).await.ok();
knock_content.avatar_url = services.users.avatar_url(sender_user).await.ok();
knock_content.blurhash = services.users.blurhash(sender_user).await.ok();
knock_content.reason = reason;
knock_event_stub.insert(
-3
View File
@@ -16,7 +16,6 @@
pub(super) mod membership;
pub(super) mod message;
pub(super) mod mutual_rooms;
pub(super) mod oauth;
pub(super) mod openid;
pub(super) mod presence;
pub(super) mod profile;
@@ -62,7 +61,6 @@
pub use membership::{leave_all_rooms, leave_room, remote_leave_room};
pub(super) use message::*;
pub(super) use mutual_rooms::*;
pub(super) use oauth::*;
pub(super) use openid::*;
pub(super) use presence::*;
pub(super) use profile::*;
@@ -75,7 +73,6 @@
pub(super) use room::*;
pub(super) use search::*;
pub(super) use send::*;
pub use session::handle_login;
pub(super) use session::*;
pub(super) use space::*;
pub(super) use state::*;
-56
View File
@@ -1,56 +0,0 @@
use axum::{
Json, Router,
extract::{Request, State},
middleware::{self, Next},
response::{IntoResponse, Response},
routing::method_routing::{get, post},
};
use const_str::concat;
use http::StatusCode;
use serde_json::json;
pub(crate) use server_metadata::*;
mod register_client;
mod server_metadata;
mod token;
const BASE_PATH: &str = concat!(conduwuit_core::ROUTE_PREFIX, "/oauth2/");
const AUTH_CODE_PATH: &str = "grant/authorization_code";
const JWKS_URI_PATH: &str = "client/keys.json";
const CLIENT_REGISTER_PATH: &str = "client/register";
const TOKEN_REVOKE_PATH: &str = "client/revoke";
const TOKEN_PATH: &str = "grant/token";
const ACCOUNT_MANAGEMENT_PATH: &str = concat!(conduwuit_core::ROUTE_PREFIX, "/account/deeplink");
pub(crate) fn router(state: crate::State) -> Router<crate::State> {
Router::new()
.nest(BASE_PATH, oauth_router())
.route(
"/.well-known/openid-configuration",
get(
// TODO(unspecced): used by old versions of the matrix-js-sdk
async |State(services): State<crate::State>| {
Json(authorization_server_metadata(&services).await)
},
),
)
.layer(middleware::from_fn_with_state(
state,
async |State(state): State<crate::State>, request: Request, next: Next| -> Response {
if state.config.oauth.compatibility_mode.oauth_available() {
next.run(request).await
} else {
(StatusCode::NOT_FOUND, "OAuth is unavailable on this server").into_response()
}
},
))
}
fn oauth_router() -> Router<crate::State> {
Router::new()
.route(concat!("/", CLIENT_REGISTER_PATH), post(register_client::register_client_route))
// TODO(unspecced): used by old versions of the matrix-js-sdk
.route(concat!("/", JWKS_URI_PATH), get(async || Json(json!({"keys": []}))))
.route(concat!("/", TOKEN_PATH), post(token::token_route))
.route(concat!("/", TOKEN_REVOKE_PATH), post(token::revoke_token_route))
}
-28
View File
@@ -1,28 +0,0 @@
use axum::{
Json,
extract::State,
response::{IntoResponse, Response},
};
use http::StatusCode;
use serde::Serialize;
use service::oauth::client_metadata::ClientMetadata;
#[derive(Serialize)]
struct RegisteredClient {
client_id: String,
#[serde(flatten)]
metadata: ClientMetadata,
}
pub(crate) async fn register_client_route(
State(services): State<crate::State>,
Json(metadata): Json<ClientMetadata>,
) -> Result<Response, Response> {
let client_id = services
.oauth
.register_client(&metadata)
.await
.map_err(|err| (StatusCode::BAD_REQUEST, err.to_owned()).into_response())?;
Ok(Json(RegisteredClient { client_id, metadata }).into_response())
}
-62
View File
@@ -1,62 +0,0 @@
use axum::extract::State;
use conduwuit::{Err, Result};
use ruma::{
api::client::discovery::get_authorization_server_metadata::{
self, v1::AccountManagementAction,
},
serde::Raw,
};
use serde_json::{Value, json};
use service::Services;
use crate::{
Ruma,
client::oauth::{
ACCOUNT_MANAGEMENT_PATH, AUTH_CODE_PATH, CLIENT_REGISTER_PATH, JWKS_URI_PATH, TOKEN_PATH,
TOKEN_REVOKE_PATH,
},
};
pub(crate) async fn get_authorization_server_metadata_route(
State(services): State<crate::State>,
_body: Ruma<get_authorization_server_metadata::v1::Request>,
) -> Result<get_authorization_server_metadata::v1::Response> {
if !services.config.oauth.compatibility_mode.oauth_available() {
return Err!(Request(Unrecognized("OAuth is unavailable on this server")));
}
let metadata = Raw::new(&authorization_server_metadata(&services).await).unwrap();
Ok(get_authorization_server_metadata::v1::Response::new(metadata.cast_unchecked()))
}
pub(crate) async fn authorization_server_metadata(services: &Services) -> Value {
let endpoint_base = services
.config
.get_client_domain()
.join(super::BASE_PATH)
.unwrap();
json!({
"account_management_uri": endpoint_base.join(ACCOUNT_MANAGEMENT_PATH).unwrap(),
"account_management_actions_supported": [
AccountManagementAction::AccountDeactivate,
AccountManagementAction::CrossSigningReset,
AccountManagementAction::DeviceDelete,
AccountManagementAction::DeviceView,
AccountManagementAction::DevicesList,
AccountManagementAction::Profile,
],
"authorization_endpoint": endpoint_base.join(AUTH_CODE_PATH).unwrap(),
"code_challenge_methods_supported": ["S256"],
"grant_types_supported": ["authorization_code", "refresh_token"],
"issuer": services.config.get_client_domain(),
"jwks_uri": endpoint_base.join(JWKS_URI_PATH).unwrap(),
"prompt_values_supported": ["create"],
"registration_endpoint": endpoint_base.join(CLIENT_REGISTER_PATH).unwrap(),
"response_modes_supported": ["query", "fragment"],
"response_types_supported": ["code"],
"revocation_endpoint": endpoint_base.join(TOKEN_REVOKE_PATH).unwrap(),
"token_endpoint": endpoint_base.join(TOKEN_PATH).unwrap(),
})
}
-23
View File
@@ -1,23 +0,0 @@
use axum::{Form, Json, extract::State, response::IntoResponse};
use http::StatusCode;
use service::oauth::grant::{RevokeTokenRequest, TokenRequest};
pub(crate) async fn token_route(
State(services): State<crate::State>,
Form(request): Form<TokenRequest>,
) -> impl IntoResponse {
match services.oauth.issue_token(request).await {
| Ok(response) => Ok(Json(response)),
| Err(err) => Err((StatusCode::BAD_REQUEST, err.message())),
}
}
pub(crate) async fn revoke_token_route(
State(services): State<crate::State>,
Form(request): Form<RevokeTokenRequest>,
) -> impl IntoResponse {
match services.oauth.revoke_token(request.token).await {
| Ok(()) => Ok(StatusCode::OK),
| Err(err) => Err((StatusCode::BAD_REQUEST, err.message())),
}
}
+15 -4
View File
@@ -23,7 +23,8 @@
/// # `GET /_matrix/client/v3/profile/{userId}`
///
/// Returns the user's profile information.
/// Returns the displayname, avatar_url, blurhash, and custom profile fields of
/// the user.
///
/// - If user is on another server and we do not have a local copy already,
/// fetch profile over federation.
@@ -321,9 +322,19 @@ async fn set_profile_field(
services.users.set_avatar_url(user_id, None);
},
| other =>
services
.users
.set_profile_key(user_id, other.field_name().as_str(), other.value()),
if other.field_name().as_str() == "blurhash" {
if let Some(Value::String(blurhash)) = other.value() {
services.users.set_blurhash(user_id, Some(blurhash));
} else {
services.users.set_blurhash(user_id, None);
}
} else {
services.users.set_profile_key(
user_id,
other.field_name().as_str(),
other.value(),
);
},
}
// If the user is local and changed their displayname or avatar_url, update it
+5 -1
View File
@@ -288,6 +288,7 @@ pub(crate) async fn create_room_route(
let mut join_event = RoomMemberEventContent::new(MembershipState::Join);
join_event.displayname = services.users.displayname(sender_user).await.ok();
join_event.avatar_url = services.users.avatar_url(sender_user).await.ok();
join_event.blurhash = services.users.blurhash(sender_user).await.ok();
join_event.is_direct = Some(body.is_direct);
debug_info!("Joining {sender_user} to room {room_id}");
@@ -536,7 +537,10 @@ pub(crate) async fn create_room_route(
if services.server.config.admin_room_notices {
services
.admin
.send_text(&format!("{sender_user} made {room_id} public to the room directory"))
.send_text(&format!(
"{sender_user} made {} public to the room directory",
&room_id
))
.await;
}
info!("{sender_user} made {0} public to the room directory", &room_id);
+1
View File
@@ -271,6 +271,7 @@ pub(crate) async fn upgrade_room_route(
&assign!(RoomMemberEventContent::new(MembershipState::Join), {
displayname: services.users.displayname(sender_user).await.ok(),
avatar_url: services.users.avatar_url(sender_user).await.ok(),
blurhash: services.users.blurhash(sender_user).await.ok(),
}),
),
sender_user,
+5 -16
View File
@@ -29,6 +29,7 @@
},
assign,
};
use service::uiaa::Identity;
use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH};
use crate::Ruma;
@@ -43,12 +44,6 @@ pub(crate) async fn get_login_types_route(
ClientIp(client): ClientIp,
_body: Ruma<get_login_types::v3::Request>,
) -> Result<get_login_types::v3::Response> {
if !services.config.oauth.compatibility_mode.uiaa_available() {
return Err!(Request(Unrecognized(
"User-interactive authentication is not available on this server."
)));
}
Ok(get_login_types::v3::Response::new(vec![
get_login_types::v3::LoginType::Password(PasswordLoginType::default()),
get_login_types::v3::LoginType::ApplicationService(ApplicationServiceLoginType::default()),
@@ -58,7 +53,7 @@ pub(crate) async fn get_login_types_route(
]))
}
pub async fn handle_login(
pub(crate) async fn handle_login(
services: &Services,
identifier: Option<&UserIdentifier>,
password: &str,
@@ -124,15 +119,10 @@ pub(crate) async fn login_route(
ClientIp(client): ClientIp,
body: Ruma<login::v3::Request>,
) -> Result<login::v3::Response> {
if !services.config.oauth.compatibility_mode.uiaa_available() {
return Err!(Request(Unrecognized(
"User-interactive authentication is not available on this server."
)));
}
let emergency_mode_enabled = services.config.emergency_password.is_some();
// Validate login method
// TODO: Other login methods
let user_id = match &body.login_info {
#[allow(deprecated)]
| login::v3::LoginInfo::Password(login::v3::Password {
@@ -213,7 +203,7 @@ pub(crate) async fn login_route(
if device_exists {
services
.users
.set_token(&user_id, &device_id, &token, None)
.set_token(&user_id, &device_id, &token)
.await?;
} else {
services
@@ -222,7 +212,6 @@ pub(crate) async fn login_route(
&user_id,
&device_id,
&token,
None,
body.initial_device_display_name.clone(),
Some(client.to_string()),
)
@@ -270,7 +259,7 @@ pub(crate) async fn login_token_route(
// Prompt the user to confirm with their password using UIAA
let _ = services
.uiaa
.authenticate_password(&body.auth, sender_user, body.sender_device(), None)
.authenticate_password(&body.auth, Some(Identity::from_user_id(sender_user)))
.await?;
let login_token = utils::random_string(TOKEN_LENGTH);
-7
View File
@@ -69,12 +69,6 @@ pub(super) async fn load_joined_room(
and `join*` functions are used to perform steps in parallel which do not depend on each other.
*/
let insert_lock = services
.rooms
.timeline
.mutex_insert
.lock(room_id.as_str())
.await;
let (
account_data,
ephemeral,
@@ -92,7 +86,6 @@ pub(super) async fn load_joined_room(
)
.boxed()
.await?;
drop(insert_lock);
if !timeline.is_empty() || !state_events.is_empty() {
trace!(
+1
View File
@@ -69,6 +69,7 @@ pub(crate) async fn sync_events_v5_route(
ClientIp(client_ip): ClientIp,
body: Ruma<sync_events::v5::Request>,
) -> Result<sync_events::v5::Response> {
debug_assert!(DEFAULT_BUMP_TYPES.is_sorted(), "DEFAULT_BUMP_TYPES is not sorted");
let ref sender_user = body.sender_user().to_owned();
let ref sender_device = body.sender_device().to_owned();
+2 -2
View File
@@ -35,8 +35,8 @@ pub(crate) async fn get_supported_versions_route(
/// `/_matrix/federation/v1/version`
pub(crate) async fn conduwuit_server_version() -> Result<impl IntoResponse> {
Ok(Json(serde_json::json!({
"name": conduwuit::BRANDING,
"version": conduwuit::version(),
"name": conduwuit::version::name(),
"version": conduwuit::version::version(),
})))
}
-1
View File
@@ -1,6 +1,5 @@
#![type_length_limit = "16384"] //TODO: reduce me
#![allow(clippy::toplevel_ref_arg)]
#![recursion_limit = "256"]
extern crate conduwuit_core as conduwuit;
extern crate conduwuit_service as service;
+3 -5
View File
@@ -10,7 +10,7 @@
response::{IntoResponse, Redirect},
routing::{any, get, post},
};
use conduwuit::err;
use conduwuit::{Server, err};
pub(super) use conduwuit_service::state::State;
use http::{Uri, uri};
@@ -18,8 +18,8 @@
pub(super) use self::{args::Args as Ruma, response::RumaResponse};
use crate::{admin, client, server};
pub fn build(router: Router<State>, state: State) -> Router<State> {
let config = &state.server.config;
pub fn build(router: Router<State>, server: &Server) -> Router<State> {
let config = &server.config;
let mut router = router
.ruma_route(&client::appservice_ping)
.ruma_route(&client::get_supported_versions_route)
@@ -185,8 +185,6 @@ pub fn build(router: Router<State>, state: State) -> Router<State> {
.ruma_route(&client::well_known_client)
.ruma_route(&client::get_rtc_transports)
.ruma_route(&client::room_initial_sync_route)
.ruma_route(&client::get_authorization_server_metadata_route)
.merge(client::oauth::router(state))
.route("/_conduwuit/server_version", get(client::conduwuit_server_version))
.route("/_continuwuity/server_version", get(client::conduwuit_server_version))
.ruma_route(&admin::rooms::ban::ban_room)
+6 -23
View File
@@ -1,7 +1,6 @@
use std::any::{Any, TypeId};
use conduwuit::{Err, Error, Result, err};
use http::StatusCode;
use conduwuit::{Err, Result, err};
use ruma::{
OwnedDeviceId, OwnedServerName, OwnedUserId, UserId,
api::{
@@ -10,15 +9,12 @@
AccessToken, AccessTokenOptional, AppserviceToken, AppserviceTokenOptional,
AuthScheme, NoAccessToken, NoAuthentication,
},
error::{ErrorKind, UnknownTokenErrorData},
federation::authentication::ServerSignatures,
},
assign,
};
use service::{
Services,
server_keys::{PubKeyMap, PubKeys},
users::AccessTokenStatus,
};
use crate::{router::args::AuthQueryParams, service::appservice::RegistrationInfo};
@@ -107,21 +103,12 @@ async fn verify<B: AsRef<[u8]> + Sync>(
query: AuthQueryParams,
route: TypeId,
) -> Result<Auth> {
// Check for appservice tokens first
let (sender_user, sender_device, appservice_info) = {
if let Some((sender_user, sender_device, status)) =
if let Ok((sender_user, sender_device)) =
services.users.find_from_token(&output).await
{
// If the token is expired we return a soft logout
if matches!(status, AccessTokenStatus::Expired) {
return Err(Error::Request(
ErrorKind::UnknownToken(
assign!(UnknownTokenErrorData::new(), { soft_logout: true }),
),
"This token has expired".into(),
StatusCode::UNAUTHORIZED,
));
}
// Locked users can only use /logout and /logout/all
if services
.users
@@ -133,7 +120,7 @@ async fn verify<B: AsRef<[u8]> + Sync>(
|| route
== TypeId::of::<ruma::api::client::session::logout_all::v3::Request>(
)) {
return Err!(Request(UserLocked("Your account is locked.")));
return Err!(Request(Unauthorized("Your account is locked.")));
}
}
@@ -181,11 +168,7 @@ async fn verify<B: AsRef<[u8]> + Sync>(
(Some(sender_user), sender_device, Some(appservice_info))
} else {
return Err(Error::Request(
ErrorKind::UnknownToken(UnknownTokenErrorData::new()),
"Invalid token".into(),
StatusCode::UNAUTHORIZED,
));
return Err!(Request(Unauthorized("Invalid access token.")));
}
};
+2 -2
View File
@@ -11,8 +11,8 @@ pub(crate) async fn get_server_version_route(
) -> Result<get_server_version::v1::Response> {
Ok(assign!(get_server_version::v1::Response::new(), {
server: Some(assign!(get_server_version::v1::Server::new(), {
name: Some(conduwuit::BRANDING.into()),
version: Some(conduwuit::version().into()),
name: Some(conduwuit::version::name().into()),
version: Some(conduwuit::version::version().into()),
})),
}))
}
+73 -114
View File
@@ -4,7 +4,7 @@
pub mod proxy;
use std::{
collections::{BTreeMap, BTreeSet},
collections::{BTreeMap, BTreeSet, HashMap},
net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr},
path::PathBuf,
};
@@ -656,25 +656,19 @@ pub struct Config {
/// even if `recaptcha_site_key` is set.
pub recaptcha_private_site_key: Option<String>,
/// display: nested
#[serde(default)]
pub registration_terms: RegistrationTerms,
/// display: nested
#[serde(default)]
pub oauth: OauthConfig,
/// Controls whether users are allowed to deactivate their own accounts
/// through the account management panel or their Matrix clients. Server
/// admins can always deactivate users using the relevant admin commands.
/// Policy documents, such as terms and conditions or a privacy policy,
/// which users must agree to when registering an account.
///
/// Note that, in some jurisdictions, you may be legally required to honor
/// users who request to deactivate their accounts if you set this option
/// to `false`.
/// Example:
/// ```ignore
/// [global.registration_terms.privacy_policy]
/// en = { name = "Privacy Policy", url = "https://homeserver.example/en/privacy_policy.html" }
/// es = { name = "Política de Privacidad", url = "https://homeserver.example/es/privacy_policy.html" }
/// ```
///
/// default: true
#[serde(default = "true_fn")]
pub allow_deactivation: bool,
/// default: {}
#[serde(default)]
pub registration_terms: HashMap<String, HashMap<String, TermsDocument>>,
/// Controls whether encrypted rooms and events are allowed.
#[serde(default = "true_fn")]
@@ -2069,10 +2063,12 @@ pub struct Config {
pub stream_amplification: usize,
/// Number of sender task workers; determines sender parallelism. Default is
/// core count. Override by setting a different value.
/// '0' which means the value is determined internally, likely matching the
/// number of tokio worker-threads or number of cores, etc. Override by
/// setting a non-zero value.
///
/// default: core count
#[serde(default = "default_sender_workers")]
/// default: 0
#[serde(default)]
pub sender_workers: usize,
/// Enables listener sockets; can be set to false to disable listening. This
@@ -2121,6 +2117,10 @@ pub struct Config {
#[serde(default)]
pub antispam: Option<Antispam>,
/// display: nested
#[serde(default)]
pub blurhashing: BlurhashConfig,
/// Configuration for MatrixRTC (MSC4143) transport discovery.
/// display: nested
#[serde(default)]
@@ -2196,6 +2196,31 @@ pub struct WellKnownConfig {
pub support_pgp_key: Option<String>,
}
#[derive(Clone, Copy, Debug, Deserialize, Default)]
#[allow(rustdoc::broken_intra_doc_links, rustdoc::bare_urls)]
#[config_example_generator(filename = "conduwuit-example.toml", section = "global.blurhashing")]
pub struct BlurhashConfig {
/// blurhashing x component, 4 is recommended by https://blurha.sh/
///
/// default: 4
#[serde(default = "default_blurhash_x_component")]
pub components_x: u32,
/// blurhashing y component, 3 is recommended by https://blurha.sh/
///
/// default: 3
#[serde(default = "default_blurhash_y_component")]
pub components_y: u32,
/// Max raw size that the server will blurhash, this is the size of the
/// image after converting it to raw data, it should be higher than the
/// upload limit but not too high. The higher it is the higher the
/// potential load will be for clients requesting blurhashes. The default
/// is 33.55MB. Setting it to 0 disables blurhashing.
///
/// default: 33554432
#[serde(default = "default_blurhash_max_raw_size")]
pub blurhash_max_raw_size: u64,
}
#[derive(Clone, Debug, Deserialize, Default)]
#[config_example_generator(filename = "conduwuit-example.toml", section = "global.matrix_rtc")]
pub struct MatrixRtcConfig {
@@ -2340,30 +2365,6 @@ pub struct SmtpConfig {
pub require_email_for_token_registration: bool,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[config_example_generator(
filename = "conduwuit-example.toml",
section = "global.registration-terms",
optional = "true"
)]
pub struct RegistrationTerms {
/// The language code to provide to clients along with the policy documents.
///
/// default: "en"
pub language: String,
/// Policy documents, such as terms and conditions or a privacy policy,
/// which users must agree to when registering an account.
///
/// Example:
/// ```ignore
/// [global.registration_terms.documents]
/// privacy_policy = { name = "Privacy Policy", url = "https://homeserver.example/en/privacy_policy.html" }
/// ```
///
/// default: {}
pub documents: BTreeMap<String, TermsDocument>,
}
/// A policy document for use with a m.login.terms stage.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct TermsDocument {
@@ -2371,43 +2372,6 @@ pub struct TermsDocument {
pub url: String,
}
#[derive(Clone, Debug, Default, Deserialize)]
#[config_example_generator(
filename = "conduwuit-example.toml",
section = "global.oauth",
optional = "true"
)]
pub struct OauthConfig {
/// The compatibility mode to use for OAuth.
///
/// - "disabled": OAuth will be unavailable. Users will only be able to log
/// in using legacy authentication.
/// - "hybrid": OAuth and legacy authentication will both be available. Some
/// clients may only use one or the other.
/// - "exclusive": Only OAuth will be available. Clients which require
/// legacy authentication will be unable to log in.
///
/// default: "hybrid"
pub compatibility_mode: OAuthMode,
}
#[derive(Clone, Debug, Default, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum OAuthMode {
Disabled,
#[default]
Hybrid,
Exclusive,
}
impl OAuthMode {
#[must_use]
pub fn uiaa_available(&self) -> bool { matches!(self, Self::Disabled | Self::Hybrid) }
#[must_use]
pub fn oauth_available(&self) -> bool { matches!(self, Self::Hybrid | Self::Exclusive) }
}
const DEPRECATED_KEYS: &[&str] = &[
"cache_capacity",
"conduit_cache_capacity_modifier",
@@ -2505,47 +2469,45 @@ fn default_database_backups_to_keep() -> i16 { 1 }
fn default_db_write_buffer_capacity_mb() -> f64 { 48.0 + parallelism_scaled_f64(4.0) }
fn default_db_cache_capacity_mb() -> f64 { 512.0 + parallelism_scaled_f64(512.0) }
fn default_db_cache_capacity_mb() -> f64 { 128.0 + parallelism_scaled_f64(64.0) }
fn default_pdu_cache_capacity() -> u32 { parallelism_scaled_u32(50_000).saturating_add(100_000) }
fn default_pdu_cache_capacity() -> u32 { parallelism_scaled_u32(10_000).saturating_add(100_000) }
fn default_cache_capacity_modifier() -> f64 { 1.0 }
fn default_auth_chain_cache_capacity() -> u32 {
parallelism_scaled_u32(50_000).saturating_add(100_000)
parallelism_scaled_u32(10_000).saturating_add(100_000)
}
fn default_shorteventid_cache_capacity() -> u32 {
parallelism_scaled_u32(100_000).saturating_add(100_000)
parallelism_scaled_u32(50_000).saturating_add(100_000)
}
fn default_eventidshort_cache_capacity() -> u32 {
parallelism_scaled_u32(50_000).saturating_add(100_000)
parallelism_scaled_u32(25_000).saturating_add(100_000)
}
fn default_eventid_pdu_cache_capacity() -> u32 {
parallelism_scaled_u32(50_000).saturating_add(100_000)
parallelism_scaled_u32(25_000).saturating_add(100_000)
}
fn default_shortstatekey_cache_capacity() -> u32 {
parallelism_scaled_u32(50_000).saturating_add(100_000)
parallelism_scaled_u32(10_000).saturating_add(100_000)
}
fn default_statekeyshort_cache_capacity() -> u32 {
parallelism_scaled_u32(50_000).saturating_add(100_000)
parallelism_scaled_u32(10_000).saturating_add(100_000)
}
fn default_servernameevent_data_cache_capacity() -> u32 {
parallelism_scaled_u32(100_000).saturating_add(100_000)
parallelism_scaled_u32(100_000).saturating_add(500_000)
}
fn default_stateinfo_cache_capacity() -> u32 { parallelism_scaled_u32(500).clamp(100, 12000) }
fn default_stateinfo_cache_capacity() -> u32 { parallelism_scaled_u32(100) }
fn default_roomid_spacehierarchy_cache_capacity() -> u32 {
parallelism_scaled_u32(500).clamp(100, 12000)
}
fn default_roomid_spacehierarchy_cache_capacity() -> u32 { parallelism_scaled_u32(1000) }
fn default_dns_cache_entries() -> u32 { 327_680 }
fn default_dns_cache_entries() -> u32 { 32768 }
fn default_dns_min_ttl() -> u64 { 60 * 180 }
@@ -2753,26 +2715,15 @@ fn default_admin_log_capture() -> String {
fn default_admin_room_tag() -> String { "m.server_notice".to_owned() }
#[must_use]
#[allow(clippy::as_conversions, clippy::cast_precision_loss)]
pub fn parallelism_scaled_f64(val: f64) -> f64 { val * (sys::available_parallelism() as f64) }
fn parallelism_scaled_f64(val: f64) -> f64 { val * (sys::available_parallelism() as f64) }
#[must_use]
#[allow(clippy::as_conversions, clippy::cast_possible_truncation)]
pub fn parallelism_scaled_u32(val: u32) -> u32 {
val.saturating_mul(sys::available_parallelism() as u32)
fn parallelism_scaled_u32(val: u32) -> u32 {
let val = val.try_into().expect("failed to cast u32 to usize");
parallelism_scaled(val).try_into().unwrap_or(u32::MAX)
}
#[must_use]
#[allow(clippy::as_conversions, clippy::cast_possible_truncation, clippy::cast_possible_wrap)]
pub fn parallelism_scaled_i32(val: i32) -> i32 {
val.saturating_mul(sys::available_parallelism() as i32)
}
#[must_use]
pub fn parallelism_scaled(val: usize) -> usize {
val.saturating_mul(sys::available_parallelism())
}
fn parallelism_scaled(val: usize) -> usize { val.saturating_mul(sys::available_parallelism()) }
fn default_trusted_server_batch_size() -> usize { 256 }
@@ -2792,8 +2743,6 @@ fn default_stream_width_scale() -> f32 { 1.0 }
fn default_stream_amplification() -> usize { 1024 }
fn default_sender_workers() -> usize { parallelism_scaled(1) }
fn default_client_receive_timeout() -> u64 { 75 }
fn default_client_request_timeout() -> u64 { 180 }
@@ -2803,3 +2752,13 @@ fn default_client_response_timeout() -> u64 { 120 }
fn default_client_shutdown_timeout() -> u64 { 15 }
fn default_sender_shutdown_timeout() -> u64 { 5 }
// blurhashing defaults recommended by https://blurha.sh/
// 2^25
pub(super) fn default_blurhash_max_raw_size() -> u64 { 33_554_432 }
pub(super) fn default_blurhash_x_component() -> u32 { 4 }
pub(super) fn default_blurhash_y_component() -> u32 { 3 }
// end recommended & blurhashing defaults
+1 -2
View File
@@ -158,7 +158,6 @@ pub fn message(&self) -> String {
match self {
| Self::Federation(origin, error) => format!("Answer from {origin}: {error}"),
| Self::Ruma(error) => response::ruma_error_message(error),
| Self::Request(_, message, _) => message.clone().into_owned(),
| _ => format!("{self}"),
}
}
@@ -261,7 +260,7 @@ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{real_error}")
}
} else {
write!(f, "Request error: {}", self.0)
write!(f, "Request error: {}", &self.0)
}
}
}
+4 -1
View File
@@ -73,8 +73,11 @@ pub(super) fn bad_request_code(kind: &ErrorKind) -> StatusCode {
// 413
| TooLarge => StatusCode::PAYLOAD_TOO_LARGE,
// 405
| Unrecognized => StatusCode::METHOD_NOT_ALLOWED,
// 404
| Unrecognized | NotFound => StatusCode::NOT_FOUND,
| NotFound => StatusCode::NOT_FOUND,
// 403
| GuestAccessForbidden
+9 -6
View File
@@ -7,16 +7,19 @@
use std::sync::OnceLock;
pub const BRANDING: &str = "continuwuity";
pub const ROUTE_PREFIX: &str = "/_continuwuity";
pub const WEBSITE: &str = "https://continuwuity.org";
pub const SEMANTIC: &str = env!("CARGO_PKG_VERSION");
static BRANDING: &str = "continuwuity";
static WEBSITE: &str = "https://continuwuity.org";
static SEMANTIC: &str = env!("CARGO_PKG_VERSION");
static VERSION: OnceLock<String> = OnceLock::new();
static VERSION_UA: OnceLock<String> = OnceLock::new();
static USER_AGENT: OnceLock<String> = OnceLock::new();
static USER_AGENT_MEDIA: OnceLock<String> = OnceLock::new();
#[inline]
#[must_use]
pub fn name() -> &'static str { BRANDING }
#[inline]
pub fn version() -> &'static str { VERSION.get_or_init(init_version) }
@@ -29,10 +32,10 @@ pub fn user_agent() -> &'static str { USER_AGENT.get_or_init(init_user_agent) }
#[inline]
pub fn user_agent_media() -> &'static str { USER_AGENT_MEDIA.get_or_init(init_user_agent_media) }
fn init_user_agent() -> String { format!("{BRANDING}/{} (bot; +{WEBSITE})", version_ua()) }
fn init_user_agent() -> String { format!("{}/{} (bot; +{WEBSITE})", name(), version_ua()) }
fn init_user_agent_media() -> String {
format!("{BRANDING}/{} (embedbot; facebookexternalhit/1.1; +{WEBSITE})", version_ua())
format!("{}/{} (embedbot; facebookexternalhit/1.1; +{WEBSITE})", name(), version_ua())
}
fn init_version_ua() -> String {
+4 -1
View File
@@ -34,7 +34,10 @@ macro_rules! mod_dtor {
pub use conduwuit_build_metadata as build_metadata;
pub use config::Config;
pub use error::Error;
pub use info::version::*;
pub use info::{
version,
version::{name, version},
};
pub use matrix::{Event, EventTypeExt, Pdu, PduCount, PduEvent, PduId, pdu, state_res};
pub use parking_lot::{Mutex as SyncMutex, RwLock as SyncRwLock};
pub use server::Server;
-3
View File
@@ -5,7 +5,6 @@
/// Sha256 hash (input gather joined by 0xFF bytes)
#[must_use]
#[tracing::instrument(skip(inputs), level = "trace")]
#[allow(clippy::unnecessary_fallible_conversions)]
pub fn delimited<'a, T, I>(mut inputs: I) -> DigestOut
where
I: Iterator<Item = T> + 'a,
@@ -26,7 +25,6 @@ pub fn delimited<'a, T, I>(mut inputs: I) -> DigestOut
/// Sha256 hash (input gather)
#[must_use]
#[tracing::instrument(skip(inputs), level = "trace")]
#[allow(clippy::unnecessary_fallible_conversions)]
pub fn concat<'a, T, I>(inputs: I) -> DigestOut
where
I: Iterator<Item = T> + 'a,
@@ -45,7 +43,6 @@ pub fn concat<'a, T, I>(inputs: I) -> DigestOut
#[inline]
#[must_use]
#[tracing::instrument(skip(input), level = "trace")]
#[allow(clippy::unnecessary_fallible_conversions)]
pub fn hash<T>(input: T) -> DigestOut
where
T: AsRef<[u8]>,
+10 -16
View File
@@ -61,23 +61,17 @@ pub fn format(ts: SystemTime, str: &str) -> String {
pub fn pretty(d: Duration) -> String {
use Unit::*;
let fmt = |w, u| {
if w == 1 {
format!("{w} {u}")
} else {
format!("{w} {u}s")
}
};
let gen64 = |w, u| fmt(w, u);
let gen128 = |w, u| gen64(u64::try_from(w).expect("u128 to u64"), u);
let fmt = |w, f, u| format!("{w}.{f} {u}");
let gen64 = |w, f, u| fmt(w, (f * 100.0) as u32, u);
let gen128 = |w, f, u| gen64(u64::try_from(w).expect("u128 to u64"), f, u);
match whole_and_frac(d) {
| (Days(whole), _) => gen64(whole, "day"),
| (Hours(whole), _) => gen64(whole, "hour"),
| (Mins(whole), _) => gen64(whole, "minute"),
| (Secs(whole), _) => gen64(whole, "second"),
| (Millis(whole), _) => gen128(whole, "millisecond"),
| (Micros(whole), _) => gen128(whole, "microsecond"),
| (Nanos(whole), _) => gen128(whole, "nanosecond"),
| (Days(whole), frac) => gen64(whole, frac, "days"),
| (Hours(whole), frac) => gen64(whole, frac, "hours"),
| (Mins(whole), frac) => gen64(whole, frac, "minutes"),
| (Secs(whole), frac) => gen64(whole, frac, "seconds"),
| (Millis(whole), frac) => gen128(whole, frac, "milliseconds"),
| (Micros(whole), frac) => gen128(whole, frac, "microseconds"),
| (Nanos(whole), frac) => gen128(whole, frac, "nanoseconds"),
}
}
+1 -1
View File
@@ -29,7 +29,7 @@ fn descriptor_cf_options(
set_table_options(&mut opts, &desc, cache)?;
opts.set_min_write_buffer_number(1);
opts.set_max_write_buffer_number(3);
opts.set_max_write_buffer_number(2);
opts.set_write_buffer_size(desc.write_size);
opts.set_target_file_size_base(desc.file_size);
+1 -26
View File
@@ -49,10 +49,6 @@ pub(super) fn open_list(db: &Arc<Engine>, maps: &[Descriptor]) -> Result<Maps> {
name: "bannedroomids",
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "clientid_clientmetadata",
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "disabledroomids",
..descriptor::RANDOM_SMALL
@@ -161,10 +157,6 @@ pub(super) fn open_list(db: &Arc<Engine>, maps: &[Descriptor]) -> Result<Maps> {
name: "referencedevents",
..descriptor::RANDOM
},
Descriptor {
name: "refreshtoken_refreshtokeninfo",
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "registrationtoken_info",
..descriptor::RANDOM_SMALL
@@ -319,11 +311,6 @@ pub(super) fn open_list(db: &Arc<Engine>, maps: &[Descriptor]) -> Result<Maps> {
key_size_hint: Some(48),
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "rejectedeventids",
key_size_hint: Some(48),
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "statehash_shortstatehash",
val_size_hint: Some(8),
@@ -379,14 +366,6 @@ pub(super) fn open_list(db: &Arc<Engine>, maps: &[Descriptor]) -> Result<Maps> {
name: "userdevicetxnid_response",
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "userdeviceid_oauthsessioninfo",
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "userdeviceid_tokenexpires",
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "userfilterid_filter",
..descriptor::RANDOM_SMALL
@@ -397,7 +376,7 @@ pub(super) fn open_list(db: &Arc<Engine>, maps: &[Descriptor]) -> Result<Maps> {
},
Descriptor {
name: "userid_blurhash",
..descriptor::DROPPED
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "userid_dehydrateddevice",
@@ -491,8 +470,4 @@ pub(super) fn open_list(db: &Arc<Engine>, maps: &[Descriptor]) -> Result<Maps> {
name: "userroomid_invitesender",
..descriptor::RANDOM_SMALL
},
Descriptor {
name: "websessionid_session",
..descriptor::RANDOM_SMALL
},
];
+4
View File
@@ -47,6 +47,7 @@ default = [
"bindgen-runtime", # replace with bindgen-static on alpine
]
standard = [
"blurhashing",
"brotli_compression",
"element_hacks",
"gzip_compression",
@@ -70,6 +71,9 @@ full = [
"tokio_console",
]
blurhashing = [
"conduwuit-service/blurhashing",
]
brotli_compression = [
"conduwuit-api/brotli_compression",
"conduwuit-core/brotli_compression",
+1 -1
View File
@@ -15,7 +15,7 @@
#[clap(
about,
long_about = None,
name = conduwuit_core::BRANDING,
name = conduwuit_core::name(),
version = conduwuit_core::version(),
)]
pub struct Args {
+1 -1
View File
@@ -110,7 +110,7 @@ pub(crate) fn init(
.with_batch_exporter(exporter)
.build();
let tracer = provider.tracer(conduwuit_core::BRANDING);
let tracer = provider.tracer(conduwuit_core::name());
let telemetry = tracing_opentelemetry::layer().with_tracer(tracer);
+1 -1
View File
@@ -47,7 +47,7 @@ fn options(config: &Config) -> ClientOptions {
traces_sample_rate: config.sentry_traces_sample_rate,
debug: cfg!(debug_assertions),
release: release_name(),
user_agent: conduwuit_core::user_agent().into(),
user_agent: conduwuit_core::version::user_agent().into(),
attach_stacktrace: config.sentry_attach_stacktrace,
before_send: Some(Arc::new(before_send)),
before_breadcrumb: Some(Arc::new(before_breadcrumb)),
+5 -7
View File
@@ -8,7 +8,7 @@
extract::State,
response::{IntoResponse, Response},
};
use conduwuit::{Result, debug_warn, err, error, info, trace};
use conduwuit::{Result, debug, debug_error, debug_warn, err, error, trace};
use conduwuit_service::Services;
use futures::FutureExt;
use http::{Method, StatusCode, Uri};
@@ -102,19 +102,17 @@ fn handle_result(method: &Method, uri: &Uri, result: Response) -> Result<Respons
let reason = status.canonical_reason().unwrap_or("Unknown Reason");
if status.is_server_error() {
info!(%method, %uri, "{code} {reason}");
error!(%method, %uri, "{code} {reason}");
} else if status.is_client_error() {
info!(%method, %uri, "{code} {reason}");
debug_error!(%method, %uri, "{code} {reason}");
} else if status.is_redirection() {
trace!(%method, %uri, "{code} {reason}");
debug!(%method, %uri, "{code} {reason}");
} else {
trace!(%method, %uri, "{code} {reason}");
}
if status == StatusCode::METHOD_NOT_ALLOWED {
return Ok(
err!(Request(Unrecognized("Method not allowed"), METHOD_NOT_ALLOWED)).into_response()
);
return Ok(err!(Request(Unrecognized("Method Not Allowed"))).into_response());
}
Ok(result)
+2 -2
View File
@@ -9,8 +9,8 @@
pub(crate) fn build(services: &Arc<Services>) -> (Router, Guard) {
let router = Router::<state::State>::new();
let (state, guard) = state::create(services.clone());
let router = conduwuit_api::router::build(router, state)
.merge(conduwuit_web::build(services))
let router = conduwuit_api::router::build(router, &services.server)
.merge(conduwuit_web::build())
.fallback(not_found)
.with_state(state);
+6 -1
View File
@@ -16,6 +16,10 @@ crate-type = [
]
[features]
blurhashing = [
"dep:image",
"dep:blurhash",
]
brotli_compression = [
"conduwuit-core/brotli_compression",
"reqwest/brotli",
@@ -115,11 +119,12 @@ tracing.workspace = true
url.workspace = true
webpage.workspace = true
webpage.optional = true
blurhash.workspace = true
blurhash.optional = true
recaptcha-verify = { version = "0.2.0", default-features = false }
reqwest_recaptcha = { package = "reqwest", version = "0.12.28", default-features = false, features = ["rustls-tls-native-roots-no-provider"] } # As long as recaptcha-verify's reqwest is outdated
yansi.workspace = true
lettre.workspace = true
serde_urlencoded.workspace = true
[target.'cfg(all(unix, target_os = "linux"))'.dependencies]
sd-notify.workspace = true
+2 -2
View File
@@ -67,7 +67,7 @@ async fn worker(self: Arc<Self>) -> Result {
for (id, registration) in appservices {
// During startup, resolve any token collisions in favour of appservices
// by logging out conflicting user devices
if let Some((user_id, device_id, _)) = self
if let Ok((user_id, device_id)) = self
.services
.users
.find_from_token(&registration.as_token)
@@ -158,7 +158,7 @@ pub async fn register_appservice(
.users
.find_from_token(&registration.as_token)
.await
.is_some()
.is_ok()
{
return Err(err!(Request(InvalidParam(
"Cannot register appservice: The provided token is already in use by a user \
+2 -2
View File
@@ -39,7 +39,7 @@ fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
let url_preview_user_agent = config
.url_preview_user_agent
.clone()
.unwrap_or_else(|| conduwuit::user_agent_media().to_owned());
.unwrap_or_else(|| conduwuit::version::user_agent_media().to_owned());
Ok(Arc::new(Self {
default: base(config)?
@@ -149,7 +149,7 @@ fn base(config: &Config) -> Result<reqwest::ClientBuilder> {
.timeout(Duration::from_secs(config.request_total_timeout))
.pool_idle_timeout(Duration::from_secs(config.request_idle_timeout))
.pool_max_idle_per_host(config.request_idle_per_host.into())
.user_agent(conduwuit::user_agent())
.user_agent(conduwuit::version::user_agent())
.redirect(redirect::Policy::limited(6))
.danger_accept_invalid_certs(config.allow_invalid_tls_certificates_yes_i_know_what_the_fuck_i_am_doing_with_this_and_i_know_this_is_insecure)
.connection_verbose(cfg!(debug_assertions));
+7 -12
View File
@@ -6,7 +6,7 @@
use askama::Template;
use async_trait::async_trait;
use conduwuit::{Result, info, utils::ReadyExt};
use futures::StreamExt;
use futures::{FutureExt, StreamExt};
use ruma::{UserId, events::room::message::RoomMessageEventContent};
use crate::{
@@ -120,7 +120,7 @@ fn disable_first_run(&self) -> bool {
///
/// Returns Ok(true) if the specified user was the first user, and Ok(false)
/// if they were not.
pub async fn empower_first_user(&self, user: &UserId) -> bool {
pub async fn empower_first_user(&self, user: &UserId) -> Result<bool> {
#[derive(Template)]
#[template(path = "welcome.md")]
struct WelcomeMessage<'a> {
@@ -130,14 +130,10 @@ struct WelcomeMessage<'a> {
// If first run mode isn't active, do nothing.
if !self.disable_first_run() {
return false;
return Ok(false);
}
self.services
.admin
.make_user_admin(user)
.await
.expect("should have been able to empower the first user");
self.services.admin.make_user_admin(user).boxed().await?;
// Send the welcome message
let welcome_message = WelcomeMessage {
@@ -150,12 +146,11 @@ struct WelcomeMessage<'a> {
self.services
.admin
.send_loud_message(RoomMessageEventContent::text_markdown(welcome_message))
.await
.expect("should have been able to send welcome message");
.await?;
info!("{user} has been invited to the admin room as the first user.");
true
Ok(true)
}
/// Get the single-use registration token which may be used to create the
@@ -186,7 +181,7 @@ pub fn print_first_run_banner(&self) {
eprintln!(
"Welcome to {} {}!",
"Continuwuity".bold().bright_magenta(),
conduwuit::version().bold()
conduwuit::version::version().bold()
);
eprintln!();
eprintln!(
+1 -1
View File
@@ -44,7 +44,7 @@ fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
db,
server: args.server.clone(),
bad_event_ratelimiter: Arc::new(SyncRwLock::new(HashMap::new())),
admin_alias: OwnedRoomAliasId::try_from(format!("#admins:{}", args.server.name))
admin_alias: OwnedRoomAliasId::try_from(format!("#admins:{}", &args.server.name))
.expect("#admins:server_name is valid alias name"),
server_user: UserId::parse_with_server_name(
String::from("conduit"),
+1 -1
View File
@@ -37,7 +37,7 @@ pub struct PasswordReset<'a> {
}
impl MessageTemplate for PasswordReset<'_> {
fn subject(&self) -> String { format!("Password reset request for {}", self.user_id) }
fn subject(&self) -> String { format!("Password reset request for {}", &self.user_id) }
}
#[derive(Template)]
+2 -4
View File
@@ -92,8 +92,8 @@ pub async fn send<Template: MessageTemplate>(
let message = MessageBuilder::new()
.from(self.sender.clone())
.to(recipient.clone())
.subject(subject.clone())
.to(recipient)
.subject(subject)
.date_now()
.header(ContentType::TEXT_PLAIN)
.body(body)
@@ -104,8 +104,6 @@ pub async fn send<Template: MessageTemplate>(
.await
.map_err(|err: TransportError| err!("Failed to send message: {err}"))?;
info!(recipient = recipient.to_string(), ?subject, "Email sent");
Ok(())
}
}
+179
View File
@@ -0,0 +1,179 @@
#[cfg(feature = "blurhashing")]
use conduwuit::config::BlurhashConfig as CoreBlurhashConfig;
use conduwuit::{Result, implement};
use super::Service;
#[implement(Service)]
#[cfg(not(feature = "blurhashing"))]
pub fn create_blurhash(
&self,
_file: &[u8],
_content_type: Option<&str>,
_file_name: Option<&str>,
) -> Result<Option<String>> {
conduwuit::debug_warn!("blurhashing on upload support was not compiled");
Ok(None)
}
#[implement(Service)]
#[cfg(feature = "blurhashing")]
pub fn create_blurhash(
&self,
file: &[u8],
content_type: Option<&str>,
file_name: Option<&str>,
) -> Result<Option<String>> {
let config = BlurhashConfig::from(self.services.server.config.blurhashing);
// since 0 means disabled blurhashing, skipped blurhashing
if config.size_limit == 0 {
return Ok(None);
}
get_blurhash_from_request(file, content_type, file_name, config)
.map_err(|e| conduwuit::err!(debug_error!("blurhashing error: {e}")))
.map(Some)
}
/// Returns the blurhash or a blurhash error which implements Display.
#[tracing::instrument(
name = "blurhash",
level = "debug",
skip(data),
fields(
bytes = data.len(),
),
)]
#[cfg(feature = "blurhashing")]
fn get_blurhash_from_request(
data: &[u8],
mime: Option<&str>,
filename: Option<&str>,
config: BlurhashConfig,
) -> Result<String, BlurhashingError> {
// Get format image is supposed to be in
let format = get_format_from_data_mime_and_filename(data, mime, filename)?;
// Get the image reader for said image format
let decoder = get_image_decoder_with_format_and_data(format, data)?;
// Check image size makes sense before unpacking whole image
if is_image_above_size_limit(&decoder, config) {
return Err(BlurhashingError::ImageTooLarge);
}
let image = image::DynamicImage::from_decoder(decoder)?;
blurhash_an_image(&image, config)
}
/// Gets the Image Format value from the data,mime, and filename
/// It first checks if the mime is a valid image format
/// Then it checks if the filename has a format, otherwise just guess based on
/// the binary data Assumes that mime and filename extension won't be for a
/// different file format than file.
#[cfg(feature = "blurhashing")]
fn get_format_from_data_mime_and_filename(
data: &[u8],
mime: Option<&str>,
filename: Option<&str>,
) -> Result<image::ImageFormat, BlurhashingError> {
let extension = filename
.map(std::path::Path::new)
.and_then(std::path::Path::extension)
.map(std::ffi::OsStr::to_string_lossy);
mime.or(extension.as_deref())
.and_then(image::ImageFormat::from_mime_type)
.map_or_else(|| image::guess_format(data).map_err(Into::into), Ok)
}
#[cfg(feature = "blurhashing")]
fn get_image_decoder_with_format_and_data(
image_format: image::ImageFormat,
data: &[u8],
) -> Result<Box<dyn image::ImageDecoder + '_>, BlurhashingError> {
let mut image_reader = image::ImageReader::new(std::io::Cursor::new(data));
image_reader.set_format(image_format);
Ok(Box::new(image_reader.into_decoder()?))
}
#[cfg(feature = "blurhashing")]
fn is_image_above_size_limit<T: image::ImageDecoder>(
decoder: &T,
blurhash_config: BlurhashConfig,
) -> bool {
decoder.total_bytes() >= blurhash_config.size_limit
}
#[cfg(feature = "blurhashing")]
#[tracing::instrument(name = "encode", level = "debug", skip_all)]
#[inline]
fn blurhash_an_image(
image: &image::DynamicImage,
blurhash_config: BlurhashConfig,
) -> Result<String, BlurhashingError> {
Ok(blurhash::encode_image(
blurhash_config.components_x,
blurhash_config.components_y,
&image.to_rgba8(),
)?)
}
#[derive(Clone, Copy, Debug)]
pub struct BlurhashConfig {
pub components_x: u32,
pub components_y: u32,
/// size limit in bytes
pub size_limit: u64,
}
#[cfg(feature = "blurhashing")]
impl From<CoreBlurhashConfig> for BlurhashConfig {
fn from(value: CoreBlurhashConfig) -> Self {
Self {
components_x: value.components_x,
components_y: value.components_y,
size_limit: value.blurhash_max_raw_size,
}
}
}
#[derive(Debug)]
#[cfg(feature = "blurhashing")]
pub enum BlurhashingError {
HashingLibError(Box<dyn std::error::Error + Send>),
#[cfg(feature = "blurhashing")]
ImageError(Box<image::ImageError>),
ImageTooLarge,
}
#[cfg(feature = "blurhashing")]
impl From<image::ImageError> for BlurhashingError {
fn from(value: image::ImageError) -> Self { Self::ImageError(Box::new(value)) }
}
#[cfg(feature = "blurhashing")]
impl From<blurhash::Error> for BlurhashingError {
fn from(value: blurhash::Error) -> Self { Self::HashingLibError(Box::new(value)) }
}
#[cfg(feature = "blurhashing")]
impl std::fmt::Display for BlurhashingError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Blurhash Error:")?;
match &self {
| Self::ImageTooLarge => write!(f, "Image was too large to blurhash")?,
| Self::HashingLibError(e) =>
write!(f, "There was an error with the blurhashing library => {e}")?,
#[cfg(feature = "blurhashing")]
| Self::ImageError(e) =>
write!(f, "There was an error with the image loading library => {e}")?,
}
Ok(())
}
}
+1
View File
@@ -1,3 +1,4 @@
pub mod blurhash;
mod data;
pub(super) mod migrations;
pub mod mxc;
+1 -1
View File
@@ -27,7 +27,7 @@
pub mod mailer;
pub mod media;
pub mod moderation;
pub mod oauth;
pub mod password_reset;
pub mod presence;
pub mod pusher;
pub mod registration_tokens;
-196
View File
@@ -1,196 +0,0 @@
use std::{collections::BTreeSet, hash::Hash};
use itertools::Itertools;
use serde::{Deserialize, Deserializer, Serialize};
use url::Url;
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[non_exhaustive]
pub struct ClientMetadata {
#[serde(default)]
pub application_type: ApplicationType,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub client_name: Option<String>,
pub client_uri: Url,
#[serde(default, deserialize_with = "btreeset_skip_err")]
pub grant_types: BTreeSet<GrantType>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub logo_uri: Option<Url>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub policy_uri: Option<Url>,
#[serde(default)]
pub redirect_uris: Vec<Url>,
#[serde(default, deserialize_with = "btreeset_skip_err")]
pub response_types: BTreeSet<ResponseType>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub token_endpoint_auth_method: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tos_uri: Option<Url>,
}
impl ClientMetadata {
pub(super) const ACCEPTABLE_LOCALHOSTS: [&str; 3] = ["localhost", "127.0.0.1", "[::1]"];
pub(super) fn validate(&self) -> Result<(), &'static str> {
let Some(client_domain) = self.client_uri.domain() else {
return Err("Client URI must have a domain.");
};
if self.client_uri.scheme() != "https" {
return Err("Client URI must be HTTPS.");
}
if !self.client_uri.username().is_empty() || self.client_uri.password().is_some() {
return Err("Client URI must not include credentials.");
}
for uri in [&self.logo_uri, &self.policy_uri, &self.tos_uri]
.iter()
.filter_map(|uri| uri.as_ref())
{
if uri.scheme() != "https" {
return Err("All metadata URIs must be HTTPS.");
}
if !uri.username().is_empty() || uri.password().is_some() {
return Err("All metadata URIs must not include credentials.");
}
if !uri
.domain()
.is_some_and(|domain| is_subdomain(domain, client_domain))
{
return Err("All metadata URIs must be subdomains of the client URI.");
}
}
for uri in &self.redirect_uris {
match uri.scheme() {
| "https" => {
// HTTPS URIs are okay for native and web clients
if !uri.username().is_empty() || uri.password().is_some() {
return Err("HTTPS redirect URIs must not contain credentials.");
}
},
| "http" if self.application_type == ApplicationType::Native => {
if uri
.host_str()
.is_none_or(|host| !Self::ACCEPTABLE_LOCALHOSTS.contains(&host))
{
return Err("HTTP redirect URIs for native applications must only \
refer to localhost.");
}
if uri.port().is_some() {
return Err("HTTP redirect URIs for native applications do not need to \
specify a port. All ports will be accepted during \
authorization.");
}
},
| private_scheme if self.application_type == ApplicationType::Native => {
let rdns_client_uri = client_domain.split('.').rev().join(".");
if !private_scheme.starts_with(&rdns_client_uri) {
return Err("Private-use scheme URIs for native applications must \
begin with the application's client URI domain in \
reverse-DNS notation.");
}
if uri.has_authority() {
return Err("Private-use scheme URIs for native applications must not \
have an authority.");
}
},
| _ =>
return Err("A redirect URI's scheme is not valid for this application type."),
}
}
Ok(())
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum ApplicationType {
#[default]
Web,
Native,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum GrantType {
AuthorizationCode,
RefreshToken,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
#[non_exhaustive]
pub enum ResponseType {
Code,
}
/// Deserialize a BTreeSet from a sequence, skipping items which fail to
/// deserialize. This is used as a deserialize helper for ClientMetadata to
/// ignore unknown enum variants in a few fields.
fn btreeset_skip_err<'de, D, V>(de: D) -> Result<BTreeSet<V>, D::Error>
where
D: Deserializer<'de>,
V: Deserialize<'de> + Hash + Eq + Ord,
{
use std::marker::PhantomData;
use serde::de::{SeqAccess, Visitor};
struct BTreeSetVisitor<V> {
item: PhantomData<V>,
}
impl<'de, V> Visitor<'de> for BTreeSetVisitor<V>
where
V: Deserialize<'de> + Hash + Eq + Ord,
{
type Value = BTreeSet<V>;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "a sequence")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut set = BTreeSet::new();
while let Some(element) = seq.next_element().transpose() {
if let Ok(element) = element {
set.insert(element);
}
}
Ok(set)
}
}
de.deserialize_seq(BTreeSetVisitor { item: PhantomData })
}
fn is_subdomain(subdomain: &str, domain: &str) -> bool {
if subdomain == domain {
return true;
}
subdomain.ends_with(&format!(".{domain}"))
}
-162
View File
@@ -1,162 +0,0 @@
use std::{collections::BTreeSet, fmt::Debug, hash::Hash, mem::discriminant};
use regex::Regex;
use ruma::OwnedDeviceId;
use serde::{Deserialize, Serialize};
use url::Url;
use super::client_metadata::ResponseType;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct AuthorizationCodeQuery {
pub response_type: ResponseType,
pub client_id: String,
pub redirect_uri: Url,
pub scope: RawScopes,
pub state: String,
#[serde(default)]
pub response_mode: ResponseMode,
pub code_challenge: String,
pub code_challenge_method: CodeChallengeMethod,
#[serde(default)]
pub prompt: Option<Prompt>,
}
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
#[non_exhaustive]
pub enum ResponseMode {
#[default]
// default for `code` response type, see https://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#:~:text=Client%2E-,For,encoding%2E,-See
Query,
Fragment,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[non_exhaustive]
pub enum CodeChallengeMethod {
S256,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
#[non_exhaustive]
pub enum Prompt {
Create,
#[serde(other)]
Unknown,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialOrd, Ord)]
pub enum Scope {
Device(OwnedDeviceId),
ClientApi,
}
impl PartialEq for Scope {
fn eq(&self, other: &Self) -> bool { discriminant(self) == discriminant(other) }
}
impl Eq for Scope {}
impl Hash for Scope {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { discriminant(self).hash(state); }
}
impl std::fmt::Display for Scope {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let urn = match self {
| Self::ClientApi => "urn:matrix:client:api:*".to_owned(),
| Self::Device(device_id) => format!("urn:matrix:client:device:{device_id}"),
};
f.write_str(&urn)
}
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct RawScopes(String);
impl RawScopes {
pub fn to_scopes(&self) -> Result<BTreeSet<Scope>, String> {
let client_api_token_regex =
Regex::new(r"urn:matrix:(client|org.matrix.msc2967.client):api:\*").unwrap();
let device_token_regex = Regex::new(
r"urn:matrix:(client|org.matrix.msc2967.client):device:([a-zA-Z0-9-._~]{5,})",
)
.unwrap();
let mut scopes = BTreeSet::new();
for token in self.0.split(' ') {
let scope_was_new = {
if client_api_token_regex.is_match(token) {
scopes.insert(Scope::ClientApi)
} else if let Some(captures) = device_token_regex.captures(token) {
scopes.insert(Scope::Device(captures.get(2).unwrap().as_str().into()))
} else if token == "openid" {
// TODO(unspecced): Element sets this scope but doesn't use it for anything
true
} else {
return Err(format!("Invalid scope: {token}"));
}
};
if !scope_was_new {
return Err("Scope was specified more than once".to_owned());
}
}
Ok(scopes)
}
}
#[derive(Serialize)]
pub struct AuthorizationCodeResponse {
pub state: String,
pub code: String,
}
#[derive(Deserialize)]
#[serde(tag = "grant_type", rename_all = "snake_case")]
pub enum TokenRequest {
AuthorizationCode {
code: String,
redirect_uri: Url,
client_id: String,
code_verifier: String,
},
RefreshToken {
client_id: String,
refresh_token: String,
},
}
impl TokenRequest {
#[must_use]
pub fn client_id(&self) -> &str {
match self {
| Self::AuthorizationCode { client_id, .. }
| Self::RefreshToken { client_id, .. } => client_id,
}
}
}
#[derive(Serialize)]
pub struct TokenResponse {
pub access_token: String,
pub token_type: TokenType,
pub expires_in: u64,
pub refresh_token: String,
pub scope: String,
}
#[derive(Serialize)]
pub enum TokenType {
Bearer,
}
#[derive(Deserialize)]
pub struct RevokeTokenRequest {
pub token: String,
}
-503
View File
@@ -1,503 +0,0 @@
use std::{
collections::{BTreeSet, HashMap},
sync::{Arc, Mutex},
time::{Duration, SystemTime},
};
use base64::Engine;
use conduwuit::{
Err, Result, err, info,
utils::{self, hash::sha256},
};
use database::{Deserialized, Json, Map};
use itertools::Itertools;
use ruma::{DeviceId, OwnedDeviceId, OwnedUserId, UserId};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
Dep,
oauth::{
client_metadata::{ApplicationType, ClientMetadata, ResponseType},
grant::{
AuthorizationCodeQuery, AuthorizationCodeResponse, CodeChallengeMethod, ResponseMode,
Scope, TokenRequest, TokenResponse, TokenType,
},
},
users,
};
pub mod client_metadata;
pub mod grant;
pub struct Service {
services: Services,
db: Data,
tickets: Mutex<HashMap<String, HashMap<OAuthTicket, SystemTime>>>,
pending_code_grants: tokio::sync::Mutex<HashMap<String, PendingCodeGrant>>,
}
struct Data {
clientid_clientmetadata: Arc<Map>,
userdeviceid_oauthsessioninfo: Arc<Map>,
refreshtoken_refreshtokeninfo: Arc<Map>,
}
struct Services {
users: Dep<users::Service>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct SessionInfo {
pub client_id: String,
pub scopes: BTreeSet<Scope>,
current_refresh_token: String,
}
#[derive(Debug, Deserialize, Serialize)]
struct RefreshTokenInfo {
client_id: String,
user_id: OwnedUserId,
device_id: OwnedDeviceId,
}
struct PendingCodeGrant {
authorizing_user: OwnedUserId,
requested_scopes: BTreeSet<Scope>,
client_name: Option<String>,
expected_client_id: String,
expected_redirect_uri: Url,
code_challenge: String,
requested_at: SystemTime,
}
impl PendingCodeGrant {
const MAX_AGE: Duration = Duration::from_mins(1);
const RANDOM_CODE_LENGTH: usize = 32;
#[must_use]
pub(crate) fn generate_code() -> String { utils::random_string(Self::RANDOM_CODE_LENGTH) }
#[must_use]
pub(crate) fn is_valid_for(&self, client_id: &str) -> bool {
let now = SystemTime::now();
self.expected_client_id == client_id
&& now
.duration_since(self.requested_at)
.is_ok_and(|age| age < Self::MAX_AGE)
}
}
/// A time-limited grant for a client to perform some sensitive action.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum OAuthTicket {
CrossSigningReset,
}
impl OAuthTicket {
const MAX_AGE: Duration = Duration::from_mins(10);
#[must_use]
pub fn ticket_issue_path(&self) -> &'static str {
match self {
| Self::CrossSigningReset => "/account/cross_signing_reset",
}
}
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
users: args.depend::<users::Service>("users"),
},
db: Data {
clientid_clientmetadata: args.db["clientid_clientmetadata"].clone(),
userdeviceid_oauthsessioninfo: args.db["userdeviceid_oauthsessioninfo"].clone(),
refreshtoken_refreshtokeninfo: args.db["refreshtoken_refreshtokeninfo"].clone(),
},
tickets: Mutex::default(),
pending_code_grants: tokio::sync::Mutex::default(),
}))
}
fn name(&self) -> &str { crate::service::make_name(std::module_path!()) }
}
impl Service {
const ACCESS_TOKEN_MAX_AGE: Duration = Duration::from_hours(1);
const RANDOM_TOKEN_LENGTH: usize = 32;
fn generate_token() -> String { utils::random_string(Self::RANDOM_TOKEN_LENGTH) }
pub async fn register_client(
&self,
metadata: &ClientMetadata,
) -> Result<String, &'static str> {
metadata.validate()?;
let client_id = base64::prelude::BASE64_STANDARD
.encode(sha256::hash(serde_json::to_string(metadata).unwrap().as_bytes()));
if self
.db
.clientid_clientmetadata
.exists(&client_id)
.await
.is_err()
{
self.db
.clientid_clientmetadata
.raw_put(&client_id, Json(metadata.clone()));
}
Ok(client_id)
}
pub async fn get_client_metadata(&self, client_id: &str) -> Option<ClientMetadata> {
self.db
.clientid_clientmetadata
.get(client_id)
.await
.deserialized()
.ok()
}
pub async fn get_session_info_for_device(
&self,
user_id: &UserId,
device_id: &DeviceId,
) -> Option<SessionInfo> {
self.db
.userdeviceid_oauthsessioninfo
.qry(&(user_id, device_id))
.await
.deserialized::<SessionInfo>()
.ok()
}
pub async fn request_authorization_code(
&self,
authorizing_user: OwnedUserId,
query: AuthorizationCodeQuery,
) -> Result<String, String> {
let Some(client_metadata) = self.get_client_metadata(&query.client_id).await else {
return Err("Invalid client ID".to_owned());
};
if !(client_metadata
.response_types
.contains(&query.response_type)
&& matches!(query.response_type, ResponseType::Code))
{
return Err("Invalid response type".to_owned());
}
if !matches!(query.code_challenge_method, CodeChallengeMethod::S256) {
return Err("Invalid code challenge type".to_owned());
}
{
let mut stripped_uri = query.redirect_uri.clone();
if client_metadata.application_type == ApplicationType::Native
&& query
.redirect_uri
.host_str()
.is_some_and(|host| ClientMetadata::ACCEPTABLE_LOCALHOSTS.contains(&host))
{
// Remove the port from localhost redirect URIs for native applications when
// checking if it's valid
stripped_uri.set_port(None).unwrap();
}
if !client_metadata.redirect_uris.contains(&stripped_uri) {
return Err("Invalid redirect URI".to_owned());
}
}
let requested_scopes = query.scope.to_scopes()?;
let redirect_uri_query_separator = match query.response_mode {
| ResponseMode::Fragment => '#',
| ResponseMode::Query => '?',
};
let code = PendingCodeGrant::generate_code();
info!(
client_id = &query.client_id,
client_name = &client_metadata.client_name,
?requested_scopes,
?authorizing_user,
"Issuing oauth authorization code"
);
let redirect_uri = format!(
"{}{}{}",
query.redirect_uri,
redirect_uri_query_separator,
serde_urlencoded::to_string(AuthorizationCodeResponse {
state: query.state,
code: code.clone(),
})
.unwrap(),
);
let pending_grant = PendingCodeGrant {
authorizing_user,
requested_scopes,
client_name: client_metadata.client_name,
expected_client_id: query.client_id,
expected_redirect_uri: query.redirect_uri,
code_challenge: query.code_challenge,
requested_at: SystemTime::now(),
};
self.pending_code_grants
.lock()
.await
.insert(code, pending_grant);
Ok(redirect_uri)
}
pub async fn issue_token(&self, request: TokenRequest) -> Result<TokenResponse> {
match request {
| TokenRequest::AuthorizationCode {
code,
redirect_uri,
client_id,
code_verifier,
} => {
let mut pending_grants = self.pending_code_grants.lock().await;
let Some(pending_grant) = pending_grants
.remove(&code)
.filter(|grant| grant.is_valid_for(&client_id))
else {
return Err!("Invalid code");
};
if redirect_uri != pending_grant.expected_redirect_uri {
return Err!("Unexpected redirect uri");
}
let expected_code_challenge =
base64::prelude::BASE64_URL_SAFE_NO_PAD.encode(sha256::hash(&code_verifier));
if expected_code_challenge != pending_grant.code_challenge {
return Err!("Invalid code challenge");
}
self.create_session(
pending_grant.authorizing_user,
pending_grant.requested_scopes,
pending_grant.client_name,
client_id,
)
.await
},
| TokenRequest::RefreshToken { client_id, refresh_token } =>
self.refresh_session(client_id, refresh_token).await,
}
}
pub async fn revoke_token(&self, token: String) -> Result<()> {
let (user_id, device_id) = if let Ok(refresh_token_info) = self
.db
.refreshtoken_refreshtokeninfo
.get(&token)
.await
.deserialized::<RefreshTokenInfo>()
{
(refresh_token_info.user_id, refresh_token_info.device_id)
} else if let Some((user_id, device_id, _)) =
self.services.users.find_from_token(&token).await
{
(user_id, device_id)
} else {
return Err!("Invalid token");
};
// This will also call [`Self::remove_session`]
self.services
.users
.remove_device(&user_id, &device_id)
.await;
Ok(())
}
async fn create_session(
&self,
authorizing_user: OwnedUserId,
requested_scopes: BTreeSet<Scope>,
client_name: Option<String>,
client_id: String,
) -> Result<TokenResponse> {
let access_token = Self::generate_token();
let refresh_token = Self::generate_token();
let device_id = requested_scopes
.iter()
.find_map(|scope| {
if let Scope::Device(device_id) = scope {
Some(device_id)
} else {
None
}
})
.ok_or_else(|| err!("No device ID scope supplied"))?;
self.services
.users
.create_device(
&authorizing_user,
device_id,
&access_token,
Some(Self::ACCESS_TOKEN_MAX_AGE),
client_name,
None,
)
.await?;
self.db.userdeviceid_oauthsessioninfo.put(
(&authorizing_user, device_id),
Json(SessionInfo {
client_id: client_id.clone(),
current_refresh_token: refresh_token.clone(),
scopes: requested_scopes.clone(),
}),
);
self.db.refreshtoken_refreshtokeninfo.raw_put(
&refresh_token,
Json(RefreshTokenInfo {
client_id: client_id.clone(),
user_id: authorizing_user.clone(),
device_id: device_id.to_owned(),
}),
);
info!(
?client_id,
?authorizing_user,
?device_id,
?requested_scopes,
"Created new oauth session"
);
Ok(TokenResponse {
access_token,
token_type: TokenType::Bearer,
expires_in: Self::ACCESS_TOKEN_MAX_AGE.as_secs(),
scope: requested_scopes.iter().join(" "),
refresh_token,
})
}
async fn refresh_session(
&self,
client_id: String,
refresh_token: String,
) -> Result<TokenResponse> {
let Some(refresh_token_info) = self
.db
.refreshtoken_refreshtokeninfo
.get(&refresh_token)
.await
.deserialized::<RefreshTokenInfo>()
.ok()
else {
return Err!("Invalid refresh token");
};
assert_eq!(&client_id, &refresh_token_info.client_id, "refresh token client id mismatch");
let mut session_info = self
.get_session_info_for_device(
&refresh_token_info.user_id,
&refresh_token_info.device_id,
)
.await
.expect("session info should exist");
assert_eq!(&client_id, &session_info.client_id, "session info client id mismatch");
let new_access_token = Self::generate_token();
let new_refresh_token = Self::generate_token();
let scope = session_info.scopes.iter().join(" ");
session_info
.current_refresh_token
.clone_from(&new_refresh_token);
self.services
.users
.set_token(
&refresh_token_info.user_id,
&refresh_token_info.device_id,
&new_access_token,
Some(Self::ACCESS_TOKEN_MAX_AGE),
)
.await?;
self.db.userdeviceid_oauthsessioninfo.put(
(&refresh_token_info.user_id, &refresh_token_info.device_id),
Json(session_info),
);
self.db.refreshtoken_refreshtokeninfo.remove(&refresh_token);
drop(refresh_token);
self.db
.refreshtoken_refreshtokeninfo
.raw_put(&new_refresh_token, Json(refresh_token_info));
Ok(TokenResponse {
access_token: new_access_token,
token_type: TokenType::Bearer,
expires_in: Self::ACCESS_TOKEN_MAX_AGE.as_secs(),
scope,
refresh_token: new_refresh_token,
})
}
pub async fn remove_session(&self, user_id: &UserId, device_id: &DeviceId) {
let session_info = self.get_session_info_for_device(user_id, device_id).await;
if let Some(session_info) = session_info {
self.db
.refreshtoken_refreshtokeninfo
.remove(&session_info.current_refresh_token);
self.db
.userdeviceid_oauthsessioninfo
.del((user_id, device_id));
info!(?user_id, ?device_id, "Removed OAuth session");
}
}
/// Issue a ticket for `localpart` to perform some action.
pub fn issue_ticket(&self, localpart: String, ticket: OAuthTicket) {
self.tickets
.lock()
.unwrap()
.entry(localpart)
.or_default()
.insert(ticket, SystemTime::now());
}
/// Try to consume an unexpired ticket for `localpart`.
pub fn try_consume_ticket(&self, localpart: &str, ticket: OAuthTicket) -> bool {
let now = SystemTime::now();
self.tickets
.lock()
.unwrap()
.get_mut(localpart)
.and_then(|tickets| tickets.remove(&ticket))
.is_some_and(|issued| {
now.duration_since(issued)
.is_ok_and(|duration| duration < OAuthTicket::MAX_AGE)
})
}
}
+68
View File
@@ -0,0 +1,68 @@
use std::{
sync::Arc,
time::{Duration, SystemTime},
};
use conduwuit::utils::{ReadyExt, stream::TryExpect};
use database::{Database, Deserialized, Json, Map};
use ruma::{OwnedUserId, UserId};
use serde::{Deserialize, Serialize};
pub(super) struct Data {
passwordresettoken_info: Arc<Map>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ResetTokenInfo {
pub user: OwnedUserId,
pub issued_at: SystemTime,
}
impl ResetTokenInfo {
// one hour
const MAX_TOKEN_AGE: Duration = Duration::from_hours(1);
pub fn is_valid(&self) -> bool {
let now = SystemTime::now();
now.duration_since(self.issued_at)
.is_ok_and(|duration| duration < Self::MAX_TOKEN_AGE)
}
}
impl Data {
pub(super) fn new(db: &Arc<Database>) -> Self {
Self {
passwordresettoken_info: db["passwordresettoken_info"].clone(),
}
}
/// Associate a reset token with its info in the database.
pub(super) fn save_token(&self, token: &str, info: &ResetTokenInfo) {
self.passwordresettoken_info.raw_put(token, Json(info));
}
/// Lookup the info for a reset token.
pub(super) async fn lookup_token_info(&self, token: &str) -> Option<ResetTokenInfo> {
self.passwordresettoken_info
.get(token)
.await
.deserialized()
.ok()
}
/// Find a user's existing reset token, if any.
pub(super) async fn find_token_for_user(
&self,
user: &UserId,
) -> Option<(String, ResetTokenInfo)> {
self.passwordresettoken_info
.stream::<'_, String, ResetTokenInfo>()
.expect_ok()
.ready_find(|(_, info)| info.user == user)
.await
}
/// Remove a reset token.
pub(super) fn remove_token(&self, token: &str) { self.passwordresettoken_info.remove(token); }
}
+111
View File
@@ -0,0 +1,111 @@
mod data;
use std::{sync::Arc, time::SystemTime};
use conduwuit::{Err, Result, utils};
use data::{Data, ResetTokenInfo};
use ruma::OwnedUserId;
use crate::{
Dep, globals,
users::{self, HashedPassword},
};
pub const PASSWORD_RESET_PATH: &str = "/_continuwuity/account/reset_password";
pub const RESET_TOKEN_QUERY_PARAM: &str = "token";
const RESET_TOKEN_LENGTH: usize = 32;
pub struct Service {
db: Data,
services: Services,
}
struct Services {
users: Dep<users::Service>,
globals: Dep<globals::Service>,
}
#[derive(Debug)]
pub struct ValidResetToken {
pub token: String,
pub info: ResetTokenInfo,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
db: Data::new(args.db),
services: Services {
users: args.depend::<users::Service>("users"),
globals: args.depend::<globals::Service>("globals"),
},
}))
}
fn name(&self) -> &str { crate::service::make_name(std::module_path!()) }
}
impl Service {
/// Generate a random string suitable to be used as a password reset token.
#[must_use]
pub fn generate_token_string() -> String { utils::random_string(RESET_TOKEN_LENGTH) }
/// Issue a password reset token for `user`, who must be a local user with
/// the `password` origin.
pub async fn issue_token(&self, user_id: OwnedUserId) -> Result<ValidResetToken> {
if !self.services.globals.user_is_local(&user_id) {
return Err!("Cannot issue a password reset token for remote user {user_id}");
}
if user_id == self.services.globals.server_user {
return Err!("Cannot issue a password reset token for the server user");
}
if self.services.users.is_deactivated(&user_id).await? {
return Err!("Cannot issue a password reset token for deactivated user {user_id}");
}
if let Some((existing_token, _)) = self.db.find_token_for_user(&user_id).await {
self.db.remove_token(&existing_token);
}
let token = Self::generate_token_string();
let info = ResetTokenInfo {
user: user_id,
issued_at: SystemTime::now(),
};
self.db.save_token(&token, &info);
Ok(ValidResetToken { token, info })
}
/// Check if `token` represents a valid, non-expired password reset token.
pub async fn check_token(&self, token: &str) -> Option<ValidResetToken> {
self.db.lookup_token_info(token).await.and_then(|info| {
if info.is_valid() {
Some(ValidResetToken { token: token.to_owned(), info })
} else {
self.db.remove_token(token);
None
}
})
}
/// Consume the supplied valid token, using it to change its user's password
/// to `new_password`.
pub async fn consume_token(
&self,
ValidResetToken { token, info }: ValidResetToken,
new_password: &str,
) -> Result<()> {
if info.is_valid() {
self.db.remove_token(&token);
self.services
.users
.set_password(&info.user, Some(HashedPassword::new(new_password)?));
}
Ok(())
}
}
+1 -1
View File
@@ -100,7 +100,7 @@ pub async fn get_presence(&self, user_id: &UserId) -> Result<PresenceEvent> {
/// Pings the presence of the given user in the given room, setting the
/// specified state.
pub async fn ping_presence(&self, user_id: &UserId, new_state: &PresenceState) -> Result<()> {
const REFRESH_TIMEOUT: u64 = 60 * 1000 * 4;
const REFRESH_TIMEOUT: u64 = 60 * 1000;
let last_presence = self.db.get_presence(user_id).await;
let state_changed = match last_presence {
+1 -1
View File
@@ -48,7 +48,7 @@ pub fn is_valid(&self) -> bool {
impl std::fmt::Display for DatabaseTokenInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Token created by {} and used {} times. ", self.creator, self.uses)?;
write!(f, "Token created by {} and used {} times. ", &self.creator, self.uses)?;
if let Some(expires) = &self.expires {
write!(f, "{expires}.")?;
} else {
+3 -4
View File
@@ -10,7 +10,6 @@
stream::{iter, once},
};
use ruma::OwnedUserId;
use serde::{Deserialize, Serialize};
use crate::{Dep, config, firstrun};
@@ -28,7 +27,7 @@ struct Services {
}
/// A validated registration token which may be used to create an account.
#[derive(Debug, Deserialize, Serialize)]
#[derive(Debug)]
pub struct ValidToken {
pub token: String,
pub source: ValidTokenSource,
@@ -36,7 +35,7 @@ pub struct ValidToken {
impl std::fmt::Display for ValidToken {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "`{}` --- {}", self.token, self.source)
write!(f, "`{}` --- {}", self.token, &self.source)
}
}
@@ -45,7 +44,7 @@ fn eq(&self, other: &str) -> bool { self.token == other }
}
/// The source of a valid database token.
#[derive(Debug, Deserialize, Serialize)]
#[derive(Debug)]
pub enum ValidTokenSource {
/// The static token set in the homeserver's config file.
Config,
+4 -12
View File
@@ -117,12 +117,8 @@ fn actual_dest_1(host_port: FedDest) -> Result<FedDest> {
async fn actual_dest_2(&self, dest: &ServerName, cache: bool, pos: usize) -> Result<FedDest> {
debug!("2: Hostname with included port");
let (host, port) = dest.as_str().split_at(pos);
self.conditional_query_and_cache(
host,
port.trim_start_matches(':').parse::<u16>().unwrap_or(8448),
cache,
)
.await?;
self.conditional_query_and_cache(host, port.parse::<u16>().unwrap_or(8448), cache)
.await?;
Ok(FedDest::Named(
host.to_owned(),
@@ -167,12 +163,8 @@ async fn actual_dest_3_2(
) -> Result<FedDest> {
debug!("3.2: Hostname with port in .well-known file");
let (host, port) = delegated.split_at(pos);
self.conditional_query_and_cache(
host,
port.trim_start_matches(':').parse::<u16>().unwrap_or(8448),
cache,
)
.await?;
self.conditional_query_and_cache(host, port.parse::<u16>().unwrap_or(8448), cache)
.await?;
Ok(FedDest::Named(
host.to_owned(),
@@ -80,7 +80,7 @@ pub(super) async fn fetch_and_handle_outliers<'a, Pdu, Events>(
{
// Exponential backoff
const MIN_DURATION: u64 = 60 * 2;
const MAX_DURATION: u64 = 60 * 60;
const MAX_DURATION: u64 = 60 * 60 * 8;
if continue_exponential_backoff_secs(
MIN_DURATION,
MAX_DURATION,
@@ -215,17 +215,6 @@ pub async fn handle_incoming_pdu<'a>(
.get_room_create_event(room_id)
.await;
let start_time = Instant::now();
self.federation_handletime
.write()
.insert(room_id.into(), (event_id.to_owned(), start_time));
defer! {{
self.federation_handletime
.write()
.remove(room_id);
}};
let (incoming_pdu, val) = self
.handle_outlier_pdu(origin, create_event, event_id, room_id, value, false)
.await?;
@@ -292,6 +281,17 @@ pub async fn handle_incoming_pdu<'a>(
.await?;
// Done with prev events, now handling the incoming event
let start_time = Instant::now();
self.federation_handletime
.write()
.insert(room_id.into(), (event_id.to_owned(), start_time));
defer! {{
self.federation_handletime
.write()
.remove(room_id);
}};
self.upgrade_outlier_to_timeline_pdu(incoming_pdu, val, create_event, origin, room_id)
.boxed()
.await
@@ -133,8 +133,6 @@ pub(super) async fn handle_outlier_pdu<'a, Pdu>(
.filter(|id| !auth_events.contains_key(*id))
.collect::<Vec<_>>();
if !still_missing.is_empty() {
// Don't reject: this could be a temporary condition
// TODO: use get_missing_events?
return Err!(Request(InvalidParam(
"Could not fetch all auth events for outlier event {event_id}, still missing: \
{still_missing:?}"
@@ -165,10 +163,6 @@ pub(super) async fn handle_outlier_pdu<'a, Pdu>(
v.insert(auth_event);
},
| hash_map::Entry::Occupied(_) => {
self.services
.outlier
.add_pdu_outlier(pdu_event.event_id(), &incoming_pdu);
self.services.pdu_metadata.mark_event_rejected(event_id);
return Err!(Request(InvalidParam(
"Auth event's type and state_key combination exists multiple times: {}, {}",
auth_event.kind,
@@ -183,10 +177,6 @@ pub(super) async fn handle_outlier_pdu<'a, Pdu>(
auth_events_by_key.get(&(StateEventType::RoomCreate, String::new().into())),
Some(_) | None
) {
self.services.pdu_metadata.mark_event_rejected(event_id);
self.services
.outlier
.add_pdu_outlier(pdu_event.event_id(), &incoming_pdu);
return Err!(Request(InvalidParam("Incoming event refers to wrong create event.")));
}
@@ -195,7 +185,6 @@ pub(super) async fn handle_outlier_pdu<'a, Pdu>(
ready(auth_events_by_key.get(&key).map(ToOwned::to_owned))
};
// PDU check: 3
let auth_check = state_res::event_auth::auth_check(
&room_version_rules,
&pdu_event,
@@ -207,13 +196,7 @@ pub(super) async fn handle_outlier_pdu<'a, Pdu>(
.map_err(|e| err!(Request(Forbidden("Auth check failed: {e:?}"))))?;
if !auth_check {
self.services.pdu_metadata.mark_event_rejected(event_id);
self.services
.outlier
.add_pdu_outlier(pdu_event.event_id(), &incoming_pdu);
return Err!(Request(Forbidden(
"Event authorisation fails based on event's claimed auth events"
)));
return Err!(Request(Forbidden("Auth check failed")));
}
trace!("Validation successful.");
@@ -46,7 +46,7 @@ pub(super) async fn handle_prev_pdu<'a, Pdu>(
{
// Exponential backoff
const MIN_DURATION: u64 = 5 * 60;
const MAX_DURATION: u64 = 60 * 60;
const MAX_DURATION: u64 = 60 * 60 * 24;
if continue_exponential_backoff_secs(MIN_DURATION, MAX_DURATION, time.elapsed(), *tries) {
debug!(
?tries,
@@ -5,7 +5,7 @@
};
use conduwuit::{
Result, debug, err, error, implement,
Result, debug, err, implement,
matrix::{Event, StateMap},
trace,
utils::stream::{BroadbandExt, IterStream, ReadyExt, TryBroadbandExt, TryWidebandExt},
@@ -121,7 +121,6 @@ pub(super) async fn state_at_incoming_resolved<Pdu>(
.state_resolution(room_version_rules, fork_states.iter(), &auth_chain_sets)
.boxed()
.await
.inspect_err(|e| error!("State resolution failed: {e:?}"))
else {
return Ok(None);
};
@@ -1,18 +1,14 @@
use std::{borrow::Borrow, collections::BTreeMap, sync::Arc, time::Instant};
use std::{borrow::Borrow, collections::BTreeMap, iter::once, sync::Arc, time::Instant};
use conduwuit::{
Err, Result, debug, debug_info, debug_warn, err, implement, is_equal_to,
Err, Result, debug, debug_info, err, implement, info, is_equal_to,
matrix::{Event, EventTypeExt, PduEvent, StateKey, state_res},
trace,
utils::{
IterStream,
stream::{BroadbandExt, ReadyExt},
},
utils::stream::{BroadbandExt, ReadyExt},
warn,
};
use futures::{FutureExt, StreamExt, future::ready};
use ruma::{CanonicalJsonValue, RoomId, ServerName, events::StateEventType};
use tokio::join;
use super::get_room_version_rules;
use crate::rooms::{
@@ -42,33 +38,13 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu<Pdu>(
return Ok(Some(pduid));
}
let (rejected, soft_failed) = join!(
self.services
.pdu_metadata
.is_event_rejected(incoming_pdu.event_id()),
self.services
.pdu_metadata
.is_event_soft_failed(incoming_pdu.event_id())
);
if rejected {
return Err!(Request(InvalidParam("Event has been rejected")));
} else if soft_failed {
return Err!(Request(InvalidParam("Event has been soft-failed")));
}
// If any of the auth events are rejected, this event is also rejected.
for aid in incoming_pdu.auth_events() {
if self.services.pdu_metadata.is_event_rejected(aid).await {
// TODO: debug_warn instead of warn
warn!(
"Rejecting incoming event {} which depends on rejected auth event {aid}",
incoming_pdu.event_id()
);
self.services
.pdu_metadata
.mark_event_rejected(incoming_pdu.event_id());
return Err!(Request(InvalidParam("Event has rejected auth event: {aid}")));
}
if self
.services
.pdu_metadata
.is_event_soft_failed(incoming_pdu.event_id())
.await
{
return Err!(Request(InvalidParam("Event has been soft failed")));
}
debug!(
@@ -119,7 +95,6 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu<Pdu>(
event_id = %incoming_pdu.event_id,
"Running initial auth check"
);
// PDU check: 5
let auth_check = state_res::event_auth::auth_check(
&room_version_rules,
&incoming_pdu,
@@ -131,12 +106,7 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu<Pdu>(
.map_err(|e| err!(Request(Forbidden("Auth check failed: {e:?}"))))?;
if !auth_check {
self.services
.pdu_metadata
.mark_event_rejected(incoming_pdu.event_id());
return Err!(Request(Forbidden(
"Event authorisation fails based on the state before the event"
)));
return Err!(Request(Forbidden("Event has failed auth check with state at the event.")));
}
debug!(
@@ -165,7 +135,6 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu<Pdu>(
event_id = %incoming_pdu.event_id,
"Running auth check with claimed state auth"
);
// PDU check: 6
let auth_check = state_res::event_auth::auth_check(
&room_version_rules,
&incoming_pdu,
@@ -175,12 +144,6 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu<Pdu>(
)
.await
.map_err(|e| err!(Request(Forbidden("Auth check failed: {e:?}"))))?;
if !auth_check {
warn!(
event_id = %incoming_pdu.event_id,
"Event authentication fails based on the current state of the room"
);
}
// Soft fail check before doing state res
debug!(
@@ -190,22 +153,16 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu<Pdu>(
let mut soft_fail = match (auth_check, incoming_pdu.redacts_id(&room_version_rules)) {
| (false, _) => true,
| (true, None) => false,
| (true, Some(redact_id)) => {
if !self
| (true, Some(redact_id)) =>
!self
.services
.state_accessor
.user_can_redact(&redact_id, incoming_pdu.sender(), room_id, true)
.await?
{
warn!(redacts = %redact_id, "User is not allowed to redact event");
true
} else {
false
}
},
.await?,
};
// 13. Use state resolution to find new room state
// We start looking at current room state now, so lets lock the room
trace!(
room_id = %room_id,
@@ -213,6 +170,36 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu<Pdu>(
);
let state_lock = self.services.state.mutex.lock(room_id).await;
// Now we calculate the set of extremities this room has after the incoming
// event has been applied. We start with the previous extremities (aka leaves)
trace!("Calculating extremities");
let mut extremities: Vec<_> = self
.services
.state
.get_forward_extremities(room_id)
.ready_filter(|event_id| {
// Remove any that are referenced by this incoming event's prev_events
!incoming_pdu.prev_events().any(is_equal_to!(event_id))
})
.broad_filter_map(|event_id| async move {
// Only keep those extremities were not referenced yet
self.services
.pdu_metadata
.is_event_referenced(room_id, &event_id)
.await
.eq(&false)
.then_some(event_id)
})
.collect()
.await;
extremities.push(incoming_pdu.event_id().to_owned());
debug!(
"Retained {} extremities checked against {} prev_events",
extremities.len(),
incoming_pdu.prev_events().count()
);
let state_ids_compressed: Arc<CompressedState> = self
.services
.state_compressor
@@ -307,88 +294,81 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu<Pdu>(
.is_event_soft_failed(&redact_id)
.await
{
// TODO: This should avoid pushing the event to the timeline instead of using
// soft-fails as a hack
warn!(
redact_id = %redact_id,
"Redaction is for a soft-failed event"
"Redaction is for a soft-failed event, soft failing the redaction"
);
soft_fail = true;
}
}
}
trace!("Appending pdu to timeline");
let mut extremities: Vec<_> = self
.services
.state
.get_forward_extremities(room_id)
.collect()
.await;
if !soft_fail {
// Per https://spec.matrix.org/unstable/server-server-api/#soft-failure, soft-failed events
// are not added as forward extremities.
// Now we calculate the set of extremities this room has after the incoming
// event has been applied. We start with the previous extremities (aka leaves)
trace!("Calculating extremities");
extremities = extremities
.into_iter()
.stream()
.ready_filter(|event_id| {
// Remove any that are referenced by this incoming event's prev_events
!incoming_pdu.prev_events().any(is_equal_to!(event_id))
})
.broad_filter_map(|event_id| async move {
// Only keep those extremities were not referenced yet
self.services
.pdu_metadata
.is_event_referenced(room_id, &event_id)
.await
.eq(&false)
.then_some(event_id)
})
.collect::<Vec<_>>()
.await;
extremities.push(incoming_pdu.event_id().to_owned());
debug!(
"Retained {} extremities checked against {} prev_events",
extremities.len(),
incoming_pdu.prev_events().count()
// 14. Check if the event passes auth based on the "current state" of the room,
// if not soft fail it
if soft_fail {
info!(
event_id = %incoming_pdu.event_id,
"Soft failing event"
);
assert!(!extremities.is_empty(), "extremities must not empty");
// assert!(extremities.is_empty(), "soft_fail extremities empty");
let extremities = extremities.iter().map(Borrow::borrow);
debug_assert!(extremities.clone().count() > 0, "extremities not empty");
self.services
.timeline
.append_incoming_pdu(
&incoming_pdu,
val,
extremities,
state_ids_compressed,
soft_fail,
&state_lock,
room_id,
)
.await?;
// Soft fail, we keep the event as an outlier but don't add it to the timeline
self.services
.pdu_metadata
.mark_event_soft_failed(incoming_pdu.event_id());
warn!(
event_id = %incoming_pdu.event_id,
"Event was soft failed"
);
return Err!(Request(InvalidParam("Event has been soft failed")));
}
// Now that the event has passed all auth it is added into the timeline.
// We use the `state_at_event` instead of `state_after` so we accurately
// represent the state for this event.
trace!("Appending pdu to timeline");
let extremities = extremities
.iter()
.map(Borrow::borrow)
.chain(once(incoming_pdu.event_id()));
debug_assert!(extremities.clone().count() > 0, "extremities not empty");
let pdu_id = self
.services
.timeline
.append_incoming_pdu(
&incoming_pdu,
val,
extremities.iter().map(Borrow::borrow),
extremities,
state_ids_compressed,
soft_fail,
&state_lock,
room_id,
)
.await?;
if soft_fail {
self.services
.pdu_metadata
.mark_event_soft_failed(incoming_pdu.event_id());
debug_warn!(
elapsed = ?timer.elapsed(),
"Event has been soft-failed",
);
} else {
debug_info!(
elapsed = ?timer.elapsed(),
"Accepted",
);
}
// Event has passed all auth/stateres checks
drop(state_lock);
debug_info!(
elapsed = ?timer.elapsed(),
"Accepted",
);
Ok(pdu_id)
}
+5 -8
View File
@@ -34,7 +34,7 @@
use crate::{
Dep, antispam, globals,
rooms::{
metadata, outlier, pdu_metadata, short,
metadata, outlier, short,
state::{self, RoomMutexGuard},
state_accessor, state_cache,
state_compressor::{self, CompressedState, HashSetCompressStateEvent},
@@ -54,7 +54,6 @@ struct Services {
globals: Dep<globals::Service>,
metadata: Dep<metadata::Service>,
outlier: Dep<outlier::Service>,
pdu_metadata: Dep<pdu_metadata::Service>,
sending: Dep<sending::Service>,
server_keys: Dep<server_keys::Service>,
short: Dep<short::Service>,
@@ -74,9 +73,8 @@ fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
db: args.db.clone(),
antispam: args.depend::<antispam::Service>("antispam"),
globals: args.depend::<globals::Service>("globals"),
metadata: args.depend::<metadata::Service>("rooms::metadata"),
metadata: args.depend::<metadata::Service>("metadata"),
outlier: args.depend::<outlier::Service>("rooms::outlier"),
pdu_metadata: args.depend::<pdu_metadata::Service>("rooms::pdu_metadata"),
sending: args.depend::<sending::Service>("sending"),
server_keys: args.depend::<server_keys::Service>("server_keys"),
short: args.depend::<short::Service>("rooms::short"),
@@ -242,6 +240,7 @@ async fn join_local_room(
let mut content = RoomMemberEventContent::new(MembershipState::Join);
content.displayname = self.services.users.displayname(sender_user).await.ok();
content.avatar_url = self.services.users.avatar_url(sender_user).await.ok();
content.blurhash = self.services.users.blurhash(sender_user).await.ok();
content.reason.clone_from(&reason);
content.join_authorized_via_users_server = auth_user;
@@ -287,7 +286,7 @@ async fn join_local_room(
}
#[tracing::instrument(skip_all, fields(%sender_user, %room_id), name = "join_remote_room", level = "info")]
pub async fn join_remote_room(
async fn join_remote_room(
&self,
sender_user: &UserId,
room_id: &RoomId,
@@ -295,7 +294,6 @@ pub async fn join_remote_room(
servers: &[OwnedServerName],
state_lock: RoomMutexGuard,
) -> Result {
// public so the admin command force-join-room-remotely works
info!("Joining {room_id} over federation.");
let (make_join_response, remote_server) = self
@@ -353,6 +351,7 @@ pub async fn join_remote_room(
let mut join_content = RoomMemberEventContent::new(MembershipState::Join);
join_content.displayname = self.services.users.displayname(sender_user).await.ok();
join_content.avatar_url = self.services.users.avatar_url(sender_user).await.ok();
join_content.blurhash = self.services.users.blurhash(sender_user).await.ok();
join_content.reason = reason;
join_content
.join_authorized_via_users_server
@@ -515,7 +514,6 @@ pub async fn join_remote_room(
return state;
}
self.services.outlier.add_pdu_outlier(&event_id, &value);
self.services.pdu_metadata.clear_pdu_markers(&event_id);
if let Some(state_key) = &pdu.state_key {
let shortstatekey = self
.services
@@ -547,7 +545,6 @@ pub async fn join_remote_room(
.ready_for_each(|(event_id, value)| {
trace!(%event_id, "Adding PDU as an outlier from send_join auth_chain");
self.services.outlier.add_pdu_outlier(&event_id, &value);
self.services.pdu_metadata.clear_pdu_markers(&event_id);
})
.await;
-24
View File
@@ -26,7 +26,6 @@ pub(super) struct Data {
tofrom_relation: Arc<Map>,
referencedevents: Arc<Map>,
softfailedeventids: Arc<Map>,
rejectedeventids: Arc<Map>,
services: Services,
}
@@ -41,7 +40,6 @@ pub(super) fn new(args: &crate::Args<'_>) -> Self {
tofrom_relation: db["tofrom_relation"].clone(),
referencedevents: db["referencedevents"].clone(),
softfailedeventids: db["softfailedeventids"].clone(),
rejectedeventids: db["rejectedeventids"].clone(),
services: Services {
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
},
@@ -120,29 +118,7 @@ pub(super) fn mark_event_soft_failed(&self, event_id: &EventId) {
self.softfailedeventids.insert(event_id, []);
}
pub(super) fn unmark_event_soft_failed(&self, event_id: &EventId) {
self.softfailedeventids.remove(event_id);
}
pub(super) async fn is_event_soft_failed(&self, event_id: &EventId) -> bool {
self.softfailedeventids.get(event_id).await.is_ok()
}
pub(super) fn mark_event_rejected(&self, event_id: &EventId) {
self.rejectedeventids.insert(event_id, []);
}
pub(super) fn unmark_event_rejected(&self, event_id: &EventId) {
self.rejectedeventids.remove(event_id);
}
pub(super) async fn is_event_rejected(&self, event_id: &EventId) -> bool {
self.rejectedeventids.get(event_id).await.is_ok()
}
/// Removes any soft-fail or rejection markers applied to the target PDU
pub(super) fn clear_pdu_markers(&self, event_id: &EventId) {
self.unmark_event_rejected(event_id);
self.unmark_event_soft_failed(event_id);
}
}
-24
View File
@@ -140,28 +140,4 @@ pub fn mark_event_soft_failed(&self, event_id: &EventId) {
pub async fn is_event_soft_failed(&self, event_id: &EventId) -> bool {
self.db.is_event_soft_failed(event_id).await
}
pub async fn is_event_rejected(&self, event_id: &EventId) -> bool {
self.db.is_event_rejected(event_id).await
}
pub fn mark_event_rejected(&self, event_id: &EventId) {
self.db.mark_event_rejected(event_id);
}
pub fn unmark_event_soft_failed(&self, event_id: &EventId) {
self.db.unmark_event_soft_failed(event_id);
}
pub fn unmark_event_rejected(&self, event_id: &EventId) {
self.db.unmark_event_rejected(event_id);
}
/// Returns true if the event is neither soft-failed nor rejected.
pub async fn is_event_accepted(&self, event_id: &EventId) -> bool {
!self.db.is_event_rejected(event_id).await
&& !self.db.is_event_soft_failed(event_id).await
}
pub fn clear_pdu_markers(&self, event_id: &EventId) { self.db.clear_pdu_markers(event_id); }
}
+9 -1
View File
@@ -53,7 +53,15 @@ pub async fn append_incoming_pdu<'a, Leaves>(
.await?;
if soft_fail {
// Nothing else to do with a soft-failed event.
self.services
.pdu_metadata
.mark_as_referenced(room_id, pdu.prev_events.iter().map(AsRef::as_ref));
// self.services
// .state
// .set_forward_extremities(room_id, new_room_leaves, state_lock)
// .await;
return Ok(None);
}
-60
View File
@@ -127,63 +127,3 @@ pub async fn get_token_shortstatehash(
.await
.deserialized()
}
/// Count how many sync tokens exist for a room without deleting them
///
/// This is useful for dry runs to see how many tokens would be deleted
#[implement(Service)]
pub async fn count_room_tokens(&self, room_id: &RoomId) -> Result<usize> {
use futures::TryStreamExt;
let shortroomid = self.services.short.get_shortroomid(room_id).await?;
// Create a prefix to search by - all entries for this room will start with its
// short ID
let prefix = &[shortroomid];
// Collect all keys into a Vec and count them
let keys = self
.db
.roomsynctoken_shortstatehash
.keys_prefix_raw(prefix)
.map_ok(|_| ()) // We only need to count, not store the keys
.try_collect::<Vec<_>>()
.await?;
Ok(keys.len())
}
/// Delete all sync tokens associated with a room
///
/// This helps clean up the database as these tokens are never otherwise removed
#[implement(Service)]
pub async fn delete_room_tokens(&self, room_id: &RoomId) -> Result<usize> {
use futures::TryStreamExt;
let shortroomid = self.services.short.get_shortroomid(room_id).await?;
// Create a prefix to search by - all entries for this room will start with its
// short ID
let prefix = &[shortroomid];
// Collect all keys into a Vec first, then delete them
let keys = self
.db
.roomsynctoken_shortstatehash
.keys_prefix_raw(prefix)
.map_ok(|key| {
// Clone the key since we can't store references in the Vec
Vec::from(key)
})
.try_collect::<Vec<_>>()
.await?;
// Delete each key individually
for key in &keys {
self.db.roomsynctoken_shortstatehash.del(key);
}
let count = keys.len();
Ok(count)
}
+4 -4
View File
@@ -11,8 +11,8 @@
account_data, admin, announcements, antispam, appservice, client, config, emergency,
federation, firstrun, globals, key_backups, mailer,
manager::Manager,
media, moderation, oauth, presence, pusher, registration_tokens, resolver, rooms, sending,
server_keys,
media, moderation, password_reset, presence, pusher, registration_tokens, resolver, rooms,
sending, server_keys,
service::{self, Args, Map, Service},
sync, threepid, transactions, uiaa, users,
};
@@ -27,7 +27,7 @@ pub struct Services {
pub globals: Arc<globals::Service>,
pub key_backups: Arc<key_backups::Service>,
pub media: Arc<media::Service>,
pub oauth: Arc<oauth::Service>,
pub password_reset: Arc<password_reset::Service>,
pub mailer: Arc<mailer::Service>,
pub presence: Arc<presence::Service>,
pub pusher: Arc<pusher::Service>,
@@ -84,7 +84,7 @@ macro_rules! build {
globals: build!(globals::Service),
key_backups: build!(key_backups::Service),
media: build!(media::Service),
oauth: build!(oauth::Service),
password_reset: build!(password_reset::Service),
mailer: build!(mailer::Service),
presence: build!(presence::Service),
pusher: build!(pusher::Service),
+7 -29
View File
@@ -9,9 +9,8 @@
ClientSecret, OwnedClientSecret, OwnedSessionId, SessionId,
api::error::{ErrorKind, LimitExceededErrorData},
};
use tokio::sync::MutexGuard;
pub mod session;
mod session;
use crate::{
Args, Dep, config,
@@ -27,7 +26,6 @@ pub struct Service {
ratelimiter: DefaultKeyedRateLimiter<Address>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EmailRequirement {
/// Users may change their email, but cannot remove it entirely.
Required,
@@ -221,12 +219,13 @@ pub async fn try_validate_session(
Ok(())
}
/// Get a validated validation session.
pub async fn get_valid_session(
/// Consume a validated validation session, removing it from the database
/// and returning the newly validated email address.
pub async fn consume_valid_session(
&self,
session_id: &SessionId,
client_secret: &ClientSecret,
) -> Result<ValidSession<'_>, Cow<'static, str>> {
) -> Result<Address, Cow<'static, str>> {
let mut sessions = self.sessions.lock().await;
let Some(session) = sessions.get_session(session_id) else {
@@ -236,13 +235,9 @@ pub async fn get_valid_session(
if session.client_secret == client_secret
&& matches!(session.validation_state, ValidationState::Validated)
{
let email = session.email.clone();
let session = sessions.remove_session(session_id);
Ok(ValidSession {
email,
session_id: session_id.to_owned(),
sessions,
})
Ok(session.email)
} else {
Err("This email address has not been validated. Did you use the link that was sent \
to you?"
@@ -318,20 +313,3 @@ pub async fn get_localpart_for_email(&self, email: &Address) -> Option<String> {
.ok()
}
}
pub struct ValidSession<'lock> {
pub email: Address,
session_id: OwnedSessionId,
sessions: MutexGuard<'lock, ValidationSessions>,
}
impl ValidSession<'_> {
/// Consume this session, removing it from the database and releasing the
/// lock it holds.
#[must_use]
pub fn consume(mut self) -> Address {
self.sessions.remove_session(&self.session_id);
self.email
}
}
+5 -5
View File
@@ -8,14 +8,14 @@
use ruma::{ClientSecret, OwnedClientSecret, OwnedSessionId, SessionId};
#[derive(Default)]
pub struct ValidationSessions {
pub(super) struct ValidationSessions {
sessions: HashMap<OwnedSessionId, ValidationSession>,
client_secrets: HashMap<OwnedClientSecret, OwnedSessionId>,
}
/// A pending or completed email validation session.
#[derive(Debug)]
pub struct ValidationSession {
pub(crate) struct ValidationSession {
/// The session's ID
pub session_id: OwnedSessionId,
/// The client's supplied client secret
@@ -28,7 +28,7 @@ pub struct ValidationSession {
/// The state of an email validation session.
#[derive(Debug)]
pub enum ValidationState {
pub(crate) enum ValidationState {
/// The session is waiting for this validation token to be provided
Pending(ValidationToken),
/// The session has been validated
@@ -36,7 +36,7 @@ pub enum ValidationState {
}
#[derive(Clone, Debug)]
pub struct ValidationToken {
pub(crate) struct ValidationToken {
pub token: String,
pub issued_at: SystemTime,
}
@@ -69,7 +69,7 @@ impl ValidationSessions {
const RANDOM_SID_LENGTH: usize = 16;
#[must_use]
pub fn generate_session_id() -> OwnedSessionId {
pub(super) fn generate_session_id() -> OwnedSessionId {
SessionId::parse(utils::random_string(Self::RANDOM_SID_LENGTH)).unwrap()
}
+156 -302
View File
@@ -7,7 +7,7 @@
use conduwuit::{Err, Error, Result, error, utils};
use lettre::Address;
use ruma::{
DeviceId, UserId,
UserId,
api::{
client::uiaa::{
AuthData, AuthFlow, AuthType, EmailIdentity, EmailUserIdentifier,
@@ -16,19 +16,11 @@
},
error::{ErrorKind, StandardErrorBody},
},
assign,
};
use serde_json::{
json,
value::{RawValue, to_raw_value},
};
use serde_json::value::RawValue;
use tokio::sync::Mutex;
use crate::{
Dep, config, globals,
oauth::{self, OAuthTicket},
registration_tokens, threepid, users,
};
use crate::{Dep, config, globals, registration_tokens, threepid, users};
pub struct Service {
services: Services,
@@ -41,7 +33,6 @@ struct Services {
config: Dep<config::Service>,
registration_tokens: Dep<registration_tokens::Service>,
threepid: Dep<threepid::Service>,
oauth: Dep<oauth::Service>,
}
impl crate::Service for Service {
@@ -54,7 +45,6 @@ fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
registration_tokens: args
.depend::<registration_tokens::Service>("registration_tokens"),
threepid: args.depend::<threepid::Service>("threepid"),
oauth: args.depend::<oauth::Service>("oauth"),
},
uiaa_sessions: Mutex::new(HashMap::new()),
}))
@@ -64,56 +54,8 @@ fn name(&self) -> &str { crate::service::make_name(std::module_path!()) }
}
struct UiaaSession {
session_metadata: UiaaSessionMetadata,
info: UiaaInfo,
}
#[derive(Clone)]
enum UiaaSessionMetadata {
Legacy {
identity: Identity,
},
OAuth {
localpart: String,
ticket: OAuthTicket,
},
}
impl UiaaSessionMetadata {
fn into_identity(self) -> Identity {
match self {
| Self::Legacy { identity } => identity,
| Self::OAuth { localpart, .. } =>
assign!(Identity::default(), { localpart: Some(localpart) }),
}
}
}
/// Information about the user which is initiating this UIAA session.
pub struct UiaaInitiator<'a> {
user_id: &'a UserId,
device_id: &'a DeviceId,
oauth_ticket: Option<OAuthTicket>,
}
impl<'a> UiaaInitiator<'a> {
#[must_use]
pub fn new(user_id: &'a UserId, device_id: &'a DeviceId) -> Self {
Self { user_id, device_id, oauth_ticket: None }
}
#[must_use]
pub fn with_oauth_ticket(
user_id: &'a UserId,
device_id: &'a DeviceId,
oauth_ticket: OAuthTicket,
) -> Self {
Self {
user_id,
device_id,
oauth_ticket: Some(oauth_ticket),
}
}
identity: Identity,
}
/// Information about the authenticated user's identity.
@@ -164,7 +106,7 @@ impl Identity {
/// Create an Identity with the localpart of the provided user ID
/// and all other fields set to None.
#[must_use]
fn from_user_id(user_id: &UserId) -> Self {
pub fn from_user_id(user_id: &UserId) -> Self {
Self {
localpart: Some(user_id.localpart().to_owned()),
..Default::default()
@@ -182,11 +124,11 @@ pub async fn authenticate(
auth: &Option<AuthData>,
flows: Vec<AuthFlow>,
params: Box<RawValue>,
initiator: Option<UiaaInitiator<'_>>,
identity: Option<Identity>,
) -> Result<Identity> {
match auth.as_ref() {
| None => {
let info = self.create_session(flows, params, initiator).await?;
let info = self.create_session(flows, params, identity).await;
Err(Error::Uiaa(info))
},
@@ -198,8 +140,8 @@ pub async fn authenticate(
// session if they want to start the UIAA exchange with existing
// authentication data. If that happens, we create a new session
// here.
self.create_session(flows, params, initiator)
.await?
self.create_session(flows, params, identity)
.await
.session
.unwrap()
.into()
@@ -219,15 +161,13 @@ pub async fn authenticate(
pub async fn authenticate_password(
&self,
auth: &Option<AuthData>,
user_id: &UserId,
device_id: &DeviceId,
oauth_ticket: Option<OAuthTicket>,
identity: Option<Identity>,
) -> Result<Identity> {
self.authenticate(
auth,
vec![AuthFlow::new(vec![AuthType::Password])],
Box::default(),
Some(UiaaInitiator { user_id, device_id, oauth_ticket }),
identity,
)
.await
}
@@ -243,84 +183,20 @@ async fn create_session(
&self,
flows: Vec<AuthFlow>,
params: Box<RawValue>,
initiator: Option<UiaaInitiator<'_>>,
) -> Result<UiaaInfo> {
identity: Option<Identity>,
) -> UiaaInfo {
let mut uiaa_sessions = self.uiaa_sessions.lock().await;
let session_id = utils::random_string(Self::SESSION_ID_LENGTH);
let mut info = assign::assign!(UiaaInfo::new(flows), {params: Some(params)});
info.session = Some(session_id.clone());
let mut info = assign!(UiaaInfo::new(flows), { params: Some(params), session: Some(session_id.clone()) });
uiaa_sessions.insert(session_id, UiaaSession {
info: info.clone(),
identity: identity.unwrap_or_default(),
});
let session_metadata = if let Some(initiator) = initiator {
let is_oauth = self
.services
.oauth
.get_session_info_for_device(initiator.user_id, initiator.device_id)
.await
.is_some();
if is_oauth {
if let Some(oauth_ticket) = initiator.oauth_ticket {
let ticket_url = self
.services
.config
.get_client_domain()
.join(&format!(
"{}{}",
conduwuit_core::ROUTE_PREFIX,
oauth_ticket.ticket_issue_path()
))
.unwrap();
info.flows = vec![AuthFlow::new(vec![AuthType::OAuth])];
info.params = Some(
to_raw_value(&json!({
AuthType::OAuth.as_str(): {
"url": ticket_url,
},
// TODO(compat): This is necessary for older versions of matrix-rust-sdk
"org.matrix.cross_signing_reset": {
"url": ticket_url,
}
}))
.unwrap(),
);
UiaaSessionMetadata::OAuth {
localpart: initiator.user_id.localpart().to_owned(),
ticket: oauth_ticket,
}
} else {
return Err!(Request(Forbidden(
"Clients authorized with OAuth cannot use this route."
)));
}
} else {
UiaaSessionMetadata::Legacy {
identity: Identity::from_user_id(initiator.user_id),
}
}
} else {
UiaaSessionMetadata::Legacy { identity: Identity::default() }
};
// Legacy sessions aren't available if OAuth is required
if matches!(&session_metadata, UiaaSessionMetadata::Legacy { .. })
&& !self
.services
.config
.oauth
.compatibility_mode
.uiaa_available()
{
return Err!(Request(Unrecognized(
"User-interactive authentication is unavailable on this server"
)));
}
uiaa_sessions.insert(session_id, UiaaSession { session_metadata, info: info.clone() });
Ok(info)
info
}
/// Proceed with UIAA authentication given a client's authorization data.
@@ -349,7 +225,7 @@ async fn continue_session(
}
let completed = {
let UiaaSession { session_metadata, info } = session.get_mut();
let UiaaSession { info, identity } = session.get_mut();
let auth_type = auth.auth_type().expect("auth type should be set");
@@ -382,12 +258,12 @@ async fn continue_session(
// If the provided stage hasn't already been completed, check it for completion
if !completed_stages.contains(auth_type.as_str()) {
match self.check_stage(auth, session_metadata.clone()).await {
| Ok((completed_stage, updated_metadata)) => {
match self.check_stage(auth, identity.clone()).await {
| Ok((completed_stage, updated_identity)) => {
info.auth_error = None;
completed_stages.insert(completed_stage.to_string());
info.completed.push(completed_stage);
*session_metadata = updated_metadata;
*identity = updated_identity;
},
| Err(error) => {
info.auth_error = Some(error);
@@ -403,9 +279,9 @@ async fn continue_session(
if completed {
// This session is complete, remove it and return success
let (_, UiaaSession { session_metadata, .. }) = session.remove_entry();
let (_, UiaaSession { identity, .. }) = session.remove_entry();
Ok(Ok(session_metadata.into_identity()))
Ok(Ok(identity))
} else {
// The client needs to try again, return the updated session
Ok(Err(session.get().info.clone()))
@@ -419,174 +295,152 @@ async fn continue_session(
async fn check_stage(
&self,
auth: &AuthData,
mut session_metadata: UiaaSessionMetadata,
) -> Result<(AuthType, UiaaSessionMetadata), StandardErrorBody> {
// Note: This function takes ownership of `session_metadata` because mutations
// to the identity (if it's a legacy session) must not be applied unless
// checking the stage succeeds. The updated identity is returned as part of
// the Ok value, and `continue_session` handles saving it to `uiaa_sessions`.
mut identity: Identity,
) -> Result<(AuthType, Identity), StandardErrorBody> {
// Note: This function takes ownership of `identity` because mutations to the
// identity must not be applied unless checking the stage succeeds. The
// updated identity is returned as part of the Ok value, and
// `continue_session` handles saving it to `uiaa_sessions`.
//
// This also means it's fine to mutate `identity` at any point in this function,
// because those mutations won't be saved unless the function returns Ok.
let completed_auth_type = match &mut session_metadata {
| UiaaSessionMetadata::OAuth { localpart, ticket } => {
// m.oauth is the only valid stage for oauth sessions
assert!(
matches!(auth, AuthData::OAuth(_)),
"got non-oauth auth data for oauth session"
);
match auth {
| AuthData::Dummy(_) => Ok(AuthType::Dummy),
| AuthData::EmailIdentity(EmailIdentity {
thirdparty_id_creds: ThirdpartyIdCredentials { client_secret, sid, .. },
..
}) => {
match self
.services
.threepid
.consume_valid_session(sid, client_secret)
.await
{
| Ok(email) => {
if let Some(localpart) =
self.services.threepid.get_localpart_for_email(&email).await
{
identity.try_set_localpart(localpart)?;
}
if self.services.oauth.try_consume_ticket(localpart, *ticket) {
Ok(AuthType::OAuth)
identity.try_set_email(email)?;
Ok(AuthType::EmailIdentity)
},
| Err(message) => Err(StandardErrorBody::new(
ErrorKind::ThreepidAuthFailed,
message.into_owned(),
)),
}
},
#[allow(clippy::useless_let_if_seq)]
| AuthData::Password(Password { identifier, password, .. }) => {
let user_id_or_localpart = match identifier {
| UserIdentifier::Matrix(MatrixUserIdentifier { user, .. }) =>
user.to_owned(),
| UserIdentifier::Email(EmailUserIdentifier { address, .. }) => {
let Ok(email) = Address::try_from(address.to_owned()) else {
return Err(StandardErrorBody::new(
ErrorKind::InvalidParam,
"Email is malformed".to_owned(),
));
};
if let Some(localpart) =
self.services.threepid.get_localpart_for_email(&email).await
{
identity.try_set_email(email)?;
localpart
} else {
return Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"Invalid identifier or password".to_owned(),
));
}
},
| _ =>
return Err(StandardErrorBody::new(
ErrorKind::Unrecognized,
"Identifier type not recognized".to_owned(),
)),
};
let Ok(user_id) = UserId::parse_with_server_name(
user_id_or_localpart,
self.services.globals.server_name(),
) else {
return Err(StandardErrorBody::new(
ErrorKind::InvalidParam,
"User ID is malformed".to_owned(),
));
};
if self
.services
.users
.check_password(&user_id, password)
.await
.is_ok()
{
identity.try_set_localpart(user_id.localpart().to_owned())?;
Ok(AuthType::Password)
} else {
Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"No OAuth ticket available".to_owned(),
"Invalid identifier or password".to_owned(),
))
}
},
| UiaaSessionMetadata::Legacy { identity } => match auth {
| AuthData::Dummy(_) => Ok(AuthType::Dummy),
| AuthData::EmailIdentity(EmailIdentity {
thirdparty_id_creds: ThirdpartyIdCredentials { client_secret, sid, .. },
..
}) => {
match self
.services
.threepid
.get_valid_session(sid, client_secret)
.await
{
| Ok(session) => {
let email = session.consume();
| AuthData::ReCaptcha(ReCaptcha { response, .. }) => {
let Some(ref private_site_key) = self.services.config.recaptcha_private_site_key
else {
return Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"ReCaptcha is not configured".to_owned(),
));
};
if let Some(localpart) =
self.services.threepid.get_localpart_for_email(&email).await
{
identity.try_set_localpart(localpart)?;
}
identity.try_set_email(email)?;
Ok(AuthType::EmailIdentity)
},
| Err(message) => Err(StandardErrorBody::new(
ErrorKind::ThreepidAuthFailed,
message.into_owned(),
)),
}
},
#[allow(clippy::useless_let_if_seq)]
| AuthData::Password(Password { identifier, password, .. }) => {
let user_id_or_localpart = match identifier {
| UserIdentifier::Matrix(MatrixUserIdentifier { user, .. }) =>
user.to_owned(),
| UserIdentifier::Email(EmailUserIdentifier { address, .. }) => {
let Ok(email) = Address::try_from(address.to_owned()) else {
return Err(StandardErrorBody::new(
ErrorKind::InvalidParam,
"Email is malformed".to_owned(),
));
};
if let Some(localpart) =
self.services.threepid.get_localpart_for_email(&email).await
{
identity.try_set_email(email)?;
localpart
} else {
return Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"Invalid identifier or password".to_owned(),
));
}
},
| _ =>
return Err(StandardErrorBody::new(
ErrorKind::Unrecognized,
"Identifier type not recognized".to_owned(),
)),
};
let Ok(user_id) = UserId::parse_with_server_name(
user_id_or_localpart,
self.services.globals.server_name(),
) else {
return Err(StandardErrorBody::new(
ErrorKind::InvalidParam,
"User ID is malformed".to_owned(),
));
};
if self
.services
.users
.check_password(&user_id, password)
.await
.is_ok()
{
identity.try_set_localpart(user_id.localpart().to_owned())?;
Ok(AuthType::Password)
} else {
match recaptcha_verify::verify_v3(private_site_key, response, None).await {
| Ok(()) => Ok(AuthType::ReCaptcha),
| Err(e) => {
error!("ReCaptcha verification failed: {e:?}");
Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"Invalid identifier or password".to_owned(),
"ReCaptcha verification failed".to_owned(),
))
}
},
| AuthData::ReCaptcha(ReCaptcha { response, .. }) => {
let Some(ref private_site_key) =
self.services.config.recaptcha_private_site_key
else {
return Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"ReCaptcha is not configured".to_owned(),
));
};
match recaptcha_verify::verify_v3(private_site_key, response, None).await {
| Ok(()) => Ok(AuthType::ReCaptcha),
| Err(e) => {
error!("ReCaptcha verification failed: {e:?}");
Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"ReCaptcha verification failed".to_owned(),
))
},
}
},
| AuthData::RegistrationToken(RegistrationToken { token, .. }) => {
let token = token.trim().to_owned();
if let Some(valid_token) = self
.services
.registration_tokens
.validate_token(token)
.await
{
self.services
.registration_tokens
.mark_token_as_used(valid_token);
Ok(AuthType::RegistrationToken)
} else {
Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"Invalid registration token".to_owned(),
))
}
},
| AuthData::Terms(_) => Ok(AuthType::Terms),
| _ => Err(StandardErrorBody::new(
ErrorKind::Unrecognized,
"Unsupported stage type".into(),
)),
},
}
},
}?;
| AuthData::RegistrationToken(RegistrationToken { token, .. }) => {
let token = token.trim().to_owned();
Ok((completed_auth_type, session_metadata))
if let Some(valid_token) = self
.services
.registration_tokens
.validate_token(token)
.await
{
self.services
.registration_tokens
.mark_token_as_used(valid_token);
Ok(AuthType::RegistrationToken)
} else {
Err(StandardErrorBody::new(
ErrorKind::Forbidden,
"Invalid registration token".to_owned(),
))
}
},
| AuthData::Terms(_) => Ok(AuthType::Terms),
| _ => Err(StandardErrorBody::new(
ErrorKind::Unrecognized,
"Unsupported stage type".into(),
)),
}
.map(|auth_type| (auth_type, identity))
}
}
+1 -1
View File
@@ -54,7 +54,6 @@ pub async fn set_dehydrated_device(&self, user_id: &UserId, request: Request) ->
user_id,
&request.device_id,
"",
None,
request.initial_device_display_name.clone(),
None,
)
@@ -139,6 +138,7 @@ pub async fn get_dehydrated_device_id(&self, user_id: &UserId) -> Result<OwnedDe
level = "debug",
skip_all,
fields(%user_id),
ret,
)]
pub async fn get_dehydrated_device(&self, user_id: &UserId) -> Result<DehydratedDevice> {
self.db
+34 -334
View File
@@ -1,21 +1,13 @@
pub(super) mod dehydrated_device;
use std::{
collections::BTreeMap,
mem,
net::IpAddr,
sync::Arc,
time::{Duration, SystemTime},
};
use std::{collections::BTreeMap, mem, net::IpAddr, sync::Arc};
use conduwuit::{
Err, Error, Result, debug_error, debug_warn, err, info, trace,
Err, Error, Result, Server, debug_error, debug_warn, err, trace,
utils::{self, ReadyExt, stream::TryIgnore, string::Unquoted},
warn,
};
use database::{Deserialized, Ignore, Interfix, Json, Map};
use futures::{FutureExt, Stream, StreamExt, TryFutureExt};
use lettre::Address;
use futures::{Stream, StreamExt, TryFutureExt};
use ruma::{
DeviceId, MilliSecondsSinceUnixEpoch, OneTimeKeyAlgorithm, OneTimeKeyId, OneTimeKeyName,
OwnedDeviceId, OwnedKeyId, OwnedMxcUri, OwnedOneTimeKeyId, OwnedUserId, RoomId, UInt, UserId,
@@ -26,24 +18,15 @@
encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
events::{
AnyToDeviceEvent, GlobalAccountDataEventType, ignored_user_list::IgnoredUserListEvent,
push_rules::PushRulesEvent, room::message::RoomMessageEventContent,
},
push::Ruleset,
serde::Raw,
uint,
};
use ruminuwuity::invite_permission_config::{FilterLevel, InvitePermissionConfigEvent};
use serde::{Deserialize, Serialize};
use serde_json::json;
use tracing::error;
use crate::{
Dep, account_data, admin,
appservice::{self, RegistrationInfo},
config, firstrun, globals, oauth,
rooms::{self, alias, membership},
threepid,
};
use crate::{Dep, account_data, admin, appservice, globals, rooms};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserSuspension {
@@ -58,7 +41,6 @@ pub struct UserSuspension {
/// A password hash. This is only for use when setting a user's password,
/// if the hash needs to be kept around for a while without keeping the password
/// in memory.
#[derive(Serialize, Deserialize)]
pub struct HashedPassword(String);
impl HashedPassword {
@@ -69,30 +51,19 @@ pub fn new(password: &str) -> Result<Self> {
}
}
/// The status of an access token.
pub enum AccessTokenStatus {
Valid,
Expired,
}
pub struct Service {
services: Services,
db: Data,
}
struct Services {
server: Arc<Server>,
account_data: Dep<account_data::Service>,
admin: Dep<admin::Service>,
alias: Dep<alias::Service>,
appservice: Dep<appservice::Service>,
config: Dep<config::Service>,
firstrun: Dep<firstrun::Service>,
globals: Dep<globals::Service>,
membership: Dep<membership::Service>,
oauth: Dep<oauth::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
state_cache: Dep<rooms::state_cache::Service>,
threepid: Dep<threepid::Service>,
}
struct Data {
@@ -104,11 +75,11 @@ struct Data {
logintoken_expiresatuserid: Arc<Map>,
todeviceid_events: Arc<Map>,
token_userdeviceid: Arc<Map>,
userdeviceid_tokenexpires: Arc<Map>,
userdeviceid_metadata: Arc<Map>,
userdeviceid_token: Arc<Map>,
userfilterid_filter: Arc<Map>,
userid_avatarurl: Arc<Map>,
userid_blurhash: Arc<Map>,
userid_dehydrateddevice: Arc<Map>,
userid_devicelistversion: Arc<Map>,
userid_displayname: Arc<Map>,
@@ -127,19 +98,14 @@ impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
server: args.server.clone(),
account_data: args.depend::<account_data::Service>("account_data"),
admin: args.depend::<admin::Service>("admin"),
alias: args.depend::<alias::Service>("alias"),
appservice: args.depend::<appservice::Service>("appservice"),
config: args.depend::<config::Service>("config"),
firstrun: args.depend::<firstrun::Service>("firstrun"),
globals: args.depend::<globals::Service>("globals"),
membership: args.depend::<membership::Service>("membership"),
oauth: args.depend::<oauth::Service>("oauth"),
state_accessor: args
.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
state_cache: args.depend::<rooms::state_cache::Service>("rooms::state_cache"),
threepid: args.depend::<threepid::Service>("threepid"),
},
db: Data {
keychangeid_userid: args.db["keychangeid_userid"].clone(),
@@ -154,6 +120,7 @@ fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
userdeviceid_token: args.db["userdeviceid_token"].clone(),
userfilterid_filter: args.db["userfilterid_filter"].clone(),
userid_avatarurl: args.db["userid_avatarurl"].clone(),
userid_blurhash: args.db["userid_blurhash"].clone(),
userid_dehydrateddevice: args.db["userid_dehydrateddevice"].clone(),
userid_devicelistversion: args.db["userid_devicelistversion"].clone(),
userid_displayname: args.db["userid_displayname"].clone(),
@@ -166,7 +133,6 @@ fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
userid_selfsigningkeyid: args.db["userid_selfsigningkeyid"].clone(),
userid_usersigningkeyid: args.db["userid_usersigningkeyid"].clone(),
useridprofilekey_value: args.db["useridprofilekey_value"].clone(),
userdeviceid_tokenexpires: args.db["userdeviceid_tokenexpires"].clone(),
},
}))
}
@@ -228,239 +194,12 @@ pub async fn create(&self, user_id: &UserId, password: Option<HashedPassword>) -
Ok(())
}
/// Create a new account for a local human or bot user.
pub async fn create_local_account(
&self,
user_id: &UserId,
password: HashedPassword,
email: Option<Address>,
) {
self.create(user_id, Some(password))
.await
.expect("should be able to save a new local user. what happened?");
// Set an initial display name
{
let mut displayname = user_id.localpart().to_owned();
let suffix = &self.services.config.new_user_displayname_suffix;
if !suffix.is_empty() {
displayname.push(' ');
displayname.push_str(suffix);
}
self.set_displayname(user_id, Some(displayname));
};
// Set default push rules
self.services
.account_data
.update(
None,
user_id,
GlobalAccountDataEventType::PushRules.to_string().into(),
&serde_json::to_value(PushRulesEvent::new(
Ruleset::server_default(user_id).into(),
))
.expect("should be able to serialize push rules"),
)
.await
.expect("should be able to update account data");
// If the user registered with an email, associate it with their account.
if let Some(email) = email {
// This may fail if the email is already in use, but we should have already
// checked that when we sent the validation email, so ignoring the error is
// acceptable here in the rare case that an email is sniped by another user
// between the validation email being sent and the account being created.
let _ = self
.services
.threepid
.associate_localpart_email(user_id.localpart(), &email)
.await;
}
// Attempt to empower the first user and disable first-run mode.
let was_first_user = self.services.firstrun.empower_first_user(user_id).await;
// If the registering user was not the first and we're suspending users on
// register, suspend them.
if !was_first_user && self.services.config.suspend_on_register {
// Note that we can still do auto joins for suspended users
self.suspend_account(user_id, &self.services.globals.server_user)
.await;
// And send an @room notice to the admin room, to prompt admins to review the
// new user and ideally unsuspend them if deemed appropriate.
if self.services.config.admin_room_notices {
self.services
.admin
.send_loud_message(RoomMessageEventContent::text_plain(format!(
"User {user_id} has been suspended as they are not the first user on \
this server. Please review and unsuspend them if appropriate."
)))
.await
.ok();
}
}
// Autojoin the user to the configured autojoin rooms
for room in &self.services.config.auto_join_rooms {
let Ok(room_id) = self.services.alias.resolve(room).await else {
error!(
"Failed to resolve room alias to room ID when attempting to auto join \
{room}, skipping"
);
continue;
};
if !self
.services
.state_cache
.server_in_room(self.services.globals.server_name(), &room_id)
.await
{
warn!(
"Skipping room {room} to automatically join as we have never joined before."
);
continue;
}
if let Some(room_server_name) = room.server_name() {
match self
.services
.membership
.join_room(
user_id,
&room_id,
Some("Automatically joining this room upon registration".to_owned()),
&[
self.services.globals.server_name().to_owned(),
room_server_name.to_owned(),
],
)
.boxed()
.await
{
| Err(e) => {
// don't return this error so we don't fail registrations
error!(
"Failed to automatically join room {room} for user {user_id}: {e}"
);
},
| _ => {
info!("Automatically joined room {room} for user {user_id}");
},
}
}
}
info!("Created new user account for {user_id}");
}
pub async fn determine_registration_user_id(
&self,
supplied_username: Option<String>,
email: Option<&Address>,
appservice_info: Option<&RegistrationInfo>,
) -> Result<OwnedUserId> {
const RANDOM_USER_ID_LENGTH: usize = 10;
let emergency_mode_enabled = self.services.config.emergency_password.is_some();
let supplied_username = supplied_username.or_else(|| {
// If the user didn't supply a username but did supply an email, use
// the email's user part to avoid falling back to a random username
email.map(|address| address.user().to_owned())
});
if let Some(supplied_username) = supplied_username {
// The user gets to pick their username. Do some validation to make sure it's
// acceptable.
// Don't allow registration with forbidden usernames.
if self
.services
.globals
.forbidden_usernames()
.is_match(&supplied_username)
&& !emergency_mode_enabled
{
return Err!(Request(Forbidden("Username is forbidden")));
}
// Create and validate the user ID
let user_id = match UserId::parse_with_server_name(
&supplied_username,
self.services.globals.server_name(),
) {
| Ok(user_id) => {
if let Err(e) = user_id.validate_strict() {
// Unless we are in emergency mode, we should follow synapse's behaviour
// on not allowing things like spaces and UTF-8 characters in
// usernames
if !emergency_mode_enabled {
return Err!(Request(InvalidUsername(debug_warn!(
"Username {supplied_username} contains disallowed characters or \
spaces: {e}"
))));
}
}
// Don't allow registration with user IDs that aren't local
if !self.services.globals.user_is_local(&user_id) {
return Err!(Request(InvalidUsername(
"Username {supplied_username} is not local to this server"
)));
}
user_id
},
| Err(e) => {
return Err!(Request(InvalidUsername(debug_warn!(
"Username {supplied_username} is not valid: {e}"
))));
},
};
if self.exists(&user_id).await {
return Err!(Request(UserInUse("User ID is not available.")));
}
// Check that the user ID is/is not in an appservice's namespace
if let Some(appservice_info) = appservice_info {
if !appservice_info.is_user_match(&user_id) && !emergency_mode_enabled {
return Err!(Request(Exclusive(
"Username is not in this appservice's namespace."
)));
}
} else if self
.services
.appservice
.is_exclusive_user_id(&user_id)
.await && !emergency_mode_enabled
{
return Err!(Request(Exclusive("Username is reserved by an appservice.")));
}
Ok(user_id)
} else {
// The user didn't specify a username. Generate a username for
// them.
loop {
let user_id = UserId::parse_with_server_name(
utils::random_string(RANDOM_USER_ID_LENGTH).to_lowercase(),
self.services.globals.server_name(),
)
.unwrap();
if !self.exists(&user_id).await {
break Ok(user_id);
}
}
}
}
// /// Create a new account for a local human or bot user.
// pub async fn create_local_account(
// &self,
// username: String,
// password:
// )
/// Deactivate account
pub async fn deactivate_account(&self, user_id: &UserId) -> Result<()> {
@@ -600,42 +339,8 @@ pub async fn is_active_local(&self, user_id: &UserId) -> bool {
pub async fn count(&self) -> usize { self.db.userid_password.count().await }
/// Find out which user an access token belongs to.
pub async fn find_from_token(
&self,
token: &str,
) -> Option<(OwnedUserId, OwnedDeviceId, AccessTokenStatus)> {
let user = self
.db
.token_userdeviceid
.get(token)
.await
.deserialized()
.ok();
// Check if the token has expired
if let Some((user_id, device_id)) = user {
if let Some(expires) = self
.db
.userdeviceid_tokenexpires
.qry(&(&user_id, &device_id))
.await
.deserialized::<u64>()
.ok()
.map(Duration::from_secs)
{
let expires_at = SystemTime::UNIX_EPOCH
.checked_add(expires)
.expect("expiry time should not overflow SystemTime");
if SystemTime::now() > expires_at {
return Some((user_id, device_id, AccessTokenStatus::Expired));
}
}
Some((user_id, device_id, AccessTokenStatus::Valid))
} else {
None
}
pub async fn find_from_token(&self, token: &str) -> Result<(OwnedUserId, OwnedDeviceId)> {
self.db.token_userdeviceid.get(token).await.deserialized()
}
/// Returns an iterator over all users on this homeserver.
@@ -725,13 +430,26 @@ pub fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option<OwnedMxcUri>)
}
}
/// Get the blurhash of a user.
pub async fn blurhash(&self, user_id: &UserId) -> Result<String> {
self.db.userid_blurhash.get(user_id).await.deserialized()
}
/// Sets a new avatar_url or removes it if avatar_url is None.
pub fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) {
if let Some(blurhash) = blurhash {
self.db.userid_blurhash.insert(user_id, blurhash);
} else {
self.db.userid_blurhash.remove(user_id);
}
}
/// Adds a new device to a user.
pub async fn create_device(
&self,
user_id: &UserId,
device_id: &DeviceId,
token: &str,
token_max_age: Option<Duration>,
initial_device_display_name: Option<String>,
client_ip: Option<String>,
) -> Result<()> {
@@ -749,8 +467,7 @@ pub async fn create_device(
increment(&self.db.userid_devicelistversion, user_id.as_bytes());
self.db.userdeviceid_metadata.put(key, Json(device));
self.set_token(user_id, device_id, token, token_max_age)
.await
self.set_token(user_id, device_id, token).await
}
/// Removes a device from a user.
@@ -766,7 +483,6 @@ pub async fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) {
if let Ok(old_token) = self.db.userdeviceid_token.qry(&userdeviceid).await {
self.db.userdeviceid_token.del(userdeviceid);
self.db.token_userdeviceid.remove(&old_token);
self.db.userdeviceid_tokenexpires.del(userdeviceid);
}
// Remove todevice events
@@ -780,9 +496,6 @@ pub async fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) {
// TODO: Remove onetimekeys
// Remove OAuth session information
self.services.oauth.remove_session(user_id, device_id).await;
increment(&self.db.userid_devicelistversion, user_id.as_bytes());
self.db.userdeviceid_metadata.del(userdeviceid);
@@ -838,7 +551,6 @@ pub async fn set_token(
user_id: &UserId,
device_id: &DeviceId,
token: &str,
token_max_age: Option<Duration>,
) -> Result<()> {
let key = (user_id, device_id);
if self.db.userdeviceid_metadata.qry(&key).await.is_err() {
@@ -865,7 +577,6 @@ pub async fn set_token(
// Remove old token
if let Ok(old_token) = self.db.userdeviceid_token.qry(&key).await {
self.db.token_userdeviceid.remove(&old_token);
self.db.userdeviceid_tokenexpires.remove(&old_token);
// It will be removed from userdeviceid_token by the insert later
}
@@ -873,18 +584,6 @@ pub async fn set_token(
self.db.userdeviceid_token.put_raw(key, token);
self.db.token_userdeviceid.raw_put(token, key);
if let Some(max_age) = token_max_age {
let expires = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.expect("system time should not be before the epoch")
.saturating_add(max_age)
.as_secs();
self.db.userdeviceid_tokenexpires.put(key, expires);
} else {
self.db.userdeviceid_tokenexpires.del(key);
}
Ok(())
}
@@ -1555,7 +1254,7 @@ pub async fn get_filter(
pub fn create_openid_token(&self, user_id: &UserId, token: &str) -> Result<u64> {
use std::num::Saturating as Sat;
let expires_in = self.services.config.openid_token_ttl;
let expires_in = self.services.server.config.openid_token_ttl;
let expires_at = Sat(utils::millis_since_unix_epoch()) + Sat(expires_in) * Sat(1000);
let mut value = expires_at.0.to_be_bytes().to_vec();
@@ -1599,7 +1298,7 @@ pub async fn find_from_openid_token(&self, token: &str) -> Result<OwnedUserId> {
pub fn create_login_token(&self, user_id: &UserId, token: &str) -> u64 {
use std::num::Saturating as Sat;
let expires_in = self.services.config.login_token_ttl;
let expires_in = self.services.server.config.login_token_ttl;
let expires_at = Sat(utils::millis_since_unix_epoch()) + Sat(expires_in);
let value = (expires_at.0, user_id);
@@ -1680,6 +1379,7 @@ pub fn set_profile_key(
pub async fn clear_profile(&self, user_id: &UserId) {
self.set_displayname(user_id, None);
self.set_avatar_url(user_id, None);
self.set_blurhash(user_id, None);
self.all_profile_keys(user_id)
.ready_for_each(|(key, _)| self.set_profile_key(user_id, &key, None))
.await;
-11
View File
@@ -22,8 +22,6 @@ crate-type = [
conduwuit-build-metadata.workspace = true
conduwuit-service.workspace = true
conduwuit-core.workspace = true
conduwuit-database.workspace = true
conduwuit-api.workspace = true
async-trait.workspace = true
askama.workspace = true
axum.workspace = true
@@ -37,18 +35,9 @@ ruma.workspace = true
thiserror.workspace = true
tower-http.workspace = true
serde.workspace = true
serde_json.workspace = true
lettre.workspace = true
memory-serve = "2.1.0"
validator = { version = "0.20.0", features = ["derive"] }
tower-sec-fetch = { version = "0.1.2", features = ["tracing"] }
tower-sessions = { version = "0.15.0", default-features = false, features = ["axum-core"] }
tower-sessions-core = { version = "0.15.0", features = ["deletion-task"] }
serde_urlencoded.workspace = true
url.workspace = true
recaptcha-verify = { version = "0.2.0", default-features = false }
reqwest_recaptcha = { package = "reqwest", version = "0.12.28", default-features = false, features = ["rustls-tls-native-roots-no-provider"] } # As long as recaptcha-verify's reqwest is outdated
form_urlencoded = "1.2.2"
[build-dependencies]
memory-serve = "2.1.0"
-48
View File
@@ -1,48 +0,0 @@
use axum::{
extract::{FromRequest, FromRequestParts, Request},
http::{Method, request::Parts},
};
use serde::de::DeserializeOwned;
use crate::WebError;
/// An extractor which deserializes a struct from a POST request's body.
/// For GET requests the struct will be None.
#[derive(Debug, Clone, Copy, Default)]
#[must_use]
pub(crate) struct PostForm<T>(pub Option<T>);
impl<T, S> FromRequest<S> for PostForm<T>
where
T: DeserializeOwned,
S: Send + Sync,
{
type Rejection = WebError;
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
if req.method() == Method::POST {
let axum::Form(data) = axum::Form::from_request(req, state).await?;
Ok(Self(Some(data)))
} else {
Ok(Self(None))
}
}
}
/// An extractor which wraps another extractor and converts its errors into
/// `WebError`s.
pub(crate) struct Expect<E>(pub E);
impl<E, S, R> FromRequestParts<S> for Expect<E>
where
E: FromRequestParts<S, Rejection = R>,
WebError: From<R>,
S: Send + Sync,
{
type Rejection = WebError;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
Ok(Self(E::from_request_parts(parts, state).await?))
}
}
+15 -60
View File
@@ -1,34 +1,25 @@
use std::{any::Any, sync::Once, time::Duration};
use std::any::Any;
use askama::Template;
use axum::{
Router,
extract::rejection::{FormRejection, PathRejection, QueryRejection},
http::StatusCode,
middleware::from_fn_with_state,
response::{Html, IntoResponse, Redirect, Response},
extract::rejection::{FormRejection, QueryRejection},
http::{HeaderValue, StatusCode, header},
response::{Html, IntoResponse, Response},
};
use conduwuit_service::{Services, state};
use tower_http::catch_panic::CatchPanicLayer;
use conduwuit_service::state;
use tower_http::{catch_panic::CatchPanicLayer, set_header::SetResponseHeaderLayer};
use tower_sec_fetch::SecFetchLayer;
use tower_sessions::{ExpiredDeletion, SessionManagerLayer, cookie::SameSite};
use crate::{
pages::TemplateContext,
session::{LoginQuery, store::RocksDbSessionStore},
};
use crate::pages::TemplateContext;
mod extract;
mod pages;
mod session;
type State = state::State;
const CATASTROPHIC_FAILURE: &str = "cat-astrophic failure! we couldn't even render the error template. \
please contact the team @ https://continuwuity.org";
const ROUTE_PREFIX: &str = conduwuit_core::ROUTE_PREFIX;
#[derive(Debug, thiserror::Error)]
enum WebError {
#[error("Failed to validate form body: {0}")]
@@ -38,16 +29,10 @@ enum WebError {
#[error("{0}")]
FormRejection(#[from] FormRejection),
#[error("{0}")]
PathRejection(#[from] PathRejection),
#[error("{0}")]
BadRequest(String),
#[error("This page does not exist.")]
NotFound,
#[error("You are not allowed to request this page: {0}")]
Forbidden(String),
#[error("You must log in to access this page")]
LoginRequired(LoginQuery),
#[error("Failed to render template: {0}")]
Render(#[from] askama::Error),
@@ -67,26 +52,12 @@ struct Error {
context: TemplateContext,
}
if let Self::LoginRequired(query) = self {
return Redirect::to(&format!(
"{}/account/login?{}",
ROUTE_PREFIX,
serde_urlencoded::to_string(query).unwrap()
))
.into_response();
}
let status = match &self {
| Self::ValidationError(_)
| Self::BadRequest(_)
| Self::QueryRejection(_)
| Self::FormRejection(_)
| Self::InternalError(_) => StatusCode::BAD_REQUEST,
| Self::FormRejection(_) => StatusCode::BAD_REQUEST,
| Self::NotFound => StatusCode::NOT_FOUND,
| Self::Forbidden(_) => StatusCode::FORBIDDEN,
| Self::LoginRequired(_) => {
unreachable!("LoginRequired is handled earlier")
},
| _ => StatusCode::INTERNAL_SERVER_ERROR,
};
@@ -96,7 +67,6 @@ struct Error {
context: TemplateContext {
// Statically set false to prevent error pages from being indexed.
allow_indexing: false,
csp_nonce: String::new(),
},
};
@@ -108,39 +78,21 @@ struct Error {
}
}
static STORE_CLEANUP_TASK: Once = Once::new();
pub fn build(services: &Services) -> Router<state::State> {
pub fn build() -> Router<state::State> {
#[allow(clippy::wildcard_imports)]
use pages::*;
let store = RocksDbSessionStore::new(&services.db);
STORE_CLEANUP_TASK.call_once(|| {
services.server.runtime().spawn(
store
.clone()
.continuously_delete_expired(Duration::from_hours(1)),
);
});
Router::new()
.merge(index::build())
.nest(
"/_continuwuity/",
Router::new()
.nest("/account/", account::build())
.merge(debug::build())
.nest("/oauth2/", oauth::build())
.merge(resources::build())
.merge(password_reset::build())
.merge(debug::build())
.merge(threepid::build())
.fallback(async || WebError::NotFound),
)
.layer(
SessionManagerLayer::new(store)
.with_name("_c10y_session")
.with_same_site(SameSite::Lax),
)
.layer(CatchPanicLayer::custom(|panic: Box<dyn Any + Send + 'static>| {
let details = if let Some(s) = panic.downcast_ref::<String>() {
s.clone()
@@ -152,7 +104,10 @@ pub fn build(services: &Services) -> Router<state::State> {
WebError::Panic(details).into_response()
}))
.layer(from_fn_with_state(services.config.clone(), template_context_middleware))
.layer(SetResponseHeaderLayer::if_not_present(
header::CONTENT_SECURITY_POLICY,
HeaderValue::from_static("default-src 'self'; img-src 'self' data:;"),
))
.layer(SecFetchLayer::new(|policy| {
policy.allow_safe_methods().reject_missing_metadata();
}))
@@ -1,47 +0,0 @@
use axum::{Extension, Router, extract::State, routing::on};
use conduwuit_service::oauth::OAuthTicket;
use crate::{
extract::PostForm,
pages::{GET_POST, Result, TemplateContext, components::UserCard},
response,
session::{LoginTarget, User},
template,
};
pub(crate) fn build() -> Router<crate::State> {
Router::new().route("/", on(GET_POST, route_cross_signing_reset))
}
template! {
struct CrossSigningReset use "cross_signing_reset.html.j2" {
user_card: UserCard,
body: CrossSigningResetBody
}
}
#[derive(Debug)]
enum CrossSigningResetBody {
Form,
Success,
}
async fn route_cross_signing_reset(
State(services): State<crate::State>,
Extension(context): Extension<TemplateContext>,
user: User,
PostForm(form): PostForm<()>,
) -> Result {
let user_id = user.expect_recent(LoginTarget::CrossSigningReset)?;
let user_card = UserCard::for_local_user(&services, user_id.clone()).await;
if form.is_some() {
services
.oauth
.issue_ticket(user_id.localpart().to_owned(), OAuthTicket::CrossSigningReset);
response!(CrossSigningReset::new(context, user_card, CrossSigningResetBody::Success))
} else {
response!(CrossSigningReset::new(context, user_card, CrossSigningResetBody::Form))
}
}
-129
View File
@@ -1,129 +0,0 @@
use axum::{Extension, Router, extract::State, routing::on};
use conduwuit_api::client::full_user_deactivate;
use futures::StreamExt;
use ruma::{OwnedRoomId, OwnedUserId, UserId};
use tower_sessions::Session;
use validator::{Validate, ValidationError, ValidationErrors};
use crate::{
extract::PostForm,
form,
pages::{
GET_POST, Result, TemplateContext,
components::{UserCard, form::Form},
},
response,
session::{LoginTarget, User},
template,
};
pub(crate) fn build() -> Router<crate::State> {
Router::new().route("/", on(GET_POST, route_deactivate))
}
template! {
struct Deactivate use "deactivate.html.j2" {
body: DeactivateBody
}
}
#[derive(Debug)]
#[allow(clippy::large_enum_variant)]
enum DeactivateBody {
Unavailable,
Form {
user_id: OwnedUserId,
user_card: UserCard,
form: Form<'static>,
},
Success,
}
form! {
struct DeactivateForm {
password: String where {
input_type: "password",
label: "Enter your password to confirm",
autocomplete: "current-password"
},
#[validate(required(message = "This checkbox must be checked"))]
confirm: Option<String> where {
input_type: "checkbox",
label: "I understand that deactivating my account cannot be undone."
}
submit: "Deactivate my account",
slowdown: true
}
}
async fn route_deactivate(
State(services): State<crate::State>,
Extension(context): Extension<TemplateContext>,
user: User,
session: Session,
PostForm(form): PostForm<DeactivateForm>,
) -> Result {
let user_id = user.expect_recent(LoginTarget::Deactivate)?;
let user_card = UserCard::for_local_user(&services, user_id.clone()).await;
let body = {
if !services.config.allow_deactivation {
DeactivateBody::Unavailable
} else if let Some(form) = form {
if let Err(err) = validate_deactivate_form(&services, &user_id, form).await {
DeactivateBody::Form {
user_id,
user_card,
form: DeactivateForm::with_errors(context.clone(), err),
}
} else {
let all_joined_rooms: Vec<OwnedRoomId> = services
.rooms
.state_cache
.rooms_joined(&user_id)
.collect()
.await;
full_user_deactivate(&services, &user_id, &all_joined_rooms).await?;
session.clear().await;
DeactivateBody::Success
}
} else {
DeactivateBody::Form {
user_id,
user_card,
form: DeactivateForm::build(context.clone()),
}
}
};
response!(Deactivate::new(context, body))
}
async fn validate_deactivate_form(
services: &crate::State,
user_id: &UserId,
form: DeactivateForm,
) -> Result<(), ValidationErrors> {
form.validate()?;
if services
.users
.check_password(user_id, &form.password)
.await
.is_err()
{
let mut errors = ValidationErrors::new();
errors.add(
"password",
ValidationError::new("wrong").with_message("Incorrect password".into()),
);
return Err(errors);
}
Ok(())
}
-126
View File
@@ -1,126 +0,0 @@
use axum::{
Extension, Router,
extract::{Path, State},
routing::{get, on},
};
use conduwuit_service::oauth::{SessionInfo, client_metadata::ClientMetadata};
use futures::StreamExt;
use ruma::OwnedDeviceId;
use serde::{Deserialize, Serialize};
use crate::{
WebError,
extract::{Expect, PostForm},
pages::{
GET_POST, Result, TemplateContext,
components::{ClientScopes, DeviceCard, DeviceCardStyle},
},
response,
session::{LoginTarget, User},
template,
};
pub(crate) fn build() -> Router<crate::State> {
Router::new()
.route("/{device}/", get(get_device_info))
.route("/{device}/remove", on(GET_POST, route_remove_device))
}
template! {
struct DeviceInfo use "device_info.html.j2" {
device_card: DeviceCard,
client_metadata: Option<(ClientMetadata, SessionInfo)>
}
}
async fn get_device_info(
State(services): State<crate::State>,
Extension(context): Extension<TemplateContext>,
user: User,
Expect(Path(query)): Expect<Path<DevicePath>>,
) -> Result {
let user_id = user.expect(LoginTarget::RemoveDevice(query.clone()))?;
let Ok(device) = services
.users
.get_device_metadata(&user_id, &query.device)
.await
else {
return response!(WebError::BadRequest("Unknown device".to_owned()));
};
let client_metadata = async {
let session_info = services
.oauth
.get_session_info_for_device(&user_id, &device.device_id)
.await?;
let client_metadata = services
.oauth
.get_client_metadata(&session_info.client_id)
.await?;
Some((client_metadata, session_info))
}
.await;
let device_card =
DeviceCard::for_device(&services, &user_id, device, DeviceCardStyle::Detailed).await;
response!(DeviceInfo::new(context, device_card, client_metadata))
}
template! {
struct RemoveDevice use "remove_device.html.j2" {
body: RemoveDeviceBody
}
}
#[derive(Debug)]
enum RemoveDeviceBody {
Form {
device_card: Box<DeviceCard>,
last_device: bool,
},
Success,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub(crate) struct DevicePath {
pub device: OwnedDeviceId,
}
async fn route_remove_device(
State(services): State<crate::State>,
Extension(context): Extension<TemplateContext>,
user: User,
Expect(Path(query)): Expect<Path<DevicePath>>,
PostForm(form): PostForm<()>,
) -> Result {
let user_id = user.expect_recent(LoginTarget::RemoveDevice(query.clone()))?;
let Ok(device) = services
.users
.get_device_metadata(&user_id, &query.device)
.await
else {
return response!(WebError::BadRequest("Unknown device".to_owned()));
};
if form.is_some() {
services
.users
.remove_device(&user_id, &device.device_id)
.await;
response!(RemoveDevice::new(context, RemoveDeviceBody::Success))
} else {
let device_card =
DeviceCard::for_device(&services, &user_id, device, DeviceCardStyle::Minimal).await;
let last_device = services.users.all_devices_metadata(&user_id).count().await <= 1;
response!(RemoveDevice::new(context, RemoveDeviceBody::Form {
device_card: Box::new(device_card),
last_device
}))
}
}
-210
View File
@@ -1,210 +0,0 @@
use axum::{
Extension, Router,
extract::{Query, State},
routing::{get, on, post},
};
use conduwuit_core::warn;
use conduwuit_service::{mailer::messages, threepid::session::ValidationSessions};
use lettre::{Address, message::Mailbox};
use ruma::{ClientSecret, OwnedClientSecret, OwnedSessionId};
use serde::{Deserialize, Serialize};
use crate::{
WebError,
extract::{Expect, PostForm},
form,
pages::{
GET_POST, Result, TemplateContext,
account::ThreepidQuery,
components::{UserCard, form::Form},
},
response,
session::{LoginTarget, User},
template,
};
pub(crate) fn build() -> Router<crate::State> {
Router::new()
.route("/change/", on(GET_POST, route_change_email))
.route("/change/validate", get(get_change_email_validate))
.route("/change/delete", post(post_delete_email))
}
template! {
struct ChangeEmail use "change_email.html.j2" {
user_card: UserCard,
email: Option<String>,
form: Form<'static>,
may_remove: bool
}
}
form! {
struct ChangeEmailForm {
email: Address where {
input_type: "email",
label: "Email address"
}
submit: "Change email"
}
}
template! {
struct ChangeEmailValidate use "change_email_validate.html.j2" {
user_card: UserCard,
body: ChangeEmailValidateBody
}
}
template! {
struct DeleteEmail use "delete_email.html.j2" {
user_card: UserCard
}
}
#[derive(Debug)]
enum ChangeEmailValidateBody {
ValidationPending {
session_id: OwnedSessionId,
client_secret: OwnedClientSecret,
validation_error: bool,
},
Success,
}
async fn route_change_email(
State(services): State<crate::State>,
Extension(context): Extension<TemplateContext>,
user: User,
PostForm(form): PostForm<ChangeEmailForm>,
) -> Result {
let user_id = user.expect_recent(LoginTarget::ChangeEmail)?;
let Some(form) = form else {
return response!(ChangeEmail::new(
context.clone(),
UserCard::for_local_user(&services, user_id.clone()).await,
services
.threepid
.get_email_for_localpart(user_id.localpart())
.await
.map(|address| address.to_string()),
ChangeEmailForm::build(context),
services.threepid.email_requirement().may_remove(),
));
};
let client_secret = ClientSecret::new();
let session_id = {
let display_name = services.users.displayname(&user_id).await.ok();
match services
.threepid
.send_validation_email(
Mailbox::new(display_name, form.email.clone()),
|verification_link| messages::ChangeEmail {
server_name: services.globals.server_name().as_str(),
user_id: Some(&user_id),
verification_link,
},
&client_secret,
0,
)
.await
{
| Ok(session_id) => session_id,
| Err(err) => {
// If we couldn't send an email, generate a random session ID to not give that
// away
warn!(
"Failed to send email change message for {user_id} to {}: {err}",
form.email
);
ValidationSessions::generate_session_id()
},
}
};
response!(ChangeEmailValidate::new(
context,
UserCard::for_local_user(&services, user_id).await,
ChangeEmailValidateBody::ValidationPending {
session_id,
client_secret,
validation_error: false
}
))
}
#[derive(Deserialize, Serialize)]
struct ChangeEmailQuery {
#[serde(flatten)]
threepid: ThreepidQuery,
}
async fn get_change_email_validate(
State(services): State<crate::State>,
Extension(context): Extension<TemplateContext>,
Expect(Query(ChangeEmailQuery {
threepid: ThreepidQuery { client_secret, session_id },
})): Expect<Query<ChangeEmailQuery>>,
user: User,
) -> Result {
let user_id = user.expect(LoginTarget::ChangeEmail)?;
let user_card = UserCard::for_local_user(&services, user_id.clone()).await;
if !services.threepid.email_requirement().may_change() {
return Err(WebError::Forbidden("You may not change your email address.".to_owned()));
}
let Ok(session) = services
.threepid
.get_valid_session(&session_id, &client_secret)
.await
else {
return response!(ChangeEmailValidate::new(
context,
user_card,
ChangeEmailValidateBody::ValidationPending {
session_id,
client_secret,
validation_error: true
}
));
};
let new_email = session.consume();
if let Err(err) = services
.threepid
.associate_localpart_email(user_id.localpart(), &new_email)
.await
{
return response!(BadRequest(err.message()));
}
response!(ChangeEmailValidate::new(context, user_card, ChangeEmailValidateBody::Success))
}
async fn post_delete_email(
State(services): State<crate::State>,
Extension(context): Extension<TemplateContext>,
user: User,
) -> Result {
let user_id = user.expect(LoginTarget::ChangeEmail)?;
let user_card = UserCard::for_local_user(&services, user_id.clone()).await;
if !services.threepid.email_requirement().may_remove() {
return Err(WebError::Forbidden("You may not remove your email address.".to_owned()));
}
let _ = services
.threepid
.disassociate_localpart_email(user_id.localpart())
.await;
response!(DeleteEmail::new(context, user_card))
}

Some files were not shown because too many files have changed in this diff Show More